blob: 3149074e1960a0236a79224f170080b17949039c [file] [log] [blame]
Patrick Georgiac959032020-05-05 22:49:26 +02001/* SPDX-License-Identifier: GPL-2.0-or-later */
Damien Zammit4b513a62015-08-20 00:37:05 +10002
Kyösti Mälkkif1b58b72019-03-01 13:43:02 +02003#include <device/pci_ops.h>
Kyösti Mälkki1a1b04e2020-01-07 22:34:33 +02004#include <device/smbus_host.h>
Damien Zammit4b513a62015-08-20 00:37:05 +10005#include <cbmem.h>
Elyes HAOUASb559b3c2019-04-28 17:52:10 +02006#include <cf9_reset.h>
Damien Zammit4b513a62015-08-20 00:37:05 +10007#include <console/console.h>
Elyes HAOUASd2b9ec12018-10-27 09:41:02 +02008#include <arch/cpu.h>
Damien Zammit4b513a62015-08-20 00:37:05 +10009#include <spd.h>
10#include <string.h>
Arthur Heymans3cf94032017-04-05 16:17:26 +020011#include <device/dram/ddr2.h>
Arthur Heymans1848ba32017-04-11 17:09:31 +020012#include <device/dram/ddr3.h>
Arthur Heymansadc571a2017-09-25 09:40:54 +020013#include <mrc_cache.h>
Elyes HAOUASf5a57a82019-01-08 22:15:53 +010014#include <timestamp.h>
Elyes HAOUAS51401c32019-05-15 21:09:30 +020015#include <types.h>
Arthur Heymansadc571a2017-09-25 09:40:54 +020016
Angel Pons41e66ac2020-09-15 13:17:23 +020017#include "raminit.h"
Elyes HAOUASbf0970e2019-03-21 11:10:03 +010018#include "x4x.h"
19
Arthur Heymansadc571a2017-09-25 09:40:54 +020020#define MRC_CACHE_VERSION 0
Damien Zammit4b513a62015-08-20 00:37:05 +100021
Arthur Heymans1848ba32017-04-11 17:09:31 +020022static u16 ddr2_get_crc(u8 device, u8 len)
23{
24 u8 raw_spd[128] = {};
Kyösti Mälkkic01a5052019-01-30 09:39:23 +020025 i2c_eeprom_read(device, 64, 9, &raw_spd[64]);
26 i2c_eeprom_read(device, 93, 6, &raw_spd[93]);
Arthur Heymans1848ba32017-04-11 17:09:31 +020027 return spd_ddr2_calc_unique_crc(raw_spd, len);
28}
29
30static u16 ddr3_get_crc(u8 device, u8 len)
31{
32 u8 raw_spd[256] = {};
Kyösti Mälkkic01a5052019-01-30 09:39:23 +020033 i2c_eeprom_read(device, 117, 11, &raw_spd[117]);
Arthur Heymans1848ba32017-04-11 17:09:31 +020034 return spd_ddr3_calc_unique_crc(raw_spd, len);
35}
36
Angel Ponsdd7ce4e2021-03-26 23:21:02 +010037static enum cb_err verify_spds(const u8 *spd_map, const struct sysinfo *ctrl_cached)
Arthur Heymansadc571a2017-09-25 09:40:54 +020038{
39 int i;
Arthur Heymansadc571a2017-09-25 09:40:54 +020040 u16 crc;
41
42 for (i = 0; i < TOTAL_DIMMS; i++) {
43 if (!(spd_map[i]))
44 continue;
45 int len = smbus_read_byte(spd_map[i], 0);
Angel Ponsdd7ce4e2021-03-26 23:21:02 +010046 if (len < 0 && ctrl_cached->dimms[i].card_type == RAW_CARD_UNPOPULATED)
Arthur Heymansadc571a2017-09-25 09:40:54 +020047 continue;
Angel Ponsdd7ce4e2021-03-26 23:21:02 +010048 if (len > 0 && ctrl_cached->dimms[i].card_type == RAW_CARD_UNPOPULATED)
Arthur Heymansadc571a2017-09-25 09:40:54 +020049 return CB_ERR;
50
Arthur Heymans1848ba32017-04-11 17:09:31 +020051 if (ctrl_cached->spd_type == DDR2)
52 crc = ddr2_get_crc(spd_map[i], len);
53 else
54 crc = ddr3_get_crc(spd_map[i], len);
55
Arthur Heymansadc571a2017-09-25 09:40:54 +020056 if (crc != ctrl_cached->dimms[i].spd_crc)
57 return CB_ERR;
58 }
59 return CB_SUCCESS;
60}
61
Arthur Heymans3cf94032017-04-05 16:17:26 +020062struct abs_timings {
63 u32 min_tclk;
64 u32 min_tRAS;
65 u32 min_tRP;
66 u32 min_tRCD;
67 u32 min_tWR;
68 u32 min_tRFC;
69 u32 min_tWTR;
70 u32 min_tRRD;
71 u32 min_tRTP;
Arthur Heymans1848ba32017-04-11 17:09:31 +020072 u32 min_tAA;
Arthur Heymans3cf94032017-04-05 16:17:26 +020073 u32 min_tCLK_cas[8];
74 u32 cas_supported;
75};
76
77#define CTRL_MIN_TCLK_DDR2 TCK_400MHZ
78
Angel Ponsdd7ce4e2021-03-26 23:21:02 +010079static void select_cas_dramfreq_ddr2(struct sysinfo *s, const struct abs_timings *saved_timings)
Damien Zammit4b513a62015-08-20 00:37:05 +100080{
Arthur Heymans3cf94032017-04-05 16:17:26 +020081 u8 try_cas;
82 /* Currently only these CAS are supported */
83 u8 cas_mask = SPD_CAS_LATENCY_DDR2_5 | SPD_CAS_LATENCY_DDR2_6;
Arthur Heymanscfa2eaa2017-03-20 16:32:07 +010084
Arthur Heymans3cf94032017-04-05 16:17:26 +020085 cas_mask &= saved_timings->cas_supported;
86 try_cas = spd_get_msbs(cas_mask);
Damien Zammit4b513a62015-08-20 00:37:05 +100087
Arthur Heymans3cf94032017-04-05 16:17:26 +020088 while (cas_mask & (1 << try_cas) && try_cas > 0) {
89 s->selected_timings.CAS = try_cas;
90 s->selected_timings.tclk = saved_timings->min_tCLK_cas[try_cas];
91 if (s->selected_timings.tclk >= CTRL_MIN_TCLK_DDR2 &&
92 saved_timings->min_tCLK_cas[try_cas] !=
93 saved_timings->min_tCLK_cas[try_cas - 1])
Arthur Heymans70a1dda2017-03-09 01:58:24 +010094 break;
Arthur Heymans3cf94032017-04-05 16:17:26 +020095 try_cas--;
Damien Zammit4b513a62015-08-20 00:37:05 +100096 }
Damien Zammit4b513a62015-08-20 00:37:05 +100097
Arthur Heymans3cf94032017-04-05 16:17:26 +020098 if ((s->selected_timings.CAS < 3) || (s->selected_timings.tclk == 0))
99 die("Could not find common memory frequency and CAS\n");
100
101 switch (s->selected_timings.tclk) {
102 case TCK_200MHZ:
103 case TCK_266MHZ:
104 /* FIXME: this works on vendor BIOS */
105 die("Selected dram frequency not supported\n");
106 case TCK_333MHZ:
107 s->selected_timings.mem_clk = MEM_CLOCK_667MHz;
108 break;
109 case TCK_400MHZ:
110 s->selected_timings.mem_clk = MEM_CLOCK_800MHz;
111 break;
Damien Zammit4b513a62015-08-20 00:37:05 +1000112 }
Damien Zammit4b513a62015-08-20 00:37:05 +1000113}
114
115static void mchinfo_ddr2(struct sysinfo *s)
116{
117 const u32 eax = cpuid_ext(0x04, 0).eax;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200118 printk(BIOS_WARNING, "%d CPU cores\n", ((eax >> 26) & 0x3f) + 1);
Damien Zammit4b513a62015-08-20 00:37:05 +1000119
Angel Ponsd1c590a2020-08-03 16:01:39 +0200120 u32 capid = pci_read_config16(HOST_BRIDGE, 0xe8);
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100121 if (!(capid & (1<<(79-64))))
Damien Zammit4b513a62015-08-20 00:37:05 +1000122 printk(BIOS_WARNING, "iTPM enabled\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000123
Angel Ponsd1c590a2020-08-03 16:01:39 +0200124 capid = pci_read_config32(HOST_BRIDGE, 0xe4);
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100125 if (!(capid & (1<<(57-32))))
Damien Zammit4b513a62015-08-20 00:37:05 +1000126 printk(BIOS_WARNING, "ME enabled\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000127
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100128 if (!(capid & (1<<(56-32))))
Damien Zammit4b513a62015-08-20 00:37:05 +1000129 printk(BIOS_WARNING, "AMT enabled\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000130
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100131 if (!(capid & (1<<(48-32))))
Damien Zammit4b513a62015-08-20 00:37:05 +1000132 printk(BIOS_WARNING, "VT-d enabled\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000133}
134
Elyes Haouas6f9786b2022-10-19 17:41:46 +0200135static enum cb_err ddr2_save_dimminfo(u8 dimm_idx, u8 *raw_spd,
Arthur Heymans3cf94032017-04-05 16:17:26 +0200136 struct abs_timings *saved_timings, struct sysinfo *s)
Damien Zammit4b513a62015-08-20 00:37:05 +1000137{
Arthur Heymansfc31e442018-02-12 15:12:34 +0100138 struct dimm_attr_ddr2_st decoded_dimm;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200139 int i;
Damien Zammit4b513a62015-08-20 00:37:05 +1000140
Arthur Heymans3cf94032017-04-05 16:17:26 +0200141 if (spd_decode_ddr2(&decoded_dimm, raw_spd) != SPD_STATUS_OK) {
142 printk(BIOS_DEBUG, "Problems decoding SPD\n");
143 return CB_ERR;
144 }
Damien Zammit7c2e5392016-07-24 03:28:42 +1000145
Julius Wernercd49cce2019-03-05 16:53:33 -0800146 if (CONFIG(DEBUG_RAM_SETUP))
Arthur Heymans3cf94032017-04-05 16:17:26 +0200147 dram_print_spd_ddr2(&decoded_dimm);
148
149 if (!(decoded_dimm.width & (0x08 | 0x10))) {
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100150 printk(BIOS_ERR, "DIMM%d Unsupported width: x%d. Disabling dimm\n",
Arthur Heymans3cf94032017-04-05 16:17:26 +0200151 dimm_idx, s->dimms[dimm_idx].width);
152 return CB_ERR;
153 }
154 s->dimms[dimm_idx].width = (decoded_dimm.width >> 3) - 1;
155 /*
156 * This boils down to:
157 * "Except for the x16 configuration, all DDR2 devices have a
158 * 1KB page size. For the x16 configuration, the page size is 2KB
159 * for all densities except the 256Mb device, which has a 1KB page
160 * size." Micron, 'TN-47-16 Designing for High-Density DDR2 Memory'
Arthur Heymansd4e57622017-12-25 17:01:33 +0100161 * The formula is pagesize in KiB = width * 2^col_bits / 8.
Arthur Heymans3cf94032017-04-05 16:17:26 +0200162 */
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100163 s->dimms[dimm_idx].page_size = decoded_dimm.width * (1 << decoded_dimm.col_bits) / 8;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200164
165 switch (decoded_dimm.banks) {
166 case 4:
167 s->dimms[dimm_idx].n_banks = N_BANKS_4;
168 break;
169 case 8:
170 s->dimms[dimm_idx].n_banks = N_BANKS_8;
171 break;
172 default:
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100173 printk(BIOS_ERR, "DIMM%d Unsupported #banks: x%d. Disabling dimm\n",
Arthur Heymans3cf94032017-04-05 16:17:26 +0200174 dimm_idx, decoded_dimm.banks);
175 return CB_ERR;
176 }
177
178 s->dimms[dimm_idx].ranks = decoded_dimm.ranks;
179 s->dimms[dimm_idx].rows = decoded_dimm.row_bits;
180 s->dimms[dimm_idx].cols = decoded_dimm.col_bits;
181
182 saved_timings->cas_supported &= decoded_dimm.cas_supported;
183
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100184 saved_timings->min_tRAS = MAX(saved_timings->min_tRAS, decoded_dimm.tRAS);
185 saved_timings->min_tRP = MAX(saved_timings->min_tRP, decoded_dimm.tRP);
186 saved_timings->min_tRCD = MAX(saved_timings->min_tRCD, decoded_dimm.tRCD);
187 saved_timings->min_tWR = MAX(saved_timings->min_tWR, decoded_dimm.tWR);
188 saved_timings->min_tRFC = MAX(saved_timings->min_tRFC, decoded_dimm.tRFC);
189 saved_timings->min_tWTR = MAX(saved_timings->min_tWTR, decoded_dimm.tWTR);
190 saved_timings->min_tRRD = MAX(saved_timings->min_tRRD, decoded_dimm.tRRD);
191 saved_timings->min_tRTP = MAX(saved_timings->min_tRTP, decoded_dimm.tRTP);
Arthur Heymans3cf94032017-04-05 16:17:26 +0200192 for (i = 0; i < 8; i++) {
193 if (!(saved_timings->cas_supported & (1 << i)))
194 saved_timings->min_tCLK_cas[i] = 0;
195 else
196 saved_timings->min_tCLK_cas[i] =
197 MAX(saved_timings->min_tCLK_cas[i],
198 decoded_dimm.cycle_time[i]);
199 }
Arthur Heymansadc571a2017-09-25 09:40:54 +0200200
201 s->dimms[dimm_idx].spd_crc = spd_ddr2_calc_unique_crc(raw_spd,
202 spd_decode_spd_size_ddr2(raw_spd[0]));
Arthur Heymans3cf94032017-04-05 16:17:26 +0200203 return CB_SUCCESS;
204}
205
Arthur Heymans1848ba32017-04-11 17:09:31 +0200206static void normalize_tCLK(u32 *tCLK)
207{
208 if (*tCLK <= TCK_666MHZ)
209 *tCLK = TCK_666MHZ;
210 else if (*tCLK <= TCK_533MHZ)
211 *tCLK = TCK_533MHZ;
212 else if (*tCLK <= TCK_400MHZ)
213 *tCLK = TCK_400MHZ;
214 else
215 *tCLK = 0;
216}
217
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100218static void select_cas_dramfreq_ddr3(struct sysinfo *s, struct abs_timings *saved_timings)
Arthur Heymans1848ba32017-04-11 17:09:31 +0200219{
220 /*
221 * various constraints must be fulfilled:
222 * CAS * tCK < 20ns == 160MTB
223 * tCK_max >= tCK >= tCK_min
224 * CAS >= roundup(tAA_min/tCK)
225 * CAS supported
226 * AND BTW: Clock(MT) = 2000 / tCK(ns) - intel uses MTs but calls them MHz
227 */
228
229 u32 min_tCLK;
230 u8 try_CAS;
Angel Ponsd1c590a2020-08-03 16:01:39 +0200231 u16 capid = (pci_read_config16(HOST_BRIDGE, 0xea) >> 4) & 0x3f;
Arthur Heymans1848ba32017-04-11 17:09:31 +0200232
233 switch (s->max_fsb) {
234 default:
235 case FSB_CLOCK_800MHz:
236 min_tCLK = TCK_400MHZ;
237 break;
238 case FSB_CLOCK_1066MHz:
239 min_tCLK = TCK_533MHZ;
240 break;
241 case FSB_CLOCK_1333MHz:
242 min_tCLK = TCK_666MHZ;
243 break;
244 }
245
246 switch (capid >> 3) {
247 default: /* Should not happen */
248 min_tCLK = TCK_400MHZ;
249 break;
250 case 1:
251 min_tCLK = MAX(min_tCLK, TCK_400MHZ);
252 break;
253 case 2:
254 min_tCLK = MAX(min_tCLK, TCK_533MHZ);
255 break;
256 case 3: /* Only on P45 */
Arthur Heymansb1ba6622018-10-14 13:22:16 +0200257 case 0:
Arthur Heymans1848ba32017-04-11 17:09:31 +0200258 min_tCLK = MAX(min_tCLK, TCK_666MHZ);
259 break;
260 }
261
262 min_tCLK = MAX(min_tCLK, saved_timings->min_tclk);
263 if (min_tCLK == 0) {
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100264 printk(BIOS_ERR,
Angel Ponse8219142021-03-26 23:27:22 +0100265 "DRAM frequency is under lowest supported frequency (400 MHz).\n"
266 "Increasing to 400 MHz as last resort.\n");
Arthur Heymans1848ba32017-04-11 17:09:31 +0200267 min_tCLK = TCK_400MHZ;
268 }
269
270 while (1) {
271 normalize_tCLK(&min_tCLK);
272 if (min_tCLK == 0)
273 die("Couldn't find compatible clock / CAS settings.\n");
274 try_CAS = DIV_ROUND_UP(saved_timings->min_tAA, min_tCLK);
275 printk(BIOS_SPEW, "Trying CAS %u, tCK %u.\n", try_CAS, min_tCLK);
276 for (; try_CAS <= DDR3_MAX_CAS; try_CAS++) {
277 /*
278 * cas_supported is encoded like the SPD which starts
279 * at CAS=4.
280 */
281 if ((saved_timings->cas_supported << 4) & (1 << try_CAS))
282 break;
283 }
284 if ((try_CAS <= DDR3_MAX_CAS) && (try_CAS * min_tCLK < 20 * 256)) {
285 /* Found good CAS. */
286 printk(BIOS_SPEW, "Found compatible tCLK / CAS pair: %u / %u.\n",
287 min_tCLK, try_CAS);
288 break;
289 }
290 /*
291 * If no valid tCLK / CAS pair could be found for a tCLK
292 * increase it after which it gets normalised. This means
293 * that a lower frequency gets tried.
294 */
295 min_tCLK++;
296 }
297
298 s->selected_timings.tclk = min_tCLK;
299 s->selected_timings.CAS = try_CAS;
300
301 switch (s->selected_timings.tclk) {
302 case TCK_400MHZ:
303 s->selected_timings.mem_clk = MEM_CLOCK_800MHz;
304 break;
305 case TCK_533MHZ:
306 s->selected_timings.mem_clk = MEM_CLOCK_1066MHz;
307 break;
308 case TCK_666MHZ:
309 s->selected_timings.mem_clk = MEM_CLOCK_1333MHz;
310 break;
311 }
312}
313
Arthur Heymans5a9dbde2018-05-26 15:05:09 +0200314/* With DDR3 and 533MHz mem clock and an enabled internal gfx device the display
315 is not usable in non stacked mode, so select stacked mode accordingly */
316static void workaround_stacked_mode(struct sysinfo *s)
317{
318 u32 deven;
319 /* Only a problem on DDR3 */
320 if (s->spd_type == DDR2)
321 return;
322 /* Does not matter if only one channel is populated */
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100323 if (!CHANNEL_IS_POPULATED(s->dimms, 0) || !CHANNEL_IS_POPULATED(s->dimms, 1))
Arthur Heymans5a9dbde2018-05-26 15:05:09 +0200324 return;
325 if (s->selected_timings.mem_clk != MEM_CLOCK_1066MHz)
326 return;
327 /* IGD0EN gets disabled if not present before this code runs */
Angel Ponsd1c590a2020-08-03 16:01:39 +0200328 deven = pci_read_config32(HOST_BRIDGE, D0F0_DEVEN);
Arthur Heymans5a9dbde2018-05-26 15:05:09 +0200329 if (deven & IGD0EN)
330 s->stacked_mode = 1;
331}
332
Elyes Haouas6f9786b2022-10-19 17:41:46 +0200333static enum cb_err ddr3_save_dimminfo(u8 dimm_idx, u8 *raw_spd,
Arthur Heymans1848ba32017-04-11 17:09:31 +0200334 struct abs_timings *saved_timings, struct sysinfo *s)
335{
Angel Ponsafb3d7e2021-03-28 13:43:13 +0200336 struct dimm_attr_ddr3_st decoded_dimm;
Arthur Heymans1848ba32017-04-11 17:09:31 +0200337
338 if (spd_decode_ddr3(&decoded_dimm, raw_spd) != SPD_STATUS_OK)
339 return CB_ERR;
340
Julius Wernercd49cce2019-03-05 16:53:33 -0800341 if (CONFIG(DEBUG_RAM_SETUP))
Arthur Heymans1848ba32017-04-11 17:09:31 +0200342 dram_print_spd_ddr3(&decoded_dimm);
343
344 /* x4 DIMMs are not supported (true for both ddr2 and ddr3) */
345 if (!(decoded_dimm.width & (0x8 | 0x10))) {
346 printk(BIOS_ERR, "DIMM%d Unsupported width: x%d. Disabling dimm\n",
347 dimm_idx, s->dimms[dimm_idx].width);
348 return CB_ERR;
349 }
350 s->dimms[dimm_idx].width = (decoded_dimm.width >> 3) - 1;
351 /*
352 * This boils down to:
353 * "Except for the x16 configuration, all DDR3 devices have a
354 * 1KB page size. For the x16 configuration, the page size is 2KB
355 * for all densities except the 256Mb device, which has a 1KB page size."
356 * Micron, 'TN-47-16 Designing for High-Density DDR2 Memory'
357 */
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100358 s->dimms[dimm_idx].page_size = decoded_dimm.width * (1 << decoded_dimm.col_bits) / 8;
Arthur Heymans1848ba32017-04-11 17:09:31 +0200359
360 s->dimms[dimm_idx].n_banks = N_BANKS_8; /* Always 8 banks on ddr3?? */
361
362 s->dimms[dimm_idx].ranks = decoded_dimm.ranks;
363 s->dimms[dimm_idx].rows = decoded_dimm.row_bits;
364 s->dimms[dimm_idx].cols = decoded_dimm.col_bits;
365
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100366 saved_timings->min_tRAS = MAX(saved_timings->min_tRAS, decoded_dimm.tRAS);
367 saved_timings->min_tRP = MAX(saved_timings->min_tRP, decoded_dimm.tRP);
368 saved_timings->min_tRCD = MAX(saved_timings->min_tRCD, decoded_dimm.tRCD);
369 saved_timings->min_tWR = MAX(saved_timings->min_tWR, decoded_dimm.tWR);
370 saved_timings->min_tRFC = MAX(saved_timings->min_tRFC, decoded_dimm.tRFC);
371 saved_timings->min_tWTR = MAX(saved_timings->min_tWTR, decoded_dimm.tWTR);
372 saved_timings->min_tRRD = MAX(saved_timings->min_tRRD, decoded_dimm.tRRD);
373 saved_timings->min_tRTP = MAX(saved_timings->min_tRTP, decoded_dimm.tRTP);
374 saved_timings->min_tAA = MAX(saved_timings->min_tAA, decoded_dimm.tAA);
Arthur Heymans1848ba32017-04-11 17:09:31 +0200375 saved_timings->cas_supported &= decoded_dimm.cas_supported;
376
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100377 s->dimms[dimm_idx].spd_crc = spd_ddr3_calc_unique_crc(raw_spd, raw_spd[0]);
Arthur Heymansf1287262017-12-25 18:30:01 +0100378
379 s->dimms[dimm_idx].mirrored = decoded_dimm.flags.pins_mirrored;
380
Arthur Heymans1848ba32017-04-11 17:09:31 +0200381 return CB_SUCCESS;
382}
383
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100384static void select_discrete_timings(struct sysinfo *s, const struct abs_timings *timings)
Arthur Heymans3cf94032017-04-05 16:17:26 +0200385{
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100386 s->selected_timings.tRAS = DIV_ROUND_UP(timings->min_tRAS, s->selected_timings.tclk);
387 s->selected_timings.tRP = DIV_ROUND_UP(timings->min_tRP, s->selected_timings.tclk);
388 s->selected_timings.tRCD = DIV_ROUND_UP(timings->min_tRCD, s->selected_timings.tclk);
389 s->selected_timings.tWR = DIV_ROUND_UP(timings->min_tWR, s->selected_timings.tclk);
390 s->selected_timings.tRFC = DIV_ROUND_UP(timings->min_tRFC, s->selected_timings.tclk);
391 s->selected_timings.tWTR = DIV_ROUND_UP(timings->min_tWTR, s->selected_timings.tclk);
392 s->selected_timings.tRRD = DIV_ROUND_UP(timings->min_tRRD, s->selected_timings.tclk);
393 s->selected_timings.tRTP = DIV_ROUND_UP(timings->min_tRTP, s->selected_timings.tclk);
Arthur Heymans3cf94032017-04-05 16:17:26 +0200394}
395static void print_selected_timings(struct sysinfo *s)
396{
397 printk(BIOS_DEBUG, "Selected timings:\n");
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100398 printk(BIOS_DEBUG, "\tFSB: %dMHz\n", fsb_to_mhz(s->selected_timings.fsb_clk));
399 printk(BIOS_DEBUG, "\tDDR: %dMHz\n", ddr_to_mhz(s->selected_timings.mem_clk));
Arthur Heymans3cf94032017-04-05 16:17:26 +0200400
401 printk(BIOS_DEBUG, "\tCAS: %d\n", s->selected_timings.CAS);
402 printk(BIOS_DEBUG, "\ttRAS: %d\n", s->selected_timings.tRAS);
403 printk(BIOS_DEBUG, "\ttRP: %d\n", s->selected_timings.tRP);
404 printk(BIOS_DEBUG, "\ttRCD: %d\n", s->selected_timings.tRCD);
405 printk(BIOS_DEBUG, "\ttWR: %d\n", s->selected_timings.tWR);
406 printk(BIOS_DEBUG, "\ttRFC: %d\n", s->selected_timings.tRFC);
407 printk(BIOS_DEBUG, "\ttWTR: %d\n", s->selected_timings.tWTR);
408 printk(BIOS_DEBUG, "\ttRRD: %d\n", s->selected_timings.tRRD);
409 printk(BIOS_DEBUG, "\ttRTP: %d\n", s->selected_timings.tRTP);
410}
411
412static void find_fsb_speed(struct sysinfo *s)
413{
Angel Ponsa5146f32021-03-27 09:35:57 +0100414 switch ((mchbar_read32(CLKCFG_MCHBAR) & CLKCFG_FSBCLK_MASK) >> CLKCFG_FSBCLK_SHIFT) {
Damien Zammit4b513a62015-08-20 00:37:05 +1000415 case 0x0:
416 s->max_fsb = FSB_CLOCK_1066MHz;
417 break;
418 case 0x2:
419 s->max_fsb = FSB_CLOCK_800MHz;
420 break;
421 case 0x4:
422 s->max_fsb = FSB_CLOCK_1333MHz;
423 break;
424 default:
425 s->max_fsb = FSB_CLOCK_800MHz;
426 printk(BIOS_WARNING, "Can't detect FSB, setting 800MHz\n");
427 break;
428 }
Arthur Heymans3cf94032017-04-05 16:17:26 +0200429 s->selected_timings.fsb_clk = s->max_fsb;
430}
Damien Zammit4b513a62015-08-20 00:37:05 +1000431
Arthur Heymans3cf94032017-04-05 16:17:26 +0200432static void decode_spd_select_timings(struct sysinfo *s)
433{
434 unsigned int device;
435 u8 dram_type_mask = (1 << DDR2) | (1 << DDR3);
436 u8 dimm_mask = 0;
437 u8 raw_spd[256];
438 int i, j;
439 struct abs_timings saved_timings;
440 memset(&saved_timings, 0, sizeof(saved_timings));
441 saved_timings.cas_supported = UINT32_MAX;
Damien Zammit4b513a62015-08-20 00:37:05 +1000442
Arthur Heymans3cf94032017-04-05 16:17:26 +0200443 FOR_EACH_DIMM(i) {
444 s->dimms[i].card_type = RAW_CARD_POPULATED;
445 device = s->spd_map[i];
446 if (!device) {
447 s->dimms[i].card_type = RAW_CARD_UNPOPULATED;
448 continue;
Damien Zammit4b513a62015-08-20 00:37:05 +1000449 }
Kyösti Mälkkibd659852020-01-05 20:00:18 +0200450 switch (smbus_read_byte(s->spd_map[i], SPD_MEMORY_TYPE)) {
Arthur Heymans3cf94032017-04-05 16:17:26 +0200451 case DDR2SPD:
452 dram_type_mask &= 1 << DDR2;
453 s->spd_type = DDR2;
Damien Zammit4b513a62015-08-20 00:37:05 +1000454 break;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200455 case DDR3SPD:
456 dram_type_mask &= 1 << DDR3;
457 s->spd_type = DDR3;
Damien Zammit4b513a62015-08-20 00:37:05 +1000458 break;
459 default:
Arthur Heymans3cf94032017-04-05 16:17:26 +0200460 s->dimms[i].card_type = RAW_CARD_UNPOPULATED;
461 continue;
Damien Zammit4b513a62015-08-20 00:37:05 +1000462 }
Arthur Heymans3cf94032017-04-05 16:17:26 +0200463 if (!dram_type_mask)
464 die("Mixing up dimm types is not supported!\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000465
Arthur Heymans3cf94032017-04-05 16:17:26 +0200466 printk(BIOS_DEBUG, "Decoding dimm %d\n", i);
Kyösti Mälkkic01a5052019-01-30 09:39:23 +0200467 if (i2c_eeprom_read(device, 0, 128, raw_spd) != 128) {
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100468 printk(BIOS_DEBUG,
469 "i2c block operation failed, trying smbus byte operation.\n");
Arthur Heymans1848ba32017-04-11 17:09:31 +0200470 for (j = 0; j < 128; j++)
Kyösti Mälkkibd659852020-01-05 20:00:18 +0200471 raw_spd[j] = smbus_read_byte(device, j);
Arthur Heymans1848ba32017-04-11 17:09:31 +0200472 }
473
Arthur Heymans3cf94032017-04-05 16:17:26 +0200474 if (s->spd_type == DDR2){
Arthur Heymans3cf94032017-04-05 16:17:26 +0200475 if (ddr2_save_dimminfo(i, raw_spd, &saved_timings, s)) {
476 printk(BIOS_WARNING,
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100477 "Encountered problems with SPD, skipping this DIMM.\n");
Arthur Heymans3cf94032017-04-05 16:17:26 +0200478 s->dimms[i].card_type = RAW_CARD_UNPOPULATED;
479 continue;
480 }
Arthur Heymans1848ba32017-04-11 17:09:31 +0200481 } else { /* DDR3 */
482 if (ddr3_save_dimminfo(i, raw_spd, &saved_timings, s)) {
483 printk(BIOS_WARNING,
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100484 "Encountered problems with SPD, skipping this DIMM.\n");
Arthur Heymans1848ba32017-04-11 17:09:31 +0200485 /* something in decoded SPD was unsupported */
486 s->dimms[i].card_type = RAW_CARD_UNPOPULATED;
487 continue;
488 }
Arthur Heymans3cf94032017-04-05 16:17:26 +0200489 }
490 dimm_mask |= (1 << i);
Damien Zammit4b513a62015-08-20 00:37:05 +1000491 }
Arthur Heymans3cf94032017-04-05 16:17:26 +0200492 if (!dimm_mask)
493 die("No memory installed.\n");
494
495 if (s->spd_type == DDR2)
496 select_cas_dramfreq_ddr2(s, &saved_timings);
Arthur Heymans1848ba32017-04-11 17:09:31 +0200497 else
498 select_cas_dramfreq_ddr3(s, &saved_timings);
Arthur Heymans3cf94032017-04-05 16:17:26 +0200499 select_discrete_timings(s, &saved_timings);
Arthur Heymans5a9dbde2018-05-26 15:05:09 +0200500 workaround_stacked_mode(s);
Arthur Heymans3cf94032017-04-05 16:17:26 +0200501}
502
503static void find_dimm_config(struct sysinfo *s)
504{
505 int chan, i;
506
507 FOR_EACH_POPULATED_CHANNEL(s->dimms, chan) {
508 FOR_EACH_POPULATED_DIMM_IN_CHANNEL(s->dimms, chan, i) {
509 int dimm_config;
510 if (s->dimms[i].ranks == 1) {
511 if (s->dimms[i].width == 0) /* x8 */
512 dimm_config = 1;
513 else /* x16 */
514 dimm_config = 3;
515 } else {
516 if (s->dimms[i].width == 0) /* x8 */
517 dimm_config = 2;
518 else
519 die("Dual-rank x16 not supported\n");
520 }
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100521 s->dimm_config[chan] |= dimm_config << (i % DIMMS_PER_CHANNEL) * 2;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200522 }
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100523 printk(BIOS_DEBUG, " Config[CH%d] : %d\n", chan, s->dimm_config[chan]);
Arthur Heymans3cf94032017-04-05 16:17:26 +0200524 }
Damien Zammit4b513a62015-08-20 00:37:05 +1000525}
526
Arthur Heymansbb5e77c2016-11-30 20:37:29 +0100527static void checkreset_ddr2(int boot_path)
528{
529 u8 pmcon2;
530 u32 pmsts;
531
532 if (boot_path >= 1) {
Angel Ponsa5146f32021-03-27 09:35:57 +0100533 pmsts = mchbar_read32(PMSTS_MCHBAR);
Arthur Heymansbb5e77c2016-11-30 20:37:29 +0100534 if (!(pmsts & 1))
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100535 printk(BIOS_DEBUG, "Channel 0 possibly not in self refresh\n");
Arthur Heymansbb5e77c2016-11-30 20:37:29 +0100536 if (!(pmsts & 2))
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100537 printk(BIOS_DEBUG, "Channel 1 possibly not in self refresh\n");
Arthur Heymansbb5e77c2016-11-30 20:37:29 +0100538 }
539
540 pmcon2 = pci_read_config8(PCI_DEV(0, 0x1f, 0), 0xa2);
541
542 if (pmcon2 & 0x80) {
543 pmcon2 &= ~0x80;
544 pci_write_config8(PCI_DEV(0, 0x1f, 0), 0xa2, pmcon2);
545
546 /* do magic 0xf0 thing. */
Angel Ponsd1c590a2020-08-03 16:01:39 +0200547 pci_and_config8(HOST_BRIDGE, 0xf0, ~(1 << 2));
Angel Pons4a9569a2020-06-08 01:39:25 +0200548
Angel Ponsd1c590a2020-08-03 16:01:39 +0200549 pci_or_config8(HOST_BRIDGE, 0xf0, (1 << 2));
Arthur Heymansbb5e77c2016-11-30 20:37:29 +0100550
Elyes HAOUAS1bc7b6e2019-05-05 16:29:41 +0200551 full_reset();
Arthur Heymansbb5e77c2016-11-30 20:37:29 +0100552 }
553 pmcon2 |= 0x80;
554 pci_write_config8(PCI_DEV(0, 0x1f, 0), 0xa2, pmcon2);
555}
556
Damien Zammit4b513a62015-08-20 00:37:05 +1000557/**
558 * @param boot_path: 0 = normal, 1 = reset, 2 = resume from s3
559 */
560void sdram_initialize(int boot_path, const u8 *spd_map)
561{
Arthur Heymansadc571a2017-09-25 09:40:54 +0200562 struct sysinfo s, *ctrl_cached;
Damien Zammit4b513a62015-08-20 00:37:05 +1000563 u8 reg8;
Shelley Chenad9cd682020-07-23 16:10:52 -0700564 int fast_boot, cbmem_was_inited;
565 size_t mrc_size;
Damien Zammit4b513a62015-08-20 00:37:05 +1000566
Jakub Czapigaad6157e2022-02-15 11:50:31 +0100567 timestamp_add_now(TS_INITRAM_START);
Damien Zammit4b513a62015-08-20 00:37:05 +1000568 printk(BIOS_DEBUG, "Setting up RAM controller.\n");
569
Angel Ponsd1c590a2020-08-03 16:01:39 +0200570 pci_write_config8(HOST_BRIDGE, 0xdf, 0xff);
Damien Zammit4b513a62015-08-20 00:37:05 +1000571
572 memset(&s, 0, sizeof(struct sysinfo));
573
Shelley Chenad9cd682020-07-23 16:10:52 -0700574 ctrl_cached = mrc_cache_current_mmap_leak(MRC_TRAINING_DATA,
575 MRC_CACHE_VERSION,
576 &mrc_size);
Damien Zammit4b513a62015-08-20 00:37:05 +1000577
Shelley Chenad9cd682020-07-23 16:10:52 -0700578 if (!ctrl_cached || mrc_size < sizeof(s)) {
Arthur Heymansadc571a2017-09-25 09:40:54 +0200579 if (boot_path == BOOT_PATH_RESUME) {
580 /* Failed S3 resume, reset to come up cleanly */
Elyes HAOUASb559b3c2019-04-28 17:52:10 +0200581 system_reset();
Arthur Heymansdf946b82018-06-14 10:53:51 +0200582 } else if (boot_path == BOOT_PATH_WARM_RESET) {
583 /* On warm reset some of dram calibrations fail
584 and therefore requiring valid cached settings */
Elyes HAOUASb559b3c2019-04-28 17:52:10 +0200585 full_reset();
Arthur Heymansadc571a2017-09-25 09:40:54 +0200586 }
Arthur Heymansadc571a2017-09-25 09:40:54 +0200587 }
Arthur Heymansbb5e77c2016-11-30 20:37:29 +0100588
Arthur Heymansadc571a2017-09-25 09:40:54 +0200589 /* verify MRC cache for fast boot */
590 if (boot_path != BOOT_PATH_RESUME && ctrl_cached) {
Angel Pons9d20c842021-01-13 12:39:37 +0100591 /* check SPD checksum to make sure the DIMMs haven't been replaced */
Arthur Heymansadc571a2017-09-25 09:40:54 +0200592 fast_boot = verify_spds(spd_map, ctrl_cached) == CB_SUCCESS;
Arthur Heymansb0c6cff2018-09-05 20:39:39 +0200593 if (!fast_boot) {
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100594 printk(BIOS_DEBUG,
595 "SPD checksums don't match, dimm's have been replaced\n");
Arthur Heymansb0c6cff2018-09-05 20:39:39 +0200596 } else {
597 find_fsb_speed(&s);
598 fast_boot = s.max_fsb == ctrl_cached->max_fsb;
599 if (!fast_boot)
600 printk(BIOS_DEBUG,
601 "CPU FSB does not match and has been replaced\n");
602 }
Arthur Heymansadc571a2017-09-25 09:40:54 +0200603 } else {
604 fast_boot = boot_path == BOOT_PATH_RESUME;
605 }
Damien Zammit4b513a62015-08-20 00:37:05 +1000606
Arthur Heymansadc571a2017-09-25 09:40:54 +0200607 if (fast_boot) {
608 printk(BIOS_DEBUG, "Using cached raminit settings\n");
609 memcpy(&s, ctrl_cached, sizeof(s));
610 s.boot_path = boot_path;
611 mchinfo_ddr2(&s);
612 print_selected_timings(&s);
613 } else {
614 s.boot_path = boot_path;
615 s.spd_map[0] = spd_map[0];
616 s.spd_map[1] = spd_map[1];
617 s.spd_map[2] = spd_map[2];
618 s.spd_map[3] = spd_map[3];
619 checkreset_ddr2(s.boot_path);
Damien Zammit4b513a62015-08-20 00:37:05 +1000620
Arthur Heymansadc571a2017-09-25 09:40:54 +0200621 /* Detect dimms per channel */
Angel Ponsd1c590a2020-08-03 16:01:39 +0200622 reg8 = pci_read_config8(HOST_BRIDGE, 0xe9);
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100623 printk(BIOS_DEBUG, "Dimms per channel: %d\n", (reg8 & 0x10) ? 1 : 2);
Arthur Heymansadc571a2017-09-25 09:40:54 +0200624
625 mchinfo_ddr2(&s);
626
627 find_fsb_speed(&s);
628 decode_spd_select_timings(&s);
629 print_selected_timings(&s);
630 find_dimm_config(&s);
631 }
Damien Zammit4b513a62015-08-20 00:37:05 +1000632
Arthur Heymansa2cc2312017-05-15 10:13:36 +0200633 do_raminit(&s, fast_boot);
Damien Zammit4b513a62015-08-20 00:37:05 +1000634
Angel Pons4a9569a2020-06-08 01:39:25 +0200635 pci_and_config8(PCI_DEV(0, 0x1f, 0), 0xa2, (u8)~0x80);
Damien Zammit4b513a62015-08-20 00:37:05 +1000636
Angel Ponsd1c590a2020-08-03 16:01:39 +0200637 pci_or_config8(HOST_BRIDGE, 0xf4, 1);
Angel Pons4a9569a2020-06-08 01:39:25 +0200638
Jakub Czapigaad6157e2022-02-15 11:50:31 +0100639 timestamp_add_now(TS_INITRAM_END);
Kyösti Mälkkib33c6fb2021-02-17 20:43:04 +0200640
Damien Zammit4b513a62015-08-20 00:37:05 +1000641 printk(BIOS_DEBUG, "RAM initialization finished.\n");
Arthur Heymansadc571a2017-09-25 09:40:54 +0200642
Kyösti Mälkki3051a9e2021-02-17 20:43:04 +0200643 int s3resume = boot_path == BOOT_PATH_RESUME;
644
645 cbmem_was_inited = !cbmem_recovery(s3resume);
Arthur Heymansadc571a2017-09-25 09:40:54 +0200646 if (!fast_boot)
Angel Ponsdd7ce4e2021-03-26 23:21:02 +0100647 mrc_cache_stash_data(MRC_TRAINING_DATA, MRC_CACHE_VERSION, &s, sizeof(s));
Kyösti Mälkki3051a9e2021-02-17 20:43:04 +0200648
649 if (s3resume && !cbmem_was_inited) {
Arthur Heymansadc571a2017-09-25 09:40:54 +0200650 /* Failed S3 resume, reset to come up cleanly */
Elyes HAOUASb559b3c2019-04-28 17:52:10 +0200651 system_reset();
Arthur Heymansadc571a2017-09-25 09:40:54 +0200652 }
Elyes HAOUASf5a57a82019-01-08 22:15:53 +0100653
Elyes HAOUASf5a57a82019-01-08 22:15:53 +0100654 printk(BIOS_DEBUG, "Memory initialized\n");
Damien Zammit4b513a62015-08-20 00:37:05 +1000655}