blob: 1f667609f451edb8b66c5a60f4b345037d50d46e [file] [log] [blame]
Patrick Georgi2efc8802012-11-06 11:03:53 +01001/*
2 * This file is part of the coreboot project.
3 *
4 * Copyright (C) 2012 secunet Security Networks AG
5 *
6 * This program is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU General Public License as
8 * published by the Free Software Foundation; version 2 of
9 * the License.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
15 *
16 * You should have received a copy of the GNU General Public License
17 * along with this program; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston,
19 * MA 02110-1301 USA
20 */
21
22#include <stdint.h>
Kyösti Mälkki931c1dc2014-06-30 09:40:19 +030023#include <stdlib.h>
Patrick Georgi2efc8802012-11-06 11:03:53 +010024#include <arch/cpu.h>
25#include <arch/io.h>
Patrick Georgi2efc8802012-11-06 11:03:53 +010026#include <device/pci_def.h>
27#include <device/pnp_def.h>
28#include <spd.h>
29#include <console/console.h>
30#include <lib.h>
31#include "delay.h"
32#include "gm45.h"
33
34static const gmch_gfx_t gmch_gfx_types[][5] = {
35/* MAX_667MHz MAX_533MHz MAX_400MHz MAX_333MHz MAX_800MHz */
36 { GMCH_UNKNOWN, GMCH_UNKNOWN, GMCH_UNKNOWN, GMCH_UNKNOWN, GMCH_UNKNOWN },
37 { GMCH_GM47, GMCH_GM45, GMCH_UNKNOWN, GMCH_UNKNOWN, GMCH_GM49 },
38 { GMCH_GE45, GMCH_GE45, GMCH_GE45, GMCH_GE45, GMCH_GE45 },
39 { GMCH_UNKNOWN, GMCH_GL43, GMCH_GL40, GMCH_UNKNOWN, GMCH_UNKNOWN },
40 { GMCH_UNKNOWN, GMCH_GS45, GMCH_GS40, GMCH_UNKNOWN, GMCH_UNKNOWN },
41 { GMCH_UNKNOWN, GMCH_UNKNOWN, GMCH_UNKNOWN, GMCH_UNKNOWN, GMCH_UNKNOWN },
42 { GMCH_UNKNOWN, GMCH_UNKNOWN, GMCH_UNKNOWN, GMCH_UNKNOWN, GMCH_UNKNOWN },
43 { GMCH_PM45, GMCH_PM45, GMCH_PM45, GMCH_PM45, GMCH_PM45 },
44};
45
46void get_gmch_info(sysinfo_t *sysinfo)
47{
48 sysinfo->stepping = pci_read_config8(PCI_DEV(0, 0, 0), PCI_CLASS_REVISION);
49 if ((sysinfo->stepping > STEPPING_B3) &&
50 (sysinfo->stepping != STEPPING_CONVERSION_A1))
51 die("Unknown stepping.\n");
52 if (sysinfo->stepping <= STEPPING_B3)
53 printk(BIOS_DEBUG, "Stepping %c%d\n", 'A' + sysinfo->stepping / 4, sysinfo->stepping % 4);
54 else
55 printk(BIOS_DEBUG, "Conversion stepping A1\n");
56
57 const u32 eax = cpuid_ext(0x04, 0).eax;
58 sysinfo->cores = ((eax >> 26) & 0x3f) + 1;
59 printk(BIOS_SPEW, "%d CPU cores\n", sysinfo->cores);
60
61 u32 capid = pci_read_config16(PCI_DEV(0, 0, 0), D0F0_CAPID0+8);
62 if (!(capid & (1<<(79-64)))) {
63 printk(BIOS_SPEW, "iTPM enabled\n");
64 }
65
66 capid = pci_read_config32(PCI_DEV(0, 0, 0), D0F0_CAPID0+4);
67 if (!(capid & (1<<(57-32)))) {
68 printk(BIOS_SPEW, "ME enabled\n");
69 }
70
71 if (!(capid & (1<<(56-32)))) {
72 printk(BIOS_SPEW, "AMT enabled\n");
73 }
74
75 sysinfo->max_ddr2_mhz = (capid & (1<<(53-32)))?667:800;
76 printk(BIOS_SPEW, "capable of DDR2 of %d MHz or lower\n", sysinfo->max_ddr2_mhz);
77
78 if (!(capid & (1<<(48-32)))) {
79 printk(BIOS_SPEW, "VT-d enabled\n");
80 }
81
82 const u32 gfx_variant = (capid>>(42-32)) & 0x7;
83 const u32 render_freq = ((capid>>(50-32) & 0x1) << 2) | ((capid>>(35-32)) & 0x3);
84 if (render_freq <= 4)
85 sysinfo->gfx_type = gmch_gfx_types[gfx_variant][render_freq];
86 else
87 sysinfo->gfx_type = GMCH_UNKNOWN;
88 sysinfo->gs45_low_power_mode = 0;
89 switch (sysinfo->gfx_type) {
90 case GMCH_GM45:
91 printk(BIOS_SPEW, "GMCH: GM45\n");
92 break;
93 case GMCH_GM47:
94 printk(BIOS_SPEW, "GMCH: GM47\n");
95 break;
96 case GMCH_GM49:
97 printk(BIOS_SPEW, "GMCH: GM49\n");
98 break;
99 case GMCH_GE45:
100 printk(BIOS_SPEW, "GMCH: GE45\n");
101 break;
102 case GMCH_GL40:
103 printk(BIOS_SPEW, "GMCH: GL40\n");
104 break;
105 case GMCH_GL43:
106 printk(BIOS_SPEW, "GMCH: GL43\n");
107 break;
108 case GMCH_GS40:
109 printk(BIOS_SPEW, "GMCH: GS40\n");
110 break;
111 case GMCH_GS45:
112 printk(BIOS_SPEW, "GMCH: GS45, using low power mode by default\n");
113 sysinfo->gs45_low_power_mode = 1;
114 break;
115 case GMCH_PM45:
116 printk(BIOS_SPEW, "GMCH: PM45\n");
117 break;
118 case GMCH_UNKNOWN:
119 printk(BIOS_SPEW, "unknown GMCH\n");
120 break;
121 }
122
123 sysinfo->txt_enabled = !(capid & (1 << (37-32)));
124 if (sysinfo->txt_enabled) {
125 printk(BIOS_SPEW, "TXT enabled\n");
126 }
127
128 switch (render_freq) {
129 case 4:
130 sysinfo->max_render_mhz = 800;
131 break;
132 case 0:
133 sysinfo->max_render_mhz = 667;
134 break;
135 case 1:
136 sysinfo->max_render_mhz = 533;
137 break;
138 case 2:
139 sysinfo->max_render_mhz = 400;
140 break;
141 case 3:
142 sysinfo->max_render_mhz = 333;
143 break;
144 default:
145 printk(BIOS_SPEW, "Unknown render frequency\n");
146 sysinfo->max_render_mhz = 0;
147 break;
148 }
149 if (sysinfo->max_render_mhz != 0) {
150 printk(BIOS_SPEW, "Render frequency: %d MHz\n", sysinfo->max_render_mhz);
151 }
152
153 if (!(capid & (1<<(33-32)))) {
154 printk(BIOS_SPEW, "IGD enabled\n");
155 }
156
157 if (!(capid & (1<<(32-32)))) {
158 printk(BIOS_SPEW, "PCIe-to-GMCH enabled\n");
159 }
160
161 capid = pci_read_config32(PCI_DEV(0, 0, 0), D0F0_CAPID0);
162
163 u32 ddr_cap = capid>>30 & 0x3;
164 switch (ddr_cap) {
165 case 0:
166 sysinfo->max_ddr3_mt = 1067;
167 break;
168 case 1:
169 sysinfo->max_ddr3_mt = 800;
170 break;
171 case 2:
172 case 3:
173 printk(BIOS_SPEW, "GMCH not DDR3 capable\n");
174 sysinfo->max_ddr3_mt = 0;
175 break;
176 }
177 if (sysinfo->max_ddr3_mt != 0) {
178 printk(BIOS_SPEW, "GMCH supports DDR3 with %d MT or less\n", sysinfo->max_ddr3_mt);
179 }
180
181 const unsigned max_fsb = (capid >> 28) & 0x3;
182 switch (max_fsb) {
183 case 1:
184 sysinfo->max_fsb_mhz = 1067;
185 break;
186 case 2:
187 sysinfo->max_fsb_mhz = 800;
188 break;
189 case 3:
190 sysinfo->max_fsb_mhz = 667;
191 break;
192 default:
193 die("unknown FSB capability\n");
194 break;
195 }
196 if (sysinfo->max_fsb_mhz != 0) {
197 printk(BIOS_SPEW, "GMCH supports FSB with up to %d MHz\n", sysinfo->max_fsb_mhz);
198 }
199 sysinfo->max_fsb = max_fsb - 1;
200}
201
202/*
203 * Detect if the system went through an interrupted RAM init or is incon-
204 * sistent. If so, initiate a cold reboot. Otherwise mark the system to be
205 * in RAM init, so this function would detect it on an erreneous reboot.
206 */
207void enter_raminit_or_reset(void)
208{
209 /* Interrupted RAM init or inconsistent system? */
210 u8 reg8 = pci_read_config8(PCI_DEV(0, 0x1f, 0), 0xa2);
211
212 if (reg8 & (1 << 2)) { /* S4-assertion-width violation */
213 /* Ignore S4-assertion-width violation like original BIOS. */
214 printk(BIOS_WARNING,
215 "WARNING: Ignoring S4-assertion-width violation.\n");
216 /* Bit2 is R/WC, so it will clear itself below. */
217 }
218
219 if (reg8 & (1 << 7)) { /* interrupted RAM init */
220 /* Don't enable S4-assertion stretch. Makes trouble on roda/rk9.
221 reg8 = pci_read_config8(PCI_DEV(0, 0x1f, 0), 0xa4);
222 pci_write_config8(PCI_DEV(0, 0x1f, 0), 0xa4, reg8 | 0x08);
223 */
224
225 /* Clear bit7. */
226 pci_write_config8(PCI_DEV(0, 0x1f, 0), 0xa2, reg8 & ~(1 << 7));
227
228 printk(BIOS_INFO, "Interrupted RAM init, reset required.\n");
229 gm45_early_reset();
230 }
231 /* Mark system to be in RAM init. */
232 pci_write_config8(PCI_DEV(0, 0x1f, 0), 0xa2, reg8 | (1 << 7));
233}
234
235
236/* For a detected DIMM, test the value of an SPD byte to
237 match the expected value after masking some bits. */
238static int test_dimm(int dimm, int addr, int bitmask, int expected)
239{
240 return (smbus_read_byte(DIMM0 + dimm, addr) & bitmask) == expected;
241}
242
243/* This function dies if dimm is unsuitable for the chipset. */
244static void verify_ddr3_dimm(int dimm)
245{
246 if (!test_dimm(dimm, 3, 15, 3))
247 die("Chipset only supports SO-DIMM\n");
248
249 if (!test_dimm(dimm, 8, 0x18, 0))
250 die("Chipset doesn't support ECC RAM\n");
251
252 if (!test_dimm(dimm, 7, 0x38, 0) &&
253 !test_dimm(dimm, 7, 0x38, 8))
254 die("Chipset wants single or double sided DIMMs\n");
255
256 if (!test_dimm(dimm, 7, 7, 1) &&
257 !test_dimm(dimm, 7, 7, 2))
258 die("Chipset requires x8 or x16 width\n");
259
260 if (!test_dimm(dimm, 4, 0x0f, 0) &&
261 !test_dimm(dimm, 4, 0x0f, 1) &&
262 !test_dimm(dimm, 4, 0x0f, 2) &&
263 !test_dimm(dimm, 4, 0x0f, 3))
264 die("Chipset requires 256Mb, 512Mb, 1Gb or 2Gb chips.");
265
266 if (!test_dimm(dimm, 4, 0x70, 0))
267 die("Chipset requires 8 banks on DDR3\n");
268
269 /* How to check if burst length is 8?
270 Other values are not supported, are they even possible? */
271
272 if (!test_dimm(dimm, 10, 0xff, 1))
273 die("Code assumes 1/8ns MTB\n");
274
275 if (!test_dimm(dimm, 11, 0xff, 8))
276 die("Code assumes 1/8ns MTB\n");
277
278 if (!test_dimm(dimm, 62, 0x9f, 0) &&
279 !test_dimm(dimm, 62, 0x9f, 1) &&
280 !test_dimm(dimm, 62, 0x9f, 2) &&
281 !test_dimm(dimm, 62, 0x9f, 3) &&
282 !test_dimm(dimm, 62, 0x9f, 5))
283 die("Only raw card types A, B, C, D and F are supported.\n");
284}
285
286/* For every detected DIMM, test if it's suitable for the chipset. */
287static void verify_ddr3(int mask)
288{
289 int cur = 0;
290 while (mask) {
291 if (mask & 1) {
292 verify_ddr3_dimm(cur);
293 }
294 mask >>= 1;
295 cur++;
296 }
297}
298
299
300typedef struct {
301 int dimm_mask;
302 struct {
303 unsigned int rows;
304 unsigned int cols;
305 unsigned int chip_capacity;
306 unsigned int banks;
307 unsigned int ranks;
308 unsigned int cas_latencies;
309 unsigned int tAAmin;
310 unsigned int tCKmin;
311 unsigned int width;
312 unsigned int tRAS;
313 unsigned int tRP;
314 unsigned int tRCD;
315 unsigned int tWR;
316 unsigned int page_size;
317 unsigned int raw_card;
318 } channel[2];
319} spdinfo_t;
320/*
321 * This function collects RAM characteristics from SPD, assuming that RAM
322 * is generally within chipset's requirements, since verify_ddr3() passed.
323 */
324static void collect_ddr3(spdinfo_t *const config)
325{
326 int mask = config->dimm_mask;
327 int cur = 0;
328 while (mask != 0) {
329 if (mask & 1) {
330 int tmp;
331 const int smb_addr = DIMM0 + cur*2;
332
333 config->channel[cur].rows = ((smbus_read_byte(smb_addr, 5) >> 3) & 7) + 12;
334 config->channel[cur].cols = (smbus_read_byte(smb_addr, 5) & 7) + 9;
335
336 config->channel[cur].chip_capacity = smbus_read_byte(smb_addr, 4) & 0xf;
337
338 config->channel[cur].banks = 8; /* GM45 only accepts this for DDR3.
339 verify_ddr3() fails for other values. */
340 config->channel[cur].ranks = ((smbus_read_byte(smb_addr, 7) >> 3) & 7) + 1;
341
342 config->channel[cur].cas_latencies =
343 ((smbus_read_byte(smb_addr, 15) << 8) | smbus_read_byte(smb_addr, 14))
344 << 4; /* so bit x is CAS x */
345 config->channel[cur].tAAmin = smbus_read_byte(smb_addr, 16); /* in MTB */
346 config->channel[cur].tCKmin = smbus_read_byte(smb_addr, 12); /* in MTB */
347
348 config->channel[cur].width = smbus_read_byte(smb_addr, 7) & 7;
349 config->channel[cur].page_size = config->channel[cur].width *
350 (1 << config->channel[cur].cols); /* in Bytes */
351
352 tmp = smbus_read_byte(smb_addr, 21);
353 config->channel[cur].tRAS = smbus_read_byte(smb_addr, 22) | ((tmp & 0xf) << 8);
354 config->channel[cur].tRP = smbus_read_byte(smb_addr, 20);
355 config->channel[cur].tRCD = smbus_read_byte(smb_addr, 18);
356 config->channel[cur].tWR = smbus_read_byte(smb_addr, 17);
357
358 config->channel[cur].raw_card = smbus_read_byte(smb_addr, 62) & 0x1f;
359 }
360 cur++;
361 mask >>= 2; /* Only every other address is used. */
362 }
363}
364
Edward O'Callaghan7116ac82014-07-08 01:53:24 +1000365#define ROUNDUP_DIV(val, by) CEIL_DIV(val, by)
Patrick Georgi2efc8802012-11-06 11:03:53 +0100366#define ROUNDUP_DIV_THIS(val, by) val = ROUNDUP_DIV(val, by)
367static fsb_clock_t read_fsb_clock(void)
368{
369 switch (MCHBAR32(CLKCFG_MCHBAR) & CLKCFG_FSBCLK_MASK) {
370 case 6:
371 return FSB_CLOCK_1067MHz;
372 case 2:
373 return FSB_CLOCK_800MHz;
374 case 3:
375 return FSB_CLOCK_667MHz;
376 default:
377 die("Unsupported FSB clock.\n");
378 }
379}
380static mem_clock_t clock_index(const unsigned int clock)
381{
382 switch (clock) {
383 case 533: return MEM_CLOCK_533MHz;
384 case 400: return MEM_CLOCK_400MHz;
385 case 333: return MEM_CLOCK_333MHz;
386 default: die("Unknown clock value.\n");
387 }
388 return -1; /* Won't be reached. */
389}
390static void normalize_clock(unsigned int *const clock)
391{
392 if (*clock >= 533)
393 *clock = 533;
394 else if (*clock >= 400)
395 *clock = 400;
396 else if (*clock >= 333)
397 *clock = 333;
398 else
399 *clock = 0;
400}
401static void lower_clock(unsigned int *const clock)
402{
403 --*clock;
404 normalize_clock(clock);
405}
406static unsigned int find_common_clock_cas(sysinfo_t *const sysinfo,
407 const spdinfo_t *const spdinfo)
408{
409 /* various constraints must be fulfilled:
410 CAS * tCK < 20ns == 160MTB
411 tCK_max >= tCK >= tCK_min
412 CAS >= roundup(tAA_min/tCK)
413 CAS supported
414 Clock(MHz) = 1000 / tCK(ns)
415 Clock(MHz) = 8000 / tCK(MTB)
416 AND BTW: Clock(MT) = 2000 / tCK(ns) - intel uses MTs but calls them MHz
417 */
418 int i;
419
420 /* Calculate common cas_latencies mask, tCKmin and tAAmin. */
421 unsigned int cas_latencies = (unsigned int)-1;
422 unsigned int tCKmin = 0, tAAmin = 0;
423 FOR_EACH_POPULATED_CHANNEL(sysinfo->dimms, i) {
424 cas_latencies &= spdinfo->channel[i].cas_latencies;
425 if (spdinfo->channel[i].tCKmin > tCKmin)
426 tCKmin = spdinfo->channel[i].tCKmin;
427 if (spdinfo->channel[i].tAAmin > tAAmin)
428 tAAmin = spdinfo->channel[i].tAAmin;
429 }
430
431 /* Get actual value of fsb clock. */
432 sysinfo->selected_timings.fsb_clock = read_fsb_clock();
433 unsigned int fsb_mhz = 0;
434 switch (sysinfo->selected_timings.fsb_clock) {
435 case FSB_CLOCK_1067MHz: fsb_mhz = 1067; break;
436 case FSB_CLOCK_800MHz: fsb_mhz = 800; break;
437 case FSB_CLOCK_667MHz: fsb_mhz = 667; break;
438 }
439
440 unsigned int clock = 8000 / tCKmin;
441 if ((clock > sysinfo->max_ddr3_mt / 2) || (clock > fsb_mhz / 2)) {
442 int new_clock = min(sysinfo->max_ddr3_mt / 2, fsb_mhz / 2);
443 printk(BIOS_SPEW, "DIMMs support %d MHz, but chipset only runs at up to %d. Limiting...\n",
444 clock, new_clock);
445 clock = new_clock;
446 }
447 normalize_clock(&clock);
448
449 /* Find compatible clock / CAS pair. */
450 unsigned int tCKproposed;
451 unsigned int CAS;
452 while (1) {
453 if (!clock)
454 die("Couldn't find compatible clock / CAS settings.\n");
455 tCKproposed = 8000 / clock;
456 CAS = ROUNDUP_DIV(tAAmin, tCKproposed);
457 printk(BIOS_SPEW, "Trying CAS %u, tCK %u.\n", CAS, tCKproposed);
458 for (; CAS <= DDR3_MAX_CAS; ++CAS)
459 if (cas_latencies & (1 << CAS))
460 break;
461 if ((CAS <= DDR3_MAX_CAS) && (CAS * tCKproposed < 160)) {
462 /* Found good CAS. */
463 printk(BIOS_SPEW, "Found compatible clock / CAS pair: %u / %u.\n", clock, CAS);
464 break;
465 }
466 lower_clock(&clock);
467 }
468 sysinfo->selected_timings.CAS = CAS;
469 sysinfo->selected_timings.mem_clock = clock_index(clock);
470
471 return tCKproposed;
472}
473
474static void calculate_derived_timings(sysinfo_t *const sysinfo,
475 const unsigned int tCLK,
476 const spdinfo_t *const spdinfo)
477{
478 int i;
479
480 /* Calculate common tRASmin, tRPmin, tRCDmin and tWRmin. */
481 unsigned int tRASmin = 0, tRPmin = 0, tRCDmin = 0, tWRmin = 0;
482 FOR_EACH_POPULATED_CHANNEL(sysinfo->dimms, i) {
483 if (spdinfo->channel[i].tRAS > tRASmin)
484 tRASmin = spdinfo->channel[i].tRAS;
485 if (spdinfo->channel[i].tRP > tRPmin)
486 tRPmin = spdinfo->channel[i].tRP;
487 if (spdinfo->channel[i].tRCD > tRCDmin)
488 tRCDmin = spdinfo->channel[i].tRCD;
489 if (spdinfo->channel[i].tWR > tWRmin)
490 tWRmin = spdinfo->channel[i].tWR;
491 }
492 ROUNDUP_DIV_THIS(tRASmin, tCLK);
493 ROUNDUP_DIV_THIS(tRPmin, tCLK);
494 ROUNDUP_DIV_THIS(tRCDmin, tCLK);
495 ROUNDUP_DIV_THIS(tWRmin, tCLK);
496
497 /* Lookup tRFC and calculate common tRFCmin. */
498 const unsigned int tRFC_from_clock_and_cap[][4] = {
499 /* CAP_256M CAP_512M CAP_1G CAP_2G */
500 /* 533MHz */ { 40, 56, 68, 104 },
501 /* 400MHz */ { 30, 42, 51, 78 },
502 /* 333MHz */ { 25, 35, 43, 65 },
503 };
504 unsigned int tRFCmin = 0;
505 FOR_EACH_POPULATED_CHANNEL(sysinfo->dimms, i) {
506 const unsigned int tRFC = tRFC_from_clock_and_cap
507 [sysinfo->selected_timings.mem_clock][spdinfo->channel[i].chip_capacity];
508 if (tRFC > tRFCmin)
509 tRFCmin = tRFC;
510 }
511
512 /* Calculate common tRD from CAS and FSB and DRAM clocks. */
513 unsigned int tRDmin = sysinfo->selected_timings.CAS;
514 switch (sysinfo->selected_timings.fsb_clock) {
515 case FSB_CLOCK_667MHz:
516 tRDmin += 1;
517 break;
518 case FSB_CLOCK_800MHz:
519 tRDmin += 2;
520 break;
521 case FSB_CLOCK_1067MHz:
522 tRDmin += 3;
523 if (sysinfo->selected_timings.mem_clock == MEM_CLOCK_1067MT)
524 tRDmin += 1;
525 break;
526 }
527
528 /* Calculate common tRRDmin. */
529 unsigned int tRRDmin = 0;
530 FOR_EACH_POPULATED_CHANNEL(sysinfo->dimms, i) {
531 unsigned int tRRD = 2 + (spdinfo->channel[i].page_size / 1024);
532 if (sysinfo->selected_timings.mem_clock == MEM_CLOCK_1067MT)
533 tRRD += (spdinfo->channel[i].page_size / 1024);
534 if (tRRD > tRRDmin)
535 tRRDmin = tRRD;
536 }
537
538 /* Lookup and calculate common tFAWmin. */
539 unsigned int tFAW_from_pagesize_and_clock[][3] = {
540 /* 533MHz 400MHz 333MHz */
541 /* 1K */ { 20, 15, 13 },
542 /* 2K */ { 27, 20, 17 },
543 };
544 unsigned int tFAWmin = 0;
545 FOR_EACH_POPULATED_CHANNEL(sysinfo->dimms, i) {
546 const unsigned int tFAW = tFAW_from_pagesize_and_clock
547 [spdinfo->channel[i].page_size / 1024 - 1]
548 [sysinfo->selected_timings.mem_clock];
549 if (tFAW > tFAWmin)
550 tFAWmin = tFAW;
551 }
552
553 /* Refresh rate is fixed. */
554 unsigned int tWL;
555 if (sysinfo->selected_timings.mem_clock == MEM_CLOCK_1067MT) {
556 tWL = 6;
557 } else {
558 tWL = 5;
559 }
560
561 printk(BIOS_SPEW, "Timing values:\n"
562 " tCLK: %3u\n"
563 " tRAS: %3u\n"
564 " tRP: %3u\n"
565 " tRCD: %3u\n"
566 " tRFC: %3u\n"
567 " tWR: %3u\n"
568 " tRD: %3u\n"
569 " tRRD: %3u\n"
570 " tFAW: %3u\n"
571 " tWL: %3u\n",
572 tCLK, tRASmin, tRPmin, tRCDmin, tRFCmin, tWRmin, tRDmin, tRRDmin, tFAWmin, tWL);
573
574 sysinfo->selected_timings.tRAS = tRASmin;
575 sysinfo->selected_timings.tRP = tRPmin;
576 sysinfo->selected_timings.tRCD = tRCDmin;
577 sysinfo->selected_timings.tRFC = tRFCmin;
578 sysinfo->selected_timings.tWR = tWRmin;
579 sysinfo->selected_timings.tRD = tRDmin;
580 sysinfo->selected_timings.tRRD = tRRDmin;
581 sysinfo->selected_timings.tFAW = tFAWmin;
582 sysinfo->selected_timings.tWL = tWL;
583}
584
585static void collect_dimm_config(sysinfo_t *const sysinfo)
586{
587 int i;
588 spdinfo_t spdinfo;
589
590 spdinfo.dimm_mask = 0;
591 sysinfo->spd_type = 0;
592
593 /* at most 2 dimms, on even slots */
594 for (i = 0; i < 4; i += 2) {
595 const u8 spd = smbus_read_byte(DIMM0 + i, 2);
596 if ((spd == 7) || (spd == 8) || (spd == 0xb)) {
597 spdinfo.dimm_mask |= 1 << i;
598 if (sysinfo->spd_type && sysinfo->spd_type != spd) {
599 die("Multiple types of DIMM installed in the system, don't do that!\n");
600 }
601 sysinfo->spd_type = spd;
602 }
603 }
604 if (spdinfo.dimm_mask == 0) {
605 die("Could not find any DIMM.\n");
606 }
607
608 /* Normalize spd_type to 1, 2, 3. */
609 sysinfo->spd_type = (sysinfo->spd_type & 1) | ((sysinfo->spd_type & 8) >> 2);
610 printk(BIOS_SPEW, "DDR mask %x, DDR %d\n", spdinfo.dimm_mask, sysinfo->spd_type);
611
612 if (sysinfo->spd_type == DDR2) {
613 die("DDR2 not supported at this time.\n");
614 } else if (sysinfo->spd_type == DDR3) {
615 verify_ddr3(spdinfo.dimm_mask);
616 collect_ddr3(&spdinfo);
617 } else {
618 die("Will never support DDR1.\n");
619 }
620
621 for (i = 0; i < 2; i++) {
622 if ((spdinfo.dimm_mask >> (i*2)) & 1) {
623 printk(BIOS_SPEW, "Bank %d populated:\n"
624 " Raw card type: %4c\n"
625 " Row addr bits: %4u\n"
626 " Col addr bits: %4u\n"
627 " byte width: %4u\n"
628 " page size: %4u\n"
629 " banks: %4u\n"
630 " ranks: %4u\n"
631 " tAAmin: %3u\n"
632 " tCKmin: %3u\n"
633 " Max clock: %3u MHz\n"
634 " CAS: 0x%04x\n",
635 i, spdinfo.channel[i].raw_card + 'A',
636 spdinfo.channel[i].rows, spdinfo.channel[i].cols,
637 spdinfo.channel[i].width, spdinfo.channel[i].page_size,
638 spdinfo.channel[i].banks, spdinfo.channel[i].ranks,
639 spdinfo.channel[i].tAAmin, spdinfo.channel[i].tCKmin,
640 8000 / spdinfo.channel[i].tCKmin, spdinfo.channel[i].cas_latencies);
641 }
642 }
643
644 FOR_EACH_CHANNEL(i) {
645 sysinfo->dimms[i].card_type =
646 (spdinfo.dimm_mask & (1 << (i * 2))) ? spdinfo.channel[i].raw_card + 0xa : 0;
647 }
648
649 /* Find common memory clock and CAS. */
650 const unsigned int tCLK = find_common_clock_cas(sysinfo, &spdinfo);
651
652 /* Calculate other timings from clock and CAS. */
653 calculate_derived_timings(sysinfo, tCLK, &spdinfo);
654
655 /* Initialize DIMM infos. */
656 /* Always prefer interleaved over async channel mode. */
657 FOR_EACH_CHANNEL(i) {
658 IF_CHANNEL_POPULATED(sysinfo->dimms, i) {
659 sysinfo->dimms[i].banks = spdinfo.channel[i].banks;
660 sysinfo->dimms[i].ranks = spdinfo.channel[i].ranks;
661
662 /* .width is 1 for x8 or 2 for x16, bus width is 8 bytes. */
663 const unsigned int chips_per_rank = 8 / spdinfo.channel[i].width;
664
665 sysinfo->dimms[i].chip_width = spdinfo.channel[i].width;
666 sysinfo->dimms[i].chip_capacity = spdinfo.channel[i].chip_capacity;
667 sysinfo->dimms[i].page_size = spdinfo.channel[i].page_size * chips_per_rank;
668 sysinfo->dimms[i].rank_capacity_mb =
669 /* offset of chip_capacity is 8 (256M), therefore, add 8
670 chip_capacity is in Mbit, we want MByte, therefore, subtract 3 */
671 (1 << (spdinfo.channel[i].chip_capacity + 8 - 3)) * chips_per_rank;
672 }
673 }
674 if (CHANNEL_IS_POPULATED(sysinfo->dimms, 0) &&
675 CHANNEL_IS_POPULATED(sysinfo->dimms, 1))
676 sysinfo->selected_timings.channel_mode = CHANNEL_MODE_DUAL_INTERLEAVED;
677 else
678 sysinfo->selected_timings.channel_mode = CHANNEL_MODE_SINGLE;
679}
680
681static void reset_on_bad_warmboot(void)
682{
683 /* Check self refresh channel status. */
684 const u32 reg = MCHBAR32(PMSTS_MCHBAR);
685 /* Clear status bits. R/WC */
686 MCHBAR32(PMSTS_MCHBAR) = reg;
687 if ((reg & PMSTS_WARM_RESET) && !(reg & PMSTS_BOTH_SELFREFRESH)) {
688 printk(BIOS_INFO, "DRAM was not in self refresh "
689 "during warm boot, reset required.\n");
690 gm45_early_reset();
691 }
692}
693
694static void set_system_memory_frequency(const timings_t *const timings)
695{
696 MCHBAR16(CLKCFG_MCHBAR + 0x60) &= ~(1 << 15);
697 MCHBAR16(CLKCFG_MCHBAR + 0x48) &= ~(1 << 15);
698
699 /* Calculate wanted frequency setting. */
700 const int want_freq = 6 - timings->mem_clock;
701
702 /* Read current memory frequency. */
703 const u32 clkcfg = MCHBAR32(CLKCFG_MCHBAR);
704 int cur_freq = (clkcfg & CLKCFG_MEMCLK_MASK) >> CLKCFG_MEMCLK_SHIFT;
705 if (0 == cur_freq) {
706 /* Try memory frequency from scratchpad. */
707 printk(BIOS_DEBUG, "Reading current memory frequency from scratchpad.\n");
708 cur_freq = (MCHBAR16(SSKPD_MCHBAR) & SSKPD_CLK_MASK) >> SSKPD_CLK_SHIFT;
709 }
710
711 if (cur_freq != want_freq) {
712 printk(BIOS_DEBUG, "Changing memory frequency: old %x, new %x.\n", cur_freq, want_freq);
713 /* When writing new frequency setting, reset, then set update bit. */
714 MCHBAR32(CLKCFG_MCHBAR) = (MCHBAR32(CLKCFG_MCHBAR) & ~(CLKCFG_UPDATE | CLKCFG_MEMCLK_MASK)) |
715 (want_freq << CLKCFG_MEMCLK_SHIFT);
716 MCHBAR32(CLKCFG_MCHBAR) = (MCHBAR32(CLKCFG_MCHBAR) & ~CLKCFG_MEMCLK_MASK) |
717 (want_freq << CLKCFG_MEMCLK_SHIFT) | CLKCFG_UPDATE;
718 /* Reset update bit. */
719 MCHBAR32(CLKCFG_MCHBAR) &= ~CLKCFG_UPDATE;
720 }
721
722 if ((timings->fsb_clock == FSB_CLOCK_1067MHz) && (timings->mem_clock == MEM_CLOCK_667MT)) {
723 MCHBAR32(CLKCFG_MCHBAR + 0x16) = 0x000030f0;
724 MCHBAR32(CLKCFG_MCHBAR + 0x64) = 0x000050c1;
725
726 MCHBAR32(CLKCFG_MCHBAR) = (MCHBAR32(CLKCFG_MCHBAR) & ~(1 << 12)) | (1 << 17);
727 MCHBAR32(CLKCFG_MCHBAR) |= (1 << 17) | (1 << 12);
728 MCHBAR32(CLKCFG_MCHBAR) &= ~(1 << 12);
729
730 MCHBAR32(CLKCFG_MCHBAR + 0x04) = 0x9bad1f1f;
731 MCHBAR8(CLKCFG_MCHBAR + 0x08) = 0xf4;
732 MCHBAR8(CLKCFG_MCHBAR + 0x0a) = 0x43;
733 MCHBAR8(CLKCFG_MCHBAR + 0x0c) = 0x10;
734 MCHBAR8(CLKCFG_MCHBAR + 0x0d) = 0x80;
735 MCHBAR32(CLKCFG_MCHBAR + 0x50) = 0x0b0e151b;
736 MCHBAR8(CLKCFG_MCHBAR + 0x54) = 0xb4;
737 MCHBAR8(CLKCFG_MCHBAR + 0x55) = 0x10;
738 MCHBAR8(CLKCFG_MCHBAR + 0x56) = 0x08;
739
740 MCHBAR32(CLKCFG_MCHBAR) |= (1 << 10);
741 MCHBAR32(CLKCFG_MCHBAR) |= (1 << 11);
742 MCHBAR32(CLKCFG_MCHBAR) &= ~(1 << 10);
743 MCHBAR32(CLKCFG_MCHBAR) &= ~(1 << 11);
744 }
745
746 MCHBAR32(CLKCFG_MCHBAR + 0x48) |= 0x3f << 24;
747}
748
749int raminit_read_vco_index(void)
750{
751 switch (MCHBAR8(0x0c0f) & 0x7) {
752 case VCO_2666:
753 return 0;
754 case VCO_3200:
755 return 1;
756 case VCO_4000:
757 return 2;
758 case VCO_5333:
759 return 3;
760 default:
761 die("Unknown VCO frequency.\n");
762 return 0;
763 }
764}
765static void set_igd_memory_frequencies(const sysinfo_t *const sysinfo)
766{
767 const int gfx_idx = ((sysinfo->gfx_type == GMCH_GS45) &&
768 !sysinfo->gs45_low_power_mode)
769 ? (GMCH_GS45 + 1) : sysinfo->gfx_type;
770
771 /* Render and sampler frequency values seem to be some kind of factor. */
772 const u16 render_freq_from_vco_and_gfxtype[][10] = {
773 /* GM45 GM47 GM49 GE45 GL40 GL43 GS40 GS45 (perf) */
774 /* VCO 2666 */ { 0xd, 0xd, 0xe, 0xd, 0xb, 0xd, 0xb, 0xa, 0xd },
775 /* VCO 3200 */ { 0xd, 0xe, 0xf, 0xd, 0xb, 0xd, 0xb, 0x9, 0xd },
776 /* VCO 4000 */ { 0xc, 0xd, 0xf, 0xc, 0xa, 0xc, 0xa, 0x9, 0xc },
777 /* VCO 5333 */ { 0xb, 0xc, 0xe, 0xb, 0x9, 0xb, 0x9, 0x8, 0xb },
778 };
779 const u16 sampler_freq_from_vco_and_gfxtype[][10] = {
780 /* GM45 GM47 GM49 GE45 GL40 GL43 GS40 GS45 (perf) */
781 /* VCO 2666 */ { 0xc, 0xc, 0xd, 0xc, 0x9, 0xc, 0x9, 0x8, 0xc },
782 /* VCO 3200 */ { 0xc, 0xd, 0xe, 0xc, 0x9, 0xc, 0x9, 0x8, 0xc },
783 /* VCO 4000 */ { 0xa, 0xc, 0xd, 0xa, 0x8, 0xa, 0x8, 0x8, 0xa },
784 /* VCO 5333 */ { 0xa, 0xa, 0xc, 0xa, 0x7, 0xa, 0x7, 0x6, 0xa },
785 };
786 const u16 display_clock_select_from_gfxtype[] = {
787 /* GM45 GM47 GM49 GE45 GL40 GL43 GS40 GS45 (perf) */
788 1, 1, 1, 1, 1, 1, 1, 0, 1
789 };
790
791 if (pci_read_config16(GCFGC_PCIDEV, 0) != 0x8086) {
792 printk(BIOS_DEBUG, "Skipping IGD memory frequency setting.\n");
793 return;
794 }
795
796 MCHBAR16(0x119e) = 0xa800;
797 MCHBAR16(0x11c0) = (MCHBAR16(0x11c0) & ~0xff00) | (0x01 << 8);
798 MCHBAR16(0x119e) = 0xb800;
799 MCHBAR8(0x0f10) |= 1 << 7;
800
801 /* Read VCO. */
802 const int vco_idx = raminit_read_vco_index();
803 printk(BIOS_DEBUG, "Setting IGD memory frequencies for VCO #%d.\n", vco_idx);
804
805 const u32 freqcfg =
806 ((render_freq_from_vco_and_gfxtype[vco_idx][gfx_idx]
807 << GCFGC_CR_SHIFT) & GCFGC_CR_MASK) |
808 ((sampler_freq_from_vco_and_gfxtype[vco_idx][gfx_idx]
809 << GCFGC_CS_SHIFT) & GCFGC_CS_MASK);
810
811 /* Set frequencies, clear update bit. */
812 u32 gcfgc = pci_read_config16(GCFGC_PCIDEV, GCFGC_OFFSET);
813 gcfgc &= ~(GCFGC_CS_MASK | GCFGC_UPDATE | GCFGC_CR_MASK);
814 gcfgc |= freqcfg;
815 pci_write_config16(GCFGC_PCIDEV, GCFGC_OFFSET, gcfgc);
816
817 /* Set frequencies, set update bit. */
818 gcfgc = pci_read_config16(GCFGC_PCIDEV, GCFGC_OFFSET);
819 gcfgc &= ~(GCFGC_CS_MASK | GCFGC_CR_MASK);
820 gcfgc |= freqcfg | GCFGC_UPDATE;
821 pci_write_config16(GCFGC_PCIDEV, GCFGC_OFFSET, gcfgc);
822
823 /* Clear update bit. */
824 pci_write_config16(GCFGC_PCIDEV, GCFGC_OFFSET,
825 pci_read_config16(GCFGC_PCIDEV, GCFGC_OFFSET) & ~GCFGC_UPDATE);
826
827 /* Set display clock select bit. */
828 pci_write_config16(GCFGC_PCIDEV, GCFGC_OFFSET,
829 (pci_read_config16(GCFGC_PCIDEV, GCFGC_OFFSET) & ~GCFGC_CD_MASK) |
830 (display_clock_select_from_gfxtype[gfx_idx] << GCFGC_CD_SHIFT));
831}
832
833static void configure_dram_control_mode(const timings_t *const timings, const dimminfo_t *const dimms)
834{
835 int ch, r;
836
837 FOR_EACH_CHANNEL(ch) {
838 unsigned int mchbar = CxDRC0_MCHBAR(ch);
839 u32 cxdrc = MCHBAR32(mchbar);
840 cxdrc &= ~CxDRC0_RANKEN_MASK;
841 FOR_EACH_POPULATED_RANK_IN_CHANNEL(dimms, ch, r)
842 cxdrc |= CxDRC0_RANKEN(r);
843 cxdrc = (cxdrc & ~CxDRC0_RMS_MASK) |
844 /* Always 7.8us for DDR3: */
845 CxDRC0_RMS_78US;
846 MCHBAR32(mchbar) = cxdrc;
847
848 mchbar = CxDRC1_MCHBAR(ch);
849 cxdrc = MCHBAR32(mchbar);
850 cxdrc |= CxDRC1_NOTPOP_MASK;
851 FOR_EACH_POPULATED_RANK_IN_CHANNEL(dimms, ch, r)
852 cxdrc &= ~CxDRC1_NOTPOP(r);
853 cxdrc |= CxDRC1_MUSTWR;
854 MCHBAR32(mchbar) = cxdrc;
855
856 mchbar = CxDRC2_MCHBAR(ch);
857 cxdrc = MCHBAR32(mchbar);
858 cxdrc |= CxDRC2_NOTPOP_MASK;
859 FOR_EACH_POPULATED_RANK_IN_CHANNEL(dimms, ch, r)
860 cxdrc &= ~CxDRC2_NOTPOP(r);
861 cxdrc |= CxDRC2_MUSTWR;
862 if (timings->mem_clock == MEM_CLOCK_1067MT)
863 cxdrc |= CxDRC2_CLK1067MT;
864 MCHBAR32(mchbar) = cxdrc;
865 }
866}
867
868static void rcomp_initialization(const stepping_t stepping, const int sff)
869{
870 /* Programm RCOMP codes. */
871 if (sff)
872 die("SFF platform unsupported in RCOMP initialization.\n");
873 /* Values are for DDR3. */
874 MCHBAR8(0x6ac) &= ~0x0f;
875 MCHBAR8(0x6b0) = 0x55;
876 MCHBAR8(0x6ec) &= ~0x0f;
877 MCHBAR8(0x6f0) = 0x66;
878 MCHBAR8(0x72c) &= ~0x0f;
879 MCHBAR8(0x730) = 0x66;
880 MCHBAR8(0x76c) &= ~0x0f;
881 MCHBAR8(0x770) = 0x66;
882 MCHBAR8(0x7ac) &= ~0x0f;
883 MCHBAR8(0x7b0) = 0x66;
884 MCHBAR8(0x7ec) &= ~0x0f;
885 MCHBAR8(0x7f0) = 0x66;
886 MCHBAR8(0x86c) &= ~0x0f;
887 MCHBAR8(0x870) = 0x55;
888 MCHBAR8(0x8ac) &= ~0x0f;
889 MCHBAR8(0x8b0) = 0x66;
890 /* ODT multiplier bits. */
891 MCHBAR32(0x04d0) = (MCHBAR32(0x04d0) & ~((7 << 3) | (7 << 0))) | (2 << 3) | (2 << 0);
892
893 /* Perform RCOMP calibration for DDR3. */
894 raminit_rcomp_calibration(stepping);
895
896 /* Run initial RCOMP. */
897 MCHBAR32(0x418) |= 1 << 17;
898 MCHBAR32(0x40c) &= ~(1 << 23);
899 MCHBAR32(0x41c) &= ~((1 << 7) | (1 << 3));
900 MCHBAR32(0x400) |= 1;
901 while (MCHBAR32(0x400) & 1) {}
902
903 /* Run second RCOMP. */
904 MCHBAR32(0x40c) |= 1 << 19;
905 MCHBAR32(0x400) |= 1;
906 while (MCHBAR32(0x400) & 1) {}
907
908 /* Cleanup and start periodic RCOMP. */
909 MCHBAR32(0x40c) &= ~(1 << 19);
910 MCHBAR32(0x40c) |= 1 << 23;
911 MCHBAR32(0x418) &= ~(1 << 17);
912 MCHBAR32(0x41c) |= (1 << 7) | (1 << 3);
913 MCHBAR32(0x400) |= (1 << 1);
914}
915
916static void dram_powerup(const int resume)
917{
918 udelay_from_reset(200);
919 MCHBAR32(CLKCFG_MCHBAR) = (MCHBAR32(CLKCFG_MCHBAR) & ~(1 << 3)) | (3 << 21);
920 if (!resume) {
921 MCHBAR32(0x1434) |= (1 << 10);
922 ns100delay(2);
923 }
924 MCHBAR32(0x1434) |= (1 << 6);
925 if (!resume) {
926 ns100delay(1);
927 MCHBAR32(0x1434) |= (1 << 9);
928 MCHBAR32(0x1434) &= ~(1 << 10);
929 udelay(500);
930 }
931}
932static void dram_program_timings(const timings_t *const timings)
933{
934 /* Values are for DDR3. */
935 const int burst_length = 8;
936 const int tWTR = 4, tRTP = 1;
937 int i;
938
939 FOR_EACH_CHANNEL(i) {
940 u32 reg = MCHBAR32(CxDRT0_MCHBAR(i));
941 const int btb_wtp = timings->tWL + burst_length/2 + timings->tWR;
942 const int btb_wtr = timings->tWL + burst_length/2 + tWTR;
943 reg = (reg & ~(CxDRT0_BtB_WtP_MASK | CxDRT0_BtB_WtR_MASK)) |
944 ((btb_wtp << CxDRT0_BtB_WtP_SHIFT) & CxDRT0_BtB_WtP_MASK) |
945 ((btb_wtr << CxDRT0_BtB_WtR_SHIFT) & CxDRT0_BtB_WtR_MASK);
946 if (timings->mem_clock != MEM_CLOCK_1067MT) {
947 reg = (reg & ~(0x7 << 15)) | ((9 - timings->CAS) << 15);
948 reg = (reg & ~(0xf << 10)) | ((timings->CAS - 3) << 10);
949 } else {
950 reg = (reg & ~(0x7 << 15)) | ((10 - timings->CAS) << 15);
951 reg = (reg & ~(0xf << 10)) | ((timings->CAS - 4) << 10);
952 }
953 reg = (reg & ~(0x7 << 5)) | (3 << 5);
954 reg = (reg & ~(0x7 << 0)) | (1 << 0);
955 MCHBAR32(CxDRT0_MCHBAR(i)) = reg;
956
957 reg = MCHBAR32(CxDRT1_MCHBAR(i));
958 reg = (reg & ~(0x03 << 28)) | ((tRTP & 0x03) << 28);
959 reg = (reg & ~(0x1f << 21)) | ((timings->tRAS & 0x1f) << 21);
960 reg = (reg & ~(0x07 << 10)) | (((timings->tRRD - 2) & 0x07) << 10);
961 reg = (reg & ~(0x07 << 5)) | (((timings->tRCD - 2) & 0x07) << 5);
962 reg = (reg & ~(0x07 << 0)) | (((timings->tRP - 2) & 0x07) << 0);
963 MCHBAR32(CxDRT1_MCHBAR(i)) = reg;
964
965 reg = MCHBAR32(CxDRT2_MCHBAR(i));
966 reg = (reg & ~(0x1f << 17)) | ((timings->tFAW & 0x1f) << 17);
967 if (timings->mem_clock != MEM_CLOCK_1067MT) {
968 reg = (reg & ~(0x7 << 12)) | (0x2 << 12);
969 reg = (reg & ~(0xf << 6)) | (0x9 << 6);
970 } else {
971 reg = (reg & ~(0x7 << 12)) | (0x3 << 12);
972 reg = (reg & ~(0xf << 6)) | (0xc << 6);
973 }
974 reg = (reg & ~(0x1f << 0)) | (0x13 << 0);
975 MCHBAR32(CxDRT2_MCHBAR(i)) = reg;
976
977 reg = MCHBAR32(CxDRT3_MCHBAR(i));
978 reg |= 0x3 << 28;
979 reg = (reg & ~(0x03 << 26));
980 reg = (reg & ~(0x07 << 23)) | (((timings->CAS - 3) & 0x07) << 23);
981 reg = (reg & ~(0xff << 13)) | ((timings->tRFC & 0xff) << 13);
982 reg = (reg & ~(0x07 << 0)) | (((timings->tWL - 2) & 0x07) << 0);
983 MCHBAR32(CxDRT3_MCHBAR(i)) = reg;
984
985 reg = MCHBAR32(CxDRT4_MCHBAR(i));
986 static const u8 timings_by_clock[4][3] = {
987 /* 333MHz 400MHz 533MHz
988 667MT 800MT 1067MT */
989 { 0x07, 0x0a, 0x0d },
990 { 0x3a, 0x46, 0x5d },
991 { 0x0c, 0x0e, 0x18 },
992 { 0x21, 0x28, 0x35 },
993 };
994 const int clk_idx = 2 - timings->mem_clock;
995 reg = (reg & ~(0x01f << 27)) | (timings_by_clock[0][clk_idx] << 27);
996 reg = (reg & ~(0x3ff << 17)) | (timings_by_clock[1][clk_idx] << 17);
997 reg = (reg & ~(0x03f << 10)) | (timings_by_clock[2][clk_idx] << 10);
998 reg = (reg & ~(0x1ff << 0)) | (timings_by_clock[3][clk_idx] << 0);
999 MCHBAR32(CxDRT4_MCHBAR(i)) = reg;
1000
1001 reg = MCHBAR32(CxDRT5_MCHBAR(i));
1002 if (timings->mem_clock == MEM_CLOCK_1067MT)
1003 reg = (reg & ~(0xf << 28)) | (0x8 << 28);
1004 reg = (reg & ~(0x00f << 22)) | ((burst_length/2 + timings->CAS + 2) << 22);
1005 reg = (reg & ~(0x3ff << 12)) | (0x190 << 12);
1006 reg = (reg & ~(0x00f << 4)) | ((timings->CAS - 2) << 4);
1007 reg = (reg & ~(0x003 << 2)) | (0x001 << 2);
1008 reg = (reg & ~(0x003 << 0));
1009 MCHBAR32(CxDRT5_MCHBAR(i)) = reg;
1010
1011 reg = MCHBAR32(CxDRT6_MCHBAR(i));
1012 reg = (reg & ~(0xffff << 16)) | (0x066a << 16); /* always 7.8us refresh rate for DDR3 */
1013 reg |= (1 << 2);
1014 MCHBAR32(CxDRT6_MCHBAR(i)) = reg;
1015 }
1016}
1017
1018static void dram_program_banks(const dimminfo_t *const dimms)
1019{
1020 int ch, r;
1021
1022 FOR_EACH_CHANNEL(ch) {
1023 const int tRPALL = dimms[ch].banks == 8;
1024
1025 u32 reg = MCHBAR32(CxDRT1_MCHBAR(ch)) & ~(0x01 << 15);
1026 IF_CHANNEL_POPULATED(dimms, ch)
1027 reg |= tRPALL << 15;
1028 MCHBAR32(CxDRT1_MCHBAR(ch)) = reg;
1029
1030 reg = MCHBAR32(CxDRA_MCHBAR(ch)) & ~CxDRA_BANKS_MASK;
1031 FOR_EACH_POPULATED_RANK_IN_CHANNEL(dimms, ch, r) {
1032 reg |= CxDRA_BANKS(r, dimms[ch].banks);
1033 }
1034 MCHBAR32(CxDRA_MCHBAR(ch)) = reg;
1035 }
1036}
1037
1038static void odt_setup(const timings_t *const timings, const int sff)
1039{
1040 /* Values are for DDR3. */
1041 int ch;
1042
1043 FOR_EACH_CHANNEL(ch) {
1044 u32 reg = MCHBAR32(CxODT_HIGH(ch));
1045 if (sff && (timings->mem_clock != MEM_CLOCK_1067MT))
1046 reg &= ~(0x3 << (61 - 32));
1047 else
1048 reg |= 0x3 << (61 - 32);
1049 reg = (reg & ~(0x3 << (52 - 32))) | (0x2 << (52 - 32));
1050 reg = (reg & ~(0x7 << (48 - 32))) | ((timings->CAS - 3) << (48 - 32));
1051 reg = (reg & ~(0xf << (44 - 32))) | (0x7 << (44 - 32));
1052 if (timings->mem_clock != MEM_CLOCK_1067MT) {
1053 reg = (reg & ~(0xf << (40 - 32))) | ((12 - timings->CAS) << (40 - 32));
1054 reg = (reg & ~(0xf << (36 - 32))) | (( 2 + timings->CAS) << (36 - 32));
1055 } else {
1056 reg = (reg & ~(0xf << (40 - 32))) | ((13 - timings->CAS) << (40 - 32));
1057 reg = (reg & ~(0xf << (36 - 32))) | (( 1 + timings->CAS) << (36 - 32));
1058 }
1059 reg = (reg & ~(0xf << (32 - 32))) | (0x7 << (32 - 32));
1060 MCHBAR32(CxODT_HIGH(ch)) = reg;
1061
1062 reg = MCHBAR32(CxODT_LOW(ch));
1063 reg = (reg & ~(0x7 << 28)) | (0x2 << 28);
1064 reg = (reg & ~(0x3 << 22)) | (0x2 << 22);
1065 reg = (reg & ~(0x7 << 12)) | (0x2 << 12);
1066 reg = (reg & ~(0x7 << 4)) | (0x2 << 4);
1067 switch (timings->mem_clock) {
1068 case MEM_CLOCK_667MT:
1069 reg = (reg & ~0x7);
1070 break;
1071 case MEM_CLOCK_800MT:
1072 reg = (reg & ~0x7) | 0x2;
1073 break;
1074 case MEM_CLOCK_1067MT:
1075 reg = (reg & ~0x7) | 0x5;
1076 break;
1077 }
1078 MCHBAR32(CxODT_LOW(ch)) = reg;
1079 }
1080}
1081
1082static void misc_settings(const timings_t *const timings,
1083 const stepping_t stepping)
1084{
1085 MCHBAR32(0x1260) = (MCHBAR32(0x1260) & ~((1 << 24) | 0x1f)) | timings->tRD;
1086 MCHBAR32(0x1360) = (MCHBAR32(0x1360) & ~((1 << 24) | 0x1f)) | timings->tRD;
1087
1088 MCHBAR8(0x1268) = (MCHBAR8(0x1268) & ~(0xf)) | timings->tWL;
1089 MCHBAR8(0x1368) = (MCHBAR8(0x1368) & ~(0xf)) | timings->tWL;
1090 MCHBAR8(0x12a0) = (MCHBAR8(0x12a0) & ~(0xf)) | 0xa;
1091 MCHBAR8(0x13a0) = (MCHBAR8(0x13a0) & ~(0xf)) | 0xa;
1092
1093 MCHBAR32(0x218) = (MCHBAR32(0x218) & ~((7 << 29) | (7 << 25) | (3 << 22) | (3 << 10))) |
1094 (4 << 29) | (3 << 25) | (0 << 22) | (1 << 10);
1095 MCHBAR32(0x220) = (MCHBAR32(0x220) & ~(7 << 16)) | (1 << 21) | (1 << 16);
1096 MCHBAR32(0x224) = (MCHBAR32(0x224) & ~(7 << 8)) | (3 << 8);
1097 if (stepping >= STEPPING_B1)
1098 MCHBAR8(0x234) |= (1 << 3);
1099}
1100
1101static void clock_crossing_setup(const fsb_clock_t fsb,
1102 const mem_clock_t ddr3clock,
1103 const dimminfo_t *const dimms)
1104{
1105 int ch;
1106
1107 static const u32 values_from_fsb_and_mem[][3][4] = {
1108 /* FSB 1067MHz */{
1109 /* DDR3-1067 */ { 0x00000000, 0x00000000, 0x00180006, 0x00810060 },
1110 /* DDR3-800 */ { 0x00000000, 0x00000000, 0x0000001c, 0x000300e0 },
1111 /* DDR3-667 */ { 0x00000000, 0x00001c00, 0x03c00038, 0x0007e000 },
1112 },
1113 /* FSB 800MHz */{
1114 /* DDR3-1067 */ { 0, 0, 0, 0 },
1115 /* DDR3-800 */ { 0x00000000, 0x00000000, 0x0030000c, 0x000300c0 },
1116 /* DDR3-667 */ { 0x00000000, 0x00000380, 0x0060001c, 0x00030c00 },
1117 },
1118 /* FSB 667MHz */{
1119 /* DDR3-1067 */ { 0, 0, 0, 0 },
1120 /* DDR3-800 */ { 0, 0, 0, 0 },
1121 /* DDR3-667 */ { 0x00000000, 0x00000000, 0x0030000c, 0x000300c0 },
1122 },
1123 };
1124
1125 const u32 *data = values_from_fsb_and_mem[fsb][ddr3clock];
1126 MCHBAR32(0x0208) = data[3];
1127 MCHBAR32(0x020c) = data[2];
1128 if (((fsb == FSB_CLOCK_1067MHz) || (fsb == FSB_CLOCK_800MHz)) && (ddr3clock == MEM_CLOCK_667MT))
1129 MCHBAR32(0x0210) = data[1];
1130
1131 static const u32 from_fsb_and_mem[][3] = {
1132 /* DDR3-1067 DDR3-800 DDR3-667 */
1133 /* FSB 1067MHz */{ 0x40100401, 0x10040220, 0x08040110, },
1134 /* FSB 800MHz */{ 0x00000000, 0x40100401, 0x00080201, },
1135 /* FSB 667MHz */{ 0x00000000, 0x00000000, 0x40100401, },
1136 };
1137 FOR_EACH_CHANNEL(ch) {
1138 const unsigned int mchbar = 0x1258 + (ch * 0x0100);
1139 if ((fsb == FSB_CLOCK_1067MHz) && (ddr3clock == MEM_CLOCK_800MT) && CHANNEL_IS_CARDF(dimms, ch))
1140 MCHBAR32(mchbar) = 0x08040120;
1141 else
1142 MCHBAR32(mchbar) = from_fsb_and_mem[fsb][ddr3clock];
1143 MCHBAR32(mchbar + 4) = 0x00000000;
1144 }
1145}
1146
1147/* Program egress VC1 timings. */
1148static void vc1_program_timings(const fsb_clock_t fsb)
1149{
1150 const u32 timings_by_fsb[][2] = {
1151 /* FSB 1067MHz */ { 0x1a, 0x01380138 },
1152 /* FSB 800MHz */ { 0x14, 0x00f000f0 },
1153 /* FSB 667MHz */ { 0x10, 0x00c000c0 },
1154 };
1155 EPBAR8(0x2c) = timings_by_fsb[fsb][0];
1156 EPBAR32(0x38) = timings_by_fsb[fsb][1];
1157 EPBAR32(0x3c) = timings_by_fsb[fsb][1];
1158}
1159
1160/* @prejedec if not zero, set rank size to 128MB and page size to 4KB. */
1161static void program_memory_map(const dimminfo_t *const dimms, const channel_mode_t mode, const int prejedec)
1162{
1163 int ch, r;
1164
1165 /* Program rank boundaries (CxDRBy). */
1166 unsigned int base = 0; /* start of next rank in MB */
1167 unsigned int total_mb[2] = { 0, 0 }; /* total memory per channel in MB */
1168 FOR_EACH_CHANNEL(ch) {
1169 if (mode == CHANNEL_MODE_DUAL_INTERLEAVED)
1170 /* In interleaved mode, start every channel from 0. */
1171 base = 0;
1172 for (r = 0; r < RANKS_PER_CHANNEL; r += 2) {
1173 /* Fixed capacity for pre-jedec config. */
1174 const unsigned int rank_capacity_mb =
1175 prejedec ? 128 : dimms[ch].rank_capacity_mb;
1176 u32 reg = 0;
1177
1178 /* Program bounds in CxDRBy. */
1179 IF_RANK_POPULATED(dimms, ch, r) {
1180 base += rank_capacity_mb;
1181 total_mb[ch] += rank_capacity_mb;
1182 }
1183 reg |= CxDRBy_BOUND_MB(r, base);
1184 IF_RANK_POPULATED(dimms, ch, r+1) {
1185 base += rank_capacity_mb;
1186 total_mb[ch] += rank_capacity_mb;
1187 }
1188 reg |= CxDRBy_BOUND_MB(r+1, base);
1189
1190 MCHBAR32(CxDRBy_MCHBAR(ch, r)) = reg;
1191 }
1192 }
1193
1194 /* Program page size (CxDRA). */
1195 FOR_EACH_CHANNEL(ch) {
1196 u32 reg = MCHBAR32(CxDRA_MCHBAR(ch)) & ~CxDRA_PAGESIZE_MASK;
1197 FOR_EACH_POPULATED_RANK_IN_CHANNEL(dimms, ch, r) {
1198 /* Fixed page size for pre-jedec config. */
1199 const unsigned int page_size = /* dimm page size in bytes */
1200 prejedec ? 4096 : dimms[ch].page_size;
1201 reg |= CxDRA_PAGESIZE(r, log2(page_size));
1202 /* deferred to f5_27: reg |= CxDRA_BANKS(r, dimms[ch].banks); */
1203 }
1204 MCHBAR32(CxDRA_MCHBAR(ch)) = reg;
1205 }
1206
1207 /* Calculate memory mapping, all values in MB. */
1208 const unsigned int MMIOstart = 0x0c00; /* 3GB, makes MTRR configuration small. */
1209 const unsigned int ME_SIZE = 0;
1210 const unsigned int usedMEsize = (total_mb[0] != total_mb[1]) ? ME_SIZE : 2 * ME_SIZE;
1211 const unsigned int claimCapable =
1212 !(pci_read_config32(PCI_DEV(0, 0, 0), D0F0_CAPID0 + 4) & (1 << (47 - 32)));
1213
1214 const unsigned int TOM = total_mb[0] + total_mb[1];
1215 unsigned int TOMminusME = TOM - usedMEsize;
1216 unsigned int TOLUD = (TOMminusME < MMIOstart) ? TOMminusME : MMIOstart;
1217 unsigned int TOUUD = TOMminusME;
1218 unsigned int REMAPbase = 0xffff, REMAPlimit = 0;
1219
1220 if (claimCapable && (TOMminusME >= (MMIOstart + 64))) {
1221 /* 64MB alignment: We'll lose some MBs here, if ME is on. */
1222 TOMminusME &= ~(64 - 1);
1223 /* 64MB alignment: Loss will be reclaimed. */
1224 TOLUD &= ~(64 - 1);
1225 if (TOMminusME > 4096) {
1226 REMAPbase = TOMminusME;
1227 REMAPlimit = REMAPbase + (4096 - TOLUD);
1228 } else {
1229 REMAPbase = 4096;
1230 REMAPlimit = REMAPbase + (TOMminusME - TOLUD);
1231 }
1232 TOUUD = REMAPlimit;
1233 /* REMAPlimit is an inclusive bound, all others exclusive. */
1234 REMAPlimit -= 64;
1235 }
1236
1237 pci_write_config16(PCI_DEV(0, 0, 0), D0F0_TOM, (TOM >> 7) & 0x1ff);
1238 pci_write_config16(PCI_DEV(0, 0, 0), D0F0_TOLUD, TOLUD << 4);
1239 pci_write_config16(PCI_DEV(0, 0, 0), D0F0_TOUUD, TOUUD);
1240 pci_write_config16(PCI_DEV(0, 0, 0), D0F0_REMAPBASE, (REMAPbase >> 6) & 0x03ff);
1241 pci_write_config16(PCI_DEV(0, 0, 0), D0F0_REMAPLIMIT, (REMAPlimit >> 6) & 0x03ff);
1242
1243 /* Program channel mode. */
1244 switch (mode) {
1245 case CHANNEL_MODE_SINGLE:
1246 printk(BIOS_DEBUG, "Memory configured in single-channel mode.\n");
1247 MCHBAR32(DCC_MCHBAR) &= ~DCC_INTERLEAVED;
1248 break;
1249 case CHANNEL_MODE_DUAL_ASYNC:
1250 printk(BIOS_DEBUG, "Memory configured in dual-channel assymetric mode.\n");
1251 MCHBAR32(DCC_MCHBAR) &= ~DCC_INTERLEAVED;
1252 break;
1253 case CHANNEL_MODE_DUAL_INTERLEAVED:
1254 printk(BIOS_DEBUG, "Memory configured in dual-channel interleaved mode.\n");
1255 MCHBAR32(DCC_MCHBAR) &= ~(DCC_NO_CHANXOR | (1 << 9));
1256 MCHBAR32(DCC_MCHBAR) |= DCC_INTERLEAVED;
1257 break;
1258 }
1259
1260 printk(BIOS_SPEW, "Memory map:\n"
1261 "TOM = %5uMB\n"
1262 "TOLUD = %5uMB\n"
1263 "TOUUD = %5uMB\n"
1264 "REMAP:\t base = %5uMB\n"
1265 "\t limit = %5uMB\n",
1266 TOM, TOLUD, TOUUD, REMAPbase, REMAPlimit);
1267}
1268static void prejedec_memory_map(const dimminfo_t *const dimms, channel_mode_t mode)
1269{
1270 /* Never use dual-interleaved mode in pre-jedec config. */
1271 if (CHANNEL_MODE_DUAL_INTERLEAVED == mode)
1272 mode = CHANNEL_MODE_DUAL_ASYNC;
1273
1274 program_memory_map(dimms, mode, 1);
1275 MCHBAR32(DCC_MCHBAR) |= DCC_NO_CHANXOR;
1276}
1277
1278static void ddr3_select_clock_mux(const mem_clock_t ddr3clock,
1279 const dimminfo_t *const dimms,
1280 const stepping_t stepping)
1281{
1282 const int clk1067 = (ddr3clock == MEM_CLOCK_1067MT);
1283 const int cardF[] = { CHANNEL_IS_CARDF(dimms, 0), CHANNEL_IS_CARDF(dimms, 1) };
1284
1285 int ch;
1286
1287 if (stepping < STEPPING_B1)
1288 die("Stepping <B1 unsupported in clock-multiplexer selection.\n");
1289
1290 FOR_EACH_POPULATED_CHANNEL(dimms, ch) {
1291 int mixed = 0;
1292 if ((1 == ch) && (!CHANNEL_IS_POPULATED(dimms, 0) || (cardF[0] != cardF[1])))
1293 mixed = 4 << 11;
1294 const unsigned int b = 0x14b0 + (ch * 0x0100);
1295 MCHBAR32(b+0x1c) = (MCHBAR32(b+0x1c) & ~(7 << 11)) |
1296 ((( cardF[ch])?1:0) << 11) | mixed;
1297 MCHBAR32(b+0x18) = (MCHBAR32(b+0x18) & ~(7 << 11)) | mixed;
1298 MCHBAR32(b+0x14) = (MCHBAR32(b+0x14) & ~(7 << 11)) |
1299 (((!clk1067 && !cardF[ch])?0:1) << 11) | mixed;
1300 MCHBAR32(b+0x10) = (MCHBAR32(b+0x10) & ~(7 << 11)) |
1301 ((( clk1067 && !cardF[ch])?1:0) << 11) | mixed;
1302 MCHBAR32(b+0x0c) = (MCHBAR32(b+0x0c) & ~(7 << 11)) |
1303 ((( cardF[ch])?3:2) << 11) | mixed;
1304 MCHBAR32(b+0x08) = (MCHBAR32(b+0x08) & ~(7 << 11)) |
1305 (2 << 11) | mixed;
1306 MCHBAR32(b+0x04) = (MCHBAR32(b+0x04) & ~(7 << 11)) |
1307 (((!clk1067 && !cardF[ch])?2:3) << 11) | mixed;
1308 MCHBAR32(b+0x00) = (MCHBAR32(b+0x00) & ~(7 << 11)) |
1309 ((( clk1067 && !cardF[ch])?3:2) << 11) | mixed;
1310 }
1311}
1312static void ddr3_write_io_init(const mem_clock_t ddr3clock,
1313 const dimminfo_t *const dimms,
1314 const stepping_t stepping,
1315 const int sff)
1316{
1317 const int a1step = stepping >= STEPPING_CONVERSION_A1;
1318 const int cardF[] = { CHANNEL_IS_CARDF(dimms, 0), CHANNEL_IS_CARDF(dimms, 1) };
1319
1320 int ch;
1321
1322 if (stepping < STEPPING_B1)
1323 die("Stepping <B1 unsupported in write i/o initialization.\n");
1324 if (sff)
1325 die("SFF platform unsupported in write i/o initialization.\n");
1326
1327 static const u32 ddr3_667_800_by_stepping_ddr3_and_card[][2][2][4] = {
1328 { /* Stepping B3 and below */
1329 { /* 667 MHz */
1330 { 0xa3255008, 0x26888209, 0x26288208, 0x6188040f },
1331 { 0x7524240b, 0xa5255608, 0x232b8508, 0x5528040f },
1332 },
1333 { /* 800 MHz */
1334 { 0xa6255308, 0x26888209, 0x212b7508, 0x6188040f },
1335 { 0x7524240b, 0xa6255708, 0x132b7508, 0x5528040f },
1336 },
1337 },
1338 { /* Conversion stepping A1 and above */
1339 { /* 667 MHz */
1340 { 0xc5257208, 0x26888209, 0x26288208, 0x6188040f },
1341 { 0x7524240b, 0xc5257608, 0x232b8508, 0x5528040f },
1342 },
1343 { /* 800 MHz */
1344 { 0xb6256308, 0x26888209, 0x212b7508, 0x6188040f },
1345 { 0x7524240b, 0xb6256708, 0x132b7508, 0x5528040f },
1346 }
1347 }};
1348
1349 static const u32 ddr3_1067_by_channel_and_card[][2][4] = {
1350 { /* Channel A */
1351 { 0xb2254708, 0x002b7408, 0x132b8008, 0x7228060f },
1352 { 0xb0255008, 0xa4254108, 0x4528b409, 0x9428230f },
1353 },
1354 { /* Channel B */
1355 { 0xa4254208, 0x022b6108, 0x132b8208, 0x9228210f },
1356 { 0x6024140b, 0x92244408, 0x252ba409, 0x9328360c },
1357 },
1358 };
1359
1360 FOR_EACH_POPULATED_CHANNEL(dimms, ch) {
1361 if ((1 == ch) && CHANNEL_IS_POPULATED(dimms, 0) && (cardF[0] == cardF[1]))
1362 /* Only write if second channel population differs. */
1363 continue;
1364 const u32 *const data = (ddr3clock != MEM_CLOCK_1067MT)
1365 ? ddr3_667_800_by_stepping_ddr3_and_card[a1step][2 - ddr3clock][cardF[ch]]
1366 : ddr3_1067_by_channel_and_card[ch][cardF[ch]];
1367 MCHBAR32(CxWRTy_MCHBAR(ch, 0)) = data[0];
1368 MCHBAR32(CxWRTy_MCHBAR(ch, 1)) = data[1];
1369 MCHBAR32(CxWRTy_MCHBAR(ch, 2)) = data[2];
1370 MCHBAR32(CxWRTy_MCHBAR(ch, 3)) = data[3];
1371 }
1372
1373 MCHBAR32(0x1490) = 0x00e70067;
1374 MCHBAR32(0x1494) = 0x000d8000;
1375 MCHBAR32(0x1590) = 0x00e70067;
1376 MCHBAR32(0x1594) = 0x000d8000;
1377}
1378static void ddr3_read_io_init(const mem_clock_t ddr3clock,
1379 const dimminfo_t *const dimms,
1380 const int sff)
1381{
1382 int ch;
1383
1384 FOR_EACH_POPULATED_CHANNEL(dimms, ch) {
1385 u32 addr, tmp;
1386 const unsigned int base = 0x14b0 + (ch * 0x0100);
1387 for (addr = base + 0x1c; addr >= base; addr -= 4) {
1388 tmp = MCHBAR32(addr);
1389 tmp &= ~((3 << 25) | (1 << 8) | (7 << 16) | (0xf << 20) | (1 << 27));
1390 tmp |= (1 << 27);
1391 switch (ddr3clock) {
1392 case MEM_CLOCK_667MT:
1393 tmp |= (1 << 16) | (4 << 20);
1394 break;
1395 case MEM_CLOCK_800MT:
1396 tmp |= (2 << 16) | (3 << 20);
1397 break;
1398 case MEM_CLOCK_1067MT:
1399 if (!sff)
1400 tmp |= (2 << 16) | (1 << 20);
1401 else
1402 tmp |= (2 << 16) | (2 << 20);
1403 break;
1404 default:
1405 die("Wrong clock");
1406 }
1407 MCHBAR32(addr) = tmp;
1408 }
1409 }
1410}
1411
1412static void memory_io_init(const mem_clock_t ddr3clock,
1413 const dimminfo_t *const dimms,
1414 const stepping_t stepping,
1415 const int sff)
1416{
1417 u32 tmp;
1418
1419 if (stepping < STEPPING_B1)
1420 die("Stepping <B1 unsupported in "
1421 "system-memory i/o initialization.\n");
1422
1423 tmp = MCHBAR32(0x1400);
1424 tmp &= ~(3<<13);
1425 tmp |= (1<<9) | (1<<13);
1426 MCHBAR32(0x1400) = tmp;
1427
1428 tmp = MCHBAR32(0x140c);
1429 tmp &= ~(0xff | (1<<11) | (1<<12) |
1430 (1<<16) | (1<<18) | (1<<27) | (0xf<<28));
1431 tmp |= (1<<7) | (1<<11) | (1<<16);
1432 switch (ddr3clock) {
1433 case MEM_CLOCK_667MT:
1434 tmp |= 9 << 28;
1435 break;
1436 case MEM_CLOCK_800MT:
1437 tmp |= 7 << 28;
1438 break;
1439 case MEM_CLOCK_1067MT:
1440 tmp |= 8 << 28;
1441 break;
1442 }
1443 MCHBAR32(0x140c) = tmp;
1444
1445 MCHBAR32(0x1440) &= ~1;
1446
1447 tmp = MCHBAR32(0x1414);
1448 tmp &= ~((1<<20) | (7<<11) | (0xf << 24) | (0xf << 16));
1449 tmp |= (3<<11);
1450 switch (ddr3clock) {
1451 case MEM_CLOCK_667MT:
1452 tmp |= (2 << 24) | (10 << 16);
1453 break;
1454 case MEM_CLOCK_800MT:
1455 tmp |= (3 << 24) | (7 << 16);
1456 break;
1457 case MEM_CLOCK_1067MT:
1458 tmp |= (4 << 24) | (4 << 16);
1459 break;
1460 }
1461 MCHBAR32(0x1414) = tmp;
1462
1463 MCHBAR32(0x1418) &= ~((1<<3) | (1<<11) | (1<<19) | (1<<27));
1464
1465 MCHBAR32(0x141c) &= ~((1<<3) | (1<<11) | (1<<19) | (1<<27));
1466
1467 MCHBAR32(0x1428) |= 1<<14;
1468
1469 tmp = MCHBAR32(0x142c);
1470 tmp &= ~((0xf << 8) | (0x7 << 20) | 0xf | (0xf << 24));
1471 tmp |= (0x3 << 20) | (5 << 24);
1472 switch (ddr3clock) {
1473 case MEM_CLOCK_667MT:
1474 tmp |= (2 << 8) | 0xc;
1475 break;
1476 case MEM_CLOCK_800MT:
1477 tmp |= (3 << 8) | 0xa;
1478 break;
1479 case MEM_CLOCK_1067MT:
1480 tmp |= (4 << 8) | 0x7;
1481 break;
1482 }
1483 MCHBAR32(0x142c) = tmp;
1484
1485 tmp = MCHBAR32(0x400);
1486 tmp &= ~((3 << 4) | (3 << 16) | (3 << 30));
1487 tmp |= (2 << 4) | (2 << 16);
1488 MCHBAR32(0x400) = tmp;
1489
1490 MCHBAR32(0x404) &= ~(0xf << 20);
1491
1492 MCHBAR32(0x40c) &= ~(1 << 6);
1493
1494 tmp = MCHBAR32(0x410);
1495 tmp &= ~(7 << 28);
1496 tmp |= 2 << 28;
1497 MCHBAR32(0x410) = tmp;
1498
1499 tmp = MCHBAR32(0x41c);
1500 tmp &= ~0x77;
1501 tmp |= 0x11;
1502 MCHBAR32(0x41c) = tmp;
1503
1504 ddr3_select_clock_mux(ddr3clock, dimms, stepping);
1505
1506 ddr3_write_io_init(ddr3clock, dimms, stepping, sff);
1507
1508 ddr3_read_io_init(ddr3clock, dimms, sff);
1509}
1510
1511static void jedec_init(const timings_t *const timings,
1512 const dimminfo_t *const dimms)
1513{
1514 if ((timings->tWR < 5) || (timings->tWR > 12))
1515 die("tWR value unsupported in Jedec initialization.\n");
1516
1517 /* Pre-jedec settings */
1518 MCHBAR32(0x40) |= (1 << 1);
1519 MCHBAR32(0x230) |= (3 << 1);
1520 MCHBAR32(0x238) |= (3 << 24);
1521 MCHBAR32(0x23c) |= (3 << 24);
1522
1523 /* Normal write pointer operation */
1524 MCHBAR32(0x14f0) |= (1 << 9);
1525 MCHBAR32(0x15f0) |= (1 << 9);
1526
1527 MCHBAR32(DCC_MCHBAR) = (MCHBAR32(DCC_MCHBAR) & ~DCC_CMD_MASK) | DCC_CMD_NOP;
1528
1529 u8 reg8 = pci_read_config8(PCI_DEV(0, 0, 0), 0xf0);
1530 pci_write_config8(PCI_DEV(0, 0, 0), 0xf0, reg8 & ~(1 << 2));
1531 reg8 = pci_read_config8(PCI_DEV(0, 0, 0), 0xf0);
1532 pci_write_config8(PCI_DEV(0, 0, 0), 0xf0, reg8 | (1 << 2));
1533 udelay(2);
1534
1535 /* 5 6 7 8 9 10 11 12 */
1536 static const u8 wr_lut[] = { 1, 2, 3, 4, 5, 5, 6, 6 };
1537
1538 const int WL = ((timings->tWL - 5) & 7) << 6;
1539 const int ODT_120OHMS = (1 << 9);
1540 const int ODS_34OHMS = (1 << 4);
1541 const int WR = (wr_lut[timings->tWR - 5] & 7) << 12;
1542 const int DLL1 = 1 << 11;
1543 const int CAS = ((timings->CAS - 4) & 7) << 7;
1544 const int INTERLEAVED = 1 << 6;/* This is READ Burst Type == interleaved. */
1545
1546 int ch, r;
1547 FOR_EACH_POPULATED_RANK(dimms, ch, r) {
1548 /* We won't do this in dual-interleaved mode,
1549 so don't care about the offset. */
1550 const u32 rankaddr = raminit_get_rank_addr(ch, r);
1551 printk(BIOS_DEBUG, "Performing Jedec initialization at address 0x%08x.\n", rankaddr);
1552 MCHBAR32(DCC_MCHBAR) = (MCHBAR32(DCC_MCHBAR) & ~DCC_SET_EREG_MASK) | DCC_SET_EREGx(2);
1553 read32(rankaddr | WL);
1554 MCHBAR32(DCC_MCHBAR) = (MCHBAR32(DCC_MCHBAR) & ~DCC_SET_EREG_MASK) | DCC_SET_EREGx(3);
1555 read32(rankaddr);
1556 MCHBAR32(DCC_MCHBAR) = (MCHBAR32(DCC_MCHBAR) & ~DCC_SET_EREG_MASK) | DCC_SET_EREGx(1);
1557 read32(rankaddr | ODT_120OHMS | ODS_34OHMS);
1558 MCHBAR32(DCC_MCHBAR) = (MCHBAR32(DCC_MCHBAR) & ~DCC_CMD_MASK) | DCC_SET_MREG;
1559 read32(rankaddr | WR | DLL1 | CAS | INTERLEAVED);
1560 MCHBAR32(DCC_MCHBAR) = (MCHBAR32(DCC_MCHBAR) & ~DCC_CMD_MASK) | DCC_SET_MREG;
1561 read32(rankaddr | WR | CAS | INTERLEAVED);
1562 }
1563}
1564
1565static void ddr3_calibrate_zq(void) {
1566 udelay(2);
1567
1568 u32 tmp = MCHBAR32(DCC_MCHBAR);
1569 tmp &= ~(7 << 16);
1570 tmp |= (5 << 16); /* ZQ calibration mode */
1571 MCHBAR32(DCC_MCHBAR) = tmp;
1572
1573 MCHBAR32(CxDRT6_MCHBAR(0)) |= (1 << 3);
1574 MCHBAR32(CxDRT6_MCHBAR(1)) |= (1 << 3);
1575
1576 udelay(1);
1577
1578 MCHBAR32(CxDRT6_MCHBAR(0)) &= ~(1 << 3);
1579 MCHBAR32(CxDRT6_MCHBAR(1)) &= ~(1 << 3);
1580
1581 MCHBAR32(DCC_MCHBAR) |= (7 << 16); /* Normal operation */
1582}
1583
1584static void post_jedec_sequence(const int cores) {
1585 const int quadcore = cores == 4;
1586
1587 MCHBAR32(0x0040) &= ~(1 << 1);
1588 MCHBAR32(0x0230) &= ~(3 << 1);
1589 MCHBAR32(0x0230) |= 1 << 15;
1590 MCHBAR32(0x0230) &= ~(1 << 19);
1591 MCHBAR32(0x1250) = 0x6c4;
1592 MCHBAR32(0x1350) = 0x6c4;
1593 MCHBAR32(0x1254) = 0x871a066d;
1594 MCHBAR32(0x1354) = 0x871a066d;
1595 MCHBAR32(0x0238) |= 1 << 26;
1596 MCHBAR32(0x0238) &= ~(3 << 24);
1597 MCHBAR32(0x0238) |= 1 << 23;
1598 MCHBAR32(0x0238) = (MCHBAR32(0x238) & ~(7 << 20)) | (3 << 20);
1599 MCHBAR32(0x0238) = (MCHBAR32(0x238) & ~(7 << 17)) | (6 << 17);
1600 MCHBAR32(0x0238) = (MCHBAR32(0x238) & ~(7 << 14)) | (6 << 14);
1601 MCHBAR32(0x0238) = (MCHBAR32(0x238) & ~(7 << 11)) | (6 << 11);
1602 MCHBAR32(0x0238) = (MCHBAR32(0x238) & ~(7 << 8)) | (6 << 8);
1603 MCHBAR32(0x023c) &= ~(3 << 24);
1604 MCHBAR32(0x023c) &= ~(1 << 23);
1605 MCHBAR32(0x023c) = (MCHBAR32(0x23c) & ~(7 << 20)) | (3 << 20);
1606 MCHBAR32(0x023c) = (MCHBAR32(0x23c) & ~(7 << 17)) | (6 << 17);
1607 MCHBAR32(0x023c) = (MCHBAR32(0x23c) & ~(7 << 14)) | (6 << 14);
1608 MCHBAR32(0x023c) = (MCHBAR32(0x23c) & ~(7 << 11)) | (6 << 11);
1609 MCHBAR32(0x023c) = (MCHBAR32(0x23c) & ~(7 << 8)) | (6 << 8);
1610
1611 if (quadcore) {
1612 MCHBAR32(0xb14) |= (0xbfbf << 16);
1613 }
1614}
1615
1616static void dram_optimizations(const timings_t *const timings,
1617 const dimminfo_t *const dimms)
1618{
1619 int ch;
1620
1621 FOR_EACH_POPULATED_CHANNEL(dimms, ch) {
1622 const unsigned int mchbar = CxDRC1_MCHBAR(ch);
1623 u32 cxdrc1 = MCHBAR32(mchbar);
1624 cxdrc1 &= ~CxDRC1_SSDS_MASK;
1625 if (dimms[ch].ranks == 1)
1626 cxdrc1 |= CxDRC1_SS;
1627 else
1628 cxdrc1 |= CxDRC1_DS;
1629 MCHBAR32(mchbar) = cxdrc1;
1630 }
1631}
1632
1633u32 raminit_get_rank_addr(unsigned int channel, unsigned int rank)
1634{
1635 if (!channel && !rank)
1636 return 0; /* Address of first rank */
1637
1638 /* Read the bound of the previous rank. */
1639 if (rank > 0) {
1640 rank--;
1641 } else {
1642 rank = 3; /* Highest rank per channel */
1643 channel--;
1644 }
1645 const u32 reg = MCHBAR32(CxDRBy_MCHBAR(channel, rank));
1646 /* Bound is in 32MB. */
1647 return ((reg & CxDRBy_BOUND_MASK(rank)) >> CxDRBy_BOUND_SHIFT(rank)) << 25;
1648}
1649
1650void raminit_reset_readwrite_pointers(void) {
1651 MCHBAR32(0x1234) |= (1 << 6);
1652 MCHBAR32(0x1234) &= ~(1 << 6);
1653 MCHBAR32(0x1334) |= (1 << 6);
1654 MCHBAR32(0x1334) &= ~(1 << 6);
1655 MCHBAR32(0x14f0) &= ~(1 << 9);
1656 MCHBAR32(0x14f0) |= (1 << 9);
1657 MCHBAR32(0x14f0) |= (1 << 10);
1658 MCHBAR32(0x15f0) &= ~(1 << 9);
1659 MCHBAR32(0x15f0) |= (1 << 9);
1660 MCHBAR32(0x15f0) |= (1 << 10);
1661}
1662
1663void raminit(sysinfo_t *const sysinfo, const int s3resume)
1664{
1665 const dimminfo_t *const dimms = sysinfo->dimms;
1666 const timings_t *const timings = &sysinfo->selected_timings;
1667 const int sff = sysinfo->gfx_type == GMCH_GS45;
1668
1669 int ch;
1670 u8 reg8;
1671
1672
1673 /* Wait for some bit, maybe TXT clear. */
1674 if (sysinfo->txt_enabled) {
1675 while (!(read8(0xfed40000) & (1 << 7))) {}
1676 }
1677
1678 /* Enable SMBUS. */
1679 enable_smbus();
1680
1681 /* Collect information about DIMMs and find common settings. */
1682 collect_dimm_config(sysinfo);
1683
1684 /* Check for bad warm boot. */
1685 reset_on_bad_warmboot();
1686
1687
1688 /***** From now on, program according to collected infos: *****/
1689
1690 /* Program DRAM type. */
1691 switch (sysinfo->spd_type) {
1692 case DDR2:
1693 MCHBAR8(0x1434) |= (1 << 7);
1694 break;
1695 case DDR3:
1696 MCHBAR8(0x1434) |= (3 << 0);
1697 break;
1698 }
1699
1700 /* Program system memory frequency. */
1701 set_system_memory_frequency(timings);
1702 /* Program IGD memory frequency. */
1703 set_igd_memory_frequencies(sysinfo);
1704
1705 /* Configure DRAM control mode for populated channels. */
1706 configure_dram_control_mode(timings, dimms);
1707
1708 /* Initialize RCOMP. */
1709 rcomp_initialization(sysinfo->stepping, sff);
1710
1711 /* Power-up DRAM. */
1712 dram_powerup(s3resume);
1713 /* Program DRAM timings. */
1714 dram_program_timings(timings);
1715 /* Program number of banks. */
1716 dram_program_banks(dimms);
1717 /* Enable DRAM clock pairs for populated DIMMs. */
1718 FOR_EACH_POPULATED_CHANNEL(dimms, ch)
1719 MCHBAR32(CxDCLKDIS_MCHBAR(ch)) |= CxDCLKDIS_ENABLE;
1720
1721 /* Enable On-Die Termination. */
1722 odt_setup(timings, sff);
1723 /* Miscellaneous settings. */
1724 misc_settings(timings, sysinfo->stepping);
1725 /* Program clock crossing registers. */
1726 clock_crossing_setup(timings->fsb_clock, timings->mem_clock, dimms);
1727 /* Program egress VC1 timings. */
1728 vc1_program_timings(timings->fsb_clock);
1729 /* Perform system-memory i/o initialization. */
1730 memory_io_init(timings->mem_clock, dimms, sysinfo->stepping, sff);
1731
1732 /* Initialize memory map with dummy values of 128MB per rank with a
1733 page size of 4KB. This makes the JEDEC initialization code easier. */
1734 prejedec_memory_map(dimms, timings->channel_mode);
1735 if (!s3resume)
1736 /* Perform JEDEC initialization of DIMMS. */
1737 jedec_init(timings, dimms);
1738 /* Some programming steps after JEDEC initialization. */
1739 post_jedec_sequence(sysinfo->cores);
1740
1741 /* Announce normal operation, initialization completed. */
1742 MCHBAR32(DCC_MCHBAR) |= (0x7 << 16) | (0x1 << 19);
1743 reg8 = pci_read_config8(PCI_DEV(0, 0, 0), 0xf0);
1744 pci_write_config8(PCI_DEV(0, 0, 0), 0xf0, reg8 | (1 << 2));
1745 reg8 = pci_read_config8(PCI_DEV(0, 0, 0), 0xf0);
1746 pci_write_config8(PCI_DEV(0, 0, 0), 0xf0, reg8 & ~(1 << 2));
1747
1748
1749 /* Take a breath (the reader). */
1750
1751
1752 /* Perform ZQ calibration for DDR3. */
1753 ddr3_calibrate_zq();
1754
1755 /* Perform receive-enable calibration. */
1756 raminit_receive_enable_calibration(timings, dimms);
1757 /* Lend clock values from receive-enable calibration. */
1758 MCHBAR32(0x1224) = (MCHBAR32(0x1224) & ~(0xf0)) |
1759 ((((MCHBAR32(0x121c) >> 7) - 1) & 0xf) << 4);
1760 MCHBAR32(0x1324) = (MCHBAR32(0x1324) & ~(0xf0)) |
1761 ((((MCHBAR32(0x131c) >> 7) - 1) & 0xf) << 4);
1762
1763 /* Perform read/write training for high clock rate. */
1764 if (timings->mem_clock == MEM_CLOCK_1067MT) {
1765 raminit_read_training(dimms, s3resume);
1766 raminit_write_training(timings->mem_clock, dimms, s3resume);
1767 }
1768
1769 /* Program final memory map (with real values). */
1770 program_memory_map(dimms, timings->channel_mode, 0);
1771
1772 /* Some last optimizations. */
1773 dram_optimizations(timings, dimms);
1774
1775 /* Mark raminit beeing finished. :-) */
1776 u8 tmp8 = pci_read_config8(PCI_DEV(0, 0x1f, 0), 0xa2) & ~(1 << 7);
1777 pci_write_config8(PCI_DEV(0, 0x1f, 0), 0xa2, tmp8);
1778}