blob: d5254fc02d3ec9b8e2518f73e3ab920278aaf000 [file] [log] [blame]
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001/*
2 * This file is part of the coreboot project.
3 *
4 * Copyright (C) 2013 Vladimir Serbinenko.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010015 */
16
17/* Please don't remove this. It's needed it to do debugging
Martin Roth128c1042016-11-18 09:29:03 -070018 and reverse engineering to support in future more nehalem variants. */
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010019#ifndef REAL
20#define REAL 1
21#endif
22
23#if REAL
Kyösti Mälkki931c1dc2014-06-30 09:40:19 +030024#include <stdlib.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010025#include <console/console.h>
26#include <string.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010027#include <arch/io.h>
28#include <cpu/x86/msr.h>
29#include <cbmem.h>
30#include <arch/cbfs.h>
31#include <cbfs.h>
32#include <ip_checksum.h>
33#include <pc80/mc146818rtc.h>
34#include <device/pci_def.h>
Patrick Rudolph266a1f72016-06-09 18:13:34 +020035#include <device/device.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010036#include <arch/cpu.h>
Patrick Georgi546953c2014-11-29 10:38:17 +010037#include <halt.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010038#include <spd.h>
39#include "raminit.h"
Patrick Rudolph266a1f72016-06-09 18:13:34 +020040#include "chip.h"
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010041#include <timestamp.h>
42#include <cpu/x86/mtrr.h>
43#include <cpu/intel/speedstep.h>
44#include <cpu/intel/turbo.h>
Alexander Couzens81c5c762016-03-09 03:13:45 +010045#include <northbridge/intel/common/mrc_cache.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010046#endif
47
48#if !REAL
49typedef unsigned char u8;
50typedef unsigned short u16;
51typedef unsigned int u32;
52typedef u32 device_t;
53#endif
54
55#include "nehalem.h"
56
57#include "southbridge/intel/ibexpeak/me.h"
58
59#if REAL
60#include <delay.h>
61#endif
62
63#define NORTHBRIDGE PCI_DEV(0, 0, 0)
64#define SOUTHBRIDGE PCI_DEV(0, 0x1f, 0)
65#define GMA PCI_DEV (0, 0x2, 0x0)
66#define HECIDEV PCI_DEV(0, 0x16, 0)
67#define HECIBAR 0x10
68
69#define FOR_ALL_RANKS \
70 for (channel = 0; channel < NUM_CHANNELS; channel++) \
71 for (slot = 0; slot < NUM_SLOTS; slot++) \
72 for (rank = 0; rank < NUM_RANKS; rank++)
73
74#define FOR_POPULATED_RANKS \
75 for (channel = 0; channel < NUM_CHANNELS; channel++) \
76 for (slot = 0; slot < NUM_SLOTS; slot++) \
77 for (rank = 0; rank < NUM_RANKS; rank++) \
78 if (info->populated_ranks[channel][slot][rank])
79
80#define FOR_POPULATED_RANKS_BACKWARDS \
81 for (channel = NUM_CHANNELS - 1; channel >= 0; channel--) \
82 for (slot = 0; slot < NUM_SLOTS; slot++) \
83 for (rank = 0; rank < NUM_RANKS; rank++) \
84 if (info->populated_ranks[channel][slot][rank])
85
86/* [REG_178][CHANNEL][2 * SLOT + RANK][LANE] */
87typedef struct {
88 u8 smallest;
89 u8 largest;
90} timing_bounds_t[2][2][2][9];
91
92struct ram_training {
93 /* [TM][CHANNEL][SLOT][RANK][LANE] */
94 u16 lane_timings[4][2][2][2][9];
95 u16 reg_178;
96 u16 reg_10b;
97
98 u8 reg178_center;
99 u8 reg178_smallest;
100 u8 reg178_largest;
101 timing_bounds_t timing_bounds[2];
102 u16 timing_offset[2][2][2][9];
103 u16 timing2_offset[2][2][2][9];
104 u16 timing2_bounds[2][2][2][9][2];
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +0100105 u8 reg274265[2][3]; /* [CHANNEL][REGISTER] */
106 u8 reg2ca9_bit0;
107 u32 reg_6dc;
108 u32 reg_6e8;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100109};
110
111#if !REAL
112#include "raminit_fake.c"
113#else
114
115#include <lib.h> /* Prototypes */
116
117static inline void write_mchbar32(u32 addr, u32 val)
118{
119 MCHBAR32(addr) = val;
120}
121
122static inline void write_mchbar16(u32 addr, u16 val)
123{
124 MCHBAR16(addr) = val;
125}
126
127static inline void write_mchbar8(u32 addr, u8 val)
128{
129 MCHBAR8(addr) = val;
130}
131
132
133static inline u32 read_mchbar32(u32 addr)
134{
135 return MCHBAR32(addr);
136}
137
138static inline u16 read_mchbar16(u32 addr)
139{
140 return MCHBAR16(addr);
141}
142
143static inline u8 read_mchbar8(u32 addr)
144{
145 return MCHBAR8(addr);
146}
147
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100148static void clflush(u32 addr)
149{
150 asm volatile ("clflush (%0)"::"r" (addr));
151}
152
153typedef struct _u128 {
154 u64 lo;
155 u64 hi;
156} u128;
157
158static void read128(u32 addr, u64 * out)
159{
160 u128 ret;
161 u128 stor;
162 asm volatile ("movdqu %%xmm0, %0\n"
163 "movdqa (%2), %%xmm0\n"
164 "movdqu %%xmm0, %1\n"
165 "movdqu %0, %%xmm0":"+m" (stor), "=m"(ret):"r"(addr));
166 out[0] = ret.lo;
167 out[1] = ret.hi;
168}
169
170#endif
171
172/* OK */
173static void write_1d0(u32 val, u16 addr, int bits, int flag)
174{
175 write_mchbar32(0x1d0, 0);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200176 while (read_mchbar32(0x1d0) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100177 write_mchbar32(0x1d4,
178 (val & ((1 << bits) - 1)) | (2 << bits) | (flag <<
179 bits));
180 write_mchbar32(0x1d0, 0x40000000 | addr);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200181 while (read_mchbar32(0x1d0) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100182}
183
184/* OK */
185static u16 read_1d0(u16 addr, int split)
186{
187 u32 val;
188 write_mchbar32(0x1d0, 0);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200189 while (read_mchbar32(0x1d0) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100190 write_mchbar32(0x1d0,
191 0x80000000 | (((read_mchbar8(0x246) >> 2) & 3) +
192 0x361 - addr));
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200193 while (read_mchbar32(0x1d0) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100194 val = read_mchbar32(0x1d8);
195 write_1d0(0, 0x33d, 0, 0);
196 write_1d0(0, 0x33d, 0, 0);
197 val &= ((1 << split) - 1);
198 // printk (BIOS_ERR, "R1D0C [%x] => %x\n", addr, val);
199 return val;
200}
201
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800202static void write32p(uintptr_t addr, uint32_t val)
203{
204 write32((void *)addr, val);
205}
206
207static uint32_t read32p(uintptr_t addr)
208{
209 return read32((void *)addr);
210}
211
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100212static void sfence(void)
213{
214#if REAL
215 asm volatile ("sfence");
216#endif
217}
218
219static inline u16 get_lane_offset(int slot, int rank, int lane)
220{
221 return 0x124 * lane + ((lane & 4) ? 0x23e : 0) + 11 * rank + 22 * slot -
222 0x452 * (lane == 8);
223}
224
225static inline u16 get_timing_register_addr(int lane, int tm, int slot, int rank)
226{
227 const u16 offs[] = { 0x1d, 0xa8, 0xe6, 0x5c };
228 return get_lane_offset(slot, rank, lane) + offs[(tm + 3) % 4];
229}
230
231#if REAL
232static u32 gav_real(int line, u32 in)
233{
234 // printk (BIOS_DEBUG, "%d: GAV: %x\n", line, in);
235 return in;
236}
237
238#define gav(x) gav_real (__LINE__, (x))
239#endif
240struct raminfo {
241 u16 clock_speed_index; /* clock_speed (REAL, not DDR) / 133.(3) - 3 */
242 u16 fsb_frequency; /* in 1.(1)/2 MHz. */
243 u8 is_x16_module[2][2]; /* [CHANNEL][SLOT] */
244 u8 density[2][2]; /* [CHANNEL][SLOT] */
245 u8 populated_ranks[2][2][2]; /* [CHANNEL][SLOT][RANK] */
246 int rank_start[2][2][2];
247 u8 cas_latency;
248 u8 board_lane_delay[9];
249 u8 use_ecc;
250 u8 revision;
251 u8 max_supported_clock_speed_index;
252 u8 uma_enabled;
253 u8 spd[2][2][151]; /* [CHANNEL][SLOT][BYTE] */
254 u8 silicon_revision;
255 u8 populated_ranks_mask[2];
256 u8 max_slots_used_in_channel;
257 u8 mode4030[2];
258 u16 avg4044[2];
259 u16 max4048[2];
260 unsigned total_memory_mb;
261 unsigned interleaved_part_mb;
262 unsigned non_interleaved_part_mb;
263
264 u32 heci_bar;
265 u64 heci_uma_addr;
266 unsigned memory_reserved_for_heci_mb;
267
268 struct ram_training training;
269 u32 last_500_command[2];
270
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100271 u32 delay46_ps[2];
272 u32 delay54_ps[2];
273 u8 revision_flag_1;
274 u8 some_delay_1_cycle_floor;
275 u8 some_delay_2_halfcycles_ceil;
276 u8 some_delay_3_ps_rounded;
277
278 const struct ram_training *cached_training;
279};
280
281static void
282write_500(struct raminfo *info, int channel, u32 val, u16 addr, int bits,
283 int flag);
284
285/* OK */
286static u16
287read_500(struct raminfo *info, int channel, u16 addr, int split)
288{
289 u32 val;
290 info->last_500_command[channel] = 0x80000000;
291 write_mchbar32(0x500 + (channel << 10), 0);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200292 while (read_mchbar32(0x500 + (channel << 10)) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100293 write_mchbar32(0x500 + (channel << 10),
294 0x80000000 |
295 (((read_mchbar8(0x246 + (channel << 10)) >> 2) &
296 3) + 0xb88 - addr));
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200297 while (read_mchbar32(0x500 + (channel << 10)) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100298 val = read_mchbar32(0x508 + (channel << 10));
299 return val & ((1 << split) - 1);
300}
301
302/* OK */
303static void
304write_500(struct raminfo *info, int channel, u32 val, u16 addr, int bits,
305 int flag)
306{
307 if (info->last_500_command[channel] == 0x80000000) {
308 info->last_500_command[channel] = 0x40000000;
309 write_500(info, channel, 0, 0xb61, 0, 0);
310 }
311 write_mchbar32(0x500 + (channel << 10), 0);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200312 while (read_mchbar32(0x500 + (channel << 10)) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100313 write_mchbar32(0x504 + (channel << 10),
314 (val & ((1 << bits) - 1)) | (2 << bits) | (flag <<
315 bits));
316 write_mchbar32(0x500 + (channel << 10), 0x40000000 | addr);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200317 while (read_mchbar32(0x500 + (channel << 10)) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100318}
319
320static int rw_test(int rank)
321{
322 const u32 mask = 0xf00fc33c;
323 int ok = 0xff;
324 int i;
325 for (i = 0; i < 64; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800326 write32p((rank << 28) | (i << 2), 0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100327 sfence();
328 for (i = 0; i < 64; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800329 gav(read32p((rank << 28) | (i << 2)));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100330 sfence();
331 for (i = 0; i < 32; i++) {
332 u32 pat = (((mask >> i) & 1) ? 0xffffffff : 0);
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800333 write32p((rank << 28) | (i << 3), pat);
334 write32p((rank << 28) | (i << 3) | 4, pat);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100335 }
336 sfence();
337 for (i = 0; i < 32; i++) {
338 u8 pat = (((mask >> i) & 1) ? 0xff : 0);
339 int j;
340 u32 val;
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800341 gav(val = read32p((rank << 28) | (i << 3)));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100342 for (j = 0; j < 4; j++)
343 if (((val >> (j * 8)) & 0xff) != pat)
344 ok &= ~(1 << j);
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800345 gav(val = read32p((rank << 28) | (i << 3) | 4));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100346 for (j = 0; j < 4; j++)
347 if (((val >> (j * 8)) & 0xff) != pat)
348 ok &= ~(16 << j);
349 }
350 sfence();
351 for (i = 0; i < 64; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800352 write32p((rank << 28) | (i << 2), 0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100353 sfence();
354 for (i = 0; i < 64; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800355 gav(read32p((rank << 28) | (i << 2)));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100356
357 return ok;
358}
359
360static void
361program_timings(struct raminfo *info, u16 base, int channel, int slot, int rank)
362{
363 int lane;
364 for (lane = 0; lane < 8; lane++) {
365 write_500(info, channel,
366 base +
367 info->training.
368 lane_timings[2][channel][slot][rank][lane],
369 get_timing_register_addr(lane, 2, slot, rank), 9, 0);
370 write_500(info, channel,
371 base +
372 info->training.
373 lane_timings[3][channel][slot][rank][lane],
374 get_timing_register_addr(lane, 3, slot, rank), 9, 0);
375 }
376}
377
378static void write_26c(int channel, u16 si)
379{
380 write_mchbar32(0x26c + (channel << 10), 0x03243f35);
381 write_mchbar32(0x268 + (channel << 10), 0xcfc00000 | (si << 9));
382 write_mchbar16(0x2b9 + (channel << 10), si);
383}
384
385static u32 get_580(int channel, u8 addr)
386{
387 u32 ret;
388 gav(read_1d0(0x142, 3));
389 write_mchbar8(0x5ff, 0x0); /* OK */
390 write_mchbar8(0x5ff, 0x80); /* OK */
391 write_mchbar32(0x580 + (channel << 10), 0x8493c012 | addr);
392 write_mchbar8(0x580 + (channel << 10),
393 read_mchbar8(0x580 + (channel << 10)) | 1);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200394 while (!((ret = read_mchbar32(0x580 + (channel << 10))) & 0x10000));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100395 write_mchbar8(0x580 + (channel << 10),
396 read_mchbar8(0x580 + (channel << 10)) & ~1);
397 return ret;
398}
399
400const int cached_config = 0;
401
402#define NUM_CHANNELS 2
403#define NUM_SLOTS 2
404#define NUM_RANKS 2
405#define RANK_SHIFT 28
406#define CHANNEL_SHIFT 10
407
408#include "raminit_tables.c"
409
410static void seq9(struct raminfo *info, int channel, int slot, int rank)
411{
412 int i, lane;
413
414 for (i = 0; i < 2; i++)
415 for (lane = 0; lane < 8; lane++)
416 write_500(info, channel,
417 info->training.lane_timings[i +
418 1][channel][slot]
419 [rank][lane], get_timing_register_addr(lane,
420 i + 1,
421 slot,
422 rank),
423 9, 0);
424
425 write_1d0(1, 0x103, 6, 1);
426 for (lane = 0; lane < 8; lane++)
427 write_500(info, channel,
428 info->training.
429 lane_timings[0][channel][slot][rank][lane],
430 get_timing_register_addr(lane, 0, slot, rank), 9, 0);
431
432 for (i = 0; i < 2; i++) {
433 for (lane = 0; lane < 8; lane++)
434 write_500(info, channel,
435 info->training.lane_timings[i +
436 1][channel][slot]
437 [rank][lane], get_timing_register_addr(lane,
438 i + 1,
439 slot,
440 rank),
441 9, 0);
442 gav(get_580(channel, ((i + 1) << 2) | (rank << 5)));
443 }
444
445 gav(read_1d0(0x142, 3)); // = 0x10408118
446 write_mchbar8(0x5ff, 0x0); /* OK */
447 write_mchbar8(0x5ff, 0x80); /* OK */
448 write_1d0(0x2, 0x142, 3, 1);
449 for (lane = 0; lane < 8; lane++) {
450 // printk (BIOS_ERR, "before: %x\n", info->training.lane_timings[2][channel][slot][rank][lane]);
451 info->training.lane_timings[2][channel][slot][rank][lane] =
452 read_500(info, channel,
453 get_timing_register_addr(lane, 2, slot, rank), 9);
454 //printk (BIOS_ERR, "after: %x\n", info->training.lane_timings[2][channel][slot][rank][lane]);
455 info->training.lane_timings[3][channel][slot][rank][lane] =
456 info->training.lane_timings[2][channel][slot][rank][lane] +
457 0x20;
458 }
459}
460
461static int count_ranks_in_channel(struct raminfo *info, int channel)
462{
463 int slot, rank;
464 int res = 0;
465 for (slot = 0; slot < NUM_SLOTS; slot++)
466 for (rank = 0; rank < NUM_SLOTS; rank++)
467 res += info->populated_ranks[channel][slot][rank];
468 return res;
469}
470
471static void
472config_rank(struct raminfo *info, int s3resume, int channel, int slot, int rank)
473{
474 int add;
475
476 write_1d0(0, 0x178, 7, 1);
477 seq9(info, channel, slot, rank);
478 program_timings(info, 0x80, channel, slot, rank);
479
480 if (channel == 0)
481 add = count_ranks_in_channel(info, 1);
482 else
483 add = 0;
484 if (!s3resume)
485 gav(rw_test(rank + add));
486 program_timings(info, 0x00, channel, slot, rank);
487 if (!s3resume)
488 gav(rw_test(rank + add));
489 if (!s3resume)
490 gav(rw_test(rank + add));
491 write_1d0(0, 0x142, 3, 1);
492 write_1d0(0, 0x103, 6, 1);
493
494 gav(get_580(channel, 0xc | (rank << 5)));
495 gav(read_1d0(0x142, 3));
496
497 write_mchbar8(0x5ff, 0x0); /* OK */
498 write_mchbar8(0x5ff, 0x80); /* OK */
499}
500
501static void set_4cf(struct raminfo *info, int channel, u8 val)
502{
503 gav(read_500(info, channel, 0x4cf, 4)); // = 0xc2300cf9
504 write_500(info, channel, val, 0x4cf, 4, 1);
505 gav(read_500(info, channel, 0x659, 4)); // = 0x80300839
506 write_500(info, channel, val, 0x659, 4, 1);
507 gav(read_500(info, channel, 0x697, 4)); // = 0x80300839
508 write_500(info, channel, val, 0x697, 4, 1);
509}
510
511static void set_334(int zero)
512{
513 int j, k, channel;
514 const u32 val3[] = { 0x2a2b2a2b, 0x26272627, 0x2e2f2e2f, 0x2a2b };
515 u32 vd8[2][16];
516
517 for (channel = 0; channel < NUM_CHANNELS; channel++) {
518 for (j = 0; j < 4; j++) {
519 u32 a = (j == 1) ? 0x29292929 : 0x31313131;
520 u32 lmask = (j == 3) ? 0xffff : 0xffffffff;
521 u16 c;
522 if ((j == 0 || j == 3) && zero)
523 c = 0;
524 else if (j == 3)
525 c = 0x5f;
526 else
527 c = 0x5f5f;
528
529 for (k = 0; k < 2; k++) {
530 write_mchbar32(0x138 + 8 * k,
531 (channel << 26) | (j << 24));
532 gav(vd8[1][(channel << 3) | (j << 1) | k] =
533 read_mchbar32(0x138 + 8 * k));
534 gav(vd8[0][(channel << 3) | (j << 1) | k] =
535 read_mchbar32(0x13c + 8 * k));
536 }
537
538 write_mchbar32(0x334 + (channel << 10) + (j * 0x44),
539 zero ? 0 : val3[j]);
540 write_mchbar32(0x32c + (channel << 10) + (j * 0x44),
541 zero ? 0 : (0x18191819 & lmask));
542 write_mchbar16(0x34a + (channel << 10) + (j * 0x44), c);
543 write_mchbar32(0x33c + (channel << 10) + (j * 0x44),
544 zero ? 0 : (a & lmask));
545 write_mchbar32(0x344 + (channel << 10) + (j * 0x44),
546 zero ? 0 : (a & lmask));
547 }
548 }
549
550 write_mchbar32(0x130, read_mchbar32(0x130) | 1); /* OK */
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200551 while (read_mchbar8(0x130) & 1); /* OK */
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100552}
553
554static void rmw_1d0(u16 addr, u32 and, u32 or, int split, int flag)
555{
556 u32 v;
557 v = read_1d0(addr, split);
558 write_1d0((v & and) | or, addr, split, flag);
559}
560
561static int find_highest_bit_set(u16 val)
562{
563 int i;
564 for (i = 15; i >= 0; i--)
565 if (val & (1 << i))
566 return i;
567 return -1;
568}
569
570static int find_lowest_bit_set32(u32 val)
571{
572 int i;
573 for (i = 0; i < 32; i++)
574 if (val & (1 << i))
575 return i;
576 return -1;
577}
578
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100579enum {
580 DEVICE_TYPE = 2,
581 MODULE_TYPE = 3,
582 DENSITY = 4,
583 RANKS_AND_DQ = 7,
584 MEMORY_BUS_WIDTH = 8,
585 TIMEBASE_DIVIDEND = 10,
586 TIMEBASE_DIVISOR = 11,
587 CYCLETIME = 12,
588
589 CAS_LATENCIES_LSB = 14,
590 CAS_LATENCIES_MSB = 15,
591 CAS_LATENCY_TIME = 16,
592 THERMAL_AND_REFRESH = 31,
593 REFERENCE_RAW_CARD_USED = 62,
594 RANK1_ADDRESS_MAPPING = 63
595};
596
597static void calculate_timings(struct raminfo *info)
598{
599 unsigned cycletime;
600 unsigned cas_latency_time;
601 unsigned supported_cas_latencies;
602 unsigned channel, slot;
603 unsigned clock_speed_index;
604 unsigned min_cas_latency;
605 unsigned cas_latency;
606 unsigned max_clock_index;
607
608 /* Find common CAS latency */
609 supported_cas_latencies = 0x3fe;
610 for (channel = 0; channel < NUM_CHANNELS; channel++)
611 for (slot = 0; slot < NUM_SLOTS; slot++)
612 if (info->populated_ranks[channel][slot][0])
613 supported_cas_latencies &=
614 2 *
615 (info->
616 spd[channel][slot][CAS_LATENCIES_LSB] |
617 (info->
618 spd[channel][slot][CAS_LATENCIES_MSB] <<
619 8));
620
621 max_clock_index = min(3, info->max_supported_clock_speed_index);
622
623 cycletime = min_cycletime[max_clock_index];
624 cas_latency_time = min_cas_latency_time[max_clock_index];
625
626 for (channel = 0; channel < NUM_CHANNELS; channel++)
627 for (slot = 0; slot < NUM_SLOTS; slot++)
628 if (info->populated_ranks[channel][slot][0]) {
629 unsigned timebase;
630 timebase =
631 1000 *
632 info->
633 spd[channel][slot][TIMEBASE_DIVIDEND] /
634 info->spd[channel][slot][TIMEBASE_DIVISOR];
635 cycletime =
636 max(cycletime,
637 timebase *
638 info->spd[channel][slot][CYCLETIME]);
639 cas_latency_time =
640 max(cas_latency_time,
641 timebase *
642 info->
643 spd[channel][slot][CAS_LATENCY_TIME]);
644 }
645 for (clock_speed_index = 0; clock_speed_index < 3; clock_speed_index++) {
646 if (cycletime == min_cycletime[clock_speed_index])
647 break;
648 if (cycletime > min_cycletime[clock_speed_index]) {
649 clock_speed_index--;
650 cycletime = min_cycletime[clock_speed_index];
651 break;
652 }
653 }
Edward O'Callaghan7116ac82014-07-08 01:53:24 +1000654 min_cas_latency = CEIL_DIV(cas_latency_time, cycletime);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100655 cas_latency = 0;
656 while (supported_cas_latencies) {
657 cas_latency = find_highest_bit_set(supported_cas_latencies) + 3;
658 if (cas_latency <= min_cas_latency)
659 break;
660 supported_cas_latencies &=
661 ~(1 << find_highest_bit_set(supported_cas_latencies));
662 }
663
664 if (cas_latency != min_cas_latency && clock_speed_index)
665 clock_speed_index--;
666
667 if (cas_latency * min_cycletime[clock_speed_index] > 20000)
668 die("Couldn't configure DRAM");
669 info->clock_speed_index = clock_speed_index;
670 info->cas_latency = cas_latency;
671}
672
673static void program_base_timings(struct raminfo *info)
674{
675 unsigned channel;
676 unsigned slot, rank, lane;
677 unsigned extended_silicon_revision;
678 int i;
679
680 extended_silicon_revision = info->silicon_revision;
681 if (info->silicon_revision == 0)
682 for (channel = 0; channel < NUM_CHANNELS; channel++)
683 for (slot = 0; slot < NUM_SLOTS; slot++)
684 if ((info->
685 spd[channel][slot][MODULE_TYPE] & 0xF) ==
686 3)
687 extended_silicon_revision = 4;
688
689 for (channel = 0; channel < NUM_CHANNELS; channel++) {
690 for (slot = 0; slot < NUM_SLOTS; slot++)
691 for (rank = 0; rank < NUM_SLOTS; rank++) {
692 int card_timing_2;
693 if (!info->populated_ranks[channel][slot][rank])
694 continue;
695
696 for (lane = 0; lane < 9; lane++) {
697 int tm_reg;
698 int card_timing;
699
700 card_timing = 0;
701 if ((info->
702 spd[channel][slot][MODULE_TYPE] &
703 0xF) == 3) {
704 int reference_card;
705 reference_card =
706 info->
707 spd[channel][slot]
708 [REFERENCE_RAW_CARD_USED] &
709 0x1f;
710 if (reference_card == 3)
711 card_timing =
712 u16_ffd1188[0][lane]
713 [info->
714 clock_speed_index];
715 if (reference_card == 5)
716 card_timing =
717 u16_ffd1188[1][lane]
718 [info->
719 clock_speed_index];
720 }
721
722 info->training.
723 lane_timings[0][channel][slot][rank]
724 [lane] =
725 u8_FFFD1218[info->
726 clock_speed_index];
727 info->training.
728 lane_timings[1][channel][slot][rank]
729 [lane] = 256;
730
731 for (tm_reg = 2; tm_reg < 4; tm_reg++)
732 info->training.
733 lane_timings[tm_reg]
734 [channel][slot][rank][lane]
735 =
736 u8_FFFD1240[channel]
737 [extended_silicon_revision]
738 [lane][2 * slot +
739 rank][info->
740 clock_speed_index]
741 + info->max4048[channel]
742 +
743 u8_FFFD0C78[channel]
744 [extended_silicon_revision]
745 [info->
746 mode4030[channel]][slot]
747 [rank][info->
748 clock_speed_index]
749 + card_timing;
750 for (tm_reg = 0; tm_reg < 4; tm_reg++)
751 write_500(info, channel,
752 info->training.
753 lane_timings[tm_reg]
754 [channel][slot][rank]
755 [lane],
756 get_timing_register_addr
757 (lane, tm_reg, slot,
758 rank), 9, 0);
759 }
760
761 card_timing_2 = 0;
762 if (!(extended_silicon_revision != 4
763 || (info->
764 populated_ranks_mask[channel] & 5) ==
765 5)) {
766 if ((info->
767 spd[channel][slot]
768 [REFERENCE_RAW_CARD_USED] & 0x1F)
769 == 3)
770 card_timing_2 =
771 u16_FFFE0EB8[0][info->
772 clock_speed_index];
773 if ((info->
774 spd[channel][slot]
775 [REFERENCE_RAW_CARD_USED] & 0x1F)
776 == 5)
777 card_timing_2 =
778 u16_FFFE0EB8[1][info->
779 clock_speed_index];
780 }
781
782 for (i = 0; i < 3; i++)
783 write_500(info, channel,
784 (card_timing_2 +
785 info->max4048[channel]
786 +
787 u8_FFFD0EF8[channel]
788 [extended_silicon_revision]
789 [info->
790 mode4030[channel]][info->
791 clock_speed_index]),
792 u16_fffd0c50[i][slot][rank],
793 8, 1);
794 write_500(info, channel,
795 (info->max4048[channel] +
796 u8_FFFD0C78[channel]
797 [extended_silicon_revision][info->
798 mode4030
799 [channel]]
800 [slot][rank][info->
801 clock_speed_index]),
802 u16_fffd0c70[slot][rank], 7, 1);
803 }
804 if (!info->populated_ranks_mask[channel])
805 continue;
806 for (i = 0; i < 3; i++)
807 write_500(info, channel,
808 (info->max4048[channel] +
809 info->avg4044[channel]
810 +
811 u8_FFFD17E0[channel]
812 [extended_silicon_revision][info->
813 mode4030
814 [channel]][info->
815 clock_speed_index]),
816 u16_fffd0c68[i], 8, 1);
817 }
818}
819
820static unsigned int fsbcycle_ps(struct raminfo *info)
821{
822 return 900000 / info->fsb_frequency;
823}
824
825/* The time of DDR transfer in ps. */
826static unsigned int halfcycle_ps(struct raminfo *info)
827{
828 return 3750 / (info->clock_speed_index + 3);
829}
830
831/* The time of clock cycle in ps. */
832static unsigned int cycle_ps(struct raminfo *info)
833{
834 return 2 * halfcycle_ps(info);
835}
836
837/* Frequency in 1.(1)=10/9 MHz units. */
838static unsigned frequency_11(struct raminfo *info)
839{
840 return (info->clock_speed_index + 3) * 120;
841}
842
843/* Frequency in 0.1 MHz units. */
844static unsigned frequency_01(struct raminfo *info)
845{
846 return 100 * frequency_11(info) / 9;
847}
848
849static unsigned ps_to_halfcycles(struct raminfo *info, unsigned int ps)
850{
851 return (frequency_11(info) * 2) * ps / 900000;
852}
853
854static unsigned ns_to_cycles(struct raminfo *info, unsigned int ns)
855{
856 return (frequency_11(info)) * ns / 900;
857}
858
859static void compute_derived_timings(struct raminfo *info)
860{
861 unsigned channel, slot, rank;
862 int extended_silicon_revision;
863 int some_delay_1_ps;
864 int some_delay_2_ps;
865 int some_delay_2_halfcycles_ceil;
866 int some_delay_2_halfcycles_floor;
867 int some_delay_3_ps;
868 int some_delay_3_halfcycles;
869 int some_delay_3_ps_rounded;
870 int some_delay_1_cycle_ceil;
871 int some_delay_1_cycle_floor;
872
873 some_delay_3_halfcycles = 0;
874 some_delay_3_ps_rounded = 0;
875 extended_silicon_revision = info->silicon_revision;
876 if (!info->silicon_revision)
877 for (channel = 0; channel < NUM_CHANNELS; channel++)
878 for (slot = 0; slot < NUM_SLOTS; slot++)
879 if ((info->
880 spd[channel][slot][MODULE_TYPE] & 0xF) ==
881 3)
882 extended_silicon_revision = 4;
883 if (info->board_lane_delay[7] < 5)
884 info->board_lane_delay[7] = 5;
885 info->revision_flag_1 = 2;
886 if (info->silicon_revision == 2 || info->silicon_revision == 3)
887 info->revision_flag_1 = 0;
888 if (info->revision < 16)
889 info->revision_flag_1 = 0;
890
891 if (info->revision < 8)
892 info->revision_flag_1 = 0;
893 if (info->revision >= 8 && (info->silicon_revision == 0
894 || info->silicon_revision == 1))
895 some_delay_2_ps = 735;
896 else
897 some_delay_2_ps = 750;
898
899 if (info->revision >= 0x10 && (info->silicon_revision == 0
900 || info->silicon_revision == 1))
901 some_delay_1_ps = 3929;
902 else
903 some_delay_1_ps = 3490;
904
905 some_delay_1_cycle_floor = some_delay_1_ps / cycle_ps(info);
906 some_delay_1_cycle_ceil = some_delay_1_ps / cycle_ps(info);
907 if (some_delay_1_ps % cycle_ps(info))
908 some_delay_1_cycle_ceil++;
909 else
910 some_delay_1_cycle_floor--;
911 info->some_delay_1_cycle_floor = some_delay_1_cycle_floor;
912 if (info->revision_flag_1)
913 some_delay_2_ps = halfcycle_ps(info) >> 6;
914 some_delay_2_ps +=
915 max(some_delay_1_ps - 30,
916 2 * halfcycle_ps(info) * (some_delay_1_cycle_ceil - 1) + 1000) +
917 375;
918 some_delay_3_ps =
919 halfcycle_ps(info) - some_delay_2_ps % halfcycle_ps(info);
920 if (info->revision_flag_1) {
921 if (some_delay_3_ps < 150)
922 some_delay_3_halfcycles = 0;
923 else
924 some_delay_3_halfcycles =
925 (some_delay_3_ps << 6) / halfcycle_ps(info);
926 some_delay_3_ps_rounded =
927 halfcycle_ps(info) * some_delay_3_halfcycles >> 6;
928 }
929 some_delay_2_halfcycles_ceil =
930 (some_delay_2_ps + halfcycle_ps(info) - 1) / halfcycle_ps(info) -
931 2 * (some_delay_1_cycle_ceil - 1);
932 if (info->revision_flag_1 && some_delay_3_ps < 150)
933 some_delay_2_halfcycles_ceil++;
934 some_delay_2_halfcycles_floor = some_delay_2_halfcycles_ceil;
935 if (info->revision < 0x10)
936 some_delay_2_halfcycles_floor =
937 some_delay_2_halfcycles_ceil - 1;
938 if (!info->revision_flag_1)
939 some_delay_2_halfcycles_floor++;
940 info->some_delay_2_halfcycles_ceil = some_delay_2_halfcycles_ceil;
941 info->some_delay_3_ps_rounded = some_delay_3_ps_rounded;
942 if ((info->populated_ranks[0][0][0] && info->populated_ranks[0][1][0])
943 || (info->populated_ranks[1][0][0]
944 && info->populated_ranks[1][1][0]))
945 info->max_slots_used_in_channel = 2;
946 else
947 info->max_slots_used_in_channel = 1;
948 for (channel = 0; channel < 2; channel++)
949 write_mchbar32(0x244 + (channel << 10),
950 ((info->revision < 8) ? 1 : 0x200)
951 | ((2 - info->max_slots_used_in_channel) << 17) |
952 (channel << 21) | (info->
953 some_delay_1_cycle_floor <<
954 18) | 0x9510);
955 if (info->max_slots_used_in_channel == 1) {
956 info->mode4030[0] = (count_ranks_in_channel(info, 0) == 2);
957 info->mode4030[1] = (count_ranks_in_channel(info, 1) == 2);
958 } else {
959 info->mode4030[0] = ((count_ranks_in_channel(info, 0) == 1) || (count_ranks_in_channel(info, 0) == 2)) ? 2 : 3; /* 2 if 1 or 2 ranks */
960 info->mode4030[1] = ((count_ranks_in_channel(info, 1) == 1)
961 || (count_ranks_in_channel(info, 1) ==
962 2)) ? 2 : 3;
963 }
964 for (channel = 0; channel < NUM_CHANNELS; channel++) {
965 int max_of_unk;
966 int min_of_unk_2;
967
968 int i, count;
969 int sum;
970
971 if (!info->populated_ranks_mask[channel])
972 continue;
973
974 max_of_unk = 0;
975 min_of_unk_2 = 32767;
976
977 sum = 0;
978 count = 0;
979 for (i = 0; i < 3; i++) {
980 int unk1;
981 if (info->revision < 8)
982 unk1 =
983 u8_FFFD1891[0][channel][info->
984 clock_speed_index]
985 [i];
986 else if (!
987 (info->revision >= 0x10
988 || info->revision_flag_1))
989 unk1 =
990 u8_FFFD1891[1][channel][info->
991 clock_speed_index]
992 [i];
993 else
994 unk1 = 0;
995 for (slot = 0; slot < NUM_SLOTS; slot++)
996 for (rank = 0; rank < NUM_RANKS; rank++) {
997 int a = 0;
998 int b = 0;
999
1000 if (!info->
1001 populated_ranks[channel][slot]
1002 [rank])
1003 continue;
1004 if (extended_silicon_revision == 4
1005 && (info->
1006 populated_ranks_mask[channel] &
1007 5) != 5) {
1008 if ((info->
1009 spd[channel][slot]
1010 [REFERENCE_RAW_CARD_USED] &
1011 0x1F) == 3) {
1012 a = u16_ffd1178[0]
1013 [info->
1014 clock_speed_index];
1015 b = u16_fe0eb8[0][info->
1016 clock_speed_index];
1017 } else
1018 if ((info->
1019 spd[channel][slot]
1020 [REFERENCE_RAW_CARD_USED]
1021 & 0x1F) == 5) {
1022 a = u16_ffd1178[1]
1023 [info->
1024 clock_speed_index];
1025 b = u16_fe0eb8[1][info->
1026 clock_speed_index];
1027 }
1028 }
1029 min_of_unk_2 = min(min_of_unk_2, a);
1030 min_of_unk_2 = min(min_of_unk_2, b);
1031 if (rank == 0) {
1032 sum += a;
1033 count++;
1034 }
1035 {
1036 int t;
1037 t = b +
1038 u8_FFFD0EF8[channel]
1039 [extended_silicon_revision]
1040 [info->
1041 mode4030[channel]][info->
1042 clock_speed_index];
1043 if (unk1 >= t)
1044 max_of_unk =
1045 max(max_of_unk,
1046 unk1 - t);
1047 }
1048 }
1049 {
1050 int t =
1051 u8_FFFD17E0[channel]
1052 [extended_silicon_revision][info->
1053 mode4030
1054 [channel]]
1055 [info->clock_speed_index] + min_of_unk_2;
1056 if (unk1 >= t)
1057 max_of_unk = max(max_of_unk, unk1 - t);
1058 }
1059 }
1060
1061 info->avg4044[channel] = sum / count;
1062 info->max4048[channel] = max_of_unk;
1063 }
1064}
1065
1066static void jedec_read(struct raminfo *info,
1067 int channel, int slot, int rank,
1068 int total_rank, u8 addr3, unsigned int value)
1069{
1070 /* Handle mirrored mapping. */
1071 if ((rank & 1) && (info->spd[channel][slot][RANK1_ADDRESS_MAPPING] & 1))
1072 addr3 =
1073 (addr3 & 0xCF) | ((addr3 & 0x10) << 1) | ((addr3 >> 1) &
1074 0x10);
1075 write_mchbar8(0x271, addr3 | (read_mchbar8(0x271) & 0xC1));
1076 write_mchbar8(0x671, addr3 | (read_mchbar8(0x671) & 0xC1));
1077
1078 /* Handle mirrored mapping. */
1079 if ((rank & 1) && (info->spd[channel][slot][RANK1_ADDRESS_MAPPING] & 1))
1080 value =
1081 (value & ~0x1f8) | ((value >> 1) & 0xa8) | ((value & 0xa8)
1082 << 1);
1083
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001084 read32p((value << 3) | (total_rank << 28));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001085
1086 write_mchbar8(0x271, (read_mchbar8(0x271) & 0xC3) | 2);
1087 write_mchbar8(0x671, (read_mchbar8(0x671) & 0xC3) | 2);
1088
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001089 read32p(total_rank << 28);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001090}
1091
1092enum {
1093 MR1_RZQ12 = 512,
1094 MR1_RZQ2 = 64,
1095 MR1_RZQ4 = 4,
1096 MR1_ODS34OHM = 2
1097};
1098
1099enum {
1100 MR0_BT_INTERLEAVED = 8,
1101 MR0_DLL_RESET_ON = 256
1102};
1103
1104enum {
1105 MR2_RTT_WR_DISABLED = 0,
1106 MR2_RZQ2 = 1 << 10
1107};
1108
1109static void jedec_init(struct raminfo *info)
1110{
1111 int write_recovery;
1112 int channel, slot, rank;
1113 int total_rank;
1114 int dll_on;
1115 int self_refresh_temperature;
1116 int auto_self_refresh;
1117
1118 auto_self_refresh = 1;
1119 self_refresh_temperature = 1;
1120 if (info->board_lane_delay[3] <= 10) {
1121 if (info->board_lane_delay[3] <= 8)
1122 write_recovery = info->board_lane_delay[3] - 4;
1123 else
1124 write_recovery = 5;
1125 } else {
1126 write_recovery = 6;
1127 }
1128 FOR_POPULATED_RANKS {
1129 auto_self_refresh &=
1130 (info->spd[channel][slot][THERMAL_AND_REFRESH] >> 2) & 1;
1131 self_refresh_temperature &=
1132 info->spd[channel][slot][THERMAL_AND_REFRESH] & 1;
1133 }
1134 if (auto_self_refresh == 1)
1135 self_refresh_temperature = 0;
1136
1137 dll_on = ((info->silicon_revision != 2 && info->silicon_revision != 3)
1138 || (info->populated_ranks[0][0][0]
1139 && info->populated_ranks[0][1][0])
1140 || (info->populated_ranks[1][0][0]
1141 && info->populated_ranks[1][1][0]));
1142
1143 total_rank = 0;
1144
1145 for (channel = NUM_CHANNELS - 1; channel >= 0; channel--) {
1146 int rtt, rtt_wr = MR2_RTT_WR_DISABLED;
1147 int rzq_reg58e;
1148
1149 if (info->silicon_revision == 2 || info->silicon_revision == 3) {
1150 rzq_reg58e = 64;
1151 rtt = MR1_RZQ2;
1152 if (info->clock_speed_index != 0) {
1153 rzq_reg58e = 4;
1154 if (info->populated_ranks_mask[channel] == 3)
1155 rtt = MR1_RZQ4;
1156 }
1157 } else {
1158 if ((info->populated_ranks_mask[channel] & 5) == 5) {
1159 rtt = MR1_RZQ12;
1160 rzq_reg58e = 64;
1161 rtt_wr = MR2_RZQ2;
1162 } else {
1163 rzq_reg58e = 4;
1164 rtt = MR1_RZQ4;
1165 }
1166 }
1167
1168 write_mchbar16(0x588 + (channel << 10), 0x0);
1169 write_mchbar16(0x58a + (channel << 10), 0x4);
1170 write_mchbar16(0x58c + (channel << 10), rtt | MR1_ODS34OHM);
1171 write_mchbar16(0x58e + (channel << 10), rzq_reg58e | 0x82);
1172 write_mchbar16(0x590 + (channel << 10), 0x1282);
1173
1174 for (slot = 0; slot < NUM_SLOTS; slot++)
1175 for (rank = 0; rank < NUM_RANKS; rank++)
1176 if (info->populated_ranks[channel][slot][rank]) {
1177 jedec_read(info, channel, slot, rank,
1178 total_rank, 0x28,
1179 rtt_wr | (info->
1180 clock_speed_index
1181 << 3)
1182 | (auto_self_refresh << 6) |
1183 (self_refresh_temperature <<
1184 7));
1185 jedec_read(info, channel, slot, rank,
1186 total_rank, 0x38, 0);
1187 jedec_read(info, channel, slot, rank,
1188 total_rank, 0x18,
1189 rtt | MR1_ODS34OHM);
1190 jedec_read(info, channel, slot, rank,
1191 total_rank, 6,
1192 (dll_on << 12) |
1193 (write_recovery << 9)
1194 | ((info->cas_latency - 4) <<
1195 4) | MR0_BT_INTERLEAVED |
1196 MR0_DLL_RESET_ON);
1197 total_rank++;
1198 }
1199 }
1200}
1201
1202static void program_modules_memory_map(struct raminfo *info, int pre_jedec)
1203{
1204 unsigned channel, slot, rank;
1205 unsigned int total_mb[2] = { 0, 0 }; /* total memory per channel in MB */
1206 unsigned int channel_0_non_interleaved;
1207
1208 FOR_ALL_RANKS {
1209 if (info->populated_ranks[channel][slot][rank]) {
1210 total_mb[channel] +=
1211 pre_jedec ? 256 : (256 << info->
1212 density[channel][slot] >> info->
1213 is_x16_module[channel][slot]);
1214 write_mchbar8(0x208 + rank + 2 * slot + (channel << 10),
1215 (pre_jedec ? (1 | ((1 + 1) << 1))
1216 : (info->
1217 is_x16_module[channel][slot] |
1218 ((info->density[channel][slot] +
1219 1) << 1))) | 0x80);
1220 }
1221 write_mchbar16(0x200 + (channel << 10) + 4 * slot + 2 * rank,
1222 total_mb[channel] >> 6);
1223 }
1224
1225 info->total_memory_mb = total_mb[0] + total_mb[1];
1226
1227 info->interleaved_part_mb =
1228 pre_jedec ? 0 : 2 * min(total_mb[0], total_mb[1]);
1229 info->non_interleaved_part_mb =
1230 total_mb[0] + total_mb[1] - info->interleaved_part_mb;
1231 channel_0_non_interleaved = total_mb[0] - info->interleaved_part_mb / 2;
1232 write_mchbar32(0x100,
1233 channel_0_non_interleaved | (info->
1234 non_interleaved_part_mb <<
1235 16));
1236 if (!pre_jedec)
1237 write_mchbar16(0x104, info->interleaved_part_mb);
1238}
1239
1240static void program_board_delay(struct raminfo *info)
1241{
1242 int cas_latency_shift;
1243 int some_delay_ns;
1244 int some_delay_3_half_cycles;
1245
1246 unsigned channel, i;
1247 int high_multiplier;
1248 int lane_3_delay;
1249 int cas_latency_derived;
1250
1251 high_multiplier = 0;
1252 some_delay_ns = 200;
1253 some_delay_3_half_cycles = 4;
1254 cas_latency_shift = info->silicon_revision == 0
1255 || info->silicon_revision == 1 ? 1 : 0;
1256 if (info->revision < 8) {
1257 some_delay_ns = 600;
1258 cas_latency_shift = 0;
1259 }
1260 {
1261 int speed_bit;
1262 speed_bit =
1263 ((info->clock_speed_index > 1
1264 || (info->silicon_revision != 2
1265 && info->silicon_revision != 3))) ^ (info->revision >=
1266 0x10);
1267 write_500(info, 0, speed_bit | ((!info->use_ecc) << 1), 0x60e,
1268 3, 1);
1269 write_500(info, 1, speed_bit | ((!info->use_ecc) << 1), 0x60e,
1270 3, 1);
1271 if (info->revision >= 0x10 && info->clock_speed_index <= 1
1272 && (info->silicon_revision == 2
1273 || info->silicon_revision == 3))
1274 rmw_1d0(0x116, 5, 2, 4, 1);
1275 }
1276 write_mchbar32(0x120,
1277 (1 << (info->max_slots_used_in_channel + 28)) |
1278 0x188e7f9f);
1279
1280 write_mchbar8(0x124,
1281 info->board_lane_delay[4] +
1282 ((frequency_01(info) + 999) / 1000));
1283 write_mchbar16(0x125, 0x1360);
1284 write_mchbar8(0x127, 0x40);
1285 if (info->fsb_frequency < frequency_11(info) / 2) {
1286 unsigned some_delay_2_half_cycles;
1287 high_multiplier = 1;
1288 some_delay_2_half_cycles = ps_to_halfcycles(info,
1289 ((3 *
1290 fsbcycle_ps(info))
1291 >> 1) +
1292 (halfcycle_ps(info)
1293 *
1294 reg178_min[info->
1295 clock_speed_index]
1296 >> 6)
1297 +
1298 4 *
1299 halfcycle_ps(info)
1300 + 2230);
1301 some_delay_3_half_cycles =
1302 min((some_delay_2_half_cycles +
1303 (frequency_11(info) * 2) * (28 -
1304 some_delay_2_half_cycles) /
1305 (frequency_11(info) * 2 -
1306 4 * (info->fsb_frequency))) >> 3, 7);
1307 }
1308 if (read_mchbar8(0x2ca9) & 1)
1309 some_delay_3_half_cycles = 3;
1310 for (channel = 0; channel < NUM_CHANNELS; channel++) {
1311 write_mchbar32(0x220 + (channel << 10),
1312 read_mchbar32(0x220 +
1313 (channel << 10)) | 0x18001117);
1314 write_mchbar32(0x224 + (channel << 10),
1315 (info->max_slots_used_in_channel - 1)
1316 |
1317 ((info->cas_latency - 5 -
1318 info->clock_speed_index) << 21)
1319 |
1320 ((info->max_slots_used_in_channel +
1321 info->cas_latency - cas_latency_shift -
1322 4) << 16)
1323 | ((info->cas_latency - cas_latency_shift - 4) <<
1324 26)
1325 |
1326 ((info->cas_latency - info->clock_speed_index +
1327 info->max_slots_used_in_channel - 6) << 8));
1328 write_mchbar32(0x228 + (channel << 10),
1329 info->max_slots_used_in_channel);
1330 write_mchbar8(0x239 + (channel << 10), 32);
1331 write_mchbar32(0x248 + (channel << 10),
1332 (high_multiplier << 24) |
1333 (some_delay_3_half_cycles << 25) | 0x840000);
1334 write_mchbar32(0x278 + (channel << 10), 0xc362042);
1335 write_mchbar32(0x27c + (channel << 10), 0x8b000062);
1336 write_mchbar32(0x24c + (channel << 10),
1337 ((! !info->
1338 clock_speed_index) << 17) | (((2 +
1339 info->
1340 clock_speed_index
1341 -
1342 (! !info->
1343 clock_speed_index)))
1344 << 12) | 0x10200);
1345
1346 write_mchbar8(0x267 + (channel << 10), 0x4);
1347 write_mchbar16(0x272 + (channel << 10), 0x155);
1348 write_mchbar32(0x2bc + (channel << 10),
1349 (read_mchbar32(0x2bc + (channel << 10)) &
1350 0xFF000000)
1351 | 0x707070);
1352
1353 write_500(info, channel,
1354 ((!info->populated_ranks[channel][1][1])
1355 | (!info->populated_ranks[channel][1][0] << 1)
1356 | (!info->populated_ranks[channel][0][1] << 2)
1357 | (!info->populated_ranks[channel][0][0] << 3)),
1358 0x4c9, 4, 1);
1359 }
1360
1361 write_mchbar8(0x2c4, ((1 + (info->clock_speed_index != 0)) << 6) | 0xC);
1362 {
1363 u8 freq_divisor = 2;
1364 if (info->fsb_frequency == frequency_11(info))
1365 freq_divisor = 3;
1366 else if (2 * info->fsb_frequency < 3 * (frequency_11(info) / 2))
1367 freq_divisor = 1;
1368 else
1369 freq_divisor = 2;
1370 write_mchbar32(0x2c0, (freq_divisor << 11) | 0x6009c400);
1371 }
1372
1373 if (info->board_lane_delay[3] <= 10) {
1374 if (info->board_lane_delay[3] <= 8)
1375 lane_3_delay = info->board_lane_delay[3];
1376 else
1377 lane_3_delay = 10;
1378 } else {
1379 lane_3_delay = 12;
1380 }
1381 cas_latency_derived = info->cas_latency - info->clock_speed_index + 2;
1382 if (info->clock_speed_index > 1)
1383 cas_latency_derived++;
1384 for (channel = 0; channel < NUM_CHANNELS; channel++) {
1385 write_mchbar32(0x240 + (channel << 10),
1386 ((info->clock_speed_index ==
1387 0) * 0x11000) | 0x1002100 | ((2 +
1388 info->
1389 clock_speed_index)
1390 << 4) | (info->
1391 cas_latency
1392 - 3));
1393 write_500(info, channel, (info->clock_speed_index << 1) | 1,
1394 0x609, 6, 1);
1395 write_500(info, channel,
1396 info->clock_speed_index + 2 * info->cas_latency - 7,
1397 0x601, 6, 1);
1398
1399 write_mchbar32(0x250 + (channel << 10),
1400 ((lane_3_delay + info->clock_speed_index +
1401 9) << 6)
1402 | (info->board_lane_delay[7] << 2) | (info->
1403 board_lane_delay
1404 [4] << 16)
1405 | (info->board_lane_delay[1] << 25) | (info->
1406 board_lane_delay
1407 [1] << 29)
1408 | 1);
1409 write_mchbar32(0x254 + (channel << 10),
1410 (info->
1411 board_lane_delay[1] >> 3) | ((info->
1412 board_lane_delay
1413 [8] +
1414 4 *
1415 info->
1416 use_ecc) << 6) |
1417 0x80 | (info->board_lane_delay[6] << 1) | (info->
1418 board_lane_delay
1419 [2] <<
1420 28) |
1421 (cas_latency_derived << 16) | 0x4700000);
1422 write_mchbar32(0x258 + (channel << 10),
1423 ((info->board_lane_delay[5] +
1424 info->clock_speed_index +
1425 9) << 12) | ((info->clock_speed_index -
1426 info->cas_latency + 12) << 8)
1427 | (info->board_lane_delay[2] << 17) | (info->
1428 board_lane_delay
1429 [4] << 24)
1430 | 0x47);
1431 write_mchbar32(0x25c + (channel << 10),
1432 (info->board_lane_delay[1] << 1) | (info->
1433 board_lane_delay
1434 [0] << 8) |
1435 0x1da50000);
1436 write_mchbar8(0x264 + (channel << 10), 0xff);
1437 write_mchbar8(0x5f8 + (channel << 10),
1438 (cas_latency_shift << 3) | info->use_ecc);
1439 }
1440
1441 program_modules_memory_map(info, 1);
1442
1443 write_mchbar16(0x610,
1444 (min(ns_to_cycles(info, some_delay_ns) / 2, 127) << 9)
1445 | (read_mchbar16(0x610) & 0x1C3) | 0x3C);
1446 write_mchbar16(0x612, read_mchbar16(0x612) | 0x100);
1447 write_mchbar16(0x214, read_mchbar16(0x214) | 0x3E00);
1448 for (i = 0; i < 8; i++) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001449 pci_write_config32(PCI_DEV (QUICKPATH_BUS, 0, 1), 0x80 + 4 * i,
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001450 (info->total_memory_mb - 64) | !i | 2);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001451 pci_write_config32(PCI_DEV (QUICKPATH_BUS, 0, 1), 0xc0 + 4 * i, 0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001452 }
1453}
1454
Patrick Rudolph266a1f72016-06-09 18:13:34 +02001455#define DEFAULT_PCI_MMIO_SIZE 2048
1456#define HOST_BRIDGE PCI_DEVFN(0, 0)
1457
1458static unsigned int get_mmio_size(void)
1459{
1460 const struct device *dev;
1461 const struct northbridge_intel_nehalem_config *cfg = NULL;
1462
1463 dev = dev_find_slot(0, HOST_BRIDGE);
1464 if (dev)
1465 cfg = dev->chip_info;
1466
1467 /* If this is zero, it just means devicetree.cb didn't set it */
1468 if (!cfg || cfg->pci_mmio_size == 0)
1469 return DEFAULT_PCI_MMIO_SIZE;
1470 else
1471 return cfg->pci_mmio_size;
1472}
1473
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001474#define BETTER_MEMORY_MAP 0
1475
1476static void program_total_memory_map(struct raminfo *info)
1477{
1478 unsigned int TOM, TOLUD, TOUUD;
1479 unsigned int quickpath_reserved;
1480 unsigned int REMAPbase;
1481 unsigned int uma_base_igd;
1482 unsigned int uma_base_gtt;
Patrick Rudolph266a1f72016-06-09 18:13:34 +02001483 unsigned int mmio_size;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001484 int memory_remap;
1485 unsigned int memory_map[8];
1486 int i;
1487 unsigned int current_limit;
1488 unsigned int tseg_base;
1489 int uma_size_igd = 0, uma_size_gtt = 0;
1490
1491 memset(memory_map, 0, sizeof(memory_map));
1492
1493#if REAL
1494 if (info->uma_enabled) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001495 u16 t = pci_read_config16(NORTHBRIDGE, D0F0_GGC);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001496 gav(t);
1497 const int uma_sizes_gtt[16] =
1498 { 0, 1, 0, 2, 0, 0, 0, 0, 0, 2, 3, 4, 42, 42, 42, 42 };
1499 /* Igd memory */
1500 const int uma_sizes_igd[16] = {
1501 0, 0, 0, 0, 0, 32, 48, 64, 128, 256, 96, 160, 224, 352,
1502 256, 512
1503 };
1504
1505 uma_size_igd = uma_sizes_igd[(t >> 4) & 0xF];
1506 uma_size_gtt = uma_sizes_gtt[(t >> 8) & 0xF];
1507 }
1508#endif
1509
Patrick Rudolph266a1f72016-06-09 18:13:34 +02001510 mmio_size = get_mmio_size();
1511
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001512 TOM = info->total_memory_mb;
1513 if (TOM == 4096)
1514 TOM = 4032;
1515 TOUUD = ALIGN_DOWN(TOM - info->memory_reserved_for_heci_mb, 64);
Patrick Rudolph266a1f72016-06-09 18:13:34 +02001516 TOLUD = ALIGN_DOWN(min(4096 - mmio_size + ALIGN_UP(uma_size_igd + uma_size_gtt, 64)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001517 , TOUUD), 64);
1518 memory_remap = 0;
1519 if (TOUUD - TOLUD > 64) {
1520 memory_remap = 1;
1521 REMAPbase = max(4096, TOUUD);
1522 TOUUD = TOUUD - TOLUD + 4096;
1523 }
1524 if (TOUUD > 4096)
1525 memory_map[2] = TOUUD | 1;
1526 quickpath_reserved = 0;
1527
1528 {
1529 u32 t;
1530
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001531 gav(t = pci_read_config32(PCI_DEV(QUICKPATH_BUS, 0, 1), 0x68));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001532 if (t & 0x800)
1533 quickpath_reserved =
1534 (1 << find_lowest_bit_set32(t >> 20));
1535 }
1536 if (memory_remap)
1537 TOUUD -= quickpath_reserved;
1538
1539#if !REAL
1540 if (info->uma_enabled) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001541 u16 t = pci_read_config16(NORTHBRIDGE, D0F0_GGC);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001542 gav(t);
1543 const int uma_sizes_gtt[16] =
1544 { 0, 1, 0, 2, 0, 0, 0, 0, 0, 2, 3, 4, 42, 42, 42, 42 };
1545 /* Igd memory */
1546 const int uma_sizes_igd[16] = {
1547 0, 0, 0, 0, 0, 32, 48, 64, 128, 256, 96, 160, 224, 352,
1548 256, 512
1549 };
1550
1551 uma_size_igd = uma_sizes_igd[(t >> 4) & 0xF];
1552 uma_size_gtt = uma_sizes_gtt[(t >> 8) & 0xF];
1553 }
1554#endif
1555
1556 uma_base_igd = TOLUD - uma_size_igd;
1557 uma_base_gtt = uma_base_igd - uma_size_gtt;
1558 tseg_base = ALIGN_DOWN(uma_base_gtt, 64) - (CONFIG_SMM_TSEG_SIZE >> 20);
1559 if (!memory_remap)
1560 tseg_base -= quickpath_reserved;
1561 tseg_base = ALIGN_DOWN(tseg_base, 8);
1562
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001563 pci_write_config16(NORTHBRIDGE, D0F0_TOLUD, TOLUD << 4);
1564 pci_write_config16(NORTHBRIDGE, D0F0_TOM, TOM >> 6);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001565 if (memory_remap) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001566 pci_write_config16(NORTHBRIDGE, D0F0_REMAPBASE, REMAPbase >> 6);
1567 pci_write_config16(NORTHBRIDGE, D0F0_REMAPLIMIT, (TOUUD - 64) >> 6);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001568 }
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001569 pci_write_config16(NORTHBRIDGE, D0F0_TOUUD, TOUUD);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001570
1571 if (info->uma_enabled) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001572 pci_write_config32(NORTHBRIDGE, D0F0_IGD_BASE, uma_base_igd << 20);
1573 pci_write_config32(NORTHBRIDGE, D0F0_GTT_BASE, uma_base_gtt << 20);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001574 }
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001575 pci_write_config32(NORTHBRIDGE, TSEG, tseg_base << 20);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001576
1577 current_limit = 0;
1578 memory_map[0] = ALIGN_DOWN(uma_base_gtt, 64) | 1;
1579 memory_map[1] = 4096;
1580 for (i = 0; i < ARRAY_SIZE(memory_map); i++) {
1581 current_limit = max(current_limit, memory_map[i] & ~1);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001582 pci_write_config32(PCI_DEV(QUICKPATH_BUS, 0, 1), 4 * i + 0x80,
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001583 (memory_map[i] & 1) | ALIGN_DOWN(current_limit -
1584 1, 64) | 2);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001585 pci_write_config32(PCI_DEV(QUICKPATH_BUS, 0, 1), 4 * i + 0xc0, 0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001586 }
1587}
1588
1589static void collect_system_info(struct raminfo *info)
1590{
1591 u32 capid0[3];
1592 int i;
1593 unsigned channel;
1594
1595 /* Wait for some bit, maybe TXT clear. */
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02001596 while (!(read8((u8 *)0xfed40000) & (1 << 7)));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001597
1598 if (!info->heci_bar)
1599 gav(info->heci_bar =
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001600 pci_read_config32(HECIDEV, HECIBAR) & 0xFFFFFFF8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001601 if (!info->memory_reserved_for_heci_mb) {
1602 /* Wait for ME to be ready */
1603 intel_early_me_init();
1604 info->memory_reserved_for_heci_mb = intel_early_me_uma_size();
1605 }
1606
1607 for (i = 0; i < 3; i++)
1608 gav(capid0[i] =
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001609 pci_read_config32(NORTHBRIDGE, D0F0_CAPID0 | (i << 2)));
1610 gav(info->revision = pci_read_config8(NORTHBRIDGE, PCI_REVISION_ID));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001611 info->max_supported_clock_speed_index = (~capid0[1] & 7);
1612
1613 if ((capid0[1] >> 11) & 1)
1614 info->uma_enabled = 0;
1615 else
1616 gav(info->uma_enabled =
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001617 pci_read_config8(NORTHBRIDGE, D0F0_DEVEN) & 8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001618 /* Unrecognised: [0000:fffd3d2d] 37f81.37f82 ! CPUID: eax: 00000001; ecx: 00000e00 => 00020655.00010800.029ae3ff.bfebfbff */
1619 info->silicon_revision = 0;
1620
1621 if (capid0[2] & 2) {
1622 info->silicon_revision = 0;
1623 info->max_supported_clock_speed_index = 2;
1624 for (channel = 0; channel < NUM_CHANNELS; channel++)
1625 if (info->populated_ranks[channel][0][0]
1626 && (info->spd[channel][0][MODULE_TYPE] & 0xf) ==
1627 3) {
1628 info->silicon_revision = 2;
1629 info->max_supported_clock_speed_index = 1;
1630 }
1631 } else {
1632 switch (((capid0[2] >> 18) & 1) + 2 * ((capid0[1] >> 3) & 1)) {
1633 case 1:
1634 case 2:
1635 info->silicon_revision = 3;
1636 break;
1637 case 3:
1638 info->silicon_revision = 0;
1639 break;
1640 case 0:
1641 info->silicon_revision = 2;
1642 break;
1643 }
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001644 switch (pci_read_config16(NORTHBRIDGE, PCI_DEVICE_ID)) {
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001645 case 0x40:
1646 info->silicon_revision = 0;
1647 break;
1648 case 0x48:
1649 info->silicon_revision = 1;
1650 break;
1651 }
1652 }
1653}
1654
1655static void write_training_data(struct raminfo *info)
1656{
1657 int tm, channel, slot, rank, lane;
1658 if (info->revision < 8)
1659 return;
1660
1661 for (tm = 0; tm < 4; tm++)
1662 for (channel = 0; channel < NUM_CHANNELS; channel++)
1663 for (slot = 0; slot < NUM_SLOTS; slot++)
1664 for (rank = 0; rank < NUM_RANKS; rank++)
1665 for (lane = 0; lane < 9; lane++)
1666 write_500(info, channel,
1667 info->
1668 cached_training->
1669 lane_timings[tm]
1670 [channel][slot][rank]
1671 [lane],
1672 get_timing_register_addr
1673 (lane, tm, slot,
1674 rank), 9, 0);
1675 write_1d0(info->cached_training->reg_178, 0x178, 7, 1);
1676 write_1d0(info->cached_training->reg_10b, 0x10b, 6, 1);
1677}
1678
1679static void dump_timings(struct raminfo *info)
1680{
1681#if REAL
1682 int channel, slot, rank, lane, i;
1683 printk(BIOS_DEBUG, "Timings:\n");
1684 FOR_POPULATED_RANKS {
1685 printk(BIOS_DEBUG, "channel %d, slot %d, rank %d\n", channel,
1686 slot, rank);
1687 for (lane = 0; lane < 9; lane++) {
1688 printk(BIOS_DEBUG, "lane %d: ", lane);
1689 for (i = 0; i < 4; i++) {
1690 printk(BIOS_DEBUG, "%x (%x) ",
1691 read_500(info, channel,
1692 get_timing_register_addr
1693 (lane, i, slot, rank),
1694 9),
1695 info->training.
1696 lane_timings[i][channel][slot][rank]
1697 [lane]);
1698 }
1699 printk(BIOS_DEBUG, "\n");
1700 }
1701 }
1702 printk(BIOS_DEBUG, "[178] = %x (%x)\n", read_1d0(0x178, 7),
1703 info->training.reg_178);
1704 printk(BIOS_DEBUG, "[10b] = %x (%x)\n", read_1d0(0x10b, 6),
1705 info->training.reg_10b);
1706#endif
1707}
1708
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01001709/* Read timings and other registers that need to be restored verbatim and
1710 put them to CBMEM.
1711 */
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001712static void save_timings(struct raminfo *info)
1713{
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001714 struct ram_training train;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001715 int channel, slot, rank, lane, i;
1716
1717 train = info->training;
1718 FOR_POPULATED_RANKS for (lane = 0; lane < 9; lane++)
1719 for (i = 0; i < 4; i++)
1720 train.lane_timings[i][channel][slot][rank][lane] =
1721 read_500(info, channel,
1722 get_timing_register_addr(lane, i, slot,
1723 rank), 9);
1724 train.reg_178 = read_1d0(0x178, 7);
1725 train.reg_10b = read_1d0(0x10b, 6);
1726
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01001727 for (channel = 0; channel < NUM_CHANNELS; channel++) {
1728 u32 reg32;
1729 reg32 = read_mchbar32 ((channel << 10) + 0x274);
1730 train.reg274265[channel][0] = reg32 >> 16;
1731 train.reg274265[channel][1] = reg32 & 0xffff;
1732 train.reg274265[channel][2] = read_mchbar16 ((channel << 10) + 0x265) >> 8;
1733 }
1734 train.reg2ca9_bit0 = read_mchbar8(0x2ca9) & 1;
1735 train.reg_6dc = read_mchbar32 (0x6dc);
1736 train.reg_6e8 = read_mchbar32 (0x6e8);
1737
1738 printk (BIOS_SPEW, "[6dc] = %x\n", train.reg_6dc);
1739 printk (BIOS_SPEW, "[6e8] = %x\n", train.reg_6e8);
1740
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001741 /* Save the MRC S3 restore data to cbmem */
Patrick Rudolphbb9c90a2016-05-29 17:05:06 +02001742 store_current_mrc_cache(&train, sizeof(train));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001743}
1744
1745#if REAL
1746static const struct ram_training *get_cached_training(void)
1747{
1748 struct mrc_data_container *cont;
1749 cont = find_current_mrc_cache();
1750 if (!cont)
1751 return 0;
1752 return (void *)cont->mrc_data;
1753}
1754#endif
1755
1756/* FIXME: add timeout. */
1757static void wait_heci_ready(void)
1758{
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02001759 while (!(read32(DEFAULT_HECIBAR + 0xc) & 8)); // = 0x8000000c
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001760 write32((DEFAULT_HECIBAR + 0x4),
1761 (read32(DEFAULT_HECIBAR + 0x4) & ~0x10) | 0xc);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001762}
1763
1764/* FIXME: add timeout. */
1765static void wait_heci_cb_avail(int len)
1766{
1767 union {
1768 struct mei_csr csr;
1769 u32 raw;
1770 } csr;
1771
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02001772 while (!(read32(DEFAULT_HECIBAR + 0xc) & 8));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001773
1774 do
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001775 csr.raw = read32(DEFAULT_HECIBAR + 0x4);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001776 while (len >
1777 csr.csr.buffer_depth - (csr.csr.buffer_write_ptr -
1778 csr.csr.buffer_read_ptr));
1779}
1780
1781static void send_heci_packet(struct mei_header *head, u32 * payload)
1782{
1783 int len = (head->length + 3) / 4;
1784 int i;
1785
1786 wait_heci_cb_avail(len + 1);
1787
1788 /* FIXME: handle leftovers correctly. */
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001789 write32(DEFAULT_HECIBAR + 0, *(u32 *) head);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001790 for (i = 0; i < len - 1; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001791 write32(DEFAULT_HECIBAR + 0, payload[i]);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001792
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001793 write32(DEFAULT_HECIBAR + 0, payload[i] & ((1 << (8 * len)) - 1));
1794 write32(DEFAULT_HECIBAR + 0x4, read32(DEFAULT_HECIBAR + 0x4) | 0x4);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001795}
1796
1797static void
1798send_heci_message(u8 * msg, int len, u8 hostaddress, u8 clientaddress)
1799{
1800 struct mei_header head;
1801 int maxlen;
1802
1803 wait_heci_ready();
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001804 maxlen = (read32(DEFAULT_HECIBAR + 0x4) >> 24) * 4 - 4;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001805
1806 while (len) {
1807 int cur = len;
1808 if (cur > maxlen) {
1809 cur = maxlen;
1810 head.is_complete = 0;
1811 } else
1812 head.is_complete = 1;
1813 head.length = cur;
1814 head.reserved = 0;
1815 head.client_address = clientaddress;
1816 head.host_address = hostaddress;
1817 send_heci_packet(&head, (u32 *) msg);
1818 len -= cur;
1819 msg += cur;
1820 }
1821}
1822
1823/* FIXME: Add timeout. */
1824static int
1825recv_heci_packet(struct raminfo *info, struct mei_header *head, u32 * packet,
1826 u32 * packet_size)
1827{
1828 union {
1829 struct mei_csr csr;
1830 u32 raw;
1831 } csr;
1832 int i = 0;
1833
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001834 write32(DEFAULT_HECIBAR + 0x4, read32(DEFAULT_HECIBAR + 0x4) | 2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001835 do {
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001836 csr.raw = read32(DEFAULT_HECIBAR + 0xc);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001837#if !REAL
1838 if (i++ > 346)
1839 return -1;
1840#endif
1841 }
1842 while (csr.csr.buffer_write_ptr == csr.csr.buffer_read_ptr);
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001843 *(u32 *) head = read32(DEFAULT_HECIBAR + 0x8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001844 if (!head->length) {
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001845 write32(DEFAULT_HECIBAR + 0x4,
1846 read32(DEFAULT_HECIBAR + 0x4) | 2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001847 *packet_size = 0;
1848 return 0;
1849 }
1850 if (head->length + 4 > 4 * csr.csr.buffer_depth
1851 || head->length > *packet_size) {
1852 *packet_size = 0;
1853 return -1;
1854 }
1855
1856 do
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001857 csr.raw = read32(DEFAULT_HECIBAR + 0xc);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001858 while ((head->length + 3) >> 2 >
1859 csr.csr.buffer_write_ptr - csr.csr.buffer_read_ptr);
1860
1861 for (i = 0; i < (head->length + 3) >> 2; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001862 packet[i++] = read32(DEFAULT_HECIBAR + 0x8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001863 *packet_size = head->length;
1864 if (!csr.csr.ready)
1865 *packet_size = 0;
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001866 write32(DEFAULT_HECIBAR + 0x4, read32(DEFAULT_HECIBAR + 0x4) | 4);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001867 return 0;
1868}
1869
1870/* FIXME: Add timeout. */
1871static int
1872recv_heci_message(struct raminfo *info, u32 * message, u32 * message_size)
1873{
1874 struct mei_header head;
1875 int current_position;
1876
1877 current_position = 0;
1878 while (1) {
1879 u32 current_size;
1880 current_size = *message_size - current_position;
1881 if (recv_heci_packet
1882 (info, &head, message + (current_position >> 2),
1883 &current_size) == -1)
1884 break;
1885 if (!current_size)
1886 break;
1887 current_position += current_size;
1888 if (head.is_complete) {
1889 *message_size = current_position;
1890 return 0;
1891 }
1892
1893 if (current_position >= *message_size)
1894 break;
1895 }
1896 *message_size = 0;
1897 return -1;
1898}
1899
1900static void send_heci_uma_message(struct raminfo *info)
1901{
1902 struct uma_reply {
1903 u8 group_id;
1904 u8 command;
1905 u8 reserved;
1906 u8 result;
1907 u8 field2;
1908 u8 unk3[0x48 - 4 - 1];
1909 } __attribute__ ((packed)) reply;
1910 struct uma_message {
1911 u8 group_id;
1912 u8 cmd;
1913 u8 reserved;
1914 u8 result;
1915 u32 c2;
1916 u64 heci_uma_addr;
1917 u32 memory_reserved_for_heci_mb;
1918 u16 c3;
1919 } __attribute__ ((packed)) msg = {
1920 0, MKHI_SET_UMA, 0, 0,
1921 0x82,
1922 info->heci_uma_addr, info->memory_reserved_for_heci_mb, 0};
1923 u32 reply_size;
1924
1925 send_heci_message((u8 *) & msg, sizeof(msg), 0, 7);
1926
1927 reply_size = sizeof(reply);
1928 if (recv_heci_message(info, (u32 *) & reply, &reply_size) == -1)
1929 return;
1930
1931 if (reply.command != (MKHI_SET_UMA | (1 << 7)))
1932 die("HECI init failed\n");
1933}
1934
1935static void setup_heci_uma(struct raminfo *info)
1936{
1937 u32 reg44;
1938
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001939 reg44 = pci_read_config32(HECIDEV, 0x44); // = 0x80010020
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001940 info->memory_reserved_for_heci_mb = 0;
1941 info->heci_uma_addr = 0;
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001942 if (!((reg44 & 0x10000) && !(pci_read_config32(HECIDEV, 0x40) & 0x20)))
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001943 return;
1944
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001945 info->heci_bar = pci_read_config32(HECIDEV, 0x10) & 0xFFFFFFF0;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001946 info->memory_reserved_for_heci_mb = reg44 & 0x3f;
1947 info->heci_uma_addr =
1948 ((u64)
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001949 ((((u64) pci_read_config16(NORTHBRIDGE, D0F0_TOM)) << 6) -
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001950 info->memory_reserved_for_heci_mb)) << 20;
1951
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001952 pci_read_config32(NORTHBRIDGE, DMIBAR);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001953 if (info->memory_reserved_for_heci_mb) {
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001954 write32(DEFAULT_DMIBAR + 0x14,
1955 read32(DEFAULT_DMIBAR + 0x14) & ~0x80);
1956 write32(DEFAULT_RCBA + 0x14,
1957 read32(DEFAULT_RCBA + 0x14) & ~0x80);
1958 write32(DEFAULT_DMIBAR + 0x20,
1959 read32(DEFAULT_DMIBAR + 0x20) & ~0x80);
1960 write32(DEFAULT_RCBA + 0x20,
1961 read32(DEFAULT_RCBA + 0x20) & ~0x80);
1962 write32(DEFAULT_DMIBAR + 0x2c,
1963 read32(DEFAULT_DMIBAR + 0x2c) & ~0x80);
1964 write32(DEFAULT_RCBA + 0x30,
1965 read32(DEFAULT_RCBA + 0x30) & ~0x80);
1966 write32(DEFAULT_DMIBAR + 0x38,
1967 read32(DEFAULT_DMIBAR + 0x38) & ~0x80);
1968 write32(DEFAULT_RCBA + 0x40,
1969 read32(DEFAULT_RCBA + 0x40) & ~0x80);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001970
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001971 write32(DEFAULT_RCBA + 0x40, 0x87000080); // OK
1972 write32(DEFAULT_DMIBAR + 0x38, 0x87000080); // OK
1973 while (read16(DEFAULT_RCBA + 0x46) & 2
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02001974 && read16(DEFAULT_DMIBAR + 0x3e) & 2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001975 }
1976
1977 write_mchbar32(0x24, 0x10000 + info->memory_reserved_for_heci_mb);
1978
1979 send_heci_uma_message(info);
1980
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001981 pci_write_config32(HECIDEV, 0x10, 0x0);
1982 pci_write_config8(HECIDEV, 0x4, 0x0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001983
1984}
1985
1986static int have_match_ranks(struct raminfo *info, int channel, int ranks)
1987{
1988 int ranks_in_channel;
1989 ranks_in_channel = info->populated_ranks[channel][0][0]
1990 + info->populated_ranks[channel][0][1]
1991 + info->populated_ranks[channel][1][0]
1992 + info->populated_ranks[channel][1][1];
1993
1994 /* empty channel */
1995 if (ranks_in_channel == 0)
1996 return 1;
1997
1998 if (ranks_in_channel != ranks)
1999 return 0;
2000 /* single slot */
2001 if (info->populated_ranks[channel][0][0] !=
2002 info->populated_ranks[channel][1][0])
2003 return 1;
2004 if (info->populated_ranks[channel][0][1] !=
2005 info->populated_ranks[channel][1][1])
2006 return 1;
2007 if (info->is_x16_module[channel][0] != info->is_x16_module[channel][1])
2008 return 0;
2009 if (info->density[channel][0] != info->density[channel][1])
2010 return 0;
2011 return 1;
2012}
2013
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002014static void read_4090(struct raminfo *info)
2015{
2016 int i, channel, slot, rank, lane;
2017 for (i = 0; i < 2; i++)
2018 for (slot = 0; slot < NUM_SLOTS; slot++)
2019 for (rank = 0; rank < NUM_RANKS; rank++)
2020 for (lane = 0; lane < 9; lane++)
2021 info->training.
2022 lane_timings[0][i][slot][rank][lane]
2023 = 32;
2024
2025 for (i = 1; i < 4; i++)
2026 for (channel = 0; channel < NUM_CHANNELS; channel++)
2027 for (slot = 0; slot < NUM_SLOTS; slot++)
2028 for (rank = 0; rank < NUM_RANKS; rank++)
2029 for (lane = 0; lane < 9; lane++) {
2030 info->training.
2031 lane_timings[i][channel]
2032 [slot][rank][lane] =
2033 read_500(info, channel,
2034 get_timing_register_addr
2035 (lane, i, slot,
2036 rank), 9)
2037 + (i == 1) * 11; // !!!!
2038 }
2039
2040}
2041
2042static u32 get_etalon2(int flip, u32 addr)
2043{
2044 const u16 invmask[] = {
2045 0xaaaa, 0x6db6, 0x4924, 0xeeee, 0xcccc, 0x8888, 0x7bde, 0x739c,
2046 0x6318, 0x4210, 0xefbe, 0xcf3c, 0x8e38, 0x0c30, 0x0820
2047 };
2048 u32 ret;
2049 u32 comp4 = addr / 480;
2050 addr %= 480;
2051 u32 comp1 = addr & 0xf;
2052 u32 comp2 = (addr >> 4) & 1;
2053 u32 comp3 = addr >> 5;
2054
2055 if (comp4)
2056 ret = 0x1010101 << (comp4 - 1);
2057 else
2058 ret = 0;
2059 if (flip ^ (((invmask[comp3] >> comp1) ^ comp2) & 1))
2060 ret = ~ret;
2061
2062 return ret;
2063}
2064
2065static void disable_cache(void)
2066{
2067 msr_t msr = {.lo = 0, .hi = 0 };
2068
Alexandru Gagniuc86091f92015-09-30 20:23:09 -07002069 wrmsr(MTRR_PHYS_BASE(3), msr);
2070 wrmsr(MTRR_PHYS_MASK(3), msr);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002071}
2072
2073static void enable_cache(unsigned int base, unsigned int size)
2074{
2075 msr_t msr;
2076 msr.lo = base | MTRR_TYPE_WRPROT;
2077 msr.hi = 0;
Alexandru Gagniuc86091f92015-09-30 20:23:09 -07002078 wrmsr(MTRR_PHYS_BASE(3), msr);
2079 msr.lo = ((~(ALIGN_DOWN(size + 4096, 4096) - 1) | MTRR_DEF_TYPE_EN)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002080 & 0xffffffff);
2081 msr.hi = 0x0000000f;
Alexandru Gagniuc86091f92015-09-30 20:23:09 -07002082 wrmsr(MTRR_PHYS_MASK(3), msr);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002083}
2084
2085static void flush_cache(u32 start, u32 size)
2086{
2087 u32 end;
2088 u32 addr;
2089
2090 end = start + (ALIGN_DOWN(size + 4096, 4096));
2091 for (addr = start; addr < end; addr += 64)
2092 clflush(addr);
2093}
2094
2095static void clear_errors(void)
2096{
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03002097 pci_write_config8(NORTHBRIDGE, 0xc0, 0x01);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002098}
2099
2100static void write_testing(struct raminfo *info, int totalrank, int flip)
2101{
2102 int nwrites = 0;
2103 /* in 8-byte units. */
2104 u32 offset;
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08002105 u8 *base;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002106
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08002107 base = (u8 *)(totalrank << 28);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002108 for (offset = 0; offset < 9 * 480; offset += 2) {
2109 write32(base + offset * 8, get_etalon2(flip, offset));
2110 write32(base + offset * 8 + 4, get_etalon2(flip, offset));
2111 write32(base + offset * 8 + 8, get_etalon2(flip, offset + 1));
2112 write32(base + offset * 8 + 12, get_etalon2(flip, offset + 1));
2113 nwrites += 4;
2114 if (nwrites >= 320) {
2115 clear_errors();
2116 nwrites = 0;
2117 }
2118 }
2119}
2120
2121static u8 check_testing(struct raminfo *info, u8 total_rank, int flip)
2122{
2123 u8 failmask = 0;
2124 int i;
2125 int comp1, comp2, comp3;
2126 u32 failxor[2] = { 0, 0 };
2127
2128 enable_cache((total_rank << 28), 1728 * 5 * 4);
2129
2130 for (comp3 = 0; comp3 < 9 && failmask != 0xff; comp3++) {
2131 for (comp1 = 0; comp1 < 4; comp1++)
2132 for (comp2 = 0; comp2 < 60; comp2++) {
2133 u32 re[4];
2134 u32 curroffset =
2135 comp3 * 8 * 60 + 2 * comp1 + 8 * comp2;
2136 read128((total_rank << 28) | (curroffset << 3),
2137 (u64 *) re);
2138 failxor[0] |=
2139 get_etalon2(flip, curroffset) ^ re[0];
2140 failxor[1] |=
2141 get_etalon2(flip, curroffset) ^ re[1];
2142 failxor[0] |=
2143 get_etalon2(flip, curroffset | 1) ^ re[2];
2144 failxor[1] |=
2145 get_etalon2(flip, curroffset | 1) ^ re[3];
2146 }
2147 for (i = 0; i < 8; i++)
2148 if ((0xff << (8 * (i % 4))) & failxor[i / 4])
2149 failmask |= 1 << i;
2150 }
2151 disable_cache();
2152 flush_cache((total_rank << 28), 1728 * 5 * 4);
2153 return failmask;
2154}
2155
2156const u32 seed1[0x18] = {
2157 0x3a9d5ab5, 0x576cb65b, 0x555773b6, 0x2ab772ee,
2158 0x555556ee, 0x3a9d5ab5, 0x576cb65b, 0x555773b6,
2159 0x2ab772ee, 0x555556ee, 0x5155a555, 0x5155a555,
2160 0x5155a555, 0x5155a555, 0x3a9d5ab5, 0x576cb65b,
2161 0x555773b6, 0x2ab772ee, 0x555556ee, 0x55d6b4a5,
2162 0x366d6b3a, 0x2ae5ddbb, 0x3b9ddbb7, 0x55d6b4a5,
2163};
2164
2165static u32 get_seed2(int a, int b)
2166{
2167 const u32 seed2[5] = {
2168 0x55555555, 0x33333333, 0x2e555a55, 0x55555555,
2169 0x5b6db6db,
2170 };
2171 u32 r;
2172 r = seed2[(a + (a >= 10)) / 5];
2173 return b ? ~r : r;
2174}
2175
2176static int make_shift(int comp2, int comp5, int x)
2177{
2178 const u8 seed3[32] = {
2179 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2180 0x00, 0x00, 0x38, 0x1c, 0x3c, 0x18, 0x38, 0x38,
2181 0x38, 0x38, 0x38, 0x38, 0x0f, 0x0f, 0x0f, 0x0f,
2182 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f,
2183 };
2184
2185 return (comp2 - ((seed3[comp5] >> (x & 7)) & 1)) & 0x1f;
2186}
2187
2188static u32 get_etalon(int flip, u32 addr)
2189{
2190 u32 mask_byte = 0;
2191 int comp1 = (addr >> 1) & 1;
2192 int comp2 = (addr >> 3) & 0x1f;
2193 int comp3 = (addr >> 8) & 0xf;
2194 int comp4 = (addr >> 12) & 0xf;
2195 int comp5 = (addr >> 16) & 0x1f;
2196 u32 mask_bit = ~(0x10001 << comp3);
2197 u32 part1;
2198 u32 part2;
2199 int byte;
2200
2201 part2 =
2202 ((seed1[comp5] >>
2203 make_shift(comp2, comp5,
2204 (comp3 >> 3) | (comp1 << 2) | 2)) & 1) ^ flip;
2205 part1 =
2206 ((seed1[comp5] >>
2207 make_shift(comp2, comp5,
2208 (comp3 >> 3) | (comp1 << 2) | 0)) & 1) ^ flip;
2209
2210 for (byte = 0; byte < 4; byte++)
2211 if ((get_seed2(comp5, comp4) >>
2212 make_shift(comp2, comp5, (byte | (comp1 << 2)))) & 1)
2213 mask_byte |= 0xff << (8 * byte);
2214
2215 return (mask_bit & mask_byte) | (part1 << comp3) | (part2 <<
2216 (comp3 + 16));
2217}
2218
2219static void
2220write_testing_type2(struct raminfo *info, u8 totalrank, u8 region, u8 block,
2221 char flip)
2222{
2223 int i;
2224 for (i = 0; i < 2048; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08002225 write32p((totalrank << 28) | (region << 25) | (block << 16) |
2226 (i << 2), get_etalon(flip, (block << 16) | (i << 2)));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002227}
2228
2229static u8
2230check_testing_type2(struct raminfo *info, u8 totalrank, u8 region, u8 block,
2231 char flip)
2232{
2233 u8 failmask = 0;
2234 u32 failxor[2];
2235 int i;
2236 int comp1, comp2, comp3;
2237
2238 failxor[0] = 0;
2239 failxor[1] = 0;
2240
2241 enable_cache(totalrank << 28, 134217728);
2242 for (comp3 = 0; comp3 < 2 && failmask != 0xff; comp3++) {
2243 for (comp1 = 0; comp1 < 16; comp1++)
2244 for (comp2 = 0; comp2 < 64; comp2++) {
2245 u32 addr =
2246 (totalrank << 28) | (region << 25) | (block
2247 << 16)
2248 | (comp3 << 12) | (comp2 << 6) | (comp1 <<
2249 2);
2250 failxor[comp1 & 1] |=
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08002251 read32p(addr) ^ get_etalon(flip, addr);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002252 }
2253 for (i = 0; i < 8; i++)
2254 if ((0xff << (8 * (i % 4))) & failxor[i / 4])
2255 failmask |= 1 << i;
2256 }
2257 disable_cache();
2258 flush_cache((totalrank << 28) | (region << 25) | (block << 16), 16384);
2259 return failmask;
2260}
2261
2262static int check_bounded(unsigned short *vals, u16 bound)
2263{
2264 int i;
2265
2266 for (i = 0; i < 8; i++)
2267 if (vals[i] < bound)
2268 return 0;
2269 return 1;
2270}
2271
2272enum state {
2273 BEFORE_USABLE = 0, AT_USABLE = 1, AT_MARGIN = 2, COMPLETE = 3
2274};
2275
2276static int validate_state(enum state *in)
2277{
2278 int i;
2279 for (i = 0; i < 8; i++)
2280 if (in[i] != COMPLETE)
2281 return 0;
2282 return 1;
2283}
2284
2285static void
2286do_fsm(enum state *state, u16 * counter,
2287 u8 fail_mask, int margin, int uplimit,
2288 u8 * res_low, u8 * res_high, u8 val)
2289{
2290 int lane;
2291
2292 for (lane = 0; lane < 8; lane++) {
2293 int is_fail = (fail_mask >> lane) & 1;
2294 switch (state[lane]) {
2295 case BEFORE_USABLE:
2296 if (!is_fail) {
2297 counter[lane] = 1;
2298 state[lane] = AT_USABLE;
2299 break;
2300 }
2301 counter[lane] = 0;
2302 state[lane] = BEFORE_USABLE;
2303 break;
2304 case AT_USABLE:
2305 if (!is_fail) {
2306 ++counter[lane];
2307 if (counter[lane] >= margin) {
2308 state[lane] = AT_MARGIN;
2309 res_low[lane] = val - margin + 1;
2310 break;
2311 }
2312 state[lane] = 1;
2313 break;
2314 }
2315 counter[lane] = 0;
2316 state[lane] = BEFORE_USABLE;
2317 break;
2318 case AT_MARGIN:
2319 if (is_fail) {
2320 state[lane] = COMPLETE;
2321 res_high[lane] = val - 1;
2322 } else {
2323 counter[lane]++;
2324 state[lane] = AT_MARGIN;
2325 if (val == uplimit) {
2326 state[lane] = COMPLETE;
2327 res_high[lane] = uplimit;
2328 }
2329 }
2330 break;
2331 case COMPLETE:
2332 break;
2333 }
2334 }
2335}
2336
2337static void
2338train_ram_at_178(struct raminfo *info, u8 channel, int slot, int rank,
2339 u8 total_rank, u8 reg_178, int first_run, int niter,
2340 timing_bounds_t * timings)
2341{
2342 int lane;
2343 enum state state[8];
2344 u16 count[8];
2345 u8 lower_usable[8];
2346 u8 upper_usable[8];
2347 unsigned short num_sucessfully_checked[8];
2348 u8 secondary_total_rank;
2349 u8 reg1b3;
2350
2351 if (info->populated_ranks_mask[1]) {
2352 if (channel == 1)
2353 secondary_total_rank =
2354 info->populated_ranks[1][0][0] +
2355 info->populated_ranks[1][0][1]
2356 + info->populated_ranks[1][1][0] +
2357 info->populated_ranks[1][1][1];
2358 else
2359 secondary_total_rank = 0;
2360 } else
2361 secondary_total_rank = total_rank;
2362
2363 {
2364 int i;
2365 for (i = 0; i < 8; i++)
2366 state[i] = BEFORE_USABLE;
2367 }
2368
2369 if (!first_run) {
2370 int is_all_ok = 1;
2371 for (lane = 0; lane < 8; lane++)
2372 if (timings[reg_178][channel][slot][rank][lane].
2373 smallest ==
2374 timings[reg_178][channel][slot][rank][lane].
2375 largest) {
2376 timings[reg_178][channel][slot][rank][lane].
2377 smallest = 0;
2378 timings[reg_178][channel][slot][rank][lane].
2379 largest = 0;
2380 is_all_ok = 0;
2381 }
2382 if (is_all_ok) {
2383 int i;
2384 for (i = 0; i < 8; i++)
2385 state[i] = COMPLETE;
2386 }
2387 }
2388
2389 for (reg1b3 = 0; reg1b3 < 0x30 && !validate_state(state); reg1b3++) {
2390 u8 failmask = 0;
2391 write_1d0(reg1b3 ^ 32, 0x1b3, 6, 1);
2392 write_1d0(reg1b3 ^ 32, 0x1a3, 6, 1);
2393 failmask = check_testing(info, total_rank, 0);
2394 write_mchbar32(0xfb0, read_mchbar32(0xfb0) | 0x00030000);
2395 do_fsm(state, count, failmask, 5, 47, lower_usable,
2396 upper_usable, reg1b3);
2397 }
2398
2399 if (reg1b3) {
2400 write_1d0(0, 0x1b3, 6, 1);
2401 write_1d0(0, 0x1a3, 6, 1);
2402 for (lane = 0; lane < 8; lane++) {
2403 if (state[lane] == COMPLETE) {
2404 timings[reg_178][channel][slot][rank][lane].
2405 smallest =
2406 lower_usable[lane] +
2407 (info->training.
2408 lane_timings[0][channel][slot][rank][lane]
2409 & 0x3F) - 32;
2410 timings[reg_178][channel][slot][rank][lane].
2411 largest =
2412 upper_usable[lane] +
2413 (info->training.
2414 lane_timings[0][channel][slot][rank][lane]
2415 & 0x3F) - 32;
2416 }
2417 }
2418 }
2419
2420 if (!first_run) {
2421 for (lane = 0; lane < 8; lane++)
2422 if (state[lane] == COMPLETE) {
2423 write_500(info, channel,
2424 timings[reg_178][channel][slot][rank]
2425 [lane].smallest,
2426 get_timing_register_addr(lane, 0,
2427 slot, rank),
2428 9, 1);
2429 write_500(info, channel,
2430 timings[reg_178][channel][slot][rank]
2431 [lane].smallest +
2432 info->training.
2433 lane_timings[1][channel][slot][rank]
2434 [lane]
2435 -
2436 info->training.
2437 lane_timings[0][channel][slot][rank]
2438 [lane], get_timing_register_addr(lane,
2439 1,
2440 slot,
2441 rank),
2442 9, 1);
2443 num_sucessfully_checked[lane] = 0;
2444 } else
2445 num_sucessfully_checked[lane] = -1;
2446
2447 do {
2448 u8 failmask = 0;
2449 int i;
2450 for (i = 0; i < niter; i++) {
2451 if (failmask == 0xFF)
2452 break;
2453 failmask |=
2454 check_testing_type2(info, total_rank, 2, i,
2455 0);
2456 failmask |=
2457 check_testing_type2(info, total_rank, 3, i,
2458 1);
2459 }
2460 write_mchbar32(0xfb0,
2461 read_mchbar32(0xfb0) | 0x00030000);
2462 for (lane = 0; lane < 8; lane++)
2463 if (num_sucessfully_checked[lane] != 0xffff) {
2464 if ((1 << lane) & failmask) {
2465 if (timings[reg_178][channel]
2466 [slot][rank][lane].
2467 largest <=
2468 timings[reg_178][channel]
2469 [slot][rank][lane].smallest)
2470 num_sucessfully_checked
2471 [lane] = -1;
2472 else {
2473 num_sucessfully_checked
2474 [lane] = 0;
2475 timings[reg_178]
2476 [channel][slot]
2477 [rank][lane].
2478 smallest++;
2479 write_500(info, channel,
2480 timings
2481 [reg_178]
2482 [channel]
2483 [slot][rank]
2484 [lane].
2485 smallest,
2486 get_timing_register_addr
2487 (lane, 0,
2488 slot, rank),
2489 9, 1);
2490 write_500(info, channel,
2491 timings
2492 [reg_178]
2493 [channel]
2494 [slot][rank]
2495 [lane].
2496 smallest +
2497 info->
2498 training.
2499 lane_timings
2500 [1][channel]
2501 [slot][rank]
2502 [lane]
2503 -
2504 info->
2505 training.
2506 lane_timings
2507 [0][channel]
2508 [slot][rank]
2509 [lane],
2510 get_timing_register_addr
2511 (lane, 1,
2512 slot, rank),
2513 9, 1);
2514 }
2515 } else
2516 num_sucessfully_checked[lane]++;
2517 }
2518 }
2519 while (!check_bounded(num_sucessfully_checked, 2));
2520
2521 for (lane = 0; lane < 8; lane++)
2522 if (state[lane] == COMPLETE) {
2523 write_500(info, channel,
2524 timings[reg_178][channel][slot][rank]
2525 [lane].largest,
2526 get_timing_register_addr(lane, 0,
2527 slot, rank),
2528 9, 1);
2529 write_500(info, channel,
2530 timings[reg_178][channel][slot][rank]
2531 [lane].largest +
2532 info->training.
2533 lane_timings[1][channel][slot][rank]
2534 [lane]
2535 -
2536 info->training.
2537 lane_timings[0][channel][slot][rank]
2538 [lane], get_timing_register_addr(lane,
2539 1,
2540 slot,
2541 rank),
2542 9, 1);
2543 num_sucessfully_checked[lane] = 0;
2544 } else
2545 num_sucessfully_checked[lane] = -1;
2546
2547 do {
2548 int failmask = 0;
2549 int i;
2550 for (i = 0; i < niter; i++) {
2551 if (failmask == 0xFF)
2552 break;
2553 failmask |=
2554 check_testing_type2(info, total_rank, 2, i,
2555 0);
2556 failmask |=
2557 check_testing_type2(info, total_rank, 3, i,
2558 1);
2559 }
2560
2561 write_mchbar32(0xfb0,
2562 read_mchbar32(0xfb0) | 0x00030000);
2563 for (lane = 0; lane < 8; lane++) {
2564 if (num_sucessfully_checked[lane] != 0xffff) {
2565 if ((1 << lane) & failmask) {
2566 if (timings[reg_178][channel]
2567 [slot][rank][lane].
2568 largest <=
2569 timings[reg_178][channel]
2570 [slot][rank][lane].
2571 smallest) {
2572 num_sucessfully_checked
2573 [lane] = -1;
2574 } else {
2575 num_sucessfully_checked
2576 [lane] = 0;
2577 timings[reg_178]
2578 [channel][slot]
2579 [rank][lane].
2580 largest--;
2581 write_500(info, channel,
2582 timings
2583 [reg_178]
2584 [channel]
2585 [slot][rank]
2586 [lane].
2587 largest,
2588 get_timing_register_addr
2589 (lane, 0,
2590 slot, rank),
2591 9, 1);
2592 write_500(info, channel,
2593 timings
2594 [reg_178]
2595 [channel]
2596 [slot][rank]
2597 [lane].
2598 largest +
2599 info->
2600 training.
2601 lane_timings
2602 [1][channel]
2603 [slot][rank]
2604 [lane]
2605 -
2606 info->
2607 training.
2608 lane_timings
2609 [0][channel]
2610 [slot][rank]
2611 [lane],
2612 get_timing_register_addr
2613 (lane, 1,
2614 slot, rank),
2615 9, 1);
2616 }
2617 } else
2618 num_sucessfully_checked[lane]++;
2619 }
2620 }
2621 }
2622 while (!check_bounded(num_sucessfully_checked, 3));
2623
2624 for (lane = 0; lane < 8; lane++) {
2625 write_500(info, channel,
2626 info->training.
2627 lane_timings[0][channel][slot][rank][lane],
2628 get_timing_register_addr(lane, 0, slot, rank),
2629 9, 1);
2630 write_500(info, channel,
2631 info->training.
2632 lane_timings[1][channel][slot][rank][lane],
2633 get_timing_register_addr(lane, 1, slot, rank),
2634 9, 1);
2635 if (timings[reg_178][channel][slot][rank][lane].
2636 largest <=
2637 timings[reg_178][channel][slot][rank][lane].
2638 smallest) {
2639 timings[reg_178][channel][slot][rank][lane].
2640 largest = 0;
2641 timings[reg_178][channel][slot][rank][lane].
2642 smallest = 0;
2643 }
2644 }
2645 }
2646}
2647
2648static void set_10b(struct raminfo *info, u8 val)
2649{
2650 int channel;
2651 int slot, rank;
2652 int lane;
2653
2654 if (read_1d0(0x10b, 6) == val)
2655 return;
2656
2657 write_1d0(val, 0x10b, 6, 1);
2658
2659 FOR_POPULATED_RANKS_BACKWARDS for (lane = 0; lane < 9; lane++) {
2660 u16 reg_500;
2661 reg_500 = read_500(info, channel,
2662 get_timing_register_addr(lane, 0, slot,
2663 rank), 9);
2664 if (val == 1) {
2665 if (lut16[info->clock_speed_index] <= reg_500)
2666 reg_500 -= lut16[info->clock_speed_index];
2667 else
2668 reg_500 = 0;
2669 } else {
2670 reg_500 += lut16[info->clock_speed_index];
2671 }
2672 write_500(info, channel, reg_500,
2673 get_timing_register_addr(lane, 0, slot, rank), 9, 1);
2674 }
2675}
2676
2677static void set_ecc(int onoff)
2678{
2679 int channel;
2680 for (channel = 0; channel < NUM_CHANNELS; channel++) {
2681 u8 t;
2682 t = read_mchbar8((channel << 10) + 0x5f8);
2683 if (onoff)
2684 t |= 1;
2685 else
2686 t &= ~1;
2687 write_mchbar8((channel << 10) + 0x5f8, t);
2688 }
2689}
2690
2691static void set_178(u8 val)
2692{
2693 if (val >= 31)
2694 val = val - 31;
2695 else
2696 val = 63 - val;
2697
2698 write_1d0(2 * val, 0x178, 7, 1);
2699}
2700
2701static void
2702write_500_timings_type(struct raminfo *info, int channel, int slot, int rank,
2703 int type)
2704{
2705 int lane;
2706
2707 for (lane = 0; lane < 8; lane++)
2708 write_500(info, channel,
2709 info->training.
2710 lane_timings[type][channel][slot][rank][lane],
2711 get_timing_register_addr(lane, type, slot, rank), 9,
2712 0);
2713}
2714
2715static void
2716try_timing_offsets(struct raminfo *info, int channel,
2717 int slot, int rank, int totalrank)
2718{
2719 u16 count[8];
2720 enum state state[8];
2721 u8 lower_usable[8], upper_usable[8];
2722 int lane;
2723 int i;
2724 int flip = 1;
2725 int timing_offset;
2726
2727 for (i = 0; i < 8; i++)
2728 state[i] = BEFORE_USABLE;
2729
2730 memset(count, 0, sizeof(count));
2731
2732 for (lane = 0; lane < 8; lane++)
2733 write_500(info, channel,
2734 info->training.
2735 lane_timings[2][channel][slot][rank][lane] + 32,
2736 get_timing_register_addr(lane, 3, slot, rank), 9, 1);
2737
2738 for (timing_offset = 0; !validate_state(state) && timing_offset < 64;
2739 timing_offset++) {
2740 u8 failmask;
2741 write_1d0(timing_offset ^ 32, 0x1bb, 6, 1);
2742 failmask = 0;
2743 for (i = 0; i < 2 && failmask != 0xff; i++) {
2744 flip = !flip;
2745 write_testing(info, totalrank, flip);
2746 failmask |= check_testing(info, totalrank, flip);
2747 }
2748 do_fsm(state, count, failmask, 10, 63, lower_usable,
2749 upper_usable, timing_offset);
2750 }
2751 write_1d0(0, 0x1bb, 6, 1);
2752 dump_timings(info);
2753 if (!validate_state(state))
2754 die("Couldn't discover DRAM timings (1)\n");
2755
2756 for (lane = 0; lane < 8; lane++) {
2757 u8 bias = 0;
2758
2759 if (info->silicon_revision) {
2760 int usable_length;
2761
2762 usable_length = upper_usable[lane] - lower_usable[lane];
2763 if (usable_length >= 20) {
2764 bias = usable_length / 2 - 10;
2765 if (bias >= 2)
2766 bias = 2;
2767 }
2768 }
2769 write_500(info, channel,
2770 info->training.
2771 lane_timings[2][channel][slot][rank][lane] +
2772 (upper_usable[lane] + lower_usable[lane]) / 2 - bias,
2773 get_timing_register_addr(lane, 3, slot, rank), 9, 1);
2774 info->training.timing2_bounds[channel][slot][rank][lane][0] =
2775 info->training.lane_timings[2][channel][slot][rank][lane] +
2776 lower_usable[lane];
2777 info->training.timing2_bounds[channel][slot][rank][lane][1] =
2778 info->training.lane_timings[2][channel][slot][rank][lane] +
2779 upper_usable[lane];
2780 info->training.timing2_offset[channel][slot][rank][lane] =
2781 info->training.lane_timings[2][channel][slot][rank][lane];
2782 }
2783}
2784
2785static u8
2786choose_training(struct raminfo *info, int channel, int slot, int rank,
2787 int lane, timing_bounds_t * timings, u8 center_178)
2788{
2789 u16 central_weight;
2790 u16 side_weight;
2791 unsigned int sum = 0, count = 0;
2792 u8 span;
2793 u8 lower_margin, upper_margin;
2794 u8 reg_178;
2795 u8 result;
2796
2797 span = 12;
2798 central_weight = 20;
2799 side_weight = 20;
2800 if (info->silicon_revision == 1 && channel == 1) {
2801 central_weight = 5;
2802 side_weight = 20;
2803 if ((info->
2804 populated_ranks_mask[1] ^ (info->
2805 populated_ranks_mask[1] >> 2)) &
2806 1)
2807 span = 18;
2808 }
2809 if ((info->populated_ranks_mask[0] & 5) == 5) {
2810 central_weight = 20;
2811 side_weight = 20;
2812 }
2813 if (info->clock_speed_index >= 2
2814 && (info->populated_ranks_mask[0] & 5) == 5 && slot == 1) {
2815 if (info->silicon_revision == 1) {
2816 switch (channel) {
2817 case 0:
2818 if (lane == 1) {
2819 central_weight = 10;
2820 side_weight = 20;
2821 }
2822 break;
2823 case 1:
2824 if (lane == 6) {
2825 side_weight = 5;
2826 central_weight = 20;
2827 }
2828 break;
2829 }
2830 }
2831 if (info->silicon_revision == 0 && channel == 0 && lane == 0) {
2832 side_weight = 5;
2833 central_weight = 20;
2834 }
2835 }
2836 for (reg_178 = center_178 - span; reg_178 <= center_178 + span;
2837 reg_178 += span) {
2838 u8 smallest;
2839 u8 largest;
2840 largest = timings[reg_178][channel][slot][rank][lane].largest;
2841 smallest = timings[reg_178][channel][slot][rank][lane].smallest;
2842 if (largest - smallest + 1 >= 5) {
2843 unsigned int weight;
2844 if (reg_178 == center_178)
2845 weight = central_weight;
2846 else
2847 weight = side_weight;
2848 sum += weight * (largest + smallest);
2849 count += weight;
2850 }
2851 }
2852 dump_timings(info);
2853 if (count == 0)
2854 die("Couldn't discover DRAM timings (2)\n");
2855 result = sum / (2 * count);
2856 lower_margin =
2857 result - timings[center_178][channel][slot][rank][lane].smallest;
2858 upper_margin =
2859 timings[center_178][channel][slot][rank][lane].largest - result;
2860 if (upper_margin < 10 && lower_margin > 10)
2861 result -= min(lower_margin - 10, 10 - upper_margin);
2862 if (upper_margin > 10 && lower_margin < 10)
2863 result += min(upper_margin - 10, 10 - lower_margin);
2864 return result;
2865}
2866
2867#define STANDARD_MIN_MARGIN 5
2868
2869static u8 choose_reg178(struct raminfo *info, timing_bounds_t * timings)
2870{
2871 u16 margin[64];
2872 int lane, rank, slot, channel;
2873 u8 reg178;
2874 int count = 0, sum = 0;
2875
2876 for (reg178 = reg178_min[info->clock_speed_index];
2877 reg178 < reg178_max[info->clock_speed_index];
2878 reg178 += reg178_step[info->clock_speed_index]) {
2879 margin[reg178] = -1;
2880 FOR_POPULATED_RANKS_BACKWARDS for (lane = 0; lane < 8; lane++) {
2881 int curmargin =
2882 timings[reg178][channel][slot][rank][lane].largest -
2883 timings[reg178][channel][slot][rank][lane].
2884 smallest + 1;
2885 if (curmargin < margin[reg178])
2886 margin[reg178] = curmargin;
2887 }
2888 if (margin[reg178] >= STANDARD_MIN_MARGIN) {
2889 u16 weight;
2890 weight = margin[reg178] - STANDARD_MIN_MARGIN;
2891 sum += weight * reg178;
2892 count += weight;
2893 }
2894 }
2895 dump_timings(info);
2896 if (count == 0)
2897 die("Couldn't discover DRAM timings (3)\n");
2898
2899 u8 threshold;
2900
2901 for (threshold = 30; threshold >= 5; threshold--) {
2902 int usable_length = 0;
2903 int smallest_fount = 0;
2904 for (reg178 = reg178_min[info->clock_speed_index];
2905 reg178 < reg178_max[info->clock_speed_index];
2906 reg178 += reg178_step[info->clock_speed_index])
2907 if (margin[reg178] >= threshold) {
2908 usable_length +=
2909 reg178_step[info->clock_speed_index];
2910 info->training.reg178_largest =
2911 reg178 -
2912 2 * reg178_step[info->clock_speed_index];
2913
2914 if (!smallest_fount) {
2915 smallest_fount = 1;
2916 info->training.reg178_smallest =
2917 reg178 +
2918 reg178_step[info->
2919 clock_speed_index];
2920 }
2921 }
2922 if (usable_length >= 0x21)
2923 break;
2924 }
2925
2926 return sum / count;
2927}
2928
2929static int check_cached_sanity(struct raminfo *info)
2930{
2931 int lane;
2932 int slot, rank;
2933 int channel;
2934
2935 if (!info->cached_training)
2936 return 0;
2937
2938 for (channel = 0; channel < NUM_CHANNELS; channel++)
2939 for (slot = 0; slot < NUM_SLOTS; slot++)
2940 for (rank = 0; rank < NUM_RANKS; rank++)
2941 for (lane = 0; lane < 8 + info->use_ecc; lane++) {
2942 u16 cached_value, estimation_value;
2943 cached_value =
2944 info->cached_training->
2945 lane_timings[1][channel][slot][rank]
2946 [lane];
2947 if (cached_value >= 0x18
2948 && cached_value <= 0x1E7) {
2949 estimation_value =
2950 info->training.
2951 lane_timings[1][channel]
2952 [slot][rank][lane];
2953 if (estimation_value <
2954 cached_value - 24)
2955 return 0;
2956 if (estimation_value >
2957 cached_value + 24)
2958 return 0;
2959 }
2960 }
2961 return 1;
2962}
2963
2964static int try_cached_training(struct raminfo *info)
2965{
2966 u8 saved_243[2];
2967 u8 tm;
2968
2969 int channel, slot, rank, lane;
2970 int flip = 1;
2971 int i, j;
2972
2973 if (!check_cached_sanity(info))
2974 return 0;
2975
2976 info->training.reg178_center = info->cached_training->reg178_center;
2977 info->training.reg178_smallest = info->cached_training->reg178_smallest;
2978 info->training.reg178_largest = info->cached_training->reg178_largest;
2979 memcpy(&info->training.timing_bounds,
2980 &info->cached_training->timing_bounds,
2981 sizeof(info->training.timing_bounds));
2982 memcpy(&info->training.timing_offset,
2983 &info->cached_training->timing_offset,
2984 sizeof(info->training.timing_offset));
2985
2986 write_1d0(2, 0x142, 3, 1);
2987 saved_243[0] = read_mchbar8(0x243);
2988 saved_243[1] = read_mchbar8(0x643);
2989 write_mchbar8(0x243, saved_243[0] | 2);
2990 write_mchbar8(0x643, saved_243[1] | 2);
2991 set_ecc(0);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03002992 pci_write_config16(NORTHBRIDGE, 0xc8, 3);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002993 if (read_1d0(0x10b, 6) & 1)
2994 set_10b(info, 0);
2995 for (tm = 0; tm < 2; tm++) {
2996 int totalrank;
2997
2998 set_178(tm ? info->cached_training->reg178_largest : info->
2999 cached_training->reg178_smallest);
3000
3001 totalrank = 0;
3002 /* Check timing ranges. With i == 0 we check smallest one and with
3003 i == 1 the largest bound. With j == 0 we check that on the bound
3004 it still works whereas with j == 1 we check that just outside of
3005 bound we fail.
3006 */
3007 FOR_POPULATED_RANKS_BACKWARDS {
3008 for (i = 0; i < 2; i++) {
3009 for (lane = 0; lane < 8; lane++) {
3010 write_500(info, channel,
3011 info->cached_training->
3012 timing2_bounds[channel][slot]
3013 [rank][lane][i],
3014 get_timing_register_addr(lane,
3015 3,
3016 slot,
3017 rank),
3018 9, 1);
3019
3020 if (!i)
3021 write_500(info, channel,
3022 info->
3023 cached_training->
3024 timing2_offset
3025 [channel][slot][rank]
3026 [lane],
3027 get_timing_register_addr
3028 (lane, 2, slot, rank),
3029 9, 1);
3030 write_500(info, channel,
3031 i ? info->cached_training->
3032 timing_bounds[tm][channel]
3033 [slot][rank][lane].
3034 largest : info->
3035 cached_training->
3036 timing_bounds[tm][channel]
3037 [slot][rank][lane].smallest,
3038 get_timing_register_addr(lane,
3039 0,
3040 slot,
3041 rank),
3042 9, 1);
3043 write_500(info, channel,
3044 info->cached_training->
3045 timing_offset[channel][slot]
3046 [rank][lane] +
3047 (i ? info->cached_training->
3048 timing_bounds[tm][channel]
3049 [slot][rank][lane].
3050 largest : info->
3051 cached_training->
3052 timing_bounds[tm][channel]
3053 [slot][rank][lane].
3054 smallest) - 64,
3055 get_timing_register_addr(lane,
3056 1,
3057 slot,
3058 rank),
3059 9, 1);
3060 }
3061 for (j = 0; j < 2; j++) {
3062 u8 failmask;
3063 u8 expected_failmask;
3064 char reg1b3;
3065
3066 reg1b3 = (j == 1) + 4;
3067 reg1b3 =
3068 j == i ? reg1b3 : (-reg1b3) & 0x3f;
3069 write_1d0(reg1b3, 0x1bb, 6, 1);
3070 write_1d0(reg1b3, 0x1b3, 6, 1);
3071 write_1d0(reg1b3, 0x1a3, 6, 1);
3072
3073 flip = !flip;
3074 write_testing(info, totalrank, flip);
3075 failmask =
3076 check_testing(info, totalrank,
3077 flip);
3078 expected_failmask =
3079 j == 0 ? 0x00 : 0xff;
3080 if (failmask != expected_failmask)
3081 goto fail;
3082 }
3083 }
3084 totalrank++;
3085 }
3086 }
3087
3088 set_178(info->cached_training->reg178_center);
3089 if (info->use_ecc)
3090 set_ecc(1);
3091 write_training_data(info);
3092 write_1d0(0, 322, 3, 1);
3093 info->training = *info->cached_training;
3094
3095 write_1d0(0, 0x1bb, 6, 1);
3096 write_1d0(0, 0x1b3, 6, 1);
3097 write_1d0(0, 0x1a3, 6, 1);
3098 write_mchbar8(0x243, saved_243[0]);
3099 write_mchbar8(0x643, saved_243[1]);
3100
3101 return 1;
3102
3103fail:
3104 FOR_POPULATED_RANKS {
3105 write_500_timings_type(info, channel, slot, rank, 1);
3106 write_500_timings_type(info, channel, slot, rank, 2);
3107 write_500_timings_type(info, channel, slot, rank, 3);
3108 }
3109
3110 write_1d0(0, 0x1bb, 6, 1);
3111 write_1d0(0, 0x1b3, 6, 1);
3112 write_1d0(0, 0x1a3, 6, 1);
3113 write_mchbar8(0x243, saved_243[0]);
3114 write_mchbar8(0x643, saved_243[1]);
3115
3116 return 0;
3117}
3118
3119static void do_ram_training(struct raminfo *info)
3120{
3121 u8 saved_243[2];
3122 int totalrank = 0;
3123 u8 reg_178;
3124 int niter;
3125
3126 timing_bounds_t timings[64];
3127 int lane, rank, slot, channel;
3128 u8 reg178_center;
3129
3130 write_1d0(2, 0x142, 3, 1);
3131 saved_243[0] = read_mchbar8(0x243);
3132 saved_243[1] = read_mchbar8(0x643);
3133 write_mchbar8(0x243, saved_243[0] | 2);
3134 write_mchbar8(0x643, saved_243[1] | 2);
3135 switch (info->clock_speed_index) {
3136 case 0:
3137 niter = 5;
3138 break;
3139 case 1:
3140 niter = 10;
3141 break;
3142 default:
3143 niter = 19;
3144 break;
3145 }
3146 set_ecc(0);
3147
3148 FOR_POPULATED_RANKS_BACKWARDS {
3149 int i;
3150
3151 write_500_timings_type(info, channel, slot, rank, 0);
3152
3153 write_testing(info, totalrank, 0);
3154 for (i = 0; i < niter; i++) {
3155 write_testing_type2(info, totalrank, 2, i, 0);
3156 write_testing_type2(info, totalrank, 3, i, 1);
3157 }
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003158 pci_write_config8(NORTHBRIDGE, 0xc0, 0x01);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003159 totalrank++;
3160 }
3161
3162 if (reg178_min[info->clock_speed_index] <
3163 reg178_max[info->clock_speed_index])
3164 memset(timings[reg178_min[info->clock_speed_index]], 0,
3165 sizeof(timings[0]) *
3166 (reg178_max[info->clock_speed_index] -
3167 reg178_min[info->clock_speed_index]));
3168 for (reg_178 = reg178_min[info->clock_speed_index];
3169 reg_178 < reg178_max[info->clock_speed_index];
3170 reg_178 += reg178_step[info->clock_speed_index]) {
3171 totalrank = 0;
3172 set_178(reg_178);
3173 for (channel = NUM_CHANNELS - 1; channel >= 0; channel--)
3174 for (slot = 0; slot < NUM_SLOTS; slot++)
3175 for (rank = 0; rank < NUM_RANKS; rank++) {
3176 memset(&timings[reg_178][channel][slot]
3177 [rank][0].smallest, 0, 16);
3178 if (info->
3179 populated_ranks[channel][slot]
3180 [rank]) {
3181 train_ram_at_178(info, channel,
3182 slot, rank,
3183 totalrank,
3184 reg_178, 1,
3185 niter,
3186 timings);
3187 totalrank++;
3188 }
3189 }
3190 }
3191
3192 reg178_center = choose_reg178(info, timings);
3193
3194 FOR_POPULATED_RANKS_BACKWARDS for (lane = 0; lane < 8; lane++) {
3195 info->training.timing_bounds[0][channel][slot][rank][lane].
3196 smallest =
3197 timings[info->training.
3198 reg178_smallest][channel][slot][rank][lane].
3199 smallest;
3200 info->training.timing_bounds[0][channel][slot][rank][lane].
3201 largest =
3202 timings[info->training.
3203 reg178_smallest][channel][slot][rank][lane].largest;
3204 info->training.timing_bounds[1][channel][slot][rank][lane].
3205 smallest =
3206 timings[info->training.
3207 reg178_largest][channel][slot][rank][lane].smallest;
3208 info->training.timing_bounds[1][channel][slot][rank][lane].
3209 largest =
3210 timings[info->training.
3211 reg178_largest][channel][slot][rank][lane].largest;
3212 info->training.timing_offset[channel][slot][rank][lane] =
3213 info->training.lane_timings[1][channel][slot][rank][lane]
3214 -
3215 info->training.lane_timings[0][channel][slot][rank][lane] +
3216 64;
3217 }
3218
3219 if (info->silicon_revision == 1
3220 && (info->
3221 populated_ranks_mask[1] ^ (info->
3222 populated_ranks_mask[1] >> 2)) & 1) {
3223 int ranks_after_channel1;
3224
3225 totalrank = 0;
3226 for (reg_178 = reg178_center - 18;
3227 reg_178 <= reg178_center + 18; reg_178 += 18) {
3228 totalrank = 0;
3229 set_178(reg_178);
3230 for (slot = 0; slot < NUM_SLOTS; slot++)
3231 for (rank = 0; rank < NUM_RANKS; rank++) {
3232 if (info->
3233 populated_ranks[1][slot][rank]) {
3234 train_ram_at_178(info, 1, slot,
3235 rank,
3236 totalrank,
3237 reg_178, 0,
3238 niter,
3239 timings);
3240 totalrank++;
3241 }
3242 }
3243 }
3244 ranks_after_channel1 = totalrank;
3245
3246 for (reg_178 = reg178_center - 12;
3247 reg_178 <= reg178_center + 12; reg_178 += 12) {
3248 totalrank = ranks_after_channel1;
3249 set_178(reg_178);
3250 for (slot = 0; slot < NUM_SLOTS; slot++)
3251 for (rank = 0; rank < NUM_RANKS; rank++)
3252 if (info->
3253 populated_ranks[0][slot][rank]) {
3254 train_ram_at_178(info, 0, slot,
3255 rank,
3256 totalrank,
3257 reg_178, 0,
3258 niter,
3259 timings);
3260 totalrank++;
3261 }
3262
3263 }
3264 } else {
3265 for (reg_178 = reg178_center - 12;
3266 reg_178 <= reg178_center + 12; reg_178 += 12) {
3267 totalrank = 0;
3268 set_178(reg_178);
3269 FOR_POPULATED_RANKS_BACKWARDS {
3270 train_ram_at_178(info, channel, slot, rank,
3271 totalrank, reg_178, 0, niter,
3272 timings);
3273 totalrank++;
3274 }
3275 }
3276 }
3277
3278 set_178(reg178_center);
3279 FOR_POPULATED_RANKS_BACKWARDS for (lane = 0; lane < 8; lane++) {
3280 u16 tm0;
3281
3282 tm0 =
3283 choose_training(info, channel, slot, rank, lane, timings,
3284 reg178_center);
3285 write_500(info, channel, tm0,
3286 get_timing_register_addr(lane, 0, slot, rank), 9, 1);
3287 write_500(info, channel,
3288 tm0 +
3289 info->training.
3290 lane_timings[1][channel][slot][rank][lane] -
3291 info->training.
3292 lane_timings[0][channel][slot][rank][lane],
3293 get_timing_register_addr(lane, 1, slot, rank), 9, 1);
3294 }
3295
3296 totalrank = 0;
3297 FOR_POPULATED_RANKS_BACKWARDS {
3298 try_timing_offsets(info, channel, slot, rank, totalrank);
3299 totalrank++;
3300 }
3301 write_mchbar8(0x243, saved_243[0]);
3302 write_mchbar8(0x643, saved_243[1]);
3303 write_1d0(0, 0x142, 3, 1);
3304 info->training.reg178_center = reg178_center;
3305}
3306
3307static void ram_training(struct raminfo *info)
3308{
3309 u16 saved_fc4;
3310
3311 saved_fc4 = read_mchbar16(0xfc4);
3312 write_mchbar16(0xfc4, 0xffff);
3313
3314 if (info->revision >= 8)
3315 read_4090(info);
3316
3317 if (!try_cached_training(info))
3318 do_ram_training(info);
3319 if ((info->silicon_revision == 2 || info->silicon_revision == 3)
3320 && info->clock_speed_index < 2)
3321 set_10b(info, 1);
3322 write_mchbar16(0xfc4, saved_fc4);
3323}
3324
3325static unsigned gcd(unsigned a, unsigned b)
3326{
3327 unsigned t;
3328 if (a > b) {
3329 t = a;
3330 a = b;
3331 b = t;
3332 }
3333 /* invariant a < b. */
3334 while (a) {
3335 t = b % a;
3336 b = a;
3337 a = t;
3338 }
3339 return b;
3340}
3341
3342static inline int div_roundup(int a, int b)
3343{
Edward O'Callaghan7116ac82014-07-08 01:53:24 +10003344 return CEIL_DIV(a, b);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003345}
3346
3347static unsigned lcm(unsigned a, unsigned b)
3348{
3349 return (a * b) / gcd(a, b);
3350}
3351
3352struct stru1 {
3353 u8 freqs_reversed;
3354 u8 freq_diff_reduced;
3355 u8 freq_min_reduced;
3356 u8 divisor_f4_to_fmax;
3357 u8 divisor_f3_to_fmax;
3358 u8 freq4_to_max_remainder;
3359 u8 freq3_to_2_remainder;
3360 u8 freq3_to_2_remaindera;
3361 u8 freq4_to_2_remainder;
3362 int divisor_f3_to_f1, divisor_f4_to_f2;
3363 int common_time_unit_ps;
3364 int freq_max_reduced;
3365};
3366
3367static void
3368compute_frequence_ratios(struct raminfo *info, u16 freq1, u16 freq2,
3369 int num_cycles_2, int num_cycles_1, int round_it,
3370 int add_freqs, struct stru1 *result)
3371{
3372 int g;
3373 int common_time_unit_ps;
3374 int freq1_reduced, freq2_reduced;
3375 int freq_min_reduced;
3376 int freq_max_reduced;
3377 int freq3, freq4;
3378
3379 g = gcd(freq1, freq2);
3380 freq1_reduced = freq1 / g;
3381 freq2_reduced = freq2 / g;
3382 freq_min_reduced = min(freq1_reduced, freq2_reduced);
3383 freq_max_reduced = max(freq1_reduced, freq2_reduced);
3384
3385 common_time_unit_ps = div_roundup(900000, lcm(freq1, freq2));
3386 freq3 = div_roundup(num_cycles_2, common_time_unit_ps) - 1;
3387 freq4 = div_roundup(num_cycles_1, common_time_unit_ps) - 1;
3388 if (add_freqs) {
3389 freq3 += freq2_reduced;
3390 freq4 += freq1_reduced;
3391 }
3392
3393 if (round_it) {
3394 result->freq3_to_2_remainder = 0;
3395 result->freq3_to_2_remaindera = 0;
3396 result->freq4_to_max_remainder = 0;
3397 result->divisor_f4_to_f2 = 0;
3398 result->divisor_f3_to_f1 = 0;
3399 } else {
3400 if (freq2_reduced < freq1_reduced) {
3401 result->freq3_to_2_remainder =
3402 result->freq3_to_2_remaindera =
3403 freq3 % freq1_reduced - freq1_reduced + 1;
3404 result->freq4_to_max_remainder =
3405 -(freq4 % freq1_reduced);
3406 result->divisor_f3_to_f1 = freq3 / freq1_reduced;
3407 result->divisor_f4_to_f2 =
3408 (freq4 -
3409 (freq1_reduced - freq2_reduced)) / freq2_reduced;
3410 result->freq4_to_2_remainder =
3411 -(char)((freq1_reduced - freq2_reduced) +
3412 ((u8) freq4 -
3413 (freq1_reduced -
3414 freq2_reduced)) % (u8) freq2_reduced);
3415 } else {
3416 if (freq2_reduced > freq1_reduced) {
3417 result->freq4_to_max_remainder =
3418 (freq4 % freq2_reduced) - freq2_reduced + 1;
3419 result->freq4_to_2_remainder =
3420 freq4 % freq_max_reduced -
3421 freq_max_reduced + 1;
3422 } else {
3423 result->freq4_to_max_remainder =
3424 -(freq4 % freq2_reduced);
3425 result->freq4_to_2_remainder =
3426 -(char)(freq4 % freq_max_reduced);
3427 }
3428 result->divisor_f4_to_f2 = freq4 / freq2_reduced;
3429 result->divisor_f3_to_f1 =
3430 (freq3 -
3431 (freq2_reduced - freq1_reduced)) / freq1_reduced;
3432 result->freq3_to_2_remainder = -(freq3 % freq2_reduced);
3433 result->freq3_to_2_remaindera =
3434 -(char)((freq_max_reduced - freq_min_reduced) +
3435 (freq3 -
3436 (freq_max_reduced -
3437 freq_min_reduced)) % freq1_reduced);
3438 }
3439 }
3440 result->divisor_f3_to_fmax = freq3 / freq_max_reduced;
3441 result->divisor_f4_to_fmax = freq4 / freq_max_reduced;
3442 if (round_it) {
3443 if (freq2_reduced > freq1_reduced) {
3444 if (freq3 % freq_max_reduced)
3445 result->divisor_f3_to_fmax++;
3446 }
3447 if (freq2_reduced < freq1_reduced) {
3448 if (freq4 % freq_max_reduced)
3449 result->divisor_f4_to_fmax++;
3450 }
3451 }
3452 result->freqs_reversed = (freq2_reduced < freq1_reduced);
3453 result->freq_diff_reduced = freq_max_reduced - freq_min_reduced;
3454 result->freq_min_reduced = freq_min_reduced;
3455 result->common_time_unit_ps = common_time_unit_ps;
3456 result->freq_max_reduced = freq_max_reduced;
3457}
3458
3459static void
3460set_2d5x_reg(struct raminfo *info, u16 reg, u16 freq1, u16 freq2,
3461 int num_cycles_2, int num_cycles_1, int num_cycles_3,
3462 int num_cycles_4, int reverse)
3463{
3464 struct stru1 vv;
3465 char multiplier;
3466
3467 compute_frequence_ratios(info, freq1, freq2, num_cycles_2, num_cycles_1,
3468 0, 1, &vv);
3469
3470 multiplier =
3471 div_roundup(max
3472 (div_roundup(num_cycles_2, vv.common_time_unit_ps) +
3473 div_roundup(num_cycles_3, vv.common_time_unit_ps),
3474 div_roundup(num_cycles_1,
3475 vv.common_time_unit_ps) +
3476 div_roundup(num_cycles_4, vv.common_time_unit_ps))
3477 + vv.freq_min_reduced - 1, vv.freq_max_reduced) - 1;
3478
3479 u32 y =
3480 (u8) ((vv.freq_max_reduced - vv.freq_min_reduced) +
3481 vv.freq_max_reduced * multiplier)
3482 | (vv.
3483 freqs_reversed << 8) | ((u8) (vv.freq_min_reduced *
3484 multiplier) << 16) | ((u8) (vv.
3485 freq_min_reduced
3486 *
3487 multiplier)
3488 << 24);
3489 u32 x =
3490 vv.freq3_to_2_remaindera | (vv.freq4_to_2_remainder << 8) | (vv.
3491 divisor_f3_to_f1
3492 << 16)
3493 | (vv.divisor_f4_to_f2 << 20) | (vv.freq_min_reduced << 24);
3494 if (reverse) {
3495 write_mchbar32(reg, y);
3496 write_mchbar32(reg + 4, x);
3497 } else {
3498 write_mchbar32(reg + 4, y);
3499 write_mchbar32(reg, x);
3500 }
3501}
3502
3503static void
3504set_6d_reg(struct raminfo *info, u16 reg, u16 freq1, u16 freq2,
3505 int num_cycles_1, int num_cycles_2, int num_cycles_3,
3506 int num_cycles_4)
3507{
3508 struct stru1 ratios1;
3509 struct stru1 ratios2;
3510
3511 compute_frequence_ratios(info, freq1, freq2, num_cycles_1, num_cycles_2,
3512 0, 1, &ratios2);
3513 compute_frequence_ratios(info, freq1, freq2, num_cycles_3, num_cycles_4,
3514 0, 1, &ratios1);
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003515 printk (BIOS_SPEW, "[%x] <= %x\n", reg,
3516 ratios1.freq4_to_max_remainder | (ratios2.
3517 freq4_to_max_remainder
3518 << 8)
3519 | (ratios1.divisor_f4_to_fmax << 16) | (ratios2.
3520 divisor_f4_to_fmax
3521 << 20));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003522 write_mchbar32(reg,
3523 ratios1.freq4_to_max_remainder | (ratios2.
3524 freq4_to_max_remainder
3525 << 8)
3526 | (ratios1.divisor_f4_to_fmax << 16) | (ratios2.
3527 divisor_f4_to_fmax
3528 << 20));
3529}
3530
3531static void
3532set_2dx8_reg(struct raminfo *info, u16 reg, u8 mode, u16 freq1, u16 freq2,
3533 int num_cycles_2, int num_cycles_1, int round_it, int add_freqs)
3534{
3535 struct stru1 ratios;
3536
3537 compute_frequence_ratios(info, freq1, freq2, num_cycles_2, num_cycles_1,
3538 round_it, add_freqs, &ratios);
3539 switch (mode) {
3540 case 0:
3541 write_mchbar32(reg + 4,
3542 ratios.freq_diff_reduced | (ratios.
3543 freqs_reversed <<
3544 8));
3545 write_mchbar32(reg,
3546 ratios.freq3_to_2_remainder | (ratios.
3547 freq4_to_max_remainder
3548 << 8)
3549 | (ratios.divisor_f3_to_fmax << 16) | (ratios.
3550 divisor_f4_to_fmax
3551 << 20) |
3552 (ratios.freq_min_reduced << 24));
3553 break;
3554
3555 case 1:
3556 write_mchbar32(reg,
3557 ratios.freq3_to_2_remainder | (ratios.
3558 divisor_f3_to_fmax
3559 << 16));
3560 break;
3561
3562 case 2:
3563 write_mchbar32(reg,
3564 ratios.freq3_to_2_remainder | (ratios.
3565 freq4_to_max_remainder
3566 << 8) | (ratios.
3567 divisor_f3_to_fmax
3568 << 16) |
3569 (ratios.divisor_f4_to_fmax << 20));
3570 break;
3571
3572 case 4:
3573 write_mchbar32(reg, (ratios.divisor_f3_to_fmax << 4)
3574 | (ratios.divisor_f4_to_fmax << 8) | (ratios.
3575 freqs_reversed
3576 << 12) |
3577 (ratios.freq_min_reduced << 16) | (ratios.
3578 freq_diff_reduced
3579 << 24));
3580 break;
3581 }
3582}
3583
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003584static void set_2dxx_series(struct raminfo *info, int s3resume)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003585{
3586 set_2dx8_reg(info, 0x2d00, 0, 0x78, frequency_11(info) / 2, 1359, 1005,
3587 0, 1);
3588 set_2dx8_reg(info, 0x2d08, 0, 0x78, 0x78, 3273, 5033, 1, 1);
3589 set_2dx8_reg(info, 0x2d10, 0, 0x78, info->fsb_frequency, 1475, 1131, 0,
3590 1);
3591 set_2dx8_reg(info, 0x2d18, 0, 2 * info->fsb_frequency,
3592 frequency_11(info), 1231, 1524, 0, 1);
3593 set_2dx8_reg(info, 0x2d20, 0, 2 * info->fsb_frequency,
3594 frequency_11(info) / 2, 1278, 2008, 0, 1);
3595 set_2dx8_reg(info, 0x2d28, 0, info->fsb_frequency, frequency_11(info),
3596 1167, 1539, 0, 1);
3597 set_2dx8_reg(info, 0x2d30, 0, info->fsb_frequency,
3598 frequency_11(info) / 2, 1403, 1318, 0, 1);
3599 set_2dx8_reg(info, 0x2d38, 0, info->fsb_frequency, 0x78, 3460, 5363, 1,
3600 1);
3601 set_2dx8_reg(info, 0x2d40, 0, info->fsb_frequency, 0x3c, 2792, 5178, 1,
3602 1);
3603 set_2dx8_reg(info, 0x2d48, 0, 2 * info->fsb_frequency, 0x78, 2738, 4610,
3604 1, 1);
3605 set_2dx8_reg(info, 0x2d50, 0, info->fsb_frequency, 0x78, 2819, 5932, 1,
3606 1);
3607 set_2dx8_reg(info, 0x6d4, 1, info->fsb_frequency,
3608 frequency_11(info) / 2, 4000, 0, 0, 0);
3609 set_2dx8_reg(info, 0x6d8, 2, info->fsb_frequency,
3610 frequency_11(info) / 2, 4000, 4000, 0, 0);
3611
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003612 if (s3resume) {
3613 printk (BIOS_SPEW, "[6dc] <= %x\n", info->cached_training->reg_6dc);
3614 write_mchbar32(0x6dc, info->cached_training->reg_6dc);
3615 } else
3616 set_6d_reg(info, 0x6dc, 2 * info->fsb_frequency, frequency_11(info), 0,
3617 info->delay46_ps[0], 0,
3618 info->delay54_ps[0]);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003619 set_2dx8_reg(info, 0x6e0, 1, 2 * info->fsb_frequency,
3620 frequency_11(info), 2500, 0, 0, 0);
3621 set_2dx8_reg(info, 0x6e4, 1, 2 * info->fsb_frequency,
3622 frequency_11(info) / 2, 3500, 0, 0, 0);
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003623 if (s3resume) {
3624 printk (BIOS_SPEW, "[6e8] <= %x\n", info->cached_training->reg_6e8);
3625 write_mchbar32(0x6e8, info->cached_training->reg_6e8);
3626 } else
3627 set_6d_reg(info, 0x6e8, 2 * info->fsb_frequency, frequency_11(info), 0,
3628 info->delay46_ps[1], 0,
3629 info->delay54_ps[1]);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003630 set_2d5x_reg(info, 0x2d58, 0x78, 0x78, 864, 1195, 762, 786, 0);
3631 set_2d5x_reg(info, 0x2d60, 0x195, info->fsb_frequency, 1352, 725, 455,
3632 470, 0);
3633 set_2d5x_reg(info, 0x2d68, 0x195, 0x3c, 2707, 5632, 3277, 2207, 0);
3634 set_2d5x_reg(info, 0x2d70, 0x195, frequency_11(info) / 2, 1276, 758,
3635 454, 459, 0);
3636 set_2d5x_reg(info, 0x2d78, 0x195, 0x78, 1021, 799, 510, 513, 0);
3637 set_2d5x_reg(info, 0x2d80, info->fsb_frequency, 0xe1, 0, 2862, 2579,
3638 2588, 0);
3639 set_2d5x_reg(info, 0x2d88, info->fsb_frequency, 0xe1, 0, 2690, 2405,
3640 2405, 0);
3641 set_2d5x_reg(info, 0x2da0, 0x78, 0xe1, 0, 2560, 2264, 2251, 0);
3642 set_2d5x_reg(info, 0x2da8, 0x195, frequency_11(info), 1060, 775, 484,
3643 480, 0);
3644 set_2d5x_reg(info, 0x2db0, 0x195, 0x78, 4183, 6023, 2217, 2048, 0);
3645 write_mchbar32(0x2dbc, ((frequency_11(info) / 2) - 1) | 0xe00000);
3646 write_mchbar32(0x2db8, ((info->fsb_frequency - 1) << 16) | 0x77);
3647}
3648
3649static u16 get_max_timing(struct raminfo *info, int channel)
3650{
3651 int slot, rank, lane;
3652 u16 ret = 0;
3653
3654 if ((read_mchbar8(0x2ca8) >> 2) < 1)
3655 return 384;
3656
3657 if (info->revision < 8)
3658 return 256;
3659
3660 for (slot = 0; slot < NUM_SLOTS; slot++)
3661 for (rank = 0; rank < NUM_RANKS; rank++)
3662 if (info->populated_ranks[channel][slot][rank])
3663 for (lane = 0; lane < 8 + info->use_ecc; lane++)
3664 ret = max(ret, read_500(info, channel,
3665 get_timing_register_addr
3666 (lane, 0, slot,
3667 rank), 9));
3668 return ret;
3669}
3670
3671static void set_274265(struct raminfo *info)
3672{
3673 int delay_a_ps, delay_b_ps, delay_c_ps, delay_d_ps;
3674 int delay_e_ps, delay_e_cycles, delay_f_cycles;
3675 int delay_e_over_cycle_ps;
3676 int cycletime_ps;
3677 int channel;
3678
3679 delay_a_ps = 4 * halfcycle_ps(info) + 6 * fsbcycle_ps(info);
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003680 info->training.reg2ca9_bit0 = 0;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003681 for (channel = 0; channel < NUM_CHANNELS; channel++) {
3682 cycletime_ps =
3683 900000 / lcm(2 * info->fsb_frequency, frequency_11(info));
3684 delay_d_ps =
3685 (halfcycle_ps(info) * get_max_timing(info, channel) >> 6)
3686 - info->some_delay_3_ps_rounded + 200;
3687 if (!
3688 ((info->silicon_revision == 0
3689 || info->silicon_revision == 1)
3690 && (info->revision >= 8)))
3691 delay_d_ps += halfcycle_ps(info) * 2;
3692 delay_d_ps +=
3693 halfcycle_ps(info) * (!info->revision_flag_1 +
3694 info->some_delay_2_halfcycles_ceil +
3695 2 * info->some_delay_1_cycle_floor +
3696 info->clock_speed_index +
3697 2 * info->cas_latency - 7 + 11);
3698 delay_d_ps += info->revision >= 8 ? 2758 : 4428;
3699
3700 write_mchbar32(0x140,
3701 (read_mchbar32(0x140) & 0xfaffffff) | 0x2000000);
3702 write_mchbar32(0x138,
3703 (read_mchbar32(0x138) & 0xfaffffff) | 0x2000000);
3704 if ((read_mchbar8(0x144) & 0x1f) > 0x13)
3705 delay_d_ps += 650;
3706 delay_c_ps = delay_d_ps + 1800;
3707 if (delay_c_ps <= delay_a_ps)
3708 delay_e_ps = 0;
3709 else
3710 delay_e_ps =
3711 cycletime_ps * div_roundup(delay_c_ps - delay_a_ps,
3712 cycletime_ps);
3713
3714 delay_e_over_cycle_ps = delay_e_ps % (2 * halfcycle_ps(info));
3715 delay_e_cycles = delay_e_ps / (2 * halfcycle_ps(info));
3716 delay_f_cycles =
3717 div_roundup(2500 - delay_e_over_cycle_ps,
3718 2 * halfcycle_ps(info));
3719 if (delay_f_cycles > delay_e_cycles) {
3720 info->delay46_ps[channel] = delay_e_ps;
3721 delay_e_cycles = 0;
3722 } else {
3723 info->delay46_ps[channel] =
3724 delay_e_over_cycle_ps +
3725 2 * halfcycle_ps(info) * delay_f_cycles;
3726 delay_e_cycles -= delay_f_cycles;
3727 }
3728
3729 if (info->delay46_ps[channel] < 2500) {
3730 info->delay46_ps[channel] = 2500;
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003731 info->training.reg2ca9_bit0 = 1;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003732 }
3733 delay_b_ps = halfcycle_ps(info) + delay_c_ps;
3734 if (delay_b_ps <= delay_a_ps)
3735 delay_b_ps = 0;
3736 else
3737 delay_b_ps -= delay_a_ps;
3738 info->delay54_ps[channel] =
3739 cycletime_ps * div_roundup(delay_b_ps,
3740 cycletime_ps) -
3741 2 * halfcycle_ps(info) * delay_e_cycles;
3742 if (info->delay54_ps[channel] < 2500)
3743 info->delay54_ps[channel] = 2500;
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003744 info->training.reg274265[channel][0] = delay_e_cycles;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003745 if (delay_d_ps + 7 * halfcycle_ps(info) <=
3746 24 * halfcycle_ps(info))
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003747 info->training.reg274265[channel][1] = 0;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003748 else
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003749 info->training.reg274265[channel][1] =
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003750 div_roundup(delay_d_ps + 7 * halfcycle_ps(info),
3751 4 * halfcycle_ps(info)) - 6;
3752 write_mchbar32((channel << 10) + 0x274,
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003753 info->training.reg274265[channel][1]
3754 | (info->training.reg274265[channel][0] << 16));
3755 info->training.reg274265[channel][2] =
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003756 div_roundup(delay_c_ps + 3 * fsbcycle_ps(info),
3757 4 * halfcycle_ps(info)) + 1;
3758 write_mchbar16((channel << 10) + 0x265,
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003759 info->training.reg274265[channel][2] << 8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003760 }
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003761 if (info->training.reg2ca9_bit0)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003762 write_mchbar8(0x2ca9, read_mchbar8(0x2ca9) | 1);
3763 else
3764 write_mchbar8(0x2ca9, read_mchbar8(0x2ca9) & ~1);
3765}
3766
3767static void restore_274265(struct raminfo *info)
3768{
3769 int channel;
3770
3771 for (channel = 0; channel < NUM_CHANNELS; channel++) {
3772 write_mchbar32((channel << 10) + 0x274,
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003773 (info->cached_training->reg274265[channel][0] << 16)
3774 | info->cached_training->reg274265[channel][1]);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003775 write_mchbar16((channel << 10) + 0x265,
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003776 info->cached_training->reg274265[channel][2] << 8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003777 }
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003778 if (info->cached_training->reg2ca9_bit0)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003779 write_mchbar8(0x2ca9, read_mchbar8(0x2ca9) | 1);
3780 else
3781 write_mchbar8(0x2ca9, read_mchbar8(0x2ca9) & ~1);
3782}
3783
3784#if REAL
3785static void dmi_setup(void)
3786{
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08003787 gav(read8(DEFAULT_DMIBAR + 0x254));
3788 write8(DEFAULT_DMIBAR + 0x254, 0x1);
3789 write16(DEFAULT_DMIBAR + 0x1b8, 0x18f2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003790 read_mchbar16(0x48);
3791 write_mchbar16(0x48, 0x2);
3792
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08003793 write32(DEFAULT_DMIBAR + 0xd68, read32(DEFAULT_DMIBAR + 0xd68) | 0x08000000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003794
3795 outl((gav(inl(DEFAULT_GPIOBASE | 0x38)) & ~0x140000) | 0x400000,
3796 DEFAULT_GPIOBASE | 0x38);
3797 gav(inb(DEFAULT_GPIOBASE | 0xe)); // = 0xfdcaff6e
3798}
3799#endif
3800
Vladimir Serbinenko9817a372014-02-19 22:07:12 +01003801void chipset_init(const int s3resume)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003802{
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003803 u8 x2ca8;
Vladimir Serbinenko55391c42014-08-03 14:51:00 +02003804 u16 ggc;
3805 u8 gfxsize;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003806
Vladimir Serbinenko9817a372014-02-19 22:07:12 +01003807 x2ca8 = read_mchbar8(0x2ca8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003808 if ((x2ca8 & 1) || (x2ca8 == 8 && !s3resume)) {
3809 printk(BIOS_DEBUG, "soft reset detected, rebooting properly\n");
3810 write_mchbar8(0x2ca8, 0);
Vladimir Serbinenkoe1eef692014-02-19 22:08:51 +01003811 outb(0x6, 0xcf9);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003812#if REAL
Patrick Georgi546953c2014-11-29 10:38:17 +01003813 halt();
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003814#else
3815 printf("CP5\n");
3816 exit(0);
3817#endif
3818 }
3819#if !REAL
3820 if (!s3resume) {
3821 pre_raminit_3(x2ca8);
3822 }
Vladimir Serbinenkof62669c2014-01-09 10:59:38 +01003823 pre_raminit_4a(x2ca8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003824#endif
3825
3826 dmi_setup();
3827
3828 write_mchbar16(0x1170, 0xa880);
3829 write_mchbar8(0x11c1, 0x1);
3830 write_mchbar16(0x1170, 0xb880);
3831 read_mchbar8(0x1210);
3832 write_mchbar8(0x1210, 0x84);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003833
Vladimir Serbinenko55391c42014-08-03 14:51:00 +02003834 if (get_option(&gfxsize, "gfx_uma_size") != CB_SUCCESS) {
3835 /* 0 for 32MB */
3836 gfxsize = 0;
3837 }
3838
3839 ggc = 0xb00 | ((gfxsize + 5) << 4);
3840
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003841 pci_write_config16(NORTHBRIDGE, D0F0_GGC, ggc | 2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003842
3843 u16 deven;
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003844 deven = pci_read_config16(NORTHBRIDGE, D0F0_DEVEN); // = 0x3
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003845
3846 if (deven & 8) {
3847 write_mchbar8(0x2c30, 0x20);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003848 pci_read_config8(NORTHBRIDGE, 0x8); // = 0x18
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003849 write_mchbar16(0x2c30, read_mchbar16(0x2c30) | 0x200);
3850 write_mchbar16(0x2c32, 0x434);
3851 read_mchbar32(0x2c44);
3852 write_mchbar32(0x2c44, 0x1053687);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003853 pci_read_config8(GMA, 0x62); // = 0x2
3854 pci_write_config8(GMA, 0x62, 0x2);
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08003855 read8(DEFAULT_RCBA + 0x2318);
3856 write8(DEFAULT_RCBA + 0x2318, 0x47);
3857 read8(DEFAULT_RCBA + 0x2320);
3858 write8(DEFAULT_RCBA + 0x2320, 0xfc);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003859 }
3860
3861 read_mchbar32(0x30);
3862 write_mchbar32(0x30, 0x40);
3863
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003864 pci_write_config16(NORTHBRIDGE, D0F0_GGC, ggc);
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08003865 gav(read32(DEFAULT_RCBA + 0x3428));
3866 write32(DEFAULT_RCBA + 0x3428, 0x1d);
Vladimir Serbinenko9817a372014-02-19 22:07:12 +01003867}
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003868
Vladimir Serbinenko9817a372014-02-19 22:07:12 +01003869void raminit(const int s3resume, const u8 *spd_addrmap)
3870{
3871 unsigned channel, slot, lane, rank;
3872 int i;
3873 struct raminfo info;
3874 u8 x2ca8;
3875 u16 deven;
Vladimir Serbinenkob16f0922014-06-07 16:27:27 +02003876 int cbmem_wasnot_inited;
Vladimir Serbinenko9817a372014-02-19 22:07:12 +01003877
3878 x2ca8 = read_mchbar8(0x2ca8);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003879 deven = pci_read_config16(NORTHBRIDGE, D0F0_DEVEN);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003880
3881 memset(&info, 0x5a, sizeof(info));
3882
3883 info.last_500_command[0] = 0;
3884 info.last_500_command[1] = 0;
3885
3886 info.fsb_frequency = 135 * 2;
3887 info.board_lane_delay[0] = 0x14;
3888 info.board_lane_delay[1] = 0x07;
3889 info.board_lane_delay[2] = 0x07;
3890 info.board_lane_delay[3] = 0x08;
3891 info.board_lane_delay[4] = 0x56;
3892 info.board_lane_delay[5] = 0x04;
3893 info.board_lane_delay[6] = 0x04;
3894 info.board_lane_delay[7] = 0x05;
3895 info.board_lane_delay[8] = 0x10;
3896
3897 info.training.reg_178 = 0;
3898 info.training.reg_10b = 0;
3899
3900 info.heci_bar = 0;
3901 info.memory_reserved_for_heci_mb = 0;
3902
3903 /* before SPD */
3904 timestamp_add_now(101);
3905
3906 if (!s3resume || REAL) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003907 pci_read_config8(SOUTHBRIDGE, GEN_PMCON_2); // = 0x80
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003908
3909 collect_system_info(&info);
3910
3911#if REAL
3912 /* Enable SMBUS. */
3913 enable_smbus();
3914#endif
3915
3916 memset(&info.populated_ranks, 0, sizeof(info.populated_ranks));
3917
3918 info.use_ecc = 1;
3919 for (channel = 0; channel < NUM_CHANNELS; channel++)
Vladimir Serbinenko2ab8ec72014-02-20 14:34:56 +01003920 for (slot = 0; slot < NUM_SLOTS; slot++) {
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003921 int v;
3922 int try;
3923 int addr;
3924 const u8 useful_addresses[] = {
3925 DEVICE_TYPE,
3926 MODULE_TYPE,
3927 DENSITY,
3928 RANKS_AND_DQ,
3929 MEMORY_BUS_WIDTH,
3930 TIMEBASE_DIVIDEND,
3931 TIMEBASE_DIVISOR,
3932 CYCLETIME,
3933 CAS_LATENCIES_LSB,
3934 CAS_LATENCIES_MSB,
3935 CAS_LATENCY_TIME,
3936 0x11, 0x12, 0x13, 0x14, 0x15,
3937 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b,
3938 0x1c, 0x1d,
3939 THERMAL_AND_REFRESH,
3940 0x20,
3941 REFERENCE_RAW_CARD_USED,
3942 RANK1_ADDRESS_MAPPING,
3943 0x75, 0x76, 0x77, 0x78,
3944 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e,
3945 0x7f, 0x80, 0x81, 0x82, 0x83, 0x84,
3946 0x85, 0x86, 0x87, 0x88,
3947 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e,
3948 0x8f, 0x90, 0x91, 0x92, 0x93, 0x94,
3949 0x95
3950 };
Vladimir Serbinenko902626c2014-02-16 17:22:26 +01003951 if (!spd_addrmap[2 * channel + slot])
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003952 continue;
3953 for (try = 0; try < 5; try++) {
Vladimir Serbinenko902626c2014-02-16 17:22:26 +01003954 v = smbus_read_byte(spd_addrmap[2 * channel + slot],
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003955 DEVICE_TYPE);
3956 if (v >= 0)
3957 break;
3958 }
3959 if (v < 0)
3960 continue;
3961 for (addr = 0;
3962 addr <
3963 sizeof(useful_addresses) /
3964 sizeof(useful_addresses[0]); addr++)
3965 gav(info.
3966 spd[channel][0][useful_addresses
3967 [addr]] =
Vladimir Serbinenko902626c2014-02-16 17:22:26 +01003968 smbus_read_byte(spd_addrmap[2 * channel + slot],
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003969 useful_addresses
3970 [addr]));
3971 if (info.spd[channel][0][DEVICE_TYPE] != 11)
3972 die("Only DDR3 is supported");
3973
3974 v = info.spd[channel][0][RANKS_AND_DQ];
3975 info.populated_ranks[channel][0][0] = 1;
3976 info.populated_ranks[channel][0][1] =
3977 ((v >> 3) & 7);
3978 if (((v >> 3) & 7) > 1)
3979 die("At most 2 ranks are supported");
3980 if ((v & 7) == 0 || (v & 7) > 2)
3981 die("Only x8 and x16 modules are supported");
3982 if ((info.
3983 spd[channel][slot][MODULE_TYPE] & 0xF) != 2
3984 && (info.
3985 spd[channel][slot][MODULE_TYPE] & 0xF)
3986 != 3)
3987 die("Registered memory is not supported");
3988 info.is_x16_module[channel][0] = (v & 7) - 1;
3989 info.density[channel][slot] =
3990 info.spd[channel][slot][DENSITY] & 0xF;
3991 if (!
3992 (info.
3993 spd[channel][slot][MEMORY_BUS_WIDTH] &
3994 0x18))
3995 info.use_ecc = 0;
3996 }
3997
3998 gav(0x55);
3999
4000 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4001 int v = 0;
4002 for (slot = 0; slot < NUM_SLOTS; slot++)
4003 for (rank = 0; rank < NUM_RANKS; rank++)
4004 v |= info.
4005 populated_ranks[channel][slot][rank]
4006 << (2 * slot + rank);
4007 info.populated_ranks_mask[channel] = v;
4008 }
4009
4010 gav(0x55);
4011
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004012 gav(pci_read_config32(NORTHBRIDGE, D0F0_CAPID0 + 4));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004013 }
4014
4015 /* after SPD */
4016 timestamp_add_now(102);
4017
4018 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) & 0xfc);
4019#if !REAL
Alexandru Gagniuc86091f92015-09-30 20:23:09 -07004020 rdmsr (MTRR_PHYS_MASK (3));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004021#endif
4022
4023 collect_system_info(&info);
4024 calculate_timings(&info);
4025
4026#if !REAL
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004027 pci_write_config8(NORTHBRIDGE, 0xdf, 0x82);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004028#endif
4029
4030 if (!s3resume) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004031 u8 reg8 = pci_read_config8(SOUTHBRIDGE, GEN_PMCON_2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004032 if (x2ca8 == 0 && (reg8 & 0x80)) {
4033 /* Don't enable S4-assertion stretch. Makes trouble on roda/rk9.
4034 reg8 = pci_read_config8(PCI_DEV(0, 0x1f, 0), 0xa4);
4035 pci_write_config8(PCI_DEV(0, 0x1f, 0), 0xa4, reg8 | 0x08);
4036 */
4037
4038 /* Clear bit7. */
4039
4040 pci_write_config8(SOUTHBRIDGE, GEN_PMCON_2,
4041 (reg8 & ~(1 << 7)));
4042
4043 printk(BIOS_INFO,
4044 "Interrupted RAM init, reset required.\n");
4045 outb(0x6, 0xcf9);
4046#if REAL
Patrick Georgi546953c2014-11-29 10:38:17 +01004047 halt();
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004048#endif
4049 }
4050 }
4051#if !REAL
4052 gav(read_mchbar8(0x2ca8)); ///!!!!
4053#endif
4054
4055 if (!s3resume && x2ca8 == 0)
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004056 pci_write_config8(SOUTHBRIDGE, GEN_PMCON_2,
4057 pci_read_config8(SOUTHBRIDGE, GEN_PMCON_2) | 0x80);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004058
4059 compute_derived_timings(&info);
4060
4061 if (x2ca8 == 0) {
4062 gav(read_mchbar8(0x164));
4063 write_mchbar8(0x164, 0x26);
4064 write_mchbar16(0x2c20, 0x10);
4065 }
4066
4067 write_mchbar32(0x18b4, read_mchbar32(0x18b4) | 0x210000); /* OK */
4068 write_mchbar32(0x1890, read_mchbar32(0x1890) | 0x2000000); /* OK */
4069 write_mchbar32(0x18b4, read_mchbar32(0x18b4) | 0x8000);
4070
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004071 gav(pci_read_config32(PCI_DEV(0xff, 2, 1), 0x50)); // !!!!
4072 pci_write_config8(PCI_DEV(0xff, 2, 1), 0x54, 0x12);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004073
4074 gav(read_mchbar16(0x2c10)); // !!!!
4075 write_mchbar16(0x2c10, 0x412);
4076 gav(read_mchbar16(0x2c10)); // !!!!
4077 write_mchbar16(0x2c12, read_mchbar16(0x2c12) | 0x100); /* OK */
4078
4079 gav(read_mchbar8(0x2ca8)); // !!!!
4080 write_mchbar32(0x1804,
4081 (read_mchbar32(0x1804) & 0xfffffffc) | 0x8400080);
4082
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004083 pci_read_config32(PCI_DEV(0xff, 2, 1), 0x6c); // !!!!
4084 pci_write_config32(PCI_DEV(0xff, 2, 1), 0x6c, 0x40a0a0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004085 gav(read_mchbar32(0x1c04)); // !!!!
4086 gav(read_mchbar32(0x1804)); // !!!!
4087
4088 if (x2ca8 == 0) {
4089 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) | 1);
4090 }
4091
4092 write_mchbar32(0x18d8, 0x120000);
4093 write_mchbar32(0x18dc, 0x30a484a);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004094 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xe0, 0x0);
4095 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xf4, 0x9444a);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004096 write_mchbar32(0x18d8, 0x40000);
4097 write_mchbar32(0x18dc, 0xb000000);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004098 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xe0, 0x60000);
4099 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xf4, 0x0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004100 write_mchbar32(0x18d8, 0x180000);
4101 write_mchbar32(0x18dc, 0xc0000142);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004102 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xe0, 0x20000);
4103 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xf4, 0x142);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004104 write_mchbar32(0x18d8, 0x1e0000);
4105
4106 gav(read_mchbar32(0x18dc)); // !!!!
4107 write_mchbar32(0x18dc, 0x3);
4108 gav(read_mchbar32(0x18dc)); // !!!!
4109
4110 if (x2ca8 == 0) {
4111 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) | 1); // guess
4112 }
4113
4114 write_mchbar32(0x188c, 0x20bc09);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004115 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xd0, 0x40b0c09);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004116 write_mchbar32(0x1a10, 0x4200010e);
4117 write_mchbar32(0x18b8, read_mchbar32(0x18b8) | 0x200);
4118 gav(read_mchbar32(0x1918)); // !!!!
4119 write_mchbar32(0x1918, 0x332);
4120
4121 gav(read_mchbar32(0x18b8)); // !!!!
4122 write_mchbar32(0x18b8, 0xe00);
4123 gav(read_mchbar32(0x182c)); // !!!!
4124 write_mchbar32(0x182c, 0x10202);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004125 gav(pci_read_config32(PCI_DEV(0xff, 2, 1), 0x94)); // !!!!
4126 pci_write_config32(PCI_DEV(0xff, 2, 1), 0x94, 0x10202);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004127 write_mchbar32(0x1a1c, read_mchbar32(0x1a1c) & 0x8fffffff);
4128 write_mchbar32(0x1a70, read_mchbar32(0x1a70) | 0x100000);
4129
4130 write_mchbar32(0x18b4, read_mchbar32(0x18b4) & 0xffff7fff);
4131 gav(read_mchbar32(0x1a68)); // !!!!
4132 write_mchbar32(0x1a68, 0x343800);
4133 gav(read_mchbar32(0x1e68)); // !!!!
4134 gav(read_mchbar32(0x1a68)); // !!!!
4135
4136 if (x2ca8 == 0) {
4137 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) | 1); // guess
4138 }
4139
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004140 pci_read_config32(PCI_DEV(0xff, 2, 0), 0x048); // !!!!
4141 pci_write_config32(PCI_DEV(0xff, 2, 0), 0x048, 0x140000);
4142 pci_read_config32(PCI_DEV(0xff, 2, 0), 0x058); // !!!!
4143 pci_write_config32(PCI_DEV(0xff, 2, 0), 0x058, 0x64555);
4144 pci_read_config32(PCI_DEV(0xff, 2, 0), 0x058); // !!!!
4145 pci_read_config32(PCI_DEV (0xff, 0, 0), 0xd0); // !!!!
4146 pci_write_config32(PCI_DEV (0xff, 0, 0), 0xd0, 0x180);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004147 gav(read_mchbar32(0x1af0)); // !!!!
4148 gav(read_mchbar32(0x1af0)); // !!!!
4149 write_mchbar32(0x1af0, 0x1f020003);
4150 gav(read_mchbar32(0x1af0)); // !!!!
4151
Edward O'Callaghan42b716f2014-06-26 21:38:52 +10004152 if (x2ca8 == 0) {
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004153 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) | 1); // guess
4154 }
4155
4156 gav(read_mchbar32(0x1890)); // !!!!
4157 write_mchbar32(0x1890, 0x80102);
4158 gav(read_mchbar32(0x18b4)); // !!!!
4159 write_mchbar32(0x18b4, 0x216000);
4160 write_mchbar32(0x18a4, 0x22222222);
4161 write_mchbar32(0x18a8, 0x22222222);
4162 write_mchbar32(0x18ac, 0x22222);
4163
4164 udelay(1000);
4165
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01004166 info.cached_training = get_cached_training();
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004167
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01004168 if (x2ca8 == 0) {
4169 int j;
4170 if (s3resume && info.cached_training) {
4171 restore_274265(&info);
4172 printk(BIOS_DEBUG, "reg2ca9_bit0 = %x\n",
4173 info.cached_training->reg2ca9_bit0);
4174 for (i = 0; i < 2; i++)
4175 for (j = 0; j < 3; j++)
4176 printk(BIOS_DEBUG, "reg274265[%d][%d] = %x\n",
4177 i, j, info.cached_training->reg274265[i][j]);
4178 } else {
4179 set_274265(&info);
4180 printk(BIOS_DEBUG, "reg2ca9_bit0 = %x\n",
4181 info.training.reg2ca9_bit0);
4182 for (i = 0; i < 2; i++)
4183 for (j = 0; j < 3; j++)
4184 printk(BIOS_DEBUG, "reg274265[%d][%d] = %x\n",
4185 i, j, info.training.reg274265[i][j]);
4186 }
4187
4188 set_2dxx_series(&info, s3resume);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004189
4190 if (!(deven & 8)) {
4191 read_mchbar32(0x2cb0);
4192 write_mchbar32(0x2cb0, 0x40);
4193 }
4194
4195 udelay(1000);
4196
4197 if (deven & 8) {
4198 write_mchbar32(0xff8, 0x1800 | read_mchbar32(0xff8));
4199 read_mchbar32(0x2cb0);
4200 write_mchbar32(0x2cb0, 0x00);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004201 pci_read_config8(PCI_DEV (0, 0x2, 0x0), 0x4c);
4202 pci_read_config8(PCI_DEV (0, 0x2, 0x0), 0x4c);
4203 pci_read_config8(PCI_DEV (0, 0x2, 0x0), 0x4e);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004204
4205 read_mchbar8(0x1150);
4206 read_mchbar8(0x1151);
4207 read_mchbar8(0x1022);
4208 read_mchbar8(0x16d0);
4209 write_mchbar32(0x1300, 0x60606060);
4210 write_mchbar32(0x1304, 0x60606060);
4211 write_mchbar32(0x1308, 0x78797a7b);
4212 write_mchbar32(0x130c, 0x7c7d7e7f);
4213 write_mchbar32(0x1310, 0x60606060);
4214 write_mchbar32(0x1314, 0x60606060);
4215 write_mchbar32(0x1318, 0x60606060);
4216 write_mchbar32(0x131c, 0x60606060);
4217 write_mchbar32(0x1320, 0x50515253);
4218 write_mchbar32(0x1324, 0x54555657);
4219 write_mchbar32(0x1328, 0x58595a5b);
4220 write_mchbar32(0x132c, 0x5c5d5e5f);
4221 write_mchbar32(0x1330, 0x40414243);
4222 write_mchbar32(0x1334, 0x44454647);
4223 write_mchbar32(0x1338, 0x48494a4b);
4224 write_mchbar32(0x133c, 0x4c4d4e4f);
4225 write_mchbar32(0x1340, 0x30313233);
4226 write_mchbar32(0x1344, 0x34353637);
4227 write_mchbar32(0x1348, 0x38393a3b);
4228 write_mchbar32(0x134c, 0x3c3d3e3f);
4229 write_mchbar32(0x1350, 0x20212223);
4230 write_mchbar32(0x1354, 0x24252627);
4231 write_mchbar32(0x1358, 0x28292a2b);
4232 write_mchbar32(0x135c, 0x2c2d2e2f);
4233 write_mchbar32(0x1360, 0x10111213);
4234 write_mchbar32(0x1364, 0x14151617);
4235 write_mchbar32(0x1368, 0x18191a1b);
4236 write_mchbar32(0x136c, 0x1c1d1e1f);
4237 write_mchbar32(0x1370, 0x10203);
4238 write_mchbar32(0x1374, 0x4050607);
4239 write_mchbar32(0x1378, 0x8090a0b);
4240 write_mchbar32(0x137c, 0xc0d0e0f);
4241 write_mchbar8(0x11cc, 0x4e);
4242 write_mchbar32(0x1110, 0x73970404);
4243 write_mchbar32(0x1114, 0x72960404);
4244 write_mchbar32(0x1118, 0x6f950404);
4245 write_mchbar32(0x111c, 0x6d940404);
4246 write_mchbar32(0x1120, 0x6a930404);
4247 write_mchbar32(0x1124, 0x68a41404);
4248 write_mchbar32(0x1128, 0x66a21404);
4249 write_mchbar32(0x112c, 0x63a01404);
4250 write_mchbar32(0x1130, 0x609e1404);
4251 write_mchbar32(0x1134, 0x5f9c1404);
4252 write_mchbar32(0x1138, 0x5c961404);
4253 write_mchbar32(0x113c, 0x58a02404);
4254 write_mchbar32(0x1140, 0x54942404);
4255 write_mchbar32(0x1190, 0x900080a);
4256 write_mchbar16(0x11c0, 0xc40b);
4257 write_mchbar16(0x11c2, 0x303);
4258 write_mchbar16(0x11c4, 0x301);
4259 read_mchbar32(0x1190);
4260 write_mchbar32(0x1190, 0x8900080a);
4261 write_mchbar32(0x11b8, 0x70c3000);
4262 write_mchbar8(0x11ec, 0xa);
4263 write_mchbar16(0x1100, 0x800);
4264 read_mchbar32(0x11bc);
4265 write_mchbar32(0x11bc, 0x1e84800);
4266 write_mchbar16(0x11ca, 0xfa);
4267 write_mchbar32(0x11e4, 0x4e20);
4268 write_mchbar8(0x11bc, 0xf);
4269 write_mchbar16(0x11da, 0x19);
4270 write_mchbar16(0x11ba, 0x470c);
4271 write_mchbar32(0x1680, 0xe6ffe4ff);
4272 write_mchbar32(0x1684, 0xdeffdaff);
4273 write_mchbar32(0x1688, 0xd4ffd0ff);
4274 write_mchbar32(0x168c, 0xccffc6ff);
4275 write_mchbar32(0x1690, 0xc0ffbeff);
4276 write_mchbar32(0x1694, 0xb8ffb0ff);
4277 write_mchbar32(0x1698, 0xa8ff0000);
4278 write_mchbar32(0x169c, 0xc00);
4279 write_mchbar32(0x1290, 0x5000000);
4280 }
4281
4282 write_mchbar32(0x124c, 0x15040d00);
4283 write_mchbar32(0x1250, 0x7f0000);
4284 write_mchbar32(0x1254, 0x1e220004);
4285 write_mchbar32(0x1258, 0x4000004);
4286 write_mchbar32(0x1278, 0x0);
4287 write_mchbar32(0x125c, 0x0);
4288 write_mchbar32(0x1260, 0x0);
4289 write_mchbar32(0x1264, 0x0);
4290 write_mchbar32(0x1268, 0x0);
4291 write_mchbar32(0x126c, 0x0);
4292 write_mchbar32(0x1270, 0x0);
4293 write_mchbar32(0x1274, 0x0);
4294 }
4295
4296 if ((deven & 8) && x2ca8 == 0) {
4297 write_mchbar16(0x1214, 0x320);
4298 write_mchbar32(0x1600, 0x40000000);
4299 read_mchbar32(0x11f4);
4300 write_mchbar32(0x11f4, 0x10000000);
4301 read_mchbar16(0x1230);
4302 write_mchbar16(0x1230, 0x8000);
4303 write_mchbar32(0x1400, 0x13040020);
4304 write_mchbar32(0x1404, 0xe090120);
4305 write_mchbar32(0x1408, 0x5120220);
4306 write_mchbar32(0x140c, 0x5120330);
4307 write_mchbar32(0x1410, 0xe090220);
4308 write_mchbar32(0x1414, 0x1010001);
4309 write_mchbar32(0x1418, 0x1110000);
4310 write_mchbar32(0x141c, 0x9020020);
4311 write_mchbar32(0x1420, 0xd090220);
4312 write_mchbar32(0x1424, 0x2090220);
4313 write_mchbar32(0x1428, 0x2090330);
4314 write_mchbar32(0x142c, 0xd090220);
4315 write_mchbar32(0x1430, 0x1010001);
4316 write_mchbar32(0x1434, 0x1110000);
4317 write_mchbar32(0x1438, 0x11040020);
4318 write_mchbar32(0x143c, 0x4030220);
4319 write_mchbar32(0x1440, 0x1060220);
4320 write_mchbar32(0x1444, 0x1060330);
4321 write_mchbar32(0x1448, 0x4030220);
4322 write_mchbar32(0x144c, 0x1010001);
4323 write_mchbar32(0x1450, 0x1110000);
4324 write_mchbar32(0x1454, 0x4010020);
4325 write_mchbar32(0x1458, 0xb090220);
4326 write_mchbar32(0x145c, 0x1090220);
4327 write_mchbar32(0x1460, 0x1090330);
4328 write_mchbar32(0x1464, 0xb090220);
4329 write_mchbar32(0x1468, 0x1010001);
4330 write_mchbar32(0x146c, 0x1110000);
4331 write_mchbar32(0x1470, 0xf040020);
4332 write_mchbar32(0x1474, 0xa090220);
4333 write_mchbar32(0x1478, 0x1120220);
4334 write_mchbar32(0x147c, 0x1120330);
4335 write_mchbar32(0x1480, 0xa090220);
4336 write_mchbar32(0x1484, 0x1010001);
4337 write_mchbar32(0x1488, 0x1110000);
4338 write_mchbar32(0x148c, 0x7020020);
4339 write_mchbar32(0x1490, 0x1010220);
4340 write_mchbar32(0x1494, 0x10210);
4341 write_mchbar32(0x1498, 0x10320);
4342 write_mchbar32(0x149c, 0x1010220);
4343 write_mchbar32(0x14a0, 0x1010001);
4344 write_mchbar32(0x14a4, 0x1110000);
4345 write_mchbar32(0x14a8, 0xd040020);
4346 write_mchbar32(0x14ac, 0x8090220);
4347 write_mchbar32(0x14b0, 0x1111310);
4348 write_mchbar32(0x14b4, 0x1111420);
4349 write_mchbar32(0x14b8, 0x8090220);
4350 write_mchbar32(0x14bc, 0x1010001);
4351 write_mchbar32(0x14c0, 0x1110000);
4352 write_mchbar32(0x14c4, 0x3010020);
4353 write_mchbar32(0x14c8, 0x7090220);
4354 write_mchbar32(0x14cc, 0x1081310);
4355 write_mchbar32(0x14d0, 0x1081420);
4356 write_mchbar32(0x14d4, 0x7090220);
4357 write_mchbar32(0x14d8, 0x1010001);
4358 write_mchbar32(0x14dc, 0x1110000);
4359 write_mchbar32(0x14e0, 0xb040020);
4360 write_mchbar32(0x14e4, 0x2030220);
4361 write_mchbar32(0x14e8, 0x1051310);
4362 write_mchbar32(0x14ec, 0x1051420);
4363 write_mchbar32(0x14f0, 0x2030220);
4364 write_mchbar32(0x14f4, 0x1010001);
4365 write_mchbar32(0x14f8, 0x1110000);
4366 write_mchbar32(0x14fc, 0x5020020);
4367 write_mchbar32(0x1500, 0x5090220);
4368 write_mchbar32(0x1504, 0x2071310);
4369 write_mchbar32(0x1508, 0x2071420);
4370 write_mchbar32(0x150c, 0x5090220);
4371 write_mchbar32(0x1510, 0x1010001);
4372 write_mchbar32(0x1514, 0x1110000);
4373 write_mchbar32(0x1518, 0x7040120);
4374 write_mchbar32(0x151c, 0x2090220);
4375 write_mchbar32(0x1520, 0x70b1210);
4376 write_mchbar32(0x1524, 0x70b1310);
4377 write_mchbar32(0x1528, 0x2090220);
4378 write_mchbar32(0x152c, 0x1010001);
4379 write_mchbar32(0x1530, 0x1110000);
4380 write_mchbar32(0x1534, 0x1010110);
4381 write_mchbar32(0x1538, 0x1081310);
4382 write_mchbar32(0x153c, 0x5041200);
4383 write_mchbar32(0x1540, 0x5041310);
4384 write_mchbar32(0x1544, 0x1081310);
4385 write_mchbar32(0x1548, 0x1010001);
4386 write_mchbar32(0x154c, 0x1110000);
4387 write_mchbar32(0x1550, 0x1040120);
4388 write_mchbar32(0x1554, 0x4051210);
4389 write_mchbar32(0x1558, 0xd051200);
4390 write_mchbar32(0x155c, 0xd051200);
4391 write_mchbar32(0x1560, 0x4051210);
4392 write_mchbar32(0x1564, 0x1010001);
4393 write_mchbar32(0x1568, 0x1110000);
4394 write_mchbar16(0x1222, 0x220a);
4395 write_mchbar16(0x123c, 0x1fc0);
4396 write_mchbar16(0x1220, 0x1388);
4397 }
4398
4399 read_mchbar32(0x2c80); // !!!!
4400 write_mchbar32(0x2c80, 0x1053688);
4401 read_mchbar32(0x1c04); // !!!!
4402 write_mchbar32(0x1804, 0x406080);
4403
4404 read_mchbar8(0x2ca8);
4405
4406 if (x2ca8 == 0) {
4407 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) & ~3);
4408 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) + 4);
4409 write_mchbar32(0x1af0, read_mchbar32(0x1af0) | 0x10);
4410#if REAL
Patrick Georgi546953c2014-11-29 10:38:17 +01004411 halt();
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004412#else
4413 printf("CP5\n");
4414 exit(0);
4415#endif
4416 }
4417
4418 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8));
4419 read_mchbar32(0x2c80); // !!!!
4420 write_mchbar32(0x2c80, 0x53688);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004421 pci_write_config32(PCI_DEV (0xff, 0, 0), 0x60, 0x20220);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004422 read_mchbar16(0x2c20); // !!!!
4423 read_mchbar16(0x2c10); // !!!!
4424 read_mchbar16(0x2c00); // !!!!
4425 write_mchbar16(0x2c00, 0x8c0);
4426 udelay(1000);
4427 write_1d0(0, 0x33d, 0, 0);
4428 write_500(&info, 0, 0, 0xb61, 0, 0);
4429 write_500(&info, 1, 0, 0xb61, 0, 0);
4430 write_mchbar32(0x1a30, 0x0);
4431 write_mchbar32(0x1a34, 0x0);
4432 write_mchbar16(0x614,
4433 0xb5b | (info.populated_ranks[1][0][0] *
4434 0x404) | (info.populated_ranks[0][0][0] *
4435 0xa0));
4436 write_mchbar16(0x616, 0x26a);
4437 write_mchbar32(0x134, 0x856000);
4438 write_mchbar32(0x160, 0x5ffffff);
4439 read_mchbar32(0x114); // !!!!
4440 write_mchbar32(0x114, 0xc2024440);
4441 read_mchbar32(0x118); // !!!!
4442 write_mchbar32(0x118, 0x4);
4443 for (channel = 0; channel < NUM_CHANNELS; channel++)
4444 write_mchbar32(0x260 + (channel << 10),
4445 0x30809ff |
4446 ((info.
4447 populated_ranks_mask[channel] & 3) << 20));
4448 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4449 write_mchbar16(0x31c + (channel << 10), 0x101);
4450 write_mchbar16(0x360 + (channel << 10), 0x909);
4451 write_mchbar16(0x3a4 + (channel << 10), 0x101);
4452 write_mchbar16(0x3e8 + (channel << 10), 0x101);
4453 write_mchbar32(0x320 + (channel << 10), 0x29002900);
4454 write_mchbar32(0x324 + (channel << 10), 0x0);
4455 write_mchbar32(0x368 + (channel << 10), 0x32003200);
4456 write_mchbar16(0x352 + (channel << 10), 0x505);
4457 write_mchbar16(0x354 + (channel << 10), 0x3c3c);
4458 write_mchbar16(0x356 + (channel << 10), 0x1040);
4459 write_mchbar16(0x39a + (channel << 10), 0x73e4);
4460 write_mchbar16(0x3de + (channel << 10), 0x77ed);
4461 write_mchbar16(0x422 + (channel << 10), 0x1040);
4462 }
4463
4464 write_1d0(0x4, 0x151, 4, 1);
4465 write_1d0(0, 0x142, 3, 1);
4466 rdmsr(0x1ac); // !!!!
4467 write_500(&info, 1, 1, 0x6b3, 4, 1);
4468 write_500(&info, 1, 1, 0x6cf, 4, 1);
4469
4470 rmw_1d0(0x21c, 0x38, 0, 6, 1);
4471
4472 write_1d0(((!info.populated_ranks[1][0][0]) << 1) | ((!info.
4473 populated_ranks[0]
4474 [0][0]) << 0),
4475 0x1d1, 3, 1);
4476 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4477 write_mchbar16(0x38e + (channel << 10), 0x5f5f);
4478 write_mchbar16(0x3d2 + (channel << 10), 0x5f5f);
4479 }
4480
4481 set_334(0);
4482
4483 program_base_timings(&info);
4484
4485 write_mchbar8(0x5ff, read_mchbar8(0x5ff) | 0x80); /* OK */
4486
4487 write_1d0(0x2, 0x1d5, 2, 1);
4488 write_1d0(0x20, 0x166, 7, 1);
4489 write_1d0(0x0, 0xeb, 3, 1);
4490 write_1d0(0x0, 0xf3, 6, 1);
4491
4492 for (channel = 0; channel < NUM_CHANNELS; channel++)
4493 for (lane = 0; lane < 9; lane++) {
4494 u16 addr = 0x125 + get_lane_offset(0, 0, lane);
4495 u8 a;
4496 a = read_500(&info, channel, addr, 6); // = 0x20040080 //!!!!
4497 write_500(&info, channel, a, addr, 6, 1);
4498 }
4499
4500 udelay(1000);
4501
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004502 if (s3resume) {
4503 if (info.cached_training == NULL) {
4504 u32 reg32;
4505 printk(BIOS_ERR,
4506 "Couldn't find training data. Rebooting\n");
4507 reg32 = inl(DEFAULT_PMBASE + 0x04);
4508 outl(reg32 & ~(7 << 10), DEFAULT_PMBASE + 0x04);
4509 outb(0xe, 0xcf9);
4510
4511#if REAL
Patrick Georgi546953c2014-11-29 10:38:17 +01004512 halt();
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004513#else
4514 printf("CP5\n");
4515 exit(0);
4516#endif
4517 }
4518 int tm;
4519 info.training = *info.cached_training;
4520 for (tm = 0; tm < 4; tm++)
4521 for (channel = 0; channel < NUM_CHANNELS; channel++)
4522 for (slot = 0; slot < NUM_SLOTS; slot++)
4523 for (rank = 0; rank < NUM_RANKS; rank++)
4524 for (lane = 0; lane < 9; lane++)
4525 write_500(&info,
4526 channel,
4527 info.training.
4528 lane_timings
4529 [tm][channel]
4530 [slot][rank]
4531 [lane],
4532 get_timing_register_addr
4533 (lane, tm,
4534 slot, rank),
4535 9, 0);
4536 write_1d0(info.cached_training->reg_178, 0x178, 7, 1);
4537 write_1d0(info.cached_training->reg_10b, 0x10b, 6, 1);
4538 }
4539
4540 read_mchbar32(0x1f4); // !!!!
4541 write_mchbar32(0x1f4, 0x20000);
4542 write_mchbar32(0x1f0, 0x1d000200);
4543 read_mchbar8(0x1f0); // !!!!
4544 write_mchbar8(0x1f0, 0x1);
4545 read_mchbar8(0x1f0); // !!!!
4546
4547 program_board_delay(&info);
4548
4549 write_mchbar8(0x5ff, 0x0); /* OK */
4550 write_mchbar8(0x5ff, 0x80); /* OK */
4551 write_mchbar8(0x5f4, 0x1); /* OK */
4552
4553 write_mchbar32(0x130, read_mchbar32(0x130) & 0xfffffffd); // | 2 when ?
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02004554 while (read_mchbar32(0x130) & 1);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004555 gav(read_1d0(0x14b, 7)); // = 0x81023100
4556 write_1d0(0x30, 0x14b, 7, 1);
4557 read_1d0(0xd6, 6); // = 0xfa008080 // !!!!
4558 write_1d0(7, 0xd6, 6, 1);
4559 read_1d0(0x328, 6); // = 0xfa018080 // !!!!
4560 write_1d0(7, 0x328, 6, 1);
4561
4562 for (channel = 0; channel < NUM_CHANNELS; channel++)
4563 set_4cf(&info, channel,
4564 info.populated_ranks[channel][0][0] ? 8 : 0);
4565
4566 read_1d0(0x116, 4); // = 0x4040432 // !!!!
4567 write_1d0(2, 0x116, 4, 1);
4568 read_1d0(0xae, 6); // = 0xe8088080 // !!!!
4569 write_1d0(0, 0xae, 6, 1);
4570 read_1d0(0x300, 4); // = 0x48088080 // !!!!
4571 write_1d0(0, 0x300, 6, 1);
4572 read_mchbar16(0x356); // !!!!
4573 write_mchbar16(0x356, 0x1040);
4574 read_mchbar16(0x756); // !!!!
4575 write_mchbar16(0x756, 0x1040);
4576 write_mchbar32(0x140, read_mchbar32(0x140) & ~0x07000000);
4577 write_mchbar32(0x138, read_mchbar32(0x138) & ~0x07000000);
4578 write_mchbar32(0x130, 0x31111301);
Vladimir Serbinenko25fc5322014-12-07 13:05:44 +01004579 /* Wait until REG130b0 is 1. */
4580 while (read_mchbar32(0x130) & 1)
4581 ;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004582
4583 {
4584 u32 t;
4585 u8 val_a1;
4586 val_a1 = read_1d0(0xa1, 6); // = 0x1cf4040 // !!!!
4587 t = read_1d0(0x2f3, 6); // = 0x10a4040 // !!!!
4588 rmw_1d0(0x320, 0x07,
4589 (t & 4) | ((t & 8) >> 2) | ((t & 0x10) >> 4), 6, 1);
4590 rmw_1d0(0x14b, 0x78,
4591 ((((val_a1 >> 2) & 4) | (val_a1 & 8)) >> 2) | (val_a1 &
4592 4), 7,
4593 1);
4594 rmw_1d0(0xce, 0x38,
4595 ((((val_a1 >> 2) & 4) | (val_a1 & 8)) >> 2) | (val_a1 &
4596 4), 6,
4597 1);
4598 }
4599
4600 for (channel = 0; channel < NUM_CHANNELS; channel++)
4601 set_4cf(&info, channel,
4602 info.populated_ranks[channel][0][0] ? 9 : 1);
4603
4604 rmw_1d0(0x116, 0xe, 1, 4, 1); // = 0x4040432 // !!!!
4605 read_mchbar32(0x144); // !!!!
4606 write_1d0(2, 0xae, 6, 1);
4607 write_1d0(2, 0x300, 6, 1);
4608 write_1d0(2, 0x121, 3, 1);
4609 read_1d0(0xd6, 6); // = 0xfa00c0c7 // !!!!
4610 write_1d0(4, 0xd6, 6, 1);
4611 read_1d0(0x328, 6); // = 0xfa00c0c7 // !!!!
4612 write_1d0(4, 0x328, 6, 1);
4613
4614 for (channel = 0; channel < NUM_CHANNELS; channel++)
4615 set_4cf(&info, channel,
4616 info.populated_ranks[channel][0][0] ? 9 : 0);
4617
4618 write_mchbar32(0x130,
4619 0x11111301 | (info.
4620 populated_ranks[1][0][0] << 30) | (info.
4621 populated_ranks
4622 [0][0]
4623 [0] <<
4624 29));
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02004625 while (read_mchbar8(0x130) & 1); // !!!!
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004626 read_1d0(0xa1, 6); // = 0x1cf4054 // !!!!
4627 read_1d0(0x2f3, 6); // = 0x10a4054 // !!!!
4628 read_1d0(0x21c, 6); // = 0xafa00c0 // !!!!
4629 write_1d0(0, 0x21c, 6, 1);
4630 read_1d0(0x14b, 7); // = 0x810231b0 // !!!!
4631 write_1d0(0x35, 0x14b, 7, 1);
4632
4633 for (channel = 0; channel < NUM_CHANNELS; channel++)
4634 set_4cf(&info, channel,
4635 info.populated_ranks[channel][0][0] ? 0xb : 0x2);
4636
4637 set_334(1);
4638
4639 write_mchbar8(0x1e8, 0x4); /* OK */
4640
4641 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4642 write_500(&info, channel,
4643 0x3 & ~(info.populated_ranks_mask[channel]), 0x6b7, 2,
4644 1);
4645 write_500(&info, channel, 0x3, 0x69b, 2, 1);
4646 }
4647 write_mchbar32(0x2d0, (read_mchbar32(0x2d0) & 0xff2c01ff) | 0x200000); /* OK */
4648 write_mchbar16(0x6c0, 0x14a0); /* OK */
4649 write_mchbar32(0x6d0, (read_mchbar32(0x6d0) & 0xff0080ff) | 0x8000); /* OK */
4650 write_mchbar16(0x232, 0x8);
4651 write_mchbar32(0x234, (read_mchbar32(0x234) & 0xfffbfffb) | 0x40004); /* 0x40004 or 0 depending on ? */
4652 write_mchbar32(0x34, (read_mchbar32(0x34) & 0xfffffffd) | 5); /* OK */
4653 write_mchbar32(0x128, 0x2150d05);
4654 write_mchbar8(0x12c, 0x1f); /* OK */
4655 write_mchbar8(0x12d, 0x56); /* OK */
4656 write_mchbar8(0x12e, 0x31);
4657 write_mchbar8(0x12f, 0x0); /* OK */
4658 write_mchbar8(0x271, 0x2); /* OK */
4659 write_mchbar8(0x671, 0x2); /* OK */
4660 write_mchbar8(0x1e8, 0x4); /* OK */
4661 for (channel = 0; channel < NUM_CHANNELS; channel++)
4662 write_mchbar32(0x294 + (channel << 10),
4663 (info.populated_ranks_mask[channel] & 3) << 16);
4664 write_mchbar32(0x134, (read_mchbar32(0x134) & 0xfc01ffff) | 0x10000); /* OK */
4665 write_mchbar32(0x134, (read_mchbar32(0x134) & 0xfc85ffff) | 0x850000); /* OK */
4666 for (channel = 0; channel < NUM_CHANNELS; channel++)
4667 write_mchbar32(0x260 + (channel << 10),
4668 (read_mchbar32(0x260 + (channel << 10)) &
4669 ~0xf00000) | 0x8000000 | ((info.
4670 populated_ranks_mask
4671 [channel] & 3) <<
4672 20));
4673
4674 if (!s3resume)
4675 jedec_init(&info);
4676
4677 int totalrank = 0;
4678 for (channel = 0; channel < NUM_CHANNELS; channel++)
4679 for (slot = 0; slot < NUM_SLOTS; slot++)
4680 for (rank = 0; rank < NUM_RANKS; rank++)
4681 if (info.populated_ranks[channel][slot][rank]) {
4682 jedec_read(&info, channel, slot, rank,
4683 totalrank, 0xa, 0x400);
4684 totalrank++;
4685 }
4686
4687 write_mchbar8(0x12c, 0x9f);
4688
4689 read_mchbar8(0x271); // 2 // !!!!
4690 write_mchbar8(0x271, 0xe);
4691 read_mchbar8(0x671); // !!!!
4692 write_mchbar8(0x671, 0xe);
4693
4694 if (!s3resume) {
4695 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4696 write_mchbar32(0x294 + (channel << 10),
4697 (info.
4698 populated_ranks_mask[channel] & 3) <<
4699 16);
4700 write_mchbar16(0x298 + (channel << 10),
4701 (info.
4702 populated_ranks[channel][0][0]) | (info.
4703 populated_ranks
4704 [channel]
4705 [0]
4706 [1]
4707 <<
4708 5));
4709 write_mchbar32(0x29c + (channel << 10), 0x77a);
4710 }
4711 read_mchbar32(0x2c0); /// !!!
4712 write_mchbar32(0x2c0, 0x6009cc00);
4713
4714 {
4715 u8 a, b;
4716 a = read_mchbar8(0x243); // !!!!
4717 b = read_mchbar8(0x643); // !!!!
4718 write_mchbar8(0x243, a | 2);
4719 write_mchbar8(0x643, b | 2);
4720 }
4721
4722 write_1d0(7, 0x19b, 3, 1);
4723 write_1d0(7, 0x1c0, 3, 1);
4724 write_1d0(4, 0x1c6, 4, 1);
4725 write_1d0(4, 0x1cc, 4, 1);
4726 read_1d0(0x151, 4); // = 0x408c6d74 // !!!!
4727 write_1d0(4, 0x151, 4, 1);
4728 write_mchbar32(0x584, 0xfffff);
4729 write_mchbar32(0x984, 0xfffff);
4730
4731 for (channel = 0; channel < NUM_CHANNELS; channel++)
4732 for (slot = 0; slot < NUM_SLOTS; slot++)
4733 for (rank = 0; rank < NUM_RANKS; rank++)
4734 if (info.
4735 populated_ranks[channel][slot]
4736 [rank])
4737 config_rank(&info, s3resume,
4738 channel, slot,
4739 rank);
4740
4741 write_mchbar8(0x243, 0x1);
4742 write_mchbar8(0x643, 0x1);
4743 }
4744
4745 /* was == 1 but is common */
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004746 pci_write_config16(NORTHBRIDGE, 0xc8, 3);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004747 write_26c(0, 0x820);
4748 write_26c(1, 0x820);
4749 write_mchbar32(0x130, read_mchbar32(0x130) | 2);
4750 /* end */
4751
4752 if (s3resume) {
4753 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4754 write_mchbar32(0x294 + (channel << 10),
4755 (info.
4756 populated_ranks_mask[channel] & 3) <<
4757 16);
4758 write_mchbar16(0x298 + (channel << 10),
4759 (info.
4760 populated_ranks[channel][0][0]) | (info.
4761 populated_ranks
4762 [channel]
4763 [0]
4764 [1]
4765 <<
4766 5));
4767 write_mchbar32(0x29c + (channel << 10), 0x77a);
4768 }
4769 read_mchbar32(0x2c0); /// !!!
4770 write_mchbar32(0x2c0, 0x6009cc00);
4771 }
4772
4773 write_mchbar32(0xfa4, read_mchbar32(0xfa4) & ~0x01000002);
4774 write_mchbar32(0xfb0, 0x2000e019);
4775
4776#if !REAL
4777 printf("CP16\n");
4778#endif
4779
4780 /* Before training. */
4781 timestamp_add_now(103);
4782
4783 if (!s3resume)
4784 ram_training(&info);
4785
4786 /* After training. */
Paul Menzel9e817bf2015-05-28 07:32:48 +02004787 timestamp_add_now(104);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004788
4789 dump_timings(&info);
4790
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004791 program_modules_memory_map(&info, 0);
4792 program_total_memory_map(&info);
4793
4794 if (info.non_interleaved_part_mb != 0 && info.interleaved_part_mb != 0)
4795 write_mchbar8(0x111, 0x20 | (0 << 2) | (1 << 6) | (0 << 7));
4796 else if (have_match_ranks(&info, 0, 4) && have_match_ranks(&info, 1, 4))
4797 write_mchbar8(0x111, 0x20 | (3 << 2) | (0 << 6) | (1 << 7));
4798 else if (have_match_ranks(&info, 0, 2) && have_match_ranks(&info, 1, 2))
4799 write_mchbar8(0x111, 0x20 | (3 << 2) | (0 << 6) | (0 << 7));
4800 else
4801 write_mchbar8(0x111, 0x20 | (3 << 2) | (1 << 6) | (0 << 7));
4802
4803 write_mchbar32(0xfac, read_mchbar32(0xfac) & ~0x80000000); // OK
4804 write_mchbar32(0xfb4, 0x4800); // OK
4805 write_mchbar32(0xfb8, (info.revision < 8) ? 0x20 : 0x0); // OK
4806 write_mchbar32(0xe94, 0x7ffff); // OK
4807 write_mchbar32(0xfc0, 0x80002040); // OK
4808 write_mchbar32(0xfc4, 0x701246); // OK
4809 write_mchbar8(0xfc8, read_mchbar8(0xfc8) & ~0x70); // OK
4810 write_mchbar32(0xe5c, 0x1000000 | read_mchbar32(0xe5c)); // OK
4811 write_mchbar32(0x1a70, (read_mchbar32(0x1a70) | 0x00200000) & ~0x00100000); // OK
4812 write_mchbar32(0x50, 0x700b0); // OK
4813 write_mchbar32(0x3c, 0x10); // OK
4814 write_mchbar8(0x1aa8, (read_mchbar8(0x1aa8) & ~0x35) | 0xa); // OK
4815 write_mchbar8(0xff4, read_mchbar8(0xff4) | 0x2); // OK
4816 write_mchbar32(0xff8, (read_mchbar32(0xff8) & ~0xe008) | 0x1020); // OK
4817
4818#if REAL
4819 write_mchbar32(0xd00, IOMMU_BASE2 | 1);
4820 write_mchbar32(0xd40, IOMMU_BASE1 | 1);
4821 write_mchbar32(0xdc0, IOMMU_BASE4 | 1);
4822
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08004823 write32p(IOMMU_BASE1 | 0xffc, 0x80000000);
4824 write32p(IOMMU_BASE2 | 0xffc, 0xc0000000);
4825 write32p(IOMMU_BASE4 | 0xffc, 0x80000000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004826
4827#else
4828 {
4829 u32 eax;
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08004830 eax = read32p(0xffc + (read_mchbar32(0xd00) & ~1)) | 0x08000000; // = 0xe911714b// OK
4831 write32p(0xffc + (read_mchbar32(0xd00) & ~1), eax); // OK
4832 eax = read32p(0xffc + (read_mchbar32(0xdc0) & ~1)) | 0x40000000; // = 0xe911714b// OK
4833 write32p(0xffc + (read_mchbar32(0xdc0) & ~1), eax); // OK
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004834 }
4835#endif
4836
4837 {
4838 u32 eax;
4839
4840 eax = info.fsb_frequency / 9;
4841 write_mchbar32(0xfcc, (read_mchbar32(0xfcc) & 0xfffc0000) | (eax * 0x280) | (eax * 0x5000) | eax | 0x40000); // OK
4842 write_mchbar32(0x20, 0x33001); //OK
4843 }
4844
4845 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4846 write_mchbar32(0x220 + (channel << 10), read_mchbar32(0x220 + (channel << 10)) & ~0x7770); //OK
4847 if (info.max_slots_used_in_channel == 1)
4848 write_mchbar16(0x237 + (channel << 10), (read_mchbar16(0x237 + (channel << 10)) | 0x0201)); //OK
4849 else
4850 write_mchbar16(0x237 + (channel << 10), (read_mchbar16(0x237 + (channel << 10)) & ~0x0201)); //OK
4851
4852 write_mchbar8(0x241 + (channel << 10), read_mchbar8(0x241 + (channel << 10)) | 1); // OK
4853
4854 if (info.clock_speed_index <= 1
4855 && (info.silicon_revision == 2
4856 || info.silicon_revision == 3))
4857 write_mchbar32(0x248 + (channel << 10), (read_mchbar32(0x248 + (channel << 10)) | 0x00102000)); // OK
4858 else
4859 write_mchbar32(0x248 + (channel << 10), (read_mchbar32(0x248 + (channel << 10)) & ~0x00102000)); // OK
4860 }
4861
4862 write_mchbar32(0x115, read_mchbar32(0x115) | 0x1000000); // OK
4863
4864 {
4865 u8 al;
4866 al = 0xd;
4867 if (!(info.silicon_revision == 0 || info.silicon_revision == 1))
4868 al += 2;
4869 al |= ((1 << (info.max_slots_used_in_channel - 1)) - 1) << 4;
4870 write_mchbar32(0x210, (al << 16) | 0x20); // OK
4871 }
4872
4873 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4874 write_mchbar32(0x288 + (channel << 10), 0x70605040); // OK
4875 write_mchbar32(0x28c + (channel << 10), 0xfffec080); // OK
4876 write_mchbar32(0x290 + (channel << 10), 0x282091c | ((info.max_slots_used_in_channel - 1) << 0x16)); // OK
4877 }
4878 u32 reg1c;
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004879 pci_read_config32(NORTHBRIDGE, 0x40); // = DEFAULT_EPBAR | 0x001 // OK
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08004880 reg1c = read32p(DEFAULT_EPBAR | 0x01c); // = 0x8001 // OK
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004881 pci_read_config32(NORTHBRIDGE, 0x40); // = DEFAULT_EPBAR | 0x001 // OK
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08004882 write32p(DEFAULT_EPBAR | 0x01c, reg1c); // OK
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004883 read_mchbar8(0xe08); // = 0x0
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004884 pci_read_config32(NORTHBRIDGE, 0xe4); // = 0x316126
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004885 write_mchbar8(0x1210, read_mchbar8(0x1210) | 2); // OK
4886 write_mchbar32(0x1200, 0x8800440); // OK
4887 write_mchbar32(0x1204, 0x53ff0453); // OK
4888 write_mchbar32(0x1208, 0x19002043); // OK
4889 write_mchbar16(0x1214, 0x320); // OK
4890
4891 if (info.revision == 0x10 || info.revision == 0x11) {
4892 write_mchbar16(0x1214, 0x220); // OK
4893 write_mchbar8(0x1210, read_mchbar8(0x1210) | 0x40); // OK
4894 }
4895
4896 write_mchbar8(0x1214, read_mchbar8(0x1214) | 0x4); // OK
4897 write_mchbar8(0x120c, 0x1); // OK
4898 write_mchbar8(0x1218, 0x3); // OK
4899 write_mchbar8(0x121a, 0x3); // OK
4900 write_mchbar8(0x121c, 0x3); // OK
4901 write_mchbar16(0xc14, 0x0); // OK
4902 write_mchbar16(0xc20, 0x0); // OK
4903 write_mchbar32(0x1c, 0x0); // OK
4904
4905 /* revision dependent here. */
4906
4907 write_mchbar16(0x1230, read_mchbar16(0x1230) | 0x1f07); // OK
4908
4909 if (info.uma_enabled)
4910 write_mchbar32(0x11f4, read_mchbar32(0x11f4) | 0x10000000); // OK
4911
4912 write_mchbar16(0x1230, read_mchbar16(0x1230) | 0x8000); // OK
4913 write_mchbar8(0x1214, read_mchbar8(0x1214) | 1); // OK
4914
4915 u8 bl, ebpb;
4916 u16 reg_1020;
4917
4918 reg_1020 = read_mchbar32(0x1020); // = 0x6c733c // OK
4919 write_mchbar8(0x1070, 0x1); // OK
4920
4921 write_mchbar32(0x1000, 0x100); // OK
4922 write_mchbar8(0x1007, 0x0); // OK
4923
4924 if (reg_1020 != 0) {
4925 write_mchbar16(0x1018, 0x0); // OK
4926 bl = reg_1020 >> 8;
4927 ebpb = reg_1020 & 0xff;
4928 } else {
4929 ebpb = 0;
4930 bl = 8;
4931 }
4932
4933 rdmsr(0x1a2);
4934
4935 write_mchbar32(0x1014, 0xffffffff); // OK
4936
4937 write_mchbar32(0x1010, ((((ebpb + 0x7d) << 7) / bl) & 0xff) * (! !reg_1020)); // OK
4938
4939 write_mchbar8(0x101c, 0xb8); // OK
4940
4941 write_mchbar8(0x123e, (read_mchbar8(0x123e) & 0xf) | 0x60); // OK
4942 if (reg_1020 != 0) {
4943 write_mchbar32(0x123c, (read_mchbar32(0x123c) & ~0x00900000) | 0x600000); // OK
4944 write_mchbar8(0x101c, 0xb8); // OK
4945 }
4946
4947 setup_heci_uma(&info);
4948
4949 if (info.uma_enabled) {
4950 u16 ax;
4951 write_mchbar32(0x11b0, read_mchbar32(0x11b0) | 0x4000); // OK
4952 write_mchbar32(0x11b4, read_mchbar32(0x11b4) | 0x4000); // OK
4953 write_mchbar16(0x1190, read_mchbar16(0x1190) | 0x4000); // OK
4954
4955 ax = read_mchbar16(0x1190) & 0xf00; // = 0x480a // OK
4956 write_mchbar16(0x1170, ax | (read_mchbar16(0x1170) & 0x107f) | 0x4080); // OK
4957 write_mchbar16(0x1170, read_mchbar16(0x1170) | 0x1000); // OK
4958#if REAL
4959 udelay(1000);
4960#endif
4961 u16 ecx;
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02004962 for (ecx = 0xffff; ecx && (read_mchbar16(0x1170) & 0x1000); ecx--); // OK
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004963 write_mchbar16(0x1190, read_mchbar16(0x1190) & ~0x4000); // OK
4964 }
4965
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004966 pci_write_config8(SOUTHBRIDGE, GEN_PMCON_2,
4967 pci_read_config8(SOUTHBRIDGE, GEN_PMCON_2) & ~0x80);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004968 udelay(10000);
Vladimir Serbinenkoc7db28c2014-02-19 22:09:33 +01004969 write_mchbar16(0x2ca8, 0x8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004970
4971#if REAL
4972 udelay(1000);
4973 dump_timings(&info);
Vladimir Serbinenkob16f0922014-06-07 16:27:27 +02004974 cbmem_wasnot_inited = cbmem_recovery(s3resume);
4975
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004976 if (!s3resume)
4977 save_timings(&info);
Vladimir Serbinenkob16f0922014-06-07 16:27:27 +02004978 if (s3resume && cbmem_wasnot_inited) {
4979 u32 reg32;
4980 printk(BIOS_ERR, "Failed S3 resume.\n");
4981 ram_check(0x100000, 0x200000);
4982
4983 /* Clear SLP_TYPE. */
4984 reg32 = inl(DEFAULT_PMBASE + 0x04);
4985 outl(reg32 & ~(7 << 10), DEFAULT_PMBASE + 0x04);
4986
4987 /* Failed S3 resume, reset to come up cleanly */
4988 outb(0xe, 0xcf9);
Patrick Georgi546953c2014-11-29 10:38:17 +01004989 halt();
Vladimir Serbinenkob16f0922014-06-07 16:27:27 +02004990 }
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004991#endif
4992}