blob: 94d9af8855045eb5871b4b2e4fa55aa331f8f55c [file] [log] [blame]
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001/*
2 * This file is part of the coreboot project.
3 *
4 * Copyright (C) 2013 Vladimir Serbinenko.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010015 */
16
Stefan Taunerf13bd412017-01-07 07:26:27 +010017/* Please don't remove this. It's needed for debugging and reverse
18 * engineering more nehalem variants in the future. */
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010019#ifndef REAL
20#define REAL 1
21#endif
22
23#if REAL
Kyösti Mälkki931c1dc2014-06-30 09:40:19 +030024#include <stdlib.h>
Stefan Reinauer6a001132017-07-13 02:20:27 +020025#include <compiler.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010026#include <console/console.h>
27#include <string.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010028#include <arch/io.h>
29#include <cpu/x86/msr.h>
30#include <cbmem.h>
31#include <arch/cbfs.h>
32#include <cbfs.h>
33#include <ip_checksum.h>
34#include <pc80/mc146818rtc.h>
35#include <device/pci_def.h>
Patrick Rudolph266a1f72016-06-09 18:13:34 +020036#include <device/device.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010037#include <arch/cpu.h>
Patrick Georgi546953c2014-11-29 10:38:17 +010038#include <halt.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010039#include <spd.h>
40#include "raminit.h"
Patrick Rudolph266a1f72016-06-09 18:13:34 +020041#include "chip.h"
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010042#include <timestamp.h>
43#include <cpu/x86/mtrr.h>
44#include <cpu/intel/speedstep.h>
45#include <cpu/intel/turbo.h>
Arthur Heymansdc71e252018-01-29 10:14:48 +010046#include <mrc_cache.h>
Matthias Gazzaridfa51252018-05-19 00:44:20 +020047#include <arch/early_variables.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010048#endif
49
50#if !REAL
51typedef unsigned char u8;
52typedef unsigned short u16;
53typedef unsigned int u32;
54typedef u32 device_t;
55#endif
56
57#include "nehalem.h"
58
Elyes HAOUAS21b71ce62018-06-16 18:43:52 +020059#include <southbridge/intel/ibexpeak/me.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010060
61#if REAL
62#include <delay.h>
63#endif
64
65#define NORTHBRIDGE PCI_DEV(0, 0, 0)
66#define SOUTHBRIDGE PCI_DEV(0, 0x1f, 0)
67#define GMA PCI_DEV (0, 0x2, 0x0)
68#define HECIDEV PCI_DEV(0, 0x16, 0)
69#define HECIBAR 0x10
70
71#define FOR_ALL_RANKS \
72 for (channel = 0; channel < NUM_CHANNELS; channel++) \
73 for (slot = 0; slot < NUM_SLOTS; slot++) \
74 for (rank = 0; rank < NUM_RANKS; rank++)
75
76#define FOR_POPULATED_RANKS \
77 for (channel = 0; channel < NUM_CHANNELS; channel++) \
78 for (slot = 0; slot < NUM_SLOTS; slot++) \
79 for (rank = 0; rank < NUM_RANKS; rank++) \
80 if (info->populated_ranks[channel][slot][rank])
81
82#define FOR_POPULATED_RANKS_BACKWARDS \
83 for (channel = NUM_CHANNELS - 1; channel >= 0; channel--) \
84 for (slot = 0; slot < NUM_SLOTS; slot++) \
85 for (rank = 0; rank < NUM_RANKS; rank++) \
86 if (info->populated_ranks[channel][slot][rank])
87
88/* [REG_178][CHANNEL][2 * SLOT + RANK][LANE] */
89typedef struct {
90 u8 smallest;
91 u8 largest;
92} timing_bounds_t[2][2][2][9];
93
Arthur Heymansdc71e252018-01-29 10:14:48 +010094#define MRC_CACHE_VERSION 1
95
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010096struct ram_training {
97 /* [TM][CHANNEL][SLOT][RANK][LANE] */
98 u16 lane_timings[4][2][2][2][9];
99 u16 reg_178;
100 u16 reg_10b;
101
102 u8 reg178_center;
103 u8 reg178_smallest;
104 u8 reg178_largest;
105 timing_bounds_t timing_bounds[2];
106 u16 timing_offset[2][2][2][9];
107 u16 timing2_offset[2][2][2][9];
108 u16 timing2_bounds[2][2][2][9][2];
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +0100109 u8 reg274265[2][3]; /* [CHANNEL][REGISTER] */
110 u8 reg2ca9_bit0;
111 u32 reg_6dc;
112 u32 reg_6e8;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100113};
114
115#if !REAL
116#include "raminit_fake.c"
117#else
118
119#include <lib.h> /* Prototypes */
120
121static inline void write_mchbar32(u32 addr, u32 val)
122{
123 MCHBAR32(addr) = val;
124}
125
126static inline void write_mchbar16(u32 addr, u16 val)
127{
128 MCHBAR16(addr) = val;
129}
130
131static inline void write_mchbar8(u32 addr, u8 val)
132{
133 MCHBAR8(addr) = val;
134}
135
136
137static inline u32 read_mchbar32(u32 addr)
138{
139 return MCHBAR32(addr);
140}
141
142static inline u16 read_mchbar16(u32 addr)
143{
144 return MCHBAR16(addr);
145}
146
147static inline u8 read_mchbar8(u32 addr)
148{
149 return MCHBAR8(addr);
150}
151
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100152static void clflush(u32 addr)
153{
154 asm volatile ("clflush (%0)"::"r" (addr));
155}
156
157typedef struct _u128 {
158 u64 lo;
159 u64 hi;
160} u128;
161
162static void read128(u32 addr, u64 * out)
163{
164 u128 ret;
165 u128 stor;
166 asm volatile ("movdqu %%xmm0, %0\n"
167 "movdqa (%2), %%xmm0\n"
168 "movdqu %%xmm0, %1\n"
169 "movdqu %0, %%xmm0":"+m" (stor), "=m"(ret):"r"(addr));
170 out[0] = ret.lo;
171 out[1] = ret.hi;
172}
173
174#endif
175
176/* OK */
177static void write_1d0(u32 val, u16 addr, int bits, int flag)
178{
179 write_mchbar32(0x1d0, 0);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200180 while (read_mchbar32(0x1d0) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100181 write_mchbar32(0x1d4,
182 (val & ((1 << bits) - 1)) | (2 << bits) | (flag <<
183 bits));
184 write_mchbar32(0x1d0, 0x40000000 | addr);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200185 while (read_mchbar32(0x1d0) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100186}
187
188/* OK */
189static u16 read_1d0(u16 addr, int split)
190{
191 u32 val;
192 write_mchbar32(0x1d0, 0);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200193 while (read_mchbar32(0x1d0) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100194 write_mchbar32(0x1d0,
195 0x80000000 | (((read_mchbar8(0x246) >> 2) & 3) +
196 0x361 - addr));
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200197 while (read_mchbar32(0x1d0) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100198 val = read_mchbar32(0x1d8);
199 write_1d0(0, 0x33d, 0, 0);
200 write_1d0(0, 0x33d, 0, 0);
201 val &= ((1 << split) - 1);
202 // printk (BIOS_ERR, "R1D0C [%x] => %x\n", addr, val);
203 return val;
204}
205
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800206static void write32p(uintptr_t addr, uint32_t val)
207{
208 write32((void *)addr, val);
209}
210
211static uint32_t read32p(uintptr_t addr)
212{
213 return read32((void *)addr);
214}
215
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100216static void sfence(void)
217{
218#if REAL
219 asm volatile ("sfence");
220#endif
221}
222
223static inline u16 get_lane_offset(int slot, int rank, int lane)
224{
225 return 0x124 * lane + ((lane & 4) ? 0x23e : 0) + 11 * rank + 22 * slot -
226 0x452 * (lane == 8);
227}
228
229static inline u16 get_timing_register_addr(int lane, int tm, int slot, int rank)
230{
231 const u16 offs[] = { 0x1d, 0xa8, 0xe6, 0x5c };
232 return get_lane_offset(slot, rank, lane) + offs[(tm + 3) % 4];
233}
234
235#if REAL
236static u32 gav_real(int line, u32 in)
237{
238 // printk (BIOS_DEBUG, "%d: GAV: %x\n", line, in);
239 return in;
240}
241
242#define gav(x) gav_real (__LINE__, (x))
243#endif
244struct raminfo {
245 u16 clock_speed_index; /* clock_speed (REAL, not DDR) / 133.(3) - 3 */
246 u16 fsb_frequency; /* in 1.(1)/2 MHz. */
247 u8 is_x16_module[2][2]; /* [CHANNEL][SLOT] */
248 u8 density[2][2]; /* [CHANNEL][SLOT] */
249 u8 populated_ranks[2][2][2]; /* [CHANNEL][SLOT][RANK] */
250 int rank_start[2][2][2];
251 u8 cas_latency;
252 u8 board_lane_delay[9];
253 u8 use_ecc;
254 u8 revision;
255 u8 max_supported_clock_speed_index;
256 u8 uma_enabled;
257 u8 spd[2][2][151]; /* [CHANNEL][SLOT][BYTE] */
258 u8 silicon_revision;
259 u8 populated_ranks_mask[2];
260 u8 max_slots_used_in_channel;
261 u8 mode4030[2];
262 u16 avg4044[2];
263 u16 max4048[2];
264 unsigned total_memory_mb;
265 unsigned interleaved_part_mb;
266 unsigned non_interleaved_part_mb;
267
268 u32 heci_bar;
269 u64 heci_uma_addr;
270 unsigned memory_reserved_for_heci_mb;
271
272 struct ram_training training;
273 u32 last_500_command[2];
274
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100275 u32 delay46_ps[2];
276 u32 delay54_ps[2];
277 u8 revision_flag_1;
278 u8 some_delay_1_cycle_floor;
279 u8 some_delay_2_halfcycles_ceil;
280 u8 some_delay_3_ps_rounded;
281
282 const struct ram_training *cached_training;
283};
284
Matthias Gazzaridfa51252018-05-19 00:44:20 +0200285/* Global allocation of timings_car */
286timing_bounds_t timings_car[64] CAR_GLOBAL;
287
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100288static void
289write_500(struct raminfo *info, int channel, u32 val, u16 addr, int bits,
290 int flag);
291
292/* OK */
293static u16
294read_500(struct raminfo *info, int channel, u16 addr, int split)
295{
296 u32 val;
297 info->last_500_command[channel] = 0x80000000;
298 write_mchbar32(0x500 + (channel << 10), 0);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200299 while (read_mchbar32(0x500 + (channel << 10)) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100300 write_mchbar32(0x500 + (channel << 10),
301 0x80000000 |
302 (((read_mchbar8(0x246 + (channel << 10)) >> 2) &
303 3) + 0xb88 - addr));
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200304 while (read_mchbar32(0x500 + (channel << 10)) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100305 val = read_mchbar32(0x508 + (channel << 10));
306 return val & ((1 << split) - 1);
307}
308
309/* OK */
310static void
311write_500(struct raminfo *info, int channel, u32 val, u16 addr, int bits,
312 int flag)
313{
314 if (info->last_500_command[channel] == 0x80000000) {
315 info->last_500_command[channel] = 0x40000000;
316 write_500(info, channel, 0, 0xb61, 0, 0);
317 }
318 write_mchbar32(0x500 + (channel << 10), 0);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200319 while (read_mchbar32(0x500 + (channel << 10)) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100320 write_mchbar32(0x504 + (channel << 10),
321 (val & ((1 << bits) - 1)) | (2 << bits) | (flag <<
322 bits));
323 write_mchbar32(0x500 + (channel << 10), 0x40000000 | addr);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200324 while (read_mchbar32(0x500 + (channel << 10)) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100325}
326
327static int rw_test(int rank)
328{
329 const u32 mask = 0xf00fc33c;
330 int ok = 0xff;
331 int i;
332 for (i = 0; i < 64; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800333 write32p((rank << 28) | (i << 2), 0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100334 sfence();
335 for (i = 0; i < 64; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800336 gav(read32p((rank << 28) | (i << 2)));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100337 sfence();
338 for (i = 0; i < 32; i++) {
339 u32 pat = (((mask >> i) & 1) ? 0xffffffff : 0);
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800340 write32p((rank << 28) | (i << 3), pat);
341 write32p((rank << 28) | (i << 3) | 4, pat);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100342 }
343 sfence();
344 for (i = 0; i < 32; i++) {
345 u8 pat = (((mask >> i) & 1) ? 0xff : 0);
346 int j;
347 u32 val;
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800348 gav(val = read32p((rank << 28) | (i << 3)));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100349 for (j = 0; j < 4; j++)
350 if (((val >> (j * 8)) & 0xff) != pat)
351 ok &= ~(1 << j);
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800352 gav(val = read32p((rank << 28) | (i << 3) | 4));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100353 for (j = 0; j < 4; j++)
354 if (((val >> (j * 8)) & 0xff) != pat)
355 ok &= ~(16 << j);
356 }
357 sfence();
358 for (i = 0; i < 64; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800359 write32p((rank << 28) | (i << 2), 0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100360 sfence();
361 for (i = 0; i < 64; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800362 gav(read32p((rank << 28) | (i << 2)));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100363
364 return ok;
365}
366
367static void
368program_timings(struct raminfo *info, u16 base, int channel, int slot, int rank)
369{
370 int lane;
371 for (lane = 0; lane < 8; lane++) {
372 write_500(info, channel,
373 base +
374 info->training.
375 lane_timings[2][channel][slot][rank][lane],
376 get_timing_register_addr(lane, 2, slot, rank), 9, 0);
377 write_500(info, channel,
378 base +
379 info->training.
380 lane_timings[3][channel][slot][rank][lane],
381 get_timing_register_addr(lane, 3, slot, rank), 9, 0);
382 }
383}
384
385static void write_26c(int channel, u16 si)
386{
387 write_mchbar32(0x26c + (channel << 10), 0x03243f35);
388 write_mchbar32(0x268 + (channel << 10), 0xcfc00000 | (si << 9));
389 write_mchbar16(0x2b9 + (channel << 10), si);
390}
391
392static u32 get_580(int channel, u8 addr)
393{
394 u32 ret;
395 gav(read_1d0(0x142, 3));
396 write_mchbar8(0x5ff, 0x0); /* OK */
397 write_mchbar8(0x5ff, 0x80); /* OK */
398 write_mchbar32(0x580 + (channel << 10), 0x8493c012 | addr);
399 write_mchbar8(0x580 + (channel << 10),
400 read_mchbar8(0x580 + (channel << 10)) | 1);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200401 while (!((ret = read_mchbar32(0x580 + (channel << 10))) & 0x10000));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100402 write_mchbar8(0x580 + (channel << 10),
403 read_mchbar8(0x580 + (channel << 10)) & ~1);
404 return ret;
405}
406
407const int cached_config = 0;
408
409#define NUM_CHANNELS 2
410#define NUM_SLOTS 2
411#define NUM_RANKS 2
412#define RANK_SHIFT 28
413#define CHANNEL_SHIFT 10
414
415#include "raminit_tables.c"
416
417static void seq9(struct raminfo *info, int channel, int slot, int rank)
418{
419 int i, lane;
420
421 for (i = 0; i < 2; i++)
422 for (lane = 0; lane < 8; lane++)
423 write_500(info, channel,
424 info->training.lane_timings[i +
425 1][channel][slot]
426 [rank][lane], get_timing_register_addr(lane,
427 i + 1,
428 slot,
429 rank),
430 9, 0);
431
432 write_1d0(1, 0x103, 6, 1);
433 for (lane = 0; lane < 8; lane++)
434 write_500(info, channel,
435 info->training.
436 lane_timings[0][channel][slot][rank][lane],
437 get_timing_register_addr(lane, 0, slot, rank), 9, 0);
438
439 for (i = 0; i < 2; i++) {
440 for (lane = 0; lane < 8; lane++)
441 write_500(info, channel,
442 info->training.lane_timings[i +
443 1][channel][slot]
444 [rank][lane], get_timing_register_addr(lane,
445 i + 1,
446 slot,
447 rank),
448 9, 0);
449 gav(get_580(channel, ((i + 1) << 2) | (rank << 5)));
450 }
451
452 gav(read_1d0(0x142, 3)); // = 0x10408118
453 write_mchbar8(0x5ff, 0x0); /* OK */
454 write_mchbar8(0x5ff, 0x80); /* OK */
455 write_1d0(0x2, 0x142, 3, 1);
456 for (lane = 0; lane < 8; lane++) {
457 // printk (BIOS_ERR, "before: %x\n", info->training.lane_timings[2][channel][slot][rank][lane]);
458 info->training.lane_timings[2][channel][slot][rank][lane] =
459 read_500(info, channel,
460 get_timing_register_addr(lane, 2, slot, rank), 9);
461 //printk (BIOS_ERR, "after: %x\n", info->training.lane_timings[2][channel][slot][rank][lane]);
462 info->training.lane_timings[3][channel][slot][rank][lane] =
463 info->training.lane_timings[2][channel][slot][rank][lane] +
464 0x20;
465 }
466}
467
468static int count_ranks_in_channel(struct raminfo *info, int channel)
469{
470 int slot, rank;
471 int res = 0;
472 for (slot = 0; slot < NUM_SLOTS; slot++)
473 for (rank = 0; rank < NUM_SLOTS; rank++)
474 res += info->populated_ranks[channel][slot][rank];
475 return res;
476}
477
478static void
479config_rank(struct raminfo *info, int s3resume, int channel, int slot, int rank)
480{
481 int add;
482
483 write_1d0(0, 0x178, 7, 1);
484 seq9(info, channel, slot, rank);
485 program_timings(info, 0x80, channel, slot, rank);
486
487 if (channel == 0)
488 add = count_ranks_in_channel(info, 1);
489 else
490 add = 0;
491 if (!s3resume)
492 gav(rw_test(rank + add));
493 program_timings(info, 0x00, channel, slot, rank);
494 if (!s3resume)
495 gav(rw_test(rank + add));
496 if (!s3resume)
497 gav(rw_test(rank + add));
498 write_1d0(0, 0x142, 3, 1);
499 write_1d0(0, 0x103, 6, 1);
500
501 gav(get_580(channel, 0xc | (rank << 5)));
502 gav(read_1d0(0x142, 3));
503
504 write_mchbar8(0x5ff, 0x0); /* OK */
505 write_mchbar8(0x5ff, 0x80); /* OK */
506}
507
508static void set_4cf(struct raminfo *info, int channel, u8 val)
509{
510 gav(read_500(info, channel, 0x4cf, 4)); // = 0xc2300cf9
511 write_500(info, channel, val, 0x4cf, 4, 1);
512 gav(read_500(info, channel, 0x659, 4)); // = 0x80300839
513 write_500(info, channel, val, 0x659, 4, 1);
514 gav(read_500(info, channel, 0x697, 4)); // = 0x80300839
515 write_500(info, channel, val, 0x697, 4, 1);
516}
517
518static void set_334(int zero)
519{
520 int j, k, channel;
521 const u32 val3[] = { 0x2a2b2a2b, 0x26272627, 0x2e2f2e2f, 0x2a2b };
522 u32 vd8[2][16];
523
524 for (channel = 0; channel < NUM_CHANNELS; channel++) {
525 for (j = 0; j < 4; j++) {
526 u32 a = (j == 1) ? 0x29292929 : 0x31313131;
527 u32 lmask = (j == 3) ? 0xffff : 0xffffffff;
528 u16 c;
529 if ((j == 0 || j == 3) && zero)
530 c = 0;
531 else if (j == 3)
532 c = 0x5f;
533 else
534 c = 0x5f5f;
535
536 for (k = 0; k < 2; k++) {
537 write_mchbar32(0x138 + 8 * k,
538 (channel << 26) | (j << 24));
539 gav(vd8[1][(channel << 3) | (j << 1) | k] =
540 read_mchbar32(0x138 + 8 * k));
541 gav(vd8[0][(channel << 3) | (j << 1) | k] =
542 read_mchbar32(0x13c + 8 * k));
543 }
544
545 write_mchbar32(0x334 + (channel << 10) + (j * 0x44),
546 zero ? 0 : val3[j]);
547 write_mchbar32(0x32c + (channel << 10) + (j * 0x44),
548 zero ? 0 : (0x18191819 & lmask));
549 write_mchbar16(0x34a + (channel << 10) + (j * 0x44), c);
550 write_mchbar32(0x33c + (channel << 10) + (j * 0x44),
551 zero ? 0 : (a & lmask));
552 write_mchbar32(0x344 + (channel << 10) + (j * 0x44),
553 zero ? 0 : (a & lmask));
554 }
555 }
556
557 write_mchbar32(0x130, read_mchbar32(0x130) | 1); /* OK */
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200558 while (read_mchbar8(0x130) & 1); /* OK */
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100559}
560
561static void rmw_1d0(u16 addr, u32 and, u32 or, int split, int flag)
562{
563 u32 v;
564 v = read_1d0(addr, split);
565 write_1d0((v & and) | or, addr, split, flag);
566}
567
568static int find_highest_bit_set(u16 val)
569{
570 int i;
571 for (i = 15; i >= 0; i--)
572 if (val & (1 << i))
573 return i;
574 return -1;
575}
576
577static int find_lowest_bit_set32(u32 val)
578{
579 int i;
580 for (i = 0; i < 32; i++)
581 if (val & (1 << i))
582 return i;
583 return -1;
584}
585
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100586enum {
587 DEVICE_TYPE = 2,
588 MODULE_TYPE = 3,
589 DENSITY = 4,
590 RANKS_AND_DQ = 7,
591 MEMORY_BUS_WIDTH = 8,
592 TIMEBASE_DIVIDEND = 10,
593 TIMEBASE_DIVISOR = 11,
594 CYCLETIME = 12,
595
596 CAS_LATENCIES_LSB = 14,
597 CAS_LATENCIES_MSB = 15,
598 CAS_LATENCY_TIME = 16,
599 THERMAL_AND_REFRESH = 31,
600 REFERENCE_RAW_CARD_USED = 62,
601 RANK1_ADDRESS_MAPPING = 63
602};
603
604static void calculate_timings(struct raminfo *info)
605{
606 unsigned cycletime;
607 unsigned cas_latency_time;
608 unsigned supported_cas_latencies;
609 unsigned channel, slot;
610 unsigned clock_speed_index;
611 unsigned min_cas_latency;
612 unsigned cas_latency;
613 unsigned max_clock_index;
614
615 /* Find common CAS latency */
616 supported_cas_latencies = 0x3fe;
617 for (channel = 0; channel < NUM_CHANNELS; channel++)
618 for (slot = 0; slot < NUM_SLOTS; slot++)
619 if (info->populated_ranks[channel][slot][0])
620 supported_cas_latencies &=
621 2 *
622 (info->
623 spd[channel][slot][CAS_LATENCIES_LSB] |
624 (info->
625 spd[channel][slot][CAS_LATENCIES_MSB] <<
626 8));
627
628 max_clock_index = min(3, info->max_supported_clock_speed_index);
629
630 cycletime = min_cycletime[max_clock_index];
631 cas_latency_time = min_cas_latency_time[max_clock_index];
632
633 for (channel = 0; channel < NUM_CHANNELS; channel++)
634 for (slot = 0; slot < NUM_SLOTS; slot++)
635 if (info->populated_ranks[channel][slot][0]) {
636 unsigned timebase;
637 timebase =
638 1000 *
639 info->
640 spd[channel][slot][TIMEBASE_DIVIDEND] /
641 info->spd[channel][slot][TIMEBASE_DIVISOR];
642 cycletime =
643 max(cycletime,
644 timebase *
645 info->spd[channel][slot][CYCLETIME]);
646 cas_latency_time =
647 max(cas_latency_time,
648 timebase *
649 info->
650 spd[channel][slot][CAS_LATENCY_TIME]);
651 }
652 for (clock_speed_index = 0; clock_speed_index < 3; clock_speed_index++) {
653 if (cycletime == min_cycletime[clock_speed_index])
654 break;
655 if (cycletime > min_cycletime[clock_speed_index]) {
656 clock_speed_index--;
657 cycletime = min_cycletime[clock_speed_index];
658 break;
659 }
660 }
Edward O'Callaghan7116ac82014-07-08 01:53:24 +1000661 min_cas_latency = CEIL_DIV(cas_latency_time, cycletime);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100662 cas_latency = 0;
663 while (supported_cas_latencies) {
664 cas_latency = find_highest_bit_set(supported_cas_latencies) + 3;
665 if (cas_latency <= min_cas_latency)
666 break;
667 supported_cas_latencies &=
668 ~(1 << find_highest_bit_set(supported_cas_latencies));
669 }
670
671 if (cas_latency != min_cas_latency && clock_speed_index)
672 clock_speed_index--;
673
674 if (cas_latency * min_cycletime[clock_speed_index] > 20000)
675 die("Couldn't configure DRAM");
676 info->clock_speed_index = clock_speed_index;
677 info->cas_latency = cas_latency;
678}
679
680static void program_base_timings(struct raminfo *info)
681{
682 unsigned channel;
683 unsigned slot, rank, lane;
684 unsigned extended_silicon_revision;
685 int i;
686
687 extended_silicon_revision = info->silicon_revision;
688 if (info->silicon_revision == 0)
689 for (channel = 0; channel < NUM_CHANNELS; channel++)
690 for (slot = 0; slot < NUM_SLOTS; slot++)
691 if ((info->
692 spd[channel][slot][MODULE_TYPE] & 0xF) ==
693 3)
694 extended_silicon_revision = 4;
695
696 for (channel = 0; channel < NUM_CHANNELS; channel++) {
697 for (slot = 0; slot < NUM_SLOTS; slot++)
698 for (rank = 0; rank < NUM_SLOTS; rank++) {
699 int card_timing_2;
700 if (!info->populated_ranks[channel][slot][rank])
701 continue;
702
703 for (lane = 0; lane < 9; lane++) {
704 int tm_reg;
705 int card_timing;
706
707 card_timing = 0;
708 if ((info->
709 spd[channel][slot][MODULE_TYPE] &
710 0xF) == 3) {
711 int reference_card;
712 reference_card =
713 info->
714 spd[channel][slot]
715 [REFERENCE_RAW_CARD_USED] &
716 0x1f;
717 if (reference_card == 3)
718 card_timing =
719 u16_ffd1188[0][lane]
720 [info->
721 clock_speed_index];
722 if (reference_card == 5)
723 card_timing =
724 u16_ffd1188[1][lane]
725 [info->
726 clock_speed_index];
727 }
728
729 info->training.
730 lane_timings[0][channel][slot][rank]
731 [lane] =
732 u8_FFFD1218[info->
733 clock_speed_index];
734 info->training.
735 lane_timings[1][channel][slot][rank]
736 [lane] = 256;
737
738 for (tm_reg = 2; tm_reg < 4; tm_reg++)
739 info->training.
740 lane_timings[tm_reg]
741 [channel][slot][rank][lane]
742 =
743 u8_FFFD1240[channel]
744 [extended_silicon_revision]
745 [lane][2 * slot +
746 rank][info->
747 clock_speed_index]
748 + info->max4048[channel]
749 +
750 u8_FFFD0C78[channel]
751 [extended_silicon_revision]
752 [info->
753 mode4030[channel]][slot]
754 [rank][info->
755 clock_speed_index]
756 + card_timing;
757 for (tm_reg = 0; tm_reg < 4; tm_reg++)
758 write_500(info, channel,
759 info->training.
760 lane_timings[tm_reg]
761 [channel][slot][rank]
762 [lane],
763 get_timing_register_addr
764 (lane, tm_reg, slot,
765 rank), 9, 0);
766 }
767
768 card_timing_2 = 0;
769 if (!(extended_silicon_revision != 4
770 || (info->
771 populated_ranks_mask[channel] & 5) ==
772 5)) {
773 if ((info->
774 spd[channel][slot]
775 [REFERENCE_RAW_CARD_USED] & 0x1F)
776 == 3)
777 card_timing_2 =
778 u16_FFFE0EB8[0][info->
779 clock_speed_index];
780 if ((info->
781 spd[channel][slot]
782 [REFERENCE_RAW_CARD_USED] & 0x1F)
783 == 5)
784 card_timing_2 =
785 u16_FFFE0EB8[1][info->
786 clock_speed_index];
787 }
788
789 for (i = 0; i < 3; i++)
790 write_500(info, channel,
791 (card_timing_2 +
792 info->max4048[channel]
793 +
794 u8_FFFD0EF8[channel]
795 [extended_silicon_revision]
796 [info->
797 mode4030[channel]][info->
798 clock_speed_index]),
799 u16_fffd0c50[i][slot][rank],
800 8, 1);
801 write_500(info, channel,
802 (info->max4048[channel] +
803 u8_FFFD0C78[channel]
804 [extended_silicon_revision][info->
805 mode4030
806 [channel]]
807 [slot][rank][info->
808 clock_speed_index]),
809 u16_fffd0c70[slot][rank], 7, 1);
810 }
811 if (!info->populated_ranks_mask[channel])
812 continue;
813 for (i = 0; i < 3; i++)
814 write_500(info, channel,
815 (info->max4048[channel] +
816 info->avg4044[channel]
817 +
818 u8_FFFD17E0[channel]
819 [extended_silicon_revision][info->
820 mode4030
821 [channel]][info->
822 clock_speed_index]),
823 u16_fffd0c68[i], 8, 1);
824 }
825}
826
827static unsigned int fsbcycle_ps(struct raminfo *info)
828{
829 return 900000 / info->fsb_frequency;
830}
831
832/* The time of DDR transfer in ps. */
833static unsigned int halfcycle_ps(struct raminfo *info)
834{
835 return 3750 / (info->clock_speed_index + 3);
836}
837
838/* The time of clock cycle in ps. */
839static unsigned int cycle_ps(struct raminfo *info)
840{
841 return 2 * halfcycle_ps(info);
842}
843
844/* Frequency in 1.(1)=10/9 MHz units. */
845static unsigned frequency_11(struct raminfo *info)
846{
847 return (info->clock_speed_index + 3) * 120;
848}
849
850/* Frequency in 0.1 MHz units. */
851static unsigned frequency_01(struct raminfo *info)
852{
853 return 100 * frequency_11(info) / 9;
854}
855
856static unsigned ps_to_halfcycles(struct raminfo *info, unsigned int ps)
857{
858 return (frequency_11(info) * 2) * ps / 900000;
859}
860
861static unsigned ns_to_cycles(struct raminfo *info, unsigned int ns)
862{
863 return (frequency_11(info)) * ns / 900;
864}
865
866static void compute_derived_timings(struct raminfo *info)
867{
868 unsigned channel, slot, rank;
869 int extended_silicon_revision;
870 int some_delay_1_ps;
871 int some_delay_2_ps;
872 int some_delay_2_halfcycles_ceil;
873 int some_delay_2_halfcycles_floor;
874 int some_delay_3_ps;
875 int some_delay_3_halfcycles;
876 int some_delay_3_ps_rounded;
877 int some_delay_1_cycle_ceil;
878 int some_delay_1_cycle_floor;
879
880 some_delay_3_halfcycles = 0;
881 some_delay_3_ps_rounded = 0;
882 extended_silicon_revision = info->silicon_revision;
883 if (!info->silicon_revision)
884 for (channel = 0; channel < NUM_CHANNELS; channel++)
885 for (slot = 0; slot < NUM_SLOTS; slot++)
886 if ((info->
887 spd[channel][slot][MODULE_TYPE] & 0xF) ==
888 3)
889 extended_silicon_revision = 4;
890 if (info->board_lane_delay[7] < 5)
891 info->board_lane_delay[7] = 5;
892 info->revision_flag_1 = 2;
893 if (info->silicon_revision == 2 || info->silicon_revision == 3)
894 info->revision_flag_1 = 0;
895 if (info->revision < 16)
896 info->revision_flag_1 = 0;
897
898 if (info->revision < 8)
899 info->revision_flag_1 = 0;
900 if (info->revision >= 8 && (info->silicon_revision == 0
901 || info->silicon_revision == 1))
902 some_delay_2_ps = 735;
903 else
904 some_delay_2_ps = 750;
905
906 if (info->revision >= 0x10 && (info->silicon_revision == 0
907 || info->silicon_revision == 1))
908 some_delay_1_ps = 3929;
909 else
910 some_delay_1_ps = 3490;
911
912 some_delay_1_cycle_floor = some_delay_1_ps / cycle_ps(info);
913 some_delay_1_cycle_ceil = some_delay_1_ps / cycle_ps(info);
914 if (some_delay_1_ps % cycle_ps(info))
915 some_delay_1_cycle_ceil++;
916 else
917 some_delay_1_cycle_floor--;
918 info->some_delay_1_cycle_floor = some_delay_1_cycle_floor;
919 if (info->revision_flag_1)
920 some_delay_2_ps = halfcycle_ps(info) >> 6;
921 some_delay_2_ps +=
922 max(some_delay_1_ps - 30,
923 2 * halfcycle_ps(info) * (some_delay_1_cycle_ceil - 1) + 1000) +
924 375;
925 some_delay_3_ps =
926 halfcycle_ps(info) - some_delay_2_ps % halfcycle_ps(info);
927 if (info->revision_flag_1) {
928 if (some_delay_3_ps < 150)
929 some_delay_3_halfcycles = 0;
930 else
931 some_delay_3_halfcycles =
932 (some_delay_3_ps << 6) / halfcycle_ps(info);
933 some_delay_3_ps_rounded =
934 halfcycle_ps(info) * some_delay_3_halfcycles >> 6;
935 }
936 some_delay_2_halfcycles_ceil =
937 (some_delay_2_ps + halfcycle_ps(info) - 1) / halfcycle_ps(info) -
938 2 * (some_delay_1_cycle_ceil - 1);
939 if (info->revision_flag_1 && some_delay_3_ps < 150)
940 some_delay_2_halfcycles_ceil++;
941 some_delay_2_halfcycles_floor = some_delay_2_halfcycles_ceil;
942 if (info->revision < 0x10)
943 some_delay_2_halfcycles_floor =
944 some_delay_2_halfcycles_ceil - 1;
945 if (!info->revision_flag_1)
946 some_delay_2_halfcycles_floor++;
947 info->some_delay_2_halfcycles_ceil = some_delay_2_halfcycles_ceil;
948 info->some_delay_3_ps_rounded = some_delay_3_ps_rounded;
949 if ((info->populated_ranks[0][0][0] && info->populated_ranks[0][1][0])
950 || (info->populated_ranks[1][0][0]
951 && info->populated_ranks[1][1][0]))
952 info->max_slots_used_in_channel = 2;
953 else
954 info->max_slots_used_in_channel = 1;
955 for (channel = 0; channel < 2; channel++)
956 write_mchbar32(0x244 + (channel << 10),
957 ((info->revision < 8) ? 1 : 0x200)
958 | ((2 - info->max_slots_used_in_channel) << 17) |
959 (channel << 21) | (info->
960 some_delay_1_cycle_floor <<
961 18) | 0x9510);
962 if (info->max_slots_used_in_channel == 1) {
963 info->mode4030[0] = (count_ranks_in_channel(info, 0) == 2);
964 info->mode4030[1] = (count_ranks_in_channel(info, 1) == 2);
965 } else {
966 info->mode4030[0] = ((count_ranks_in_channel(info, 0) == 1) || (count_ranks_in_channel(info, 0) == 2)) ? 2 : 3; /* 2 if 1 or 2 ranks */
967 info->mode4030[1] = ((count_ranks_in_channel(info, 1) == 1)
968 || (count_ranks_in_channel(info, 1) ==
969 2)) ? 2 : 3;
970 }
971 for (channel = 0; channel < NUM_CHANNELS; channel++) {
972 int max_of_unk;
973 int min_of_unk_2;
974
975 int i, count;
976 int sum;
977
978 if (!info->populated_ranks_mask[channel])
979 continue;
980
981 max_of_unk = 0;
982 min_of_unk_2 = 32767;
983
984 sum = 0;
985 count = 0;
986 for (i = 0; i < 3; i++) {
987 int unk1;
988 if (info->revision < 8)
989 unk1 =
990 u8_FFFD1891[0][channel][info->
991 clock_speed_index]
992 [i];
993 else if (!
994 (info->revision >= 0x10
995 || info->revision_flag_1))
996 unk1 =
997 u8_FFFD1891[1][channel][info->
998 clock_speed_index]
999 [i];
1000 else
1001 unk1 = 0;
1002 for (slot = 0; slot < NUM_SLOTS; slot++)
1003 for (rank = 0; rank < NUM_RANKS; rank++) {
1004 int a = 0;
1005 int b = 0;
1006
1007 if (!info->
1008 populated_ranks[channel][slot]
1009 [rank])
1010 continue;
1011 if (extended_silicon_revision == 4
1012 && (info->
1013 populated_ranks_mask[channel] &
1014 5) != 5) {
1015 if ((info->
1016 spd[channel][slot]
1017 [REFERENCE_RAW_CARD_USED] &
1018 0x1F) == 3) {
1019 a = u16_ffd1178[0]
1020 [info->
1021 clock_speed_index];
1022 b = u16_fe0eb8[0][info->
1023 clock_speed_index];
1024 } else
1025 if ((info->
1026 spd[channel][slot]
1027 [REFERENCE_RAW_CARD_USED]
1028 & 0x1F) == 5) {
1029 a = u16_ffd1178[1]
1030 [info->
1031 clock_speed_index];
1032 b = u16_fe0eb8[1][info->
1033 clock_speed_index];
1034 }
1035 }
1036 min_of_unk_2 = min(min_of_unk_2, a);
1037 min_of_unk_2 = min(min_of_unk_2, b);
1038 if (rank == 0) {
1039 sum += a;
1040 count++;
1041 }
1042 {
1043 int t;
1044 t = b +
1045 u8_FFFD0EF8[channel]
1046 [extended_silicon_revision]
1047 [info->
1048 mode4030[channel]][info->
1049 clock_speed_index];
1050 if (unk1 >= t)
1051 max_of_unk =
1052 max(max_of_unk,
1053 unk1 - t);
1054 }
1055 }
1056 {
1057 int t =
1058 u8_FFFD17E0[channel]
1059 [extended_silicon_revision][info->
1060 mode4030
1061 [channel]]
1062 [info->clock_speed_index] + min_of_unk_2;
1063 if (unk1 >= t)
1064 max_of_unk = max(max_of_unk, unk1 - t);
1065 }
1066 }
1067
1068 info->avg4044[channel] = sum / count;
1069 info->max4048[channel] = max_of_unk;
1070 }
1071}
1072
1073static void jedec_read(struct raminfo *info,
1074 int channel, int slot, int rank,
1075 int total_rank, u8 addr3, unsigned int value)
1076{
1077 /* Handle mirrored mapping. */
1078 if ((rank & 1) && (info->spd[channel][slot][RANK1_ADDRESS_MAPPING] & 1))
1079 addr3 =
1080 (addr3 & 0xCF) | ((addr3 & 0x10) << 1) | ((addr3 >> 1) &
1081 0x10);
1082 write_mchbar8(0x271, addr3 | (read_mchbar8(0x271) & 0xC1));
1083 write_mchbar8(0x671, addr3 | (read_mchbar8(0x671) & 0xC1));
1084
1085 /* Handle mirrored mapping. */
1086 if ((rank & 1) && (info->spd[channel][slot][RANK1_ADDRESS_MAPPING] & 1))
1087 value =
1088 (value & ~0x1f8) | ((value >> 1) & 0xa8) | ((value & 0xa8)
1089 << 1);
1090
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001091 read32p((value << 3) | (total_rank << 28));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001092
1093 write_mchbar8(0x271, (read_mchbar8(0x271) & 0xC3) | 2);
1094 write_mchbar8(0x671, (read_mchbar8(0x671) & 0xC3) | 2);
1095
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001096 read32p(total_rank << 28);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001097}
1098
1099enum {
1100 MR1_RZQ12 = 512,
1101 MR1_RZQ2 = 64,
1102 MR1_RZQ4 = 4,
1103 MR1_ODS34OHM = 2
1104};
1105
1106enum {
1107 MR0_BT_INTERLEAVED = 8,
1108 MR0_DLL_RESET_ON = 256
1109};
1110
1111enum {
1112 MR2_RTT_WR_DISABLED = 0,
1113 MR2_RZQ2 = 1 << 10
1114};
1115
1116static void jedec_init(struct raminfo *info)
1117{
1118 int write_recovery;
1119 int channel, slot, rank;
1120 int total_rank;
1121 int dll_on;
1122 int self_refresh_temperature;
1123 int auto_self_refresh;
1124
1125 auto_self_refresh = 1;
1126 self_refresh_temperature = 1;
1127 if (info->board_lane_delay[3] <= 10) {
1128 if (info->board_lane_delay[3] <= 8)
1129 write_recovery = info->board_lane_delay[3] - 4;
1130 else
1131 write_recovery = 5;
1132 } else {
1133 write_recovery = 6;
1134 }
1135 FOR_POPULATED_RANKS {
1136 auto_self_refresh &=
1137 (info->spd[channel][slot][THERMAL_AND_REFRESH] >> 2) & 1;
1138 self_refresh_temperature &=
1139 info->spd[channel][slot][THERMAL_AND_REFRESH] & 1;
1140 }
1141 if (auto_self_refresh == 1)
1142 self_refresh_temperature = 0;
1143
1144 dll_on = ((info->silicon_revision != 2 && info->silicon_revision != 3)
1145 || (info->populated_ranks[0][0][0]
1146 && info->populated_ranks[0][1][0])
1147 || (info->populated_ranks[1][0][0]
1148 && info->populated_ranks[1][1][0]));
1149
1150 total_rank = 0;
1151
1152 for (channel = NUM_CHANNELS - 1; channel >= 0; channel--) {
1153 int rtt, rtt_wr = MR2_RTT_WR_DISABLED;
1154 int rzq_reg58e;
1155
1156 if (info->silicon_revision == 2 || info->silicon_revision == 3) {
1157 rzq_reg58e = 64;
1158 rtt = MR1_RZQ2;
1159 if (info->clock_speed_index != 0) {
1160 rzq_reg58e = 4;
1161 if (info->populated_ranks_mask[channel] == 3)
1162 rtt = MR1_RZQ4;
1163 }
1164 } else {
1165 if ((info->populated_ranks_mask[channel] & 5) == 5) {
1166 rtt = MR1_RZQ12;
1167 rzq_reg58e = 64;
1168 rtt_wr = MR2_RZQ2;
1169 } else {
1170 rzq_reg58e = 4;
1171 rtt = MR1_RZQ4;
1172 }
1173 }
1174
1175 write_mchbar16(0x588 + (channel << 10), 0x0);
1176 write_mchbar16(0x58a + (channel << 10), 0x4);
1177 write_mchbar16(0x58c + (channel << 10), rtt | MR1_ODS34OHM);
1178 write_mchbar16(0x58e + (channel << 10), rzq_reg58e | 0x82);
1179 write_mchbar16(0x590 + (channel << 10), 0x1282);
1180
1181 for (slot = 0; slot < NUM_SLOTS; slot++)
1182 for (rank = 0; rank < NUM_RANKS; rank++)
1183 if (info->populated_ranks[channel][slot][rank]) {
1184 jedec_read(info, channel, slot, rank,
1185 total_rank, 0x28,
1186 rtt_wr | (info->
1187 clock_speed_index
1188 << 3)
1189 | (auto_self_refresh << 6) |
1190 (self_refresh_temperature <<
1191 7));
1192 jedec_read(info, channel, slot, rank,
1193 total_rank, 0x38, 0);
1194 jedec_read(info, channel, slot, rank,
1195 total_rank, 0x18,
1196 rtt | MR1_ODS34OHM);
1197 jedec_read(info, channel, slot, rank,
1198 total_rank, 6,
1199 (dll_on << 12) |
1200 (write_recovery << 9)
1201 | ((info->cas_latency - 4) <<
1202 4) | MR0_BT_INTERLEAVED |
1203 MR0_DLL_RESET_ON);
1204 total_rank++;
1205 }
1206 }
1207}
1208
1209static void program_modules_memory_map(struct raminfo *info, int pre_jedec)
1210{
1211 unsigned channel, slot, rank;
1212 unsigned int total_mb[2] = { 0, 0 }; /* total memory per channel in MB */
1213 unsigned int channel_0_non_interleaved;
1214
1215 FOR_ALL_RANKS {
1216 if (info->populated_ranks[channel][slot][rank]) {
1217 total_mb[channel] +=
1218 pre_jedec ? 256 : (256 << info->
1219 density[channel][slot] >> info->
1220 is_x16_module[channel][slot]);
1221 write_mchbar8(0x208 + rank + 2 * slot + (channel << 10),
1222 (pre_jedec ? (1 | ((1 + 1) << 1))
1223 : (info->
1224 is_x16_module[channel][slot] |
1225 ((info->density[channel][slot] +
1226 1) << 1))) | 0x80);
1227 }
1228 write_mchbar16(0x200 + (channel << 10) + 4 * slot + 2 * rank,
1229 total_mb[channel] >> 6);
1230 }
1231
1232 info->total_memory_mb = total_mb[0] + total_mb[1];
1233
1234 info->interleaved_part_mb =
1235 pre_jedec ? 0 : 2 * min(total_mb[0], total_mb[1]);
1236 info->non_interleaved_part_mb =
1237 total_mb[0] + total_mb[1] - info->interleaved_part_mb;
1238 channel_0_non_interleaved = total_mb[0] - info->interleaved_part_mb / 2;
1239 write_mchbar32(0x100,
1240 channel_0_non_interleaved | (info->
1241 non_interleaved_part_mb <<
1242 16));
1243 if (!pre_jedec)
1244 write_mchbar16(0x104, info->interleaved_part_mb);
1245}
1246
1247static void program_board_delay(struct raminfo *info)
1248{
1249 int cas_latency_shift;
1250 int some_delay_ns;
1251 int some_delay_3_half_cycles;
1252
1253 unsigned channel, i;
1254 int high_multiplier;
1255 int lane_3_delay;
1256 int cas_latency_derived;
1257
1258 high_multiplier = 0;
1259 some_delay_ns = 200;
1260 some_delay_3_half_cycles = 4;
1261 cas_latency_shift = info->silicon_revision == 0
1262 || info->silicon_revision == 1 ? 1 : 0;
1263 if (info->revision < 8) {
1264 some_delay_ns = 600;
1265 cas_latency_shift = 0;
1266 }
1267 {
1268 int speed_bit;
1269 speed_bit =
1270 ((info->clock_speed_index > 1
1271 || (info->silicon_revision != 2
1272 && info->silicon_revision != 3))) ^ (info->revision >=
1273 0x10);
1274 write_500(info, 0, speed_bit | ((!info->use_ecc) << 1), 0x60e,
1275 3, 1);
1276 write_500(info, 1, speed_bit | ((!info->use_ecc) << 1), 0x60e,
1277 3, 1);
1278 if (info->revision >= 0x10 && info->clock_speed_index <= 1
1279 && (info->silicon_revision == 2
1280 || info->silicon_revision == 3))
1281 rmw_1d0(0x116, 5, 2, 4, 1);
1282 }
1283 write_mchbar32(0x120,
1284 (1 << (info->max_slots_used_in_channel + 28)) |
1285 0x188e7f9f);
1286
1287 write_mchbar8(0x124,
1288 info->board_lane_delay[4] +
1289 ((frequency_01(info) + 999) / 1000));
1290 write_mchbar16(0x125, 0x1360);
1291 write_mchbar8(0x127, 0x40);
1292 if (info->fsb_frequency < frequency_11(info) / 2) {
1293 unsigned some_delay_2_half_cycles;
1294 high_multiplier = 1;
1295 some_delay_2_half_cycles = ps_to_halfcycles(info,
1296 ((3 *
1297 fsbcycle_ps(info))
1298 >> 1) +
1299 (halfcycle_ps(info)
1300 *
1301 reg178_min[info->
1302 clock_speed_index]
1303 >> 6)
1304 +
1305 4 *
1306 halfcycle_ps(info)
1307 + 2230);
1308 some_delay_3_half_cycles =
1309 min((some_delay_2_half_cycles +
1310 (frequency_11(info) * 2) * (28 -
1311 some_delay_2_half_cycles) /
1312 (frequency_11(info) * 2 -
1313 4 * (info->fsb_frequency))) >> 3, 7);
1314 }
1315 if (read_mchbar8(0x2ca9) & 1)
1316 some_delay_3_half_cycles = 3;
1317 for (channel = 0; channel < NUM_CHANNELS; channel++) {
1318 write_mchbar32(0x220 + (channel << 10),
1319 read_mchbar32(0x220 +
1320 (channel << 10)) | 0x18001117);
1321 write_mchbar32(0x224 + (channel << 10),
1322 (info->max_slots_used_in_channel - 1)
1323 |
1324 ((info->cas_latency - 5 -
1325 info->clock_speed_index) << 21)
1326 |
1327 ((info->max_slots_used_in_channel +
1328 info->cas_latency - cas_latency_shift -
1329 4) << 16)
1330 | ((info->cas_latency - cas_latency_shift - 4) <<
1331 26)
1332 |
1333 ((info->cas_latency - info->clock_speed_index +
1334 info->max_slots_used_in_channel - 6) << 8));
1335 write_mchbar32(0x228 + (channel << 10),
1336 info->max_slots_used_in_channel);
1337 write_mchbar8(0x239 + (channel << 10), 32);
1338 write_mchbar32(0x248 + (channel << 10),
1339 (high_multiplier << 24) |
1340 (some_delay_3_half_cycles << 25) | 0x840000);
1341 write_mchbar32(0x278 + (channel << 10), 0xc362042);
1342 write_mchbar32(0x27c + (channel << 10), 0x8b000062);
1343 write_mchbar32(0x24c + (channel << 10),
1344 ((! !info->
1345 clock_speed_index) << 17) | (((2 +
1346 info->
1347 clock_speed_index
1348 -
1349 (! !info->
1350 clock_speed_index)))
1351 << 12) | 0x10200);
1352
1353 write_mchbar8(0x267 + (channel << 10), 0x4);
1354 write_mchbar16(0x272 + (channel << 10), 0x155);
1355 write_mchbar32(0x2bc + (channel << 10),
1356 (read_mchbar32(0x2bc + (channel << 10)) &
1357 0xFF000000)
1358 | 0x707070);
1359
1360 write_500(info, channel,
1361 ((!info->populated_ranks[channel][1][1])
1362 | (!info->populated_ranks[channel][1][0] << 1)
1363 | (!info->populated_ranks[channel][0][1] << 2)
1364 | (!info->populated_ranks[channel][0][0] << 3)),
1365 0x4c9, 4, 1);
1366 }
1367
1368 write_mchbar8(0x2c4, ((1 + (info->clock_speed_index != 0)) << 6) | 0xC);
1369 {
1370 u8 freq_divisor = 2;
1371 if (info->fsb_frequency == frequency_11(info))
1372 freq_divisor = 3;
1373 else if (2 * info->fsb_frequency < 3 * (frequency_11(info) / 2))
1374 freq_divisor = 1;
1375 else
1376 freq_divisor = 2;
1377 write_mchbar32(0x2c0, (freq_divisor << 11) | 0x6009c400);
1378 }
1379
1380 if (info->board_lane_delay[3] <= 10) {
1381 if (info->board_lane_delay[3] <= 8)
1382 lane_3_delay = info->board_lane_delay[3];
1383 else
1384 lane_3_delay = 10;
1385 } else {
1386 lane_3_delay = 12;
1387 }
1388 cas_latency_derived = info->cas_latency - info->clock_speed_index + 2;
1389 if (info->clock_speed_index > 1)
1390 cas_latency_derived++;
1391 for (channel = 0; channel < NUM_CHANNELS; channel++) {
1392 write_mchbar32(0x240 + (channel << 10),
1393 ((info->clock_speed_index ==
1394 0) * 0x11000) | 0x1002100 | ((2 +
1395 info->
1396 clock_speed_index)
1397 << 4) | (info->
1398 cas_latency
1399 - 3));
1400 write_500(info, channel, (info->clock_speed_index << 1) | 1,
1401 0x609, 6, 1);
1402 write_500(info, channel,
1403 info->clock_speed_index + 2 * info->cas_latency - 7,
1404 0x601, 6, 1);
1405
1406 write_mchbar32(0x250 + (channel << 10),
1407 ((lane_3_delay + info->clock_speed_index +
1408 9) << 6)
1409 | (info->board_lane_delay[7] << 2) | (info->
1410 board_lane_delay
1411 [4] << 16)
1412 | (info->board_lane_delay[1] << 25) | (info->
1413 board_lane_delay
1414 [1] << 29)
1415 | 1);
1416 write_mchbar32(0x254 + (channel << 10),
1417 (info->
1418 board_lane_delay[1] >> 3) | ((info->
1419 board_lane_delay
1420 [8] +
1421 4 *
1422 info->
1423 use_ecc) << 6) |
1424 0x80 | (info->board_lane_delay[6] << 1) | (info->
1425 board_lane_delay
1426 [2] <<
1427 28) |
1428 (cas_latency_derived << 16) | 0x4700000);
1429 write_mchbar32(0x258 + (channel << 10),
1430 ((info->board_lane_delay[5] +
1431 info->clock_speed_index +
1432 9) << 12) | ((info->clock_speed_index -
1433 info->cas_latency + 12) << 8)
1434 | (info->board_lane_delay[2] << 17) | (info->
1435 board_lane_delay
1436 [4] << 24)
1437 | 0x47);
1438 write_mchbar32(0x25c + (channel << 10),
1439 (info->board_lane_delay[1] << 1) | (info->
1440 board_lane_delay
1441 [0] << 8) |
1442 0x1da50000);
1443 write_mchbar8(0x264 + (channel << 10), 0xff);
1444 write_mchbar8(0x5f8 + (channel << 10),
1445 (cas_latency_shift << 3) | info->use_ecc);
1446 }
1447
1448 program_modules_memory_map(info, 1);
1449
1450 write_mchbar16(0x610,
1451 (min(ns_to_cycles(info, some_delay_ns) / 2, 127) << 9)
1452 | (read_mchbar16(0x610) & 0x1C3) | 0x3C);
1453 write_mchbar16(0x612, read_mchbar16(0x612) | 0x100);
1454 write_mchbar16(0x214, read_mchbar16(0x214) | 0x3E00);
1455 for (i = 0; i < 8; i++) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001456 pci_write_config32(PCI_DEV (QUICKPATH_BUS, 0, 1), 0x80 + 4 * i,
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001457 (info->total_memory_mb - 64) | !i | 2);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001458 pci_write_config32(PCI_DEV (QUICKPATH_BUS, 0, 1), 0xc0 + 4 * i, 0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001459 }
1460}
1461
Patrick Rudolph266a1f72016-06-09 18:13:34 +02001462#define DEFAULT_PCI_MMIO_SIZE 2048
1463#define HOST_BRIDGE PCI_DEVFN(0, 0)
1464
1465static unsigned int get_mmio_size(void)
1466{
1467 const struct device *dev;
1468 const struct northbridge_intel_nehalem_config *cfg = NULL;
1469
1470 dev = dev_find_slot(0, HOST_BRIDGE);
1471 if (dev)
1472 cfg = dev->chip_info;
1473
1474 /* If this is zero, it just means devicetree.cb didn't set it */
1475 if (!cfg || cfg->pci_mmio_size == 0)
1476 return DEFAULT_PCI_MMIO_SIZE;
1477 else
1478 return cfg->pci_mmio_size;
1479}
1480
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001481#define BETTER_MEMORY_MAP 0
1482
1483static void program_total_memory_map(struct raminfo *info)
1484{
1485 unsigned int TOM, TOLUD, TOUUD;
1486 unsigned int quickpath_reserved;
1487 unsigned int REMAPbase;
1488 unsigned int uma_base_igd;
1489 unsigned int uma_base_gtt;
Patrick Rudolph266a1f72016-06-09 18:13:34 +02001490 unsigned int mmio_size;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001491 int memory_remap;
1492 unsigned int memory_map[8];
1493 int i;
1494 unsigned int current_limit;
1495 unsigned int tseg_base;
1496 int uma_size_igd = 0, uma_size_gtt = 0;
1497
1498 memset(memory_map, 0, sizeof(memory_map));
1499
1500#if REAL
1501 if (info->uma_enabled) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001502 u16 t = pci_read_config16(NORTHBRIDGE, D0F0_GGC);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001503 gav(t);
1504 const int uma_sizes_gtt[16] =
1505 { 0, 1, 0, 2, 0, 0, 0, 0, 0, 2, 3, 4, 42, 42, 42, 42 };
1506 /* Igd memory */
1507 const int uma_sizes_igd[16] = {
1508 0, 0, 0, 0, 0, 32, 48, 64, 128, 256, 96, 160, 224, 352,
1509 256, 512
1510 };
1511
1512 uma_size_igd = uma_sizes_igd[(t >> 4) & 0xF];
1513 uma_size_gtt = uma_sizes_gtt[(t >> 8) & 0xF];
1514 }
1515#endif
1516
Patrick Rudolph266a1f72016-06-09 18:13:34 +02001517 mmio_size = get_mmio_size();
1518
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001519 TOM = info->total_memory_mb;
1520 if (TOM == 4096)
1521 TOM = 4032;
1522 TOUUD = ALIGN_DOWN(TOM - info->memory_reserved_for_heci_mb, 64);
Patrick Rudolph266a1f72016-06-09 18:13:34 +02001523 TOLUD = ALIGN_DOWN(min(4096 - mmio_size + ALIGN_UP(uma_size_igd + uma_size_gtt, 64)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001524 , TOUUD), 64);
1525 memory_remap = 0;
1526 if (TOUUD - TOLUD > 64) {
1527 memory_remap = 1;
1528 REMAPbase = max(4096, TOUUD);
1529 TOUUD = TOUUD - TOLUD + 4096;
1530 }
1531 if (TOUUD > 4096)
1532 memory_map[2] = TOUUD | 1;
1533 quickpath_reserved = 0;
1534
1535 {
1536 u32 t;
1537
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001538 gav(t = pci_read_config32(PCI_DEV(QUICKPATH_BUS, 0, 1), 0x68));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001539 if (t & 0x800)
1540 quickpath_reserved =
1541 (1 << find_lowest_bit_set32(t >> 20));
1542 }
1543 if (memory_remap)
1544 TOUUD -= quickpath_reserved;
1545
1546#if !REAL
1547 if (info->uma_enabled) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001548 u16 t = pci_read_config16(NORTHBRIDGE, D0F0_GGC);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001549 gav(t);
1550 const int uma_sizes_gtt[16] =
1551 { 0, 1, 0, 2, 0, 0, 0, 0, 0, 2, 3, 4, 42, 42, 42, 42 };
1552 /* Igd memory */
1553 const int uma_sizes_igd[16] = {
1554 0, 0, 0, 0, 0, 32, 48, 64, 128, 256, 96, 160, 224, 352,
1555 256, 512
1556 };
1557
1558 uma_size_igd = uma_sizes_igd[(t >> 4) & 0xF];
1559 uma_size_gtt = uma_sizes_gtt[(t >> 8) & 0xF];
1560 }
1561#endif
1562
1563 uma_base_igd = TOLUD - uma_size_igd;
1564 uma_base_gtt = uma_base_igd - uma_size_gtt;
1565 tseg_base = ALIGN_DOWN(uma_base_gtt, 64) - (CONFIG_SMM_TSEG_SIZE >> 20);
1566 if (!memory_remap)
1567 tseg_base -= quickpath_reserved;
1568 tseg_base = ALIGN_DOWN(tseg_base, 8);
1569
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001570 pci_write_config16(NORTHBRIDGE, D0F0_TOLUD, TOLUD << 4);
1571 pci_write_config16(NORTHBRIDGE, D0F0_TOM, TOM >> 6);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001572 if (memory_remap) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001573 pci_write_config16(NORTHBRIDGE, D0F0_REMAPBASE, REMAPbase >> 6);
1574 pci_write_config16(NORTHBRIDGE, D0F0_REMAPLIMIT, (TOUUD - 64) >> 6);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001575 }
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001576 pci_write_config16(NORTHBRIDGE, D0F0_TOUUD, TOUUD);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001577
1578 if (info->uma_enabled) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001579 pci_write_config32(NORTHBRIDGE, D0F0_IGD_BASE, uma_base_igd << 20);
1580 pci_write_config32(NORTHBRIDGE, D0F0_GTT_BASE, uma_base_gtt << 20);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001581 }
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001582 pci_write_config32(NORTHBRIDGE, TSEG, tseg_base << 20);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001583
1584 current_limit = 0;
1585 memory_map[0] = ALIGN_DOWN(uma_base_gtt, 64) | 1;
1586 memory_map[1] = 4096;
1587 for (i = 0; i < ARRAY_SIZE(memory_map); i++) {
1588 current_limit = max(current_limit, memory_map[i] & ~1);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001589 pci_write_config32(PCI_DEV(QUICKPATH_BUS, 0, 1), 4 * i + 0x80,
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001590 (memory_map[i] & 1) | ALIGN_DOWN(current_limit -
1591 1, 64) | 2);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001592 pci_write_config32(PCI_DEV(QUICKPATH_BUS, 0, 1), 4 * i + 0xc0, 0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001593 }
1594}
1595
1596static void collect_system_info(struct raminfo *info)
1597{
1598 u32 capid0[3];
1599 int i;
1600 unsigned channel;
1601
1602 /* Wait for some bit, maybe TXT clear. */
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02001603 while (!(read8((u8 *)0xfed40000) & (1 << 7)));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001604
1605 if (!info->heci_bar)
1606 gav(info->heci_bar =
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001607 pci_read_config32(HECIDEV, HECIBAR) & 0xFFFFFFF8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001608 if (!info->memory_reserved_for_heci_mb) {
1609 /* Wait for ME to be ready */
1610 intel_early_me_init();
1611 info->memory_reserved_for_heci_mb = intel_early_me_uma_size();
1612 }
1613
1614 for (i = 0; i < 3; i++)
1615 gav(capid0[i] =
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001616 pci_read_config32(NORTHBRIDGE, D0F0_CAPID0 | (i << 2)));
1617 gav(info->revision = pci_read_config8(NORTHBRIDGE, PCI_REVISION_ID));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001618 info->max_supported_clock_speed_index = (~capid0[1] & 7);
1619
1620 if ((capid0[1] >> 11) & 1)
1621 info->uma_enabled = 0;
1622 else
1623 gav(info->uma_enabled =
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001624 pci_read_config8(NORTHBRIDGE, D0F0_DEVEN) & 8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001625 /* Unrecognised: [0000:fffd3d2d] 37f81.37f82 ! CPUID: eax: 00000001; ecx: 00000e00 => 00020655.00010800.029ae3ff.bfebfbff */
1626 info->silicon_revision = 0;
1627
1628 if (capid0[2] & 2) {
1629 info->silicon_revision = 0;
1630 info->max_supported_clock_speed_index = 2;
1631 for (channel = 0; channel < NUM_CHANNELS; channel++)
1632 if (info->populated_ranks[channel][0][0]
1633 && (info->spd[channel][0][MODULE_TYPE] & 0xf) ==
1634 3) {
1635 info->silicon_revision = 2;
1636 info->max_supported_clock_speed_index = 1;
1637 }
1638 } else {
1639 switch (((capid0[2] >> 18) & 1) + 2 * ((capid0[1] >> 3) & 1)) {
1640 case 1:
1641 case 2:
1642 info->silicon_revision = 3;
1643 break;
1644 case 3:
1645 info->silicon_revision = 0;
1646 break;
1647 case 0:
1648 info->silicon_revision = 2;
1649 break;
1650 }
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001651 switch (pci_read_config16(NORTHBRIDGE, PCI_DEVICE_ID)) {
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001652 case 0x40:
1653 info->silicon_revision = 0;
1654 break;
1655 case 0x48:
1656 info->silicon_revision = 1;
1657 break;
1658 }
1659 }
1660}
1661
1662static void write_training_data(struct raminfo *info)
1663{
1664 int tm, channel, slot, rank, lane;
1665 if (info->revision < 8)
1666 return;
1667
1668 for (tm = 0; tm < 4; tm++)
1669 for (channel = 0; channel < NUM_CHANNELS; channel++)
1670 for (slot = 0; slot < NUM_SLOTS; slot++)
1671 for (rank = 0; rank < NUM_RANKS; rank++)
1672 for (lane = 0; lane < 9; lane++)
1673 write_500(info, channel,
1674 info->
1675 cached_training->
1676 lane_timings[tm]
1677 [channel][slot][rank]
1678 [lane],
1679 get_timing_register_addr
1680 (lane, tm, slot,
1681 rank), 9, 0);
1682 write_1d0(info->cached_training->reg_178, 0x178, 7, 1);
1683 write_1d0(info->cached_training->reg_10b, 0x10b, 6, 1);
1684}
1685
1686static void dump_timings(struct raminfo *info)
1687{
1688#if REAL
1689 int channel, slot, rank, lane, i;
1690 printk(BIOS_DEBUG, "Timings:\n");
1691 FOR_POPULATED_RANKS {
1692 printk(BIOS_DEBUG, "channel %d, slot %d, rank %d\n", channel,
1693 slot, rank);
1694 for (lane = 0; lane < 9; lane++) {
1695 printk(BIOS_DEBUG, "lane %d: ", lane);
1696 for (i = 0; i < 4; i++) {
1697 printk(BIOS_DEBUG, "%x (%x) ",
1698 read_500(info, channel,
1699 get_timing_register_addr
1700 (lane, i, slot, rank),
1701 9),
1702 info->training.
1703 lane_timings[i][channel][slot][rank]
1704 [lane]);
1705 }
1706 printk(BIOS_DEBUG, "\n");
1707 }
1708 }
1709 printk(BIOS_DEBUG, "[178] = %x (%x)\n", read_1d0(0x178, 7),
1710 info->training.reg_178);
1711 printk(BIOS_DEBUG, "[10b] = %x (%x)\n", read_1d0(0x10b, 6),
1712 info->training.reg_10b);
1713#endif
1714}
1715
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01001716/* Read timings and other registers that need to be restored verbatim and
1717 put them to CBMEM.
1718 */
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001719static void save_timings(struct raminfo *info)
1720{
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001721 struct ram_training train;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001722 int channel, slot, rank, lane, i;
1723
1724 train = info->training;
1725 FOR_POPULATED_RANKS for (lane = 0; lane < 9; lane++)
1726 for (i = 0; i < 4; i++)
1727 train.lane_timings[i][channel][slot][rank][lane] =
1728 read_500(info, channel,
1729 get_timing_register_addr(lane, i, slot,
1730 rank), 9);
1731 train.reg_178 = read_1d0(0x178, 7);
1732 train.reg_10b = read_1d0(0x10b, 6);
1733
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01001734 for (channel = 0; channel < NUM_CHANNELS; channel++) {
1735 u32 reg32;
1736 reg32 = read_mchbar32 ((channel << 10) + 0x274);
1737 train.reg274265[channel][0] = reg32 >> 16;
1738 train.reg274265[channel][1] = reg32 & 0xffff;
1739 train.reg274265[channel][2] = read_mchbar16 ((channel << 10) + 0x265) >> 8;
1740 }
1741 train.reg2ca9_bit0 = read_mchbar8(0x2ca9) & 1;
1742 train.reg_6dc = read_mchbar32 (0x6dc);
1743 train.reg_6e8 = read_mchbar32 (0x6e8);
1744
1745 printk (BIOS_SPEW, "[6dc] = %x\n", train.reg_6dc);
1746 printk (BIOS_SPEW, "[6e8] = %x\n", train.reg_6e8);
1747
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001748 /* Save the MRC S3 restore data to cbmem */
Arthur Heymansdc71e252018-01-29 10:14:48 +01001749 mrc_cache_stash_data(MRC_TRAINING_DATA, MRC_CACHE_VERSION,
1750 &train, sizeof(train));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001751}
1752
1753#if REAL
1754static const struct ram_training *get_cached_training(void)
1755{
Arthur Heymansdc71e252018-01-29 10:14:48 +01001756 struct region_device rdev;
1757 if (mrc_cache_get_current(MRC_TRAINING_DATA, MRC_CACHE_VERSION,
1758 &rdev))
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001759 return 0;
Arthur Heymansdc71e252018-01-29 10:14:48 +01001760 return (void *)rdev_mmap_full(&rdev);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001761}
1762#endif
1763
1764/* FIXME: add timeout. */
1765static void wait_heci_ready(void)
1766{
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02001767 while (!(read32(DEFAULT_HECIBAR + 0xc) & 8)); // = 0x8000000c
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001768 write32((DEFAULT_HECIBAR + 0x4),
1769 (read32(DEFAULT_HECIBAR + 0x4) & ~0x10) | 0xc);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001770}
1771
1772/* FIXME: add timeout. */
1773static void wait_heci_cb_avail(int len)
1774{
1775 union {
1776 struct mei_csr csr;
1777 u32 raw;
1778 } csr;
1779
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02001780 while (!(read32(DEFAULT_HECIBAR + 0xc) & 8));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001781
1782 do
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001783 csr.raw = read32(DEFAULT_HECIBAR + 0x4);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001784 while (len >
1785 csr.csr.buffer_depth - (csr.csr.buffer_write_ptr -
1786 csr.csr.buffer_read_ptr));
1787}
1788
1789static void send_heci_packet(struct mei_header *head, u32 * payload)
1790{
1791 int len = (head->length + 3) / 4;
1792 int i;
1793
1794 wait_heci_cb_avail(len + 1);
1795
1796 /* FIXME: handle leftovers correctly. */
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001797 write32(DEFAULT_HECIBAR + 0, *(u32 *) head);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001798 for (i = 0; i < len - 1; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001799 write32(DEFAULT_HECIBAR + 0, payload[i]);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001800
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001801 write32(DEFAULT_HECIBAR + 0, payload[i] & ((1 << (8 * len)) - 1));
1802 write32(DEFAULT_HECIBAR + 0x4, read32(DEFAULT_HECIBAR + 0x4) | 0x4);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001803}
1804
1805static void
1806send_heci_message(u8 * msg, int len, u8 hostaddress, u8 clientaddress)
1807{
1808 struct mei_header head;
1809 int maxlen;
1810
1811 wait_heci_ready();
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001812 maxlen = (read32(DEFAULT_HECIBAR + 0x4) >> 24) * 4 - 4;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001813
1814 while (len) {
1815 int cur = len;
1816 if (cur > maxlen) {
1817 cur = maxlen;
1818 head.is_complete = 0;
1819 } else
1820 head.is_complete = 1;
1821 head.length = cur;
1822 head.reserved = 0;
1823 head.client_address = clientaddress;
1824 head.host_address = hostaddress;
1825 send_heci_packet(&head, (u32 *) msg);
1826 len -= cur;
1827 msg += cur;
1828 }
1829}
1830
1831/* FIXME: Add timeout. */
1832static int
1833recv_heci_packet(struct raminfo *info, struct mei_header *head, u32 * packet,
1834 u32 * packet_size)
1835{
1836 union {
1837 struct mei_csr csr;
1838 u32 raw;
1839 } csr;
1840 int i = 0;
1841
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001842 write32(DEFAULT_HECIBAR + 0x4, read32(DEFAULT_HECIBAR + 0x4) | 2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001843 do {
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001844 csr.raw = read32(DEFAULT_HECIBAR + 0xc);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001845#if !REAL
1846 if (i++ > 346)
1847 return -1;
1848#endif
1849 }
1850 while (csr.csr.buffer_write_ptr == csr.csr.buffer_read_ptr);
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001851 *(u32 *) head = read32(DEFAULT_HECIBAR + 0x8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001852 if (!head->length) {
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001853 write32(DEFAULT_HECIBAR + 0x4,
1854 read32(DEFAULT_HECIBAR + 0x4) | 2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001855 *packet_size = 0;
1856 return 0;
1857 }
1858 if (head->length + 4 > 4 * csr.csr.buffer_depth
1859 || head->length > *packet_size) {
1860 *packet_size = 0;
1861 return -1;
1862 }
1863
1864 do
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001865 csr.raw = read32(DEFAULT_HECIBAR + 0xc);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001866 while ((head->length + 3) >> 2 >
1867 csr.csr.buffer_write_ptr - csr.csr.buffer_read_ptr);
1868
1869 for (i = 0; i < (head->length + 3) >> 2; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001870 packet[i++] = read32(DEFAULT_HECIBAR + 0x8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001871 *packet_size = head->length;
1872 if (!csr.csr.ready)
1873 *packet_size = 0;
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001874 write32(DEFAULT_HECIBAR + 0x4, read32(DEFAULT_HECIBAR + 0x4) | 4);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001875 return 0;
1876}
1877
1878/* FIXME: Add timeout. */
1879static int
1880recv_heci_message(struct raminfo *info, u32 * message, u32 * message_size)
1881{
1882 struct mei_header head;
1883 int current_position;
1884
1885 current_position = 0;
1886 while (1) {
1887 u32 current_size;
1888 current_size = *message_size - current_position;
1889 if (recv_heci_packet
1890 (info, &head, message + (current_position >> 2),
1891 &current_size) == -1)
1892 break;
1893 if (!current_size)
1894 break;
1895 current_position += current_size;
1896 if (head.is_complete) {
1897 *message_size = current_position;
1898 return 0;
1899 }
1900
1901 if (current_position >= *message_size)
1902 break;
1903 }
1904 *message_size = 0;
1905 return -1;
1906}
1907
1908static void send_heci_uma_message(struct raminfo *info)
1909{
1910 struct uma_reply {
1911 u8 group_id;
1912 u8 command;
1913 u8 reserved;
1914 u8 result;
1915 u8 field2;
1916 u8 unk3[0x48 - 4 - 1];
Stefan Reinauer6a001132017-07-13 02:20:27 +02001917 } __packed reply;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001918 struct uma_message {
1919 u8 group_id;
1920 u8 cmd;
1921 u8 reserved;
1922 u8 result;
1923 u32 c2;
1924 u64 heci_uma_addr;
1925 u32 memory_reserved_for_heci_mb;
1926 u16 c3;
Stefan Reinauer6a001132017-07-13 02:20:27 +02001927 } __packed msg = {
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001928 0, MKHI_SET_UMA, 0, 0,
1929 0x82,
1930 info->heci_uma_addr, info->memory_reserved_for_heci_mb, 0};
1931 u32 reply_size;
1932
1933 send_heci_message((u8 *) & msg, sizeof(msg), 0, 7);
1934
1935 reply_size = sizeof(reply);
1936 if (recv_heci_message(info, (u32 *) & reply, &reply_size) == -1)
1937 return;
1938
1939 if (reply.command != (MKHI_SET_UMA | (1 << 7)))
1940 die("HECI init failed\n");
1941}
1942
1943static void setup_heci_uma(struct raminfo *info)
1944{
1945 u32 reg44;
1946
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001947 reg44 = pci_read_config32(HECIDEV, 0x44); // = 0x80010020
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001948 info->memory_reserved_for_heci_mb = 0;
1949 info->heci_uma_addr = 0;
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001950 if (!((reg44 & 0x10000) && !(pci_read_config32(HECIDEV, 0x40) & 0x20)))
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001951 return;
1952
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001953 info->heci_bar = pci_read_config32(HECIDEV, 0x10) & 0xFFFFFFF0;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001954 info->memory_reserved_for_heci_mb = reg44 & 0x3f;
1955 info->heci_uma_addr =
1956 ((u64)
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001957 ((((u64) pci_read_config16(NORTHBRIDGE, D0F0_TOM)) << 6) -
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001958 info->memory_reserved_for_heci_mb)) << 20;
1959
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001960 pci_read_config32(NORTHBRIDGE, DMIBAR);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001961 if (info->memory_reserved_for_heci_mb) {
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001962 write32(DEFAULT_DMIBAR + 0x14,
1963 read32(DEFAULT_DMIBAR + 0x14) & ~0x80);
1964 write32(DEFAULT_RCBA + 0x14,
1965 read32(DEFAULT_RCBA + 0x14) & ~0x80);
1966 write32(DEFAULT_DMIBAR + 0x20,
1967 read32(DEFAULT_DMIBAR + 0x20) & ~0x80);
1968 write32(DEFAULT_RCBA + 0x20,
1969 read32(DEFAULT_RCBA + 0x20) & ~0x80);
1970 write32(DEFAULT_DMIBAR + 0x2c,
1971 read32(DEFAULT_DMIBAR + 0x2c) & ~0x80);
1972 write32(DEFAULT_RCBA + 0x30,
1973 read32(DEFAULT_RCBA + 0x30) & ~0x80);
1974 write32(DEFAULT_DMIBAR + 0x38,
1975 read32(DEFAULT_DMIBAR + 0x38) & ~0x80);
1976 write32(DEFAULT_RCBA + 0x40,
1977 read32(DEFAULT_RCBA + 0x40) & ~0x80);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001978
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001979 write32(DEFAULT_RCBA + 0x40, 0x87000080); // OK
1980 write32(DEFAULT_DMIBAR + 0x38, 0x87000080); // OK
1981 while (read16(DEFAULT_RCBA + 0x46) & 2
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02001982 && read16(DEFAULT_DMIBAR + 0x3e) & 2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001983 }
1984
1985 write_mchbar32(0x24, 0x10000 + info->memory_reserved_for_heci_mb);
1986
1987 send_heci_uma_message(info);
1988
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001989 pci_write_config32(HECIDEV, 0x10, 0x0);
1990 pci_write_config8(HECIDEV, 0x4, 0x0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001991
1992}
1993
1994static int have_match_ranks(struct raminfo *info, int channel, int ranks)
1995{
1996 int ranks_in_channel;
1997 ranks_in_channel = info->populated_ranks[channel][0][0]
1998 + info->populated_ranks[channel][0][1]
1999 + info->populated_ranks[channel][1][0]
2000 + info->populated_ranks[channel][1][1];
2001
2002 /* empty channel */
2003 if (ranks_in_channel == 0)
2004 return 1;
2005
2006 if (ranks_in_channel != ranks)
2007 return 0;
2008 /* single slot */
2009 if (info->populated_ranks[channel][0][0] !=
2010 info->populated_ranks[channel][1][0])
2011 return 1;
2012 if (info->populated_ranks[channel][0][1] !=
2013 info->populated_ranks[channel][1][1])
2014 return 1;
2015 if (info->is_x16_module[channel][0] != info->is_x16_module[channel][1])
2016 return 0;
2017 if (info->density[channel][0] != info->density[channel][1])
2018 return 0;
2019 return 1;
2020}
2021
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002022static void read_4090(struct raminfo *info)
2023{
2024 int i, channel, slot, rank, lane;
2025 for (i = 0; i < 2; i++)
2026 for (slot = 0; slot < NUM_SLOTS; slot++)
2027 for (rank = 0; rank < NUM_RANKS; rank++)
2028 for (lane = 0; lane < 9; lane++)
2029 info->training.
2030 lane_timings[0][i][slot][rank][lane]
2031 = 32;
2032
2033 for (i = 1; i < 4; i++)
2034 for (channel = 0; channel < NUM_CHANNELS; channel++)
2035 for (slot = 0; slot < NUM_SLOTS; slot++)
2036 for (rank = 0; rank < NUM_RANKS; rank++)
2037 for (lane = 0; lane < 9; lane++) {
2038 info->training.
2039 lane_timings[i][channel]
2040 [slot][rank][lane] =
2041 read_500(info, channel,
2042 get_timing_register_addr
2043 (lane, i, slot,
2044 rank), 9)
2045 + (i == 1) * 11; // !!!!
2046 }
2047
2048}
2049
2050static u32 get_etalon2(int flip, u32 addr)
2051{
2052 const u16 invmask[] = {
2053 0xaaaa, 0x6db6, 0x4924, 0xeeee, 0xcccc, 0x8888, 0x7bde, 0x739c,
2054 0x6318, 0x4210, 0xefbe, 0xcf3c, 0x8e38, 0x0c30, 0x0820
2055 };
2056 u32 ret;
2057 u32 comp4 = addr / 480;
2058 addr %= 480;
2059 u32 comp1 = addr & 0xf;
2060 u32 comp2 = (addr >> 4) & 1;
2061 u32 comp3 = addr >> 5;
2062
2063 if (comp4)
2064 ret = 0x1010101 << (comp4 - 1);
2065 else
2066 ret = 0;
2067 if (flip ^ (((invmask[comp3] >> comp1) ^ comp2) & 1))
2068 ret = ~ret;
2069
2070 return ret;
2071}
2072
2073static void disable_cache(void)
2074{
2075 msr_t msr = {.lo = 0, .hi = 0 };
2076
Alexandru Gagniuc86091f92015-09-30 20:23:09 -07002077 wrmsr(MTRR_PHYS_BASE(3), msr);
2078 wrmsr(MTRR_PHYS_MASK(3), msr);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002079}
2080
2081static void enable_cache(unsigned int base, unsigned int size)
2082{
2083 msr_t msr;
2084 msr.lo = base | MTRR_TYPE_WRPROT;
2085 msr.hi = 0;
Alexandru Gagniuc86091f92015-09-30 20:23:09 -07002086 wrmsr(MTRR_PHYS_BASE(3), msr);
2087 msr.lo = ((~(ALIGN_DOWN(size + 4096, 4096) - 1) | MTRR_DEF_TYPE_EN)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002088 & 0xffffffff);
2089 msr.hi = 0x0000000f;
Alexandru Gagniuc86091f92015-09-30 20:23:09 -07002090 wrmsr(MTRR_PHYS_MASK(3), msr);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002091}
2092
2093static void flush_cache(u32 start, u32 size)
2094{
2095 u32 end;
2096 u32 addr;
2097
2098 end = start + (ALIGN_DOWN(size + 4096, 4096));
2099 for (addr = start; addr < end; addr += 64)
2100 clflush(addr);
2101}
2102
2103static void clear_errors(void)
2104{
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03002105 pci_write_config8(NORTHBRIDGE, 0xc0, 0x01);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002106}
2107
2108static void write_testing(struct raminfo *info, int totalrank, int flip)
2109{
2110 int nwrites = 0;
2111 /* in 8-byte units. */
2112 u32 offset;
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08002113 u8 *base;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002114
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08002115 base = (u8 *)(totalrank << 28);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002116 for (offset = 0; offset < 9 * 480; offset += 2) {
2117 write32(base + offset * 8, get_etalon2(flip, offset));
2118 write32(base + offset * 8 + 4, get_etalon2(flip, offset));
2119 write32(base + offset * 8 + 8, get_etalon2(flip, offset + 1));
2120 write32(base + offset * 8 + 12, get_etalon2(flip, offset + 1));
2121 nwrites += 4;
2122 if (nwrites >= 320) {
2123 clear_errors();
2124 nwrites = 0;
2125 }
2126 }
2127}
2128
2129static u8 check_testing(struct raminfo *info, u8 total_rank, int flip)
2130{
2131 u8 failmask = 0;
2132 int i;
2133 int comp1, comp2, comp3;
2134 u32 failxor[2] = { 0, 0 };
2135
2136 enable_cache((total_rank << 28), 1728 * 5 * 4);
2137
2138 for (comp3 = 0; comp3 < 9 && failmask != 0xff; comp3++) {
2139 for (comp1 = 0; comp1 < 4; comp1++)
2140 for (comp2 = 0; comp2 < 60; comp2++) {
2141 u32 re[4];
2142 u32 curroffset =
2143 comp3 * 8 * 60 + 2 * comp1 + 8 * comp2;
2144 read128((total_rank << 28) | (curroffset << 3),
2145 (u64 *) re);
2146 failxor[0] |=
2147 get_etalon2(flip, curroffset) ^ re[0];
2148 failxor[1] |=
2149 get_etalon2(flip, curroffset) ^ re[1];
2150 failxor[0] |=
2151 get_etalon2(flip, curroffset | 1) ^ re[2];
2152 failxor[1] |=
2153 get_etalon2(flip, curroffset | 1) ^ re[3];
2154 }
2155 for (i = 0; i < 8; i++)
2156 if ((0xff << (8 * (i % 4))) & failxor[i / 4])
2157 failmask |= 1 << i;
2158 }
2159 disable_cache();
2160 flush_cache((total_rank << 28), 1728 * 5 * 4);
2161 return failmask;
2162}
2163
2164const u32 seed1[0x18] = {
2165 0x3a9d5ab5, 0x576cb65b, 0x555773b6, 0x2ab772ee,
2166 0x555556ee, 0x3a9d5ab5, 0x576cb65b, 0x555773b6,
2167 0x2ab772ee, 0x555556ee, 0x5155a555, 0x5155a555,
2168 0x5155a555, 0x5155a555, 0x3a9d5ab5, 0x576cb65b,
2169 0x555773b6, 0x2ab772ee, 0x555556ee, 0x55d6b4a5,
2170 0x366d6b3a, 0x2ae5ddbb, 0x3b9ddbb7, 0x55d6b4a5,
2171};
2172
2173static u32 get_seed2(int a, int b)
2174{
2175 const u32 seed2[5] = {
2176 0x55555555, 0x33333333, 0x2e555a55, 0x55555555,
2177 0x5b6db6db,
2178 };
2179 u32 r;
2180 r = seed2[(a + (a >= 10)) / 5];
2181 return b ? ~r : r;
2182}
2183
2184static int make_shift(int comp2, int comp5, int x)
2185{
2186 const u8 seed3[32] = {
2187 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2188 0x00, 0x00, 0x38, 0x1c, 0x3c, 0x18, 0x38, 0x38,
2189 0x38, 0x38, 0x38, 0x38, 0x0f, 0x0f, 0x0f, 0x0f,
2190 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f,
2191 };
2192
2193 return (comp2 - ((seed3[comp5] >> (x & 7)) & 1)) & 0x1f;
2194}
2195
2196static u32 get_etalon(int flip, u32 addr)
2197{
2198 u32 mask_byte = 0;
2199 int comp1 = (addr >> 1) & 1;
2200 int comp2 = (addr >> 3) & 0x1f;
2201 int comp3 = (addr >> 8) & 0xf;
2202 int comp4 = (addr >> 12) & 0xf;
2203 int comp5 = (addr >> 16) & 0x1f;
2204 u32 mask_bit = ~(0x10001 << comp3);
2205 u32 part1;
2206 u32 part2;
2207 int byte;
2208
2209 part2 =
2210 ((seed1[comp5] >>
2211 make_shift(comp2, comp5,
2212 (comp3 >> 3) | (comp1 << 2) | 2)) & 1) ^ flip;
2213 part1 =
2214 ((seed1[comp5] >>
2215 make_shift(comp2, comp5,
2216 (comp3 >> 3) | (comp1 << 2) | 0)) & 1) ^ flip;
2217
2218 for (byte = 0; byte < 4; byte++)
2219 if ((get_seed2(comp5, comp4) >>
2220 make_shift(comp2, comp5, (byte | (comp1 << 2)))) & 1)
2221 mask_byte |= 0xff << (8 * byte);
2222
2223 return (mask_bit & mask_byte) | (part1 << comp3) | (part2 <<
2224 (comp3 + 16));
2225}
2226
2227static void
2228write_testing_type2(struct raminfo *info, u8 totalrank, u8 region, u8 block,
2229 char flip)
2230{
2231 int i;
2232 for (i = 0; i < 2048; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08002233 write32p((totalrank << 28) | (region << 25) | (block << 16) |
2234 (i << 2), get_etalon(flip, (block << 16) | (i << 2)));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002235}
2236
2237static u8
2238check_testing_type2(struct raminfo *info, u8 totalrank, u8 region, u8 block,
2239 char flip)
2240{
2241 u8 failmask = 0;
2242 u32 failxor[2];
2243 int i;
2244 int comp1, comp2, comp3;
2245
2246 failxor[0] = 0;
2247 failxor[1] = 0;
2248
2249 enable_cache(totalrank << 28, 134217728);
2250 for (comp3 = 0; comp3 < 2 && failmask != 0xff; comp3++) {
2251 for (comp1 = 0; comp1 < 16; comp1++)
2252 for (comp2 = 0; comp2 < 64; comp2++) {
2253 u32 addr =
2254 (totalrank << 28) | (region << 25) | (block
2255 << 16)
2256 | (comp3 << 12) | (comp2 << 6) | (comp1 <<
2257 2);
2258 failxor[comp1 & 1] |=
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08002259 read32p(addr) ^ get_etalon(flip, addr);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002260 }
2261 for (i = 0; i < 8; i++)
2262 if ((0xff << (8 * (i % 4))) & failxor[i / 4])
2263 failmask |= 1 << i;
2264 }
2265 disable_cache();
2266 flush_cache((totalrank << 28) | (region << 25) | (block << 16), 16384);
2267 return failmask;
2268}
2269
2270static int check_bounded(unsigned short *vals, u16 bound)
2271{
2272 int i;
2273
2274 for (i = 0; i < 8; i++)
2275 if (vals[i] < bound)
2276 return 0;
2277 return 1;
2278}
2279
2280enum state {
2281 BEFORE_USABLE = 0, AT_USABLE = 1, AT_MARGIN = 2, COMPLETE = 3
2282};
2283
2284static int validate_state(enum state *in)
2285{
2286 int i;
2287 for (i = 0; i < 8; i++)
2288 if (in[i] != COMPLETE)
2289 return 0;
2290 return 1;
2291}
2292
2293static void
2294do_fsm(enum state *state, u16 * counter,
2295 u8 fail_mask, int margin, int uplimit,
2296 u8 * res_low, u8 * res_high, u8 val)
2297{
2298 int lane;
2299
2300 for (lane = 0; lane < 8; lane++) {
2301 int is_fail = (fail_mask >> lane) & 1;
2302 switch (state[lane]) {
2303 case BEFORE_USABLE:
2304 if (!is_fail) {
2305 counter[lane] = 1;
2306 state[lane] = AT_USABLE;
2307 break;
2308 }
2309 counter[lane] = 0;
2310 state[lane] = BEFORE_USABLE;
2311 break;
2312 case AT_USABLE:
2313 if (!is_fail) {
2314 ++counter[lane];
2315 if (counter[lane] >= margin) {
2316 state[lane] = AT_MARGIN;
2317 res_low[lane] = val - margin + 1;
2318 break;
2319 }
2320 state[lane] = 1;
2321 break;
2322 }
2323 counter[lane] = 0;
2324 state[lane] = BEFORE_USABLE;
2325 break;
2326 case AT_MARGIN:
2327 if (is_fail) {
2328 state[lane] = COMPLETE;
2329 res_high[lane] = val - 1;
2330 } else {
2331 counter[lane]++;
2332 state[lane] = AT_MARGIN;
2333 if (val == uplimit) {
2334 state[lane] = COMPLETE;
2335 res_high[lane] = uplimit;
2336 }
2337 }
2338 break;
2339 case COMPLETE:
2340 break;
2341 }
2342 }
2343}
2344
2345static void
2346train_ram_at_178(struct raminfo *info, u8 channel, int slot, int rank,
2347 u8 total_rank, u8 reg_178, int first_run, int niter,
2348 timing_bounds_t * timings)
2349{
2350 int lane;
2351 enum state state[8];
2352 u16 count[8];
2353 u8 lower_usable[8];
2354 u8 upper_usable[8];
2355 unsigned short num_sucessfully_checked[8];
2356 u8 secondary_total_rank;
2357 u8 reg1b3;
2358
2359 if (info->populated_ranks_mask[1]) {
2360 if (channel == 1)
2361 secondary_total_rank =
2362 info->populated_ranks[1][0][0] +
2363 info->populated_ranks[1][0][1]
2364 + info->populated_ranks[1][1][0] +
2365 info->populated_ranks[1][1][1];
2366 else
2367 secondary_total_rank = 0;
2368 } else
2369 secondary_total_rank = total_rank;
2370
2371 {
2372 int i;
2373 for (i = 0; i < 8; i++)
2374 state[i] = BEFORE_USABLE;
2375 }
2376
2377 if (!first_run) {
2378 int is_all_ok = 1;
2379 for (lane = 0; lane < 8; lane++)
2380 if (timings[reg_178][channel][slot][rank][lane].
2381 smallest ==
2382 timings[reg_178][channel][slot][rank][lane].
2383 largest) {
2384 timings[reg_178][channel][slot][rank][lane].
2385 smallest = 0;
2386 timings[reg_178][channel][slot][rank][lane].
2387 largest = 0;
2388 is_all_ok = 0;
2389 }
2390 if (is_all_ok) {
2391 int i;
2392 for (i = 0; i < 8; i++)
2393 state[i] = COMPLETE;
2394 }
2395 }
2396
2397 for (reg1b3 = 0; reg1b3 < 0x30 && !validate_state(state); reg1b3++) {
2398 u8 failmask = 0;
2399 write_1d0(reg1b3 ^ 32, 0x1b3, 6, 1);
2400 write_1d0(reg1b3 ^ 32, 0x1a3, 6, 1);
2401 failmask = check_testing(info, total_rank, 0);
2402 write_mchbar32(0xfb0, read_mchbar32(0xfb0) | 0x00030000);
2403 do_fsm(state, count, failmask, 5, 47, lower_usable,
2404 upper_usable, reg1b3);
2405 }
2406
2407 if (reg1b3) {
2408 write_1d0(0, 0x1b3, 6, 1);
2409 write_1d0(0, 0x1a3, 6, 1);
2410 for (lane = 0; lane < 8; lane++) {
2411 if (state[lane] == COMPLETE) {
2412 timings[reg_178][channel][slot][rank][lane].
2413 smallest =
2414 lower_usable[lane] +
2415 (info->training.
2416 lane_timings[0][channel][slot][rank][lane]
2417 & 0x3F) - 32;
2418 timings[reg_178][channel][slot][rank][lane].
2419 largest =
2420 upper_usable[lane] +
2421 (info->training.
2422 lane_timings[0][channel][slot][rank][lane]
2423 & 0x3F) - 32;
2424 }
2425 }
2426 }
2427
2428 if (!first_run) {
2429 for (lane = 0; lane < 8; lane++)
2430 if (state[lane] == COMPLETE) {
2431 write_500(info, channel,
2432 timings[reg_178][channel][slot][rank]
2433 [lane].smallest,
2434 get_timing_register_addr(lane, 0,
2435 slot, rank),
2436 9, 1);
2437 write_500(info, channel,
2438 timings[reg_178][channel][slot][rank]
2439 [lane].smallest +
2440 info->training.
2441 lane_timings[1][channel][slot][rank]
2442 [lane]
2443 -
2444 info->training.
2445 lane_timings[0][channel][slot][rank]
2446 [lane], get_timing_register_addr(lane,
2447 1,
2448 slot,
2449 rank),
2450 9, 1);
2451 num_sucessfully_checked[lane] = 0;
2452 } else
2453 num_sucessfully_checked[lane] = -1;
2454
2455 do {
2456 u8 failmask = 0;
2457 int i;
2458 for (i = 0; i < niter; i++) {
2459 if (failmask == 0xFF)
2460 break;
2461 failmask |=
2462 check_testing_type2(info, total_rank, 2, i,
2463 0);
2464 failmask |=
2465 check_testing_type2(info, total_rank, 3, i,
2466 1);
2467 }
2468 write_mchbar32(0xfb0,
2469 read_mchbar32(0xfb0) | 0x00030000);
2470 for (lane = 0; lane < 8; lane++)
2471 if (num_sucessfully_checked[lane] != 0xffff) {
2472 if ((1 << lane) & failmask) {
2473 if (timings[reg_178][channel]
2474 [slot][rank][lane].
2475 largest <=
2476 timings[reg_178][channel]
2477 [slot][rank][lane].smallest)
2478 num_sucessfully_checked
2479 [lane] = -1;
2480 else {
2481 num_sucessfully_checked
2482 [lane] = 0;
2483 timings[reg_178]
2484 [channel][slot]
2485 [rank][lane].
2486 smallest++;
2487 write_500(info, channel,
2488 timings
2489 [reg_178]
2490 [channel]
2491 [slot][rank]
2492 [lane].
2493 smallest,
2494 get_timing_register_addr
2495 (lane, 0,
2496 slot, rank),
2497 9, 1);
2498 write_500(info, channel,
2499 timings
2500 [reg_178]
2501 [channel]
2502 [slot][rank]
2503 [lane].
2504 smallest +
2505 info->
2506 training.
2507 lane_timings
2508 [1][channel]
2509 [slot][rank]
2510 [lane]
2511 -
2512 info->
2513 training.
2514 lane_timings
2515 [0][channel]
2516 [slot][rank]
2517 [lane],
2518 get_timing_register_addr
2519 (lane, 1,
2520 slot, rank),
2521 9, 1);
2522 }
2523 } else
2524 num_sucessfully_checked[lane]++;
2525 }
2526 }
2527 while (!check_bounded(num_sucessfully_checked, 2));
2528
2529 for (lane = 0; lane < 8; lane++)
2530 if (state[lane] == COMPLETE) {
2531 write_500(info, channel,
2532 timings[reg_178][channel][slot][rank]
2533 [lane].largest,
2534 get_timing_register_addr(lane, 0,
2535 slot, rank),
2536 9, 1);
2537 write_500(info, channel,
2538 timings[reg_178][channel][slot][rank]
2539 [lane].largest +
2540 info->training.
2541 lane_timings[1][channel][slot][rank]
2542 [lane]
2543 -
2544 info->training.
2545 lane_timings[0][channel][slot][rank]
2546 [lane], get_timing_register_addr(lane,
2547 1,
2548 slot,
2549 rank),
2550 9, 1);
2551 num_sucessfully_checked[lane] = 0;
2552 } else
2553 num_sucessfully_checked[lane] = -1;
2554
2555 do {
2556 int failmask = 0;
2557 int i;
2558 for (i = 0; i < niter; i++) {
2559 if (failmask == 0xFF)
2560 break;
2561 failmask |=
2562 check_testing_type2(info, total_rank, 2, i,
2563 0);
2564 failmask |=
2565 check_testing_type2(info, total_rank, 3, i,
2566 1);
2567 }
2568
2569 write_mchbar32(0xfb0,
2570 read_mchbar32(0xfb0) | 0x00030000);
2571 for (lane = 0; lane < 8; lane++) {
2572 if (num_sucessfully_checked[lane] != 0xffff) {
2573 if ((1 << lane) & failmask) {
2574 if (timings[reg_178][channel]
2575 [slot][rank][lane].
2576 largest <=
2577 timings[reg_178][channel]
2578 [slot][rank][lane].
2579 smallest) {
2580 num_sucessfully_checked
2581 [lane] = -1;
2582 } else {
2583 num_sucessfully_checked
2584 [lane] = 0;
2585 timings[reg_178]
2586 [channel][slot]
2587 [rank][lane].
2588 largest--;
2589 write_500(info, channel,
2590 timings
2591 [reg_178]
2592 [channel]
2593 [slot][rank]
2594 [lane].
2595 largest,
2596 get_timing_register_addr
2597 (lane, 0,
2598 slot, rank),
2599 9, 1);
2600 write_500(info, channel,
2601 timings
2602 [reg_178]
2603 [channel]
2604 [slot][rank]
2605 [lane].
2606 largest +
2607 info->
2608 training.
2609 lane_timings
2610 [1][channel]
2611 [slot][rank]
2612 [lane]
2613 -
2614 info->
2615 training.
2616 lane_timings
2617 [0][channel]
2618 [slot][rank]
2619 [lane],
2620 get_timing_register_addr
2621 (lane, 1,
2622 slot, rank),
2623 9, 1);
2624 }
2625 } else
2626 num_sucessfully_checked[lane]++;
2627 }
2628 }
2629 }
2630 while (!check_bounded(num_sucessfully_checked, 3));
2631
2632 for (lane = 0; lane < 8; lane++) {
2633 write_500(info, channel,
2634 info->training.
2635 lane_timings[0][channel][slot][rank][lane],
2636 get_timing_register_addr(lane, 0, slot, rank),
2637 9, 1);
2638 write_500(info, channel,
2639 info->training.
2640 lane_timings[1][channel][slot][rank][lane],
2641 get_timing_register_addr(lane, 1, slot, rank),
2642 9, 1);
2643 if (timings[reg_178][channel][slot][rank][lane].
2644 largest <=
2645 timings[reg_178][channel][slot][rank][lane].
2646 smallest) {
2647 timings[reg_178][channel][slot][rank][lane].
2648 largest = 0;
2649 timings[reg_178][channel][slot][rank][lane].
2650 smallest = 0;
2651 }
2652 }
2653 }
2654}
2655
2656static void set_10b(struct raminfo *info, u8 val)
2657{
2658 int channel;
2659 int slot, rank;
2660 int lane;
2661
2662 if (read_1d0(0x10b, 6) == val)
2663 return;
2664
2665 write_1d0(val, 0x10b, 6, 1);
2666
2667 FOR_POPULATED_RANKS_BACKWARDS for (lane = 0; lane < 9; lane++) {
2668 u16 reg_500;
2669 reg_500 = read_500(info, channel,
2670 get_timing_register_addr(lane, 0, slot,
2671 rank), 9);
2672 if (val == 1) {
2673 if (lut16[info->clock_speed_index] <= reg_500)
2674 reg_500 -= lut16[info->clock_speed_index];
2675 else
2676 reg_500 = 0;
2677 } else {
2678 reg_500 += lut16[info->clock_speed_index];
2679 }
2680 write_500(info, channel, reg_500,
2681 get_timing_register_addr(lane, 0, slot, rank), 9, 1);
2682 }
2683}
2684
2685static void set_ecc(int onoff)
2686{
2687 int channel;
2688 for (channel = 0; channel < NUM_CHANNELS; channel++) {
2689 u8 t;
2690 t = read_mchbar8((channel << 10) + 0x5f8);
2691 if (onoff)
2692 t |= 1;
2693 else
2694 t &= ~1;
2695 write_mchbar8((channel << 10) + 0x5f8, t);
2696 }
2697}
2698
2699static void set_178(u8 val)
2700{
2701 if (val >= 31)
2702 val = val - 31;
2703 else
2704 val = 63 - val;
2705
2706 write_1d0(2 * val, 0x178, 7, 1);
2707}
2708
2709static void
2710write_500_timings_type(struct raminfo *info, int channel, int slot, int rank,
2711 int type)
2712{
2713 int lane;
2714
2715 for (lane = 0; lane < 8; lane++)
2716 write_500(info, channel,
2717 info->training.
2718 lane_timings[type][channel][slot][rank][lane],
2719 get_timing_register_addr(lane, type, slot, rank), 9,
2720 0);
2721}
2722
2723static void
2724try_timing_offsets(struct raminfo *info, int channel,
2725 int slot, int rank, int totalrank)
2726{
2727 u16 count[8];
2728 enum state state[8];
2729 u8 lower_usable[8], upper_usable[8];
2730 int lane;
2731 int i;
2732 int flip = 1;
2733 int timing_offset;
2734
2735 for (i = 0; i < 8; i++)
2736 state[i] = BEFORE_USABLE;
2737
2738 memset(count, 0, sizeof(count));
2739
2740 for (lane = 0; lane < 8; lane++)
2741 write_500(info, channel,
2742 info->training.
2743 lane_timings[2][channel][slot][rank][lane] + 32,
2744 get_timing_register_addr(lane, 3, slot, rank), 9, 1);
2745
2746 for (timing_offset = 0; !validate_state(state) && timing_offset < 64;
2747 timing_offset++) {
2748 u8 failmask;
2749 write_1d0(timing_offset ^ 32, 0x1bb, 6, 1);
2750 failmask = 0;
2751 for (i = 0; i < 2 && failmask != 0xff; i++) {
2752 flip = !flip;
2753 write_testing(info, totalrank, flip);
2754 failmask |= check_testing(info, totalrank, flip);
2755 }
2756 do_fsm(state, count, failmask, 10, 63, lower_usable,
2757 upper_usable, timing_offset);
2758 }
2759 write_1d0(0, 0x1bb, 6, 1);
2760 dump_timings(info);
2761 if (!validate_state(state))
2762 die("Couldn't discover DRAM timings (1)\n");
2763
2764 for (lane = 0; lane < 8; lane++) {
2765 u8 bias = 0;
2766
2767 if (info->silicon_revision) {
2768 int usable_length;
2769
2770 usable_length = upper_usable[lane] - lower_usable[lane];
2771 if (usable_length >= 20) {
2772 bias = usable_length / 2 - 10;
2773 if (bias >= 2)
2774 bias = 2;
2775 }
2776 }
2777 write_500(info, channel,
2778 info->training.
2779 lane_timings[2][channel][slot][rank][lane] +
2780 (upper_usable[lane] + lower_usable[lane]) / 2 - bias,
2781 get_timing_register_addr(lane, 3, slot, rank), 9, 1);
2782 info->training.timing2_bounds[channel][slot][rank][lane][0] =
2783 info->training.lane_timings[2][channel][slot][rank][lane] +
2784 lower_usable[lane];
2785 info->training.timing2_bounds[channel][slot][rank][lane][1] =
2786 info->training.lane_timings[2][channel][slot][rank][lane] +
2787 upper_usable[lane];
2788 info->training.timing2_offset[channel][slot][rank][lane] =
2789 info->training.lane_timings[2][channel][slot][rank][lane];
2790 }
2791}
2792
2793static u8
2794choose_training(struct raminfo *info, int channel, int slot, int rank,
2795 int lane, timing_bounds_t * timings, u8 center_178)
2796{
2797 u16 central_weight;
2798 u16 side_weight;
2799 unsigned int sum = 0, count = 0;
2800 u8 span;
2801 u8 lower_margin, upper_margin;
2802 u8 reg_178;
2803 u8 result;
2804
2805 span = 12;
2806 central_weight = 20;
2807 side_weight = 20;
2808 if (info->silicon_revision == 1 && channel == 1) {
2809 central_weight = 5;
2810 side_weight = 20;
2811 if ((info->
2812 populated_ranks_mask[1] ^ (info->
2813 populated_ranks_mask[1] >> 2)) &
2814 1)
2815 span = 18;
2816 }
2817 if ((info->populated_ranks_mask[0] & 5) == 5) {
2818 central_weight = 20;
2819 side_weight = 20;
2820 }
2821 if (info->clock_speed_index >= 2
2822 && (info->populated_ranks_mask[0] & 5) == 5 && slot == 1) {
2823 if (info->silicon_revision == 1) {
2824 switch (channel) {
2825 case 0:
2826 if (lane == 1) {
2827 central_weight = 10;
2828 side_weight = 20;
2829 }
2830 break;
2831 case 1:
2832 if (lane == 6) {
2833 side_weight = 5;
2834 central_weight = 20;
2835 }
2836 break;
2837 }
2838 }
2839 if (info->silicon_revision == 0 && channel == 0 && lane == 0) {
2840 side_weight = 5;
2841 central_weight = 20;
2842 }
2843 }
2844 for (reg_178 = center_178 - span; reg_178 <= center_178 + span;
2845 reg_178 += span) {
2846 u8 smallest;
2847 u8 largest;
2848 largest = timings[reg_178][channel][slot][rank][lane].largest;
2849 smallest = timings[reg_178][channel][slot][rank][lane].smallest;
2850 if (largest - smallest + 1 >= 5) {
2851 unsigned int weight;
2852 if (reg_178 == center_178)
2853 weight = central_weight;
2854 else
2855 weight = side_weight;
2856 sum += weight * (largest + smallest);
2857 count += weight;
2858 }
2859 }
2860 dump_timings(info);
2861 if (count == 0)
2862 die("Couldn't discover DRAM timings (2)\n");
2863 result = sum / (2 * count);
2864 lower_margin =
2865 result - timings[center_178][channel][slot][rank][lane].smallest;
2866 upper_margin =
2867 timings[center_178][channel][slot][rank][lane].largest - result;
2868 if (upper_margin < 10 && lower_margin > 10)
2869 result -= min(lower_margin - 10, 10 - upper_margin);
2870 if (upper_margin > 10 && lower_margin < 10)
2871 result += min(upper_margin - 10, 10 - lower_margin);
2872 return result;
2873}
2874
2875#define STANDARD_MIN_MARGIN 5
2876
2877static u8 choose_reg178(struct raminfo *info, timing_bounds_t * timings)
2878{
2879 u16 margin[64];
2880 int lane, rank, slot, channel;
2881 u8 reg178;
2882 int count = 0, sum = 0;
2883
2884 for (reg178 = reg178_min[info->clock_speed_index];
2885 reg178 < reg178_max[info->clock_speed_index];
2886 reg178 += reg178_step[info->clock_speed_index]) {
2887 margin[reg178] = -1;
2888 FOR_POPULATED_RANKS_BACKWARDS for (lane = 0; lane < 8; lane++) {
2889 int curmargin =
2890 timings[reg178][channel][slot][rank][lane].largest -
2891 timings[reg178][channel][slot][rank][lane].
2892 smallest + 1;
2893 if (curmargin < margin[reg178])
2894 margin[reg178] = curmargin;
2895 }
2896 if (margin[reg178] >= STANDARD_MIN_MARGIN) {
2897 u16 weight;
2898 weight = margin[reg178] - STANDARD_MIN_MARGIN;
2899 sum += weight * reg178;
2900 count += weight;
2901 }
2902 }
2903 dump_timings(info);
2904 if (count == 0)
2905 die("Couldn't discover DRAM timings (3)\n");
2906
2907 u8 threshold;
2908
2909 for (threshold = 30; threshold >= 5; threshold--) {
2910 int usable_length = 0;
2911 int smallest_fount = 0;
2912 for (reg178 = reg178_min[info->clock_speed_index];
2913 reg178 < reg178_max[info->clock_speed_index];
2914 reg178 += reg178_step[info->clock_speed_index])
2915 if (margin[reg178] >= threshold) {
2916 usable_length +=
2917 reg178_step[info->clock_speed_index];
2918 info->training.reg178_largest =
2919 reg178 -
2920 2 * reg178_step[info->clock_speed_index];
2921
2922 if (!smallest_fount) {
2923 smallest_fount = 1;
2924 info->training.reg178_smallest =
2925 reg178 +
2926 reg178_step[info->
2927 clock_speed_index];
2928 }
2929 }
2930 if (usable_length >= 0x21)
2931 break;
2932 }
2933
2934 return sum / count;
2935}
2936
2937static int check_cached_sanity(struct raminfo *info)
2938{
2939 int lane;
2940 int slot, rank;
2941 int channel;
2942
2943 if (!info->cached_training)
2944 return 0;
2945
2946 for (channel = 0; channel < NUM_CHANNELS; channel++)
2947 for (slot = 0; slot < NUM_SLOTS; slot++)
2948 for (rank = 0; rank < NUM_RANKS; rank++)
2949 for (lane = 0; lane < 8 + info->use_ecc; lane++) {
2950 u16 cached_value, estimation_value;
2951 cached_value =
2952 info->cached_training->
2953 lane_timings[1][channel][slot][rank]
2954 [lane];
2955 if (cached_value >= 0x18
2956 && cached_value <= 0x1E7) {
2957 estimation_value =
2958 info->training.
2959 lane_timings[1][channel]
2960 [slot][rank][lane];
2961 if (estimation_value <
2962 cached_value - 24)
2963 return 0;
2964 if (estimation_value >
2965 cached_value + 24)
2966 return 0;
2967 }
2968 }
2969 return 1;
2970}
2971
2972static int try_cached_training(struct raminfo *info)
2973{
2974 u8 saved_243[2];
2975 u8 tm;
2976
2977 int channel, slot, rank, lane;
2978 int flip = 1;
2979 int i, j;
2980
2981 if (!check_cached_sanity(info))
2982 return 0;
2983
2984 info->training.reg178_center = info->cached_training->reg178_center;
2985 info->training.reg178_smallest = info->cached_training->reg178_smallest;
2986 info->training.reg178_largest = info->cached_training->reg178_largest;
2987 memcpy(&info->training.timing_bounds,
2988 &info->cached_training->timing_bounds,
2989 sizeof(info->training.timing_bounds));
2990 memcpy(&info->training.timing_offset,
2991 &info->cached_training->timing_offset,
2992 sizeof(info->training.timing_offset));
2993
2994 write_1d0(2, 0x142, 3, 1);
2995 saved_243[0] = read_mchbar8(0x243);
2996 saved_243[1] = read_mchbar8(0x643);
2997 write_mchbar8(0x243, saved_243[0] | 2);
2998 write_mchbar8(0x643, saved_243[1] | 2);
2999 set_ecc(0);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003000 pci_write_config16(NORTHBRIDGE, 0xc8, 3);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003001 if (read_1d0(0x10b, 6) & 1)
3002 set_10b(info, 0);
3003 for (tm = 0; tm < 2; tm++) {
3004 int totalrank;
3005
3006 set_178(tm ? info->cached_training->reg178_largest : info->
3007 cached_training->reg178_smallest);
3008
3009 totalrank = 0;
3010 /* Check timing ranges. With i == 0 we check smallest one and with
3011 i == 1 the largest bound. With j == 0 we check that on the bound
3012 it still works whereas with j == 1 we check that just outside of
3013 bound we fail.
3014 */
3015 FOR_POPULATED_RANKS_BACKWARDS {
3016 for (i = 0; i < 2; i++) {
3017 for (lane = 0; lane < 8; lane++) {
3018 write_500(info, channel,
3019 info->cached_training->
3020 timing2_bounds[channel][slot]
3021 [rank][lane][i],
3022 get_timing_register_addr(lane,
3023 3,
3024 slot,
3025 rank),
3026 9, 1);
3027
3028 if (!i)
3029 write_500(info, channel,
3030 info->
3031 cached_training->
3032 timing2_offset
3033 [channel][slot][rank]
3034 [lane],
3035 get_timing_register_addr
3036 (lane, 2, slot, rank),
3037 9, 1);
3038 write_500(info, channel,
3039 i ? info->cached_training->
3040 timing_bounds[tm][channel]
3041 [slot][rank][lane].
3042 largest : info->
3043 cached_training->
3044 timing_bounds[tm][channel]
3045 [slot][rank][lane].smallest,
3046 get_timing_register_addr(lane,
3047 0,
3048 slot,
3049 rank),
3050 9, 1);
3051 write_500(info, channel,
3052 info->cached_training->
3053 timing_offset[channel][slot]
3054 [rank][lane] +
3055 (i ? info->cached_training->
3056 timing_bounds[tm][channel]
3057 [slot][rank][lane].
3058 largest : info->
3059 cached_training->
3060 timing_bounds[tm][channel]
3061 [slot][rank][lane].
3062 smallest) - 64,
3063 get_timing_register_addr(lane,
3064 1,
3065 slot,
3066 rank),
3067 9, 1);
3068 }
3069 for (j = 0; j < 2; j++) {
3070 u8 failmask;
3071 u8 expected_failmask;
3072 char reg1b3;
3073
3074 reg1b3 = (j == 1) + 4;
3075 reg1b3 =
3076 j == i ? reg1b3 : (-reg1b3) & 0x3f;
3077 write_1d0(reg1b3, 0x1bb, 6, 1);
3078 write_1d0(reg1b3, 0x1b3, 6, 1);
3079 write_1d0(reg1b3, 0x1a3, 6, 1);
3080
3081 flip = !flip;
3082 write_testing(info, totalrank, flip);
3083 failmask =
3084 check_testing(info, totalrank,
3085 flip);
3086 expected_failmask =
3087 j == 0 ? 0x00 : 0xff;
3088 if (failmask != expected_failmask)
3089 goto fail;
3090 }
3091 }
3092 totalrank++;
3093 }
3094 }
3095
3096 set_178(info->cached_training->reg178_center);
3097 if (info->use_ecc)
3098 set_ecc(1);
3099 write_training_data(info);
3100 write_1d0(0, 322, 3, 1);
3101 info->training = *info->cached_training;
3102
3103 write_1d0(0, 0x1bb, 6, 1);
3104 write_1d0(0, 0x1b3, 6, 1);
3105 write_1d0(0, 0x1a3, 6, 1);
3106 write_mchbar8(0x243, saved_243[0]);
3107 write_mchbar8(0x643, saved_243[1]);
3108
3109 return 1;
3110
3111fail:
3112 FOR_POPULATED_RANKS {
3113 write_500_timings_type(info, channel, slot, rank, 1);
3114 write_500_timings_type(info, channel, slot, rank, 2);
3115 write_500_timings_type(info, channel, slot, rank, 3);
3116 }
3117
3118 write_1d0(0, 0x1bb, 6, 1);
3119 write_1d0(0, 0x1b3, 6, 1);
3120 write_1d0(0, 0x1a3, 6, 1);
3121 write_mchbar8(0x243, saved_243[0]);
3122 write_mchbar8(0x643, saved_243[1]);
3123
3124 return 0;
3125}
3126
3127static void do_ram_training(struct raminfo *info)
3128{
3129 u8 saved_243[2];
3130 int totalrank = 0;
3131 u8 reg_178;
3132 int niter;
3133
Matthias Gazzaridfa51252018-05-19 00:44:20 +02003134 timing_bounds_t *timings = timings_car;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003135 int lane, rank, slot, channel;
3136 u8 reg178_center;
3137
3138 write_1d0(2, 0x142, 3, 1);
3139 saved_243[0] = read_mchbar8(0x243);
3140 saved_243[1] = read_mchbar8(0x643);
3141 write_mchbar8(0x243, saved_243[0] | 2);
3142 write_mchbar8(0x643, saved_243[1] | 2);
3143 switch (info->clock_speed_index) {
3144 case 0:
3145 niter = 5;
3146 break;
3147 case 1:
3148 niter = 10;
3149 break;
3150 default:
3151 niter = 19;
3152 break;
3153 }
3154 set_ecc(0);
3155
3156 FOR_POPULATED_RANKS_BACKWARDS {
3157 int i;
3158
3159 write_500_timings_type(info, channel, slot, rank, 0);
3160
3161 write_testing(info, totalrank, 0);
3162 for (i = 0; i < niter; i++) {
3163 write_testing_type2(info, totalrank, 2, i, 0);
3164 write_testing_type2(info, totalrank, 3, i, 1);
3165 }
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003166 pci_write_config8(NORTHBRIDGE, 0xc0, 0x01);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003167 totalrank++;
3168 }
3169
3170 if (reg178_min[info->clock_speed_index] <
3171 reg178_max[info->clock_speed_index])
3172 memset(timings[reg178_min[info->clock_speed_index]], 0,
3173 sizeof(timings[0]) *
3174 (reg178_max[info->clock_speed_index] -
3175 reg178_min[info->clock_speed_index]));
3176 for (reg_178 = reg178_min[info->clock_speed_index];
3177 reg_178 < reg178_max[info->clock_speed_index];
3178 reg_178 += reg178_step[info->clock_speed_index]) {
3179 totalrank = 0;
3180 set_178(reg_178);
3181 for (channel = NUM_CHANNELS - 1; channel >= 0; channel--)
3182 for (slot = 0; slot < NUM_SLOTS; slot++)
3183 for (rank = 0; rank < NUM_RANKS; rank++) {
3184 memset(&timings[reg_178][channel][slot]
3185 [rank][0].smallest, 0, 16);
3186 if (info->
3187 populated_ranks[channel][slot]
3188 [rank]) {
3189 train_ram_at_178(info, channel,
3190 slot, rank,
3191 totalrank,
3192 reg_178, 1,
3193 niter,
3194 timings);
3195 totalrank++;
3196 }
3197 }
3198 }
3199
3200 reg178_center = choose_reg178(info, timings);
3201
3202 FOR_POPULATED_RANKS_BACKWARDS for (lane = 0; lane < 8; lane++) {
3203 info->training.timing_bounds[0][channel][slot][rank][lane].
3204 smallest =
3205 timings[info->training.
3206 reg178_smallest][channel][slot][rank][lane].
3207 smallest;
3208 info->training.timing_bounds[0][channel][slot][rank][lane].
3209 largest =
3210 timings[info->training.
3211 reg178_smallest][channel][slot][rank][lane].largest;
3212 info->training.timing_bounds[1][channel][slot][rank][lane].
3213 smallest =
3214 timings[info->training.
3215 reg178_largest][channel][slot][rank][lane].smallest;
3216 info->training.timing_bounds[1][channel][slot][rank][lane].
3217 largest =
3218 timings[info->training.
3219 reg178_largest][channel][slot][rank][lane].largest;
3220 info->training.timing_offset[channel][slot][rank][lane] =
3221 info->training.lane_timings[1][channel][slot][rank][lane]
3222 -
3223 info->training.lane_timings[0][channel][slot][rank][lane] +
3224 64;
3225 }
3226
3227 if (info->silicon_revision == 1
3228 && (info->
3229 populated_ranks_mask[1] ^ (info->
3230 populated_ranks_mask[1] >> 2)) & 1) {
3231 int ranks_after_channel1;
3232
3233 totalrank = 0;
3234 for (reg_178 = reg178_center - 18;
3235 reg_178 <= reg178_center + 18; reg_178 += 18) {
3236 totalrank = 0;
3237 set_178(reg_178);
3238 for (slot = 0; slot < NUM_SLOTS; slot++)
3239 for (rank = 0; rank < NUM_RANKS; rank++) {
3240 if (info->
3241 populated_ranks[1][slot][rank]) {
3242 train_ram_at_178(info, 1, slot,
3243 rank,
3244 totalrank,
3245 reg_178, 0,
3246 niter,
3247 timings);
3248 totalrank++;
3249 }
3250 }
3251 }
3252 ranks_after_channel1 = totalrank;
3253
3254 for (reg_178 = reg178_center - 12;
3255 reg_178 <= reg178_center + 12; reg_178 += 12) {
3256 totalrank = ranks_after_channel1;
3257 set_178(reg_178);
3258 for (slot = 0; slot < NUM_SLOTS; slot++)
3259 for (rank = 0; rank < NUM_RANKS; rank++)
3260 if (info->
3261 populated_ranks[0][slot][rank]) {
3262 train_ram_at_178(info, 0, slot,
3263 rank,
3264 totalrank,
3265 reg_178, 0,
3266 niter,
3267 timings);
3268 totalrank++;
3269 }
3270
3271 }
3272 } else {
3273 for (reg_178 = reg178_center - 12;
3274 reg_178 <= reg178_center + 12; reg_178 += 12) {
3275 totalrank = 0;
3276 set_178(reg_178);
3277 FOR_POPULATED_RANKS_BACKWARDS {
3278 train_ram_at_178(info, channel, slot, rank,
3279 totalrank, reg_178, 0, niter,
3280 timings);
3281 totalrank++;
3282 }
3283 }
3284 }
3285
3286 set_178(reg178_center);
3287 FOR_POPULATED_RANKS_BACKWARDS for (lane = 0; lane < 8; lane++) {
3288 u16 tm0;
3289
3290 tm0 =
3291 choose_training(info, channel, slot, rank, lane, timings,
3292 reg178_center);
3293 write_500(info, channel, tm0,
3294 get_timing_register_addr(lane, 0, slot, rank), 9, 1);
3295 write_500(info, channel,
3296 tm0 +
3297 info->training.
3298 lane_timings[1][channel][slot][rank][lane] -
3299 info->training.
3300 lane_timings[0][channel][slot][rank][lane],
3301 get_timing_register_addr(lane, 1, slot, rank), 9, 1);
3302 }
3303
3304 totalrank = 0;
3305 FOR_POPULATED_RANKS_BACKWARDS {
3306 try_timing_offsets(info, channel, slot, rank, totalrank);
3307 totalrank++;
3308 }
3309 write_mchbar8(0x243, saved_243[0]);
3310 write_mchbar8(0x643, saved_243[1]);
3311 write_1d0(0, 0x142, 3, 1);
3312 info->training.reg178_center = reg178_center;
3313}
3314
3315static void ram_training(struct raminfo *info)
3316{
3317 u16 saved_fc4;
3318
3319 saved_fc4 = read_mchbar16(0xfc4);
3320 write_mchbar16(0xfc4, 0xffff);
3321
3322 if (info->revision >= 8)
3323 read_4090(info);
3324
3325 if (!try_cached_training(info))
3326 do_ram_training(info);
3327 if ((info->silicon_revision == 2 || info->silicon_revision == 3)
3328 && info->clock_speed_index < 2)
3329 set_10b(info, 1);
3330 write_mchbar16(0xfc4, saved_fc4);
3331}
3332
3333static unsigned gcd(unsigned a, unsigned b)
3334{
3335 unsigned t;
3336 if (a > b) {
3337 t = a;
3338 a = b;
3339 b = t;
3340 }
3341 /* invariant a < b. */
3342 while (a) {
3343 t = b % a;
3344 b = a;
3345 a = t;
3346 }
3347 return b;
3348}
3349
3350static inline int div_roundup(int a, int b)
3351{
Edward O'Callaghan7116ac82014-07-08 01:53:24 +10003352 return CEIL_DIV(a, b);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003353}
3354
3355static unsigned lcm(unsigned a, unsigned b)
3356{
3357 return (a * b) / gcd(a, b);
3358}
3359
3360struct stru1 {
3361 u8 freqs_reversed;
3362 u8 freq_diff_reduced;
3363 u8 freq_min_reduced;
3364 u8 divisor_f4_to_fmax;
3365 u8 divisor_f3_to_fmax;
3366 u8 freq4_to_max_remainder;
3367 u8 freq3_to_2_remainder;
3368 u8 freq3_to_2_remaindera;
3369 u8 freq4_to_2_remainder;
3370 int divisor_f3_to_f1, divisor_f4_to_f2;
3371 int common_time_unit_ps;
3372 int freq_max_reduced;
3373};
3374
3375static void
3376compute_frequence_ratios(struct raminfo *info, u16 freq1, u16 freq2,
3377 int num_cycles_2, int num_cycles_1, int round_it,
3378 int add_freqs, struct stru1 *result)
3379{
3380 int g;
3381 int common_time_unit_ps;
3382 int freq1_reduced, freq2_reduced;
3383 int freq_min_reduced;
3384 int freq_max_reduced;
3385 int freq3, freq4;
3386
3387 g = gcd(freq1, freq2);
3388 freq1_reduced = freq1 / g;
3389 freq2_reduced = freq2 / g;
3390 freq_min_reduced = min(freq1_reduced, freq2_reduced);
3391 freq_max_reduced = max(freq1_reduced, freq2_reduced);
3392
3393 common_time_unit_ps = div_roundup(900000, lcm(freq1, freq2));
3394 freq3 = div_roundup(num_cycles_2, common_time_unit_ps) - 1;
3395 freq4 = div_roundup(num_cycles_1, common_time_unit_ps) - 1;
3396 if (add_freqs) {
3397 freq3 += freq2_reduced;
3398 freq4 += freq1_reduced;
3399 }
3400
3401 if (round_it) {
3402 result->freq3_to_2_remainder = 0;
3403 result->freq3_to_2_remaindera = 0;
3404 result->freq4_to_max_remainder = 0;
3405 result->divisor_f4_to_f2 = 0;
3406 result->divisor_f3_to_f1 = 0;
3407 } else {
3408 if (freq2_reduced < freq1_reduced) {
3409 result->freq3_to_2_remainder =
3410 result->freq3_to_2_remaindera =
3411 freq3 % freq1_reduced - freq1_reduced + 1;
3412 result->freq4_to_max_remainder =
3413 -(freq4 % freq1_reduced);
3414 result->divisor_f3_to_f1 = freq3 / freq1_reduced;
3415 result->divisor_f4_to_f2 =
3416 (freq4 -
3417 (freq1_reduced - freq2_reduced)) / freq2_reduced;
3418 result->freq4_to_2_remainder =
3419 -(char)((freq1_reduced - freq2_reduced) +
3420 ((u8) freq4 -
3421 (freq1_reduced -
3422 freq2_reduced)) % (u8) freq2_reduced);
3423 } else {
3424 if (freq2_reduced > freq1_reduced) {
3425 result->freq4_to_max_remainder =
3426 (freq4 % freq2_reduced) - freq2_reduced + 1;
3427 result->freq4_to_2_remainder =
3428 freq4 % freq_max_reduced -
3429 freq_max_reduced + 1;
3430 } else {
3431 result->freq4_to_max_remainder =
3432 -(freq4 % freq2_reduced);
3433 result->freq4_to_2_remainder =
3434 -(char)(freq4 % freq_max_reduced);
3435 }
3436 result->divisor_f4_to_f2 = freq4 / freq2_reduced;
3437 result->divisor_f3_to_f1 =
3438 (freq3 -
3439 (freq2_reduced - freq1_reduced)) / freq1_reduced;
3440 result->freq3_to_2_remainder = -(freq3 % freq2_reduced);
3441 result->freq3_to_2_remaindera =
3442 -(char)((freq_max_reduced - freq_min_reduced) +
3443 (freq3 -
3444 (freq_max_reduced -
3445 freq_min_reduced)) % freq1_reduced);
3446 }
3447 }
3448 result->divisor_f3_to_fmax = freq3 / freq_max_reduced;
3449 result->divisor_f4_to_fmax = freq4 / freq_max_reduced;
3450 if (round_it) {
3451 if (freq2_reduced > freq1_reduced) {
3452 if (freq3 % freq_max_reduced)
3453 result->divisor_f3_to_fmax++;
3454 }
3455 if (freq2_reduced < freq1_reduced) {
3456 if (freq4 % freq_max_reduced)
3457 result->divisor_f4_to_fmax++;
3458 }
3459 }
3460 result->freqs_reversed = (freq2_reduced < freq1_reduced);
3461 result->freq_diff_reduced = freq_max_reduced - freq_min_reduced;
3462 result->freq_min_reduced = freq_min_reduced;
3463 result->common_time_unit_ps = common_time_unit_ps;
3464 result->freq_max_reduced = freq_max_reduced;
3465}
3466
3467static void
3468set_2d5x_reg(struct raminfo *info, u16 reg, u16 freq1, u16 freq2,
3469 int num_cycles_2, int num_cycles_1, int num_cycles_3,
3470 int num_cycles_4, int reverse)
3471{
3472 struct stru1 vv;
3473 char multiplier;
3474
3475 compute_frequence_ratios(info, freq1, freq2, num_cycles_2, num_cycles_1,
3476 0, 1, &vv);
3477
3478 multiplier =
3479 div_roundup(max
3480 (div_roundup(num_cycles_2, vv.common_time_unit_ps) +
3481 div_roundup(num_cycles_3, vv.common_time_unit_ps),
3482 div_roundup(num_cycles_1,
3483 vv.common_time_unit_ps) +
3484 div_roundup(num_cycles_4, vv.common_time_unit_ps))
3485 + vv.freq_min_reduced - 1, vv.freq_max_reduced) - 1;
3486
3487 u32 y =
3488 (u8) ((vv.freq_max_reduced - vv.freq_min_reduced) +
3489 vv.freq_max_reduced * multiplier)
3490 | (vv.
3491 freqs_reversed << 8) | ((u8) (vv.freq_min_reduced *
3492 multiplier) << 16) | ((u8) (vv.
3493 freq_min_reduced
3494 *
3495 multiplier)
3496 << 24);
3497 u32 x =
3498 vv.freq3_to_2_remaindera | (vv.freq4_to_2_remainder << 8) | (vv.
3499 divisor_f3_to_f1
3500 << 16)
3501 | (vv.divisor_f4_to_f2 << 20) | (vv.freq_min_reduced << 24);
3502 if (reverse) {
3503 write_mchbar32(reg, y);
3504 write_mchbar32(reg + 4, x);
3505 } else {
3506 write_mchbar32(reg + 4, y);
3507 write_mchbar32(reg, x);
3508 }
3509}
3510
3511static void
3512set_6d_reg(struct raminfo *info, u16 reg, u16 freq1, u16 freq2,
3513 int num_cycles_1, int num_cycles_2, int num_cycles_3,
3514 int num_cycles_4)
3515{
3516 struct stru1 ratios1;
3517 struct stru1 ratios2;
3518
3519 compute_frequence_ratios(info, freq1, freq2, num_cycles_1, num_cycles_2,
3520 0, 1, &ratios2);
3521 compute_frequence_ratios(info, freq1, freq2, num_cycles_3, num_cycles_4,
3522 0, 1, &ratios1);
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003523 printk (BIOS_SPEW, "[%x] <= %x\n", reg,
3524 ratios1.freq4_to_max_remainder | (ratios2.
3525 freq4_to_max_remainder
3526 << 8)
3527 | (ratios1.divisor_f4_to_fmax << 16) | (ratios2.
3528 divisor_f4_to_fmax
3529 << 20));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003530 write_mchbar32(reg,
3531 ratios1.freq4_to_max_remainder | (ratios2.
3532 freq4_to_max_remainder
3533 << 8)
3534 | (ratios1.divisor_f4_to_fmax << 16) | (ratios2.
3535 divisor_f4_to_fmax
3536 << 20));
3537}
3538
3539static void
3540set_2dx8_reg(struct raminfo *info, u16 reg, u8 mode, u16 freq1, u16 freq2,
3541 int num_cycles_2, int num_cycles_1, int round_it, int add_freqs)
3542{
3543 struct stru1 ratios;
3544
3545 compute_frequence_ratios(info, freq1, freq2, num_cycles_2, num_cycles_1,
3546 round_it, add_freqs, &ratios);
3547 switch (mode) {
3548 case 0:
3549 write_mchbar32(reg + 4,
3550 ratios.freq_diff_reduced | (ratios.
3551 freqs_reversed <<
3552 8));
3553 write_mchbar32(reg,
3554 ratios.freq3_to_2_remainder | (ratios.
3555 freq4_to_max_remainder
3556 << 8)
3557 | (ratios.divisor_f3_to_fmax << 16) | (ratios.
3558 divisor_f4_to_fmax
3559 << 20) |
3560 (ratios.freq_min_reduced << 24));
3561 break;
3562
3563 case 1:
3564 write_mchbar32(reg,
3565 ratios.freq3_to_2_remainder | (ratios.
3566 divisor_f3_to_fmax
3567 << 16));
3568 break;
3569
3570 case 2:
3571 write_mchbar32(reg,
3572 ratios.freq3_to_2_remainder | (ratios.
3573 freq4_to_max_remainder
3574 << 8) | (ratios.
3575 divisor_f3_to_fmax
3576 << 16) |
3577 (ratios.divisor_f4_to_fmax << 20));
3578 break;
3579
3580 case 4:
3581 write_mchbar32(reg, (ratios.divisor_f3_to_fmax << 4)
3582 | (ratios.divisor_f4_to_fmax << 8) | (ratios.
3583 freqs_reversed
3584 << 12) |
3585 (ratios.freq_min_reduced << 16) | (ratios.
3586 freq_diff_reduced
3587 << 24));
3588 break;
3589 }
3590}
3591
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003592static void set_2dxx_series(struct raminfo *info, int s3resume)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003593{
3594 set_2dx8_reg(info, 0x2d00, 0, 0x78, frequency_11(info) / 2, 1359, 1005,
3595 0, 1);
3596 set_2dx8_reg(info, 0x2d08, 0, 0x78, 0x78, 3273, 5033, 1, 1);
3597 set_2dx8_reg(info, 0x2d10, 0, 0x78, info->fsb_frequency, 1475, 1131, 0,
3598 1);
3599 set_2dx8_reg(info, 0x2d18, 0, 2 * info->fsb_frequency,
3600 frequency_11(info), 1231, 1524, 0, 1);
3601 set_2dx8_reg(info, 0x2d20, 0, 2 * info->fsb_frequency,
3602 frequency_11(info) / 2, 1278, 2008, 0, 1);
3603 set_2dx8_reg(info, 0x2d28, 0, info->fsb_frequency, frequency_11(info),
3604 1167, 1539, 0, 1);
3605 set_2dx8_reg(info, 0x2d30, 0, info->fsb_frequency,
3606 frequency_11(info) / 2, 1403, 1318, 0, 1);
3607 set_2dx8_reg(info, 0x2d38, 0, info->fsb_frequency, 0x78, 3460, 5363, 1,
3608 1);
3609 set_2dx8_reg(info, 0x2d40, 0, info->fsb_frequency, 0x3c, 2792, 5178, 1,
3610 1);
3611 set_2dx8_reg(info, 0x2d48, 0, 2 * info->fsb_frequency, 0x78, 2738, 4610,
3612 1, 1);
3613 set_2dx8_reg(info, 0x2d50, 0, info->fsb_frequency, 0x78, 2819, 5932, 1,
3614 1);
3615 set_2dx8_reg(info, 0x6d4, 1, info->fsb_frequency,
3616 frequency_11(info) / 2, 4000, 0, 0, 0);
3617 set_2dx8_reg(info, 0x6d8, 2, info->fsb_frequency,
3618 frequency_11(info) / 2, 4000, 4000, 0, 0);
3619
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003620 if (s3resume) {
3621 printk (BIOS_SPEW, "[6dc] <= %x\n", info->cached_training->reg_6dc);
3622 write_mchbar32(0x6dc, info->cached_training->reg_6dc);
3623 } else
3624 set_6d_reg(info, 0x6dc, 2 * info->fsb_frequency, frequency_11(info), 0,
3625 info->delay46_ps[0], 0,
3626 info->delay54_ps[0]);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003627 set_2dx8_reg(info, 0x6e0, 1, 2 * info->fsb_frequency,
3628 frequency_11(info), 2500, 0, 0, 0);
3629 set_2dx8_reg(info, 0x6e4, 1, 2 * info->fsb_frequency,
3630 frequency_11(info) / 2, 3500, 0, 0, 0);
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003631 if (s3resume) {
3632 printk (BIOS_SPEW, "[6e8] <= %x\n", info->cached_training->reg_6e8);
3633 write_mchbar32(0x6e8, info->cached_training->reg_6e8);
3634 } else
3635 set_6d_reg(info, 0x6e8, 2 * info->fsb_frequency, frequency_11(info), 0,
3636 info->delay46_ps[1], 0,
3637 info->delay54_ps[1]);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003638 set_2d5x_reg(info, 0x2d58, 0x78, 0x78, 864, 1195, 762, 786, 0);
3639 set_2d5x_reg(info, 0x2d60, 0x195, info->fsb_frequency, 1352, 725, 455,
3640 470, 0);
3641 set_2d5x_reg(info, 0x2d68, 0x195, 0x3c, 2707, 5632, 3277, 2207, 0);
3642 set_2d5x_reg(info, 0x2d70, 0x195, frequency_11(info) / 2, 1276, 758,
3643 454, 459, 0);
3644 set_2d5x_reg(info, 0x2d78, 0x195, 0x78, 1021, 799, 510, 513, 0);
3645 set_2d5x_reg(info, 0x2d80, info->fsb_frequency, 0xe1, 0, 2862, 2579,
3646 2588, 0);
3647 set_2d5x_reg(info, 0x2d88, info->fsb_frequency, 0xe1, 0, 2690, 2405,
3648 2405, 0);
3649 set_2d5x_reg(info, 0x2da0, 0x78, 0xe1, 0, 2560, 2264, 2251, 0);
3650 set_2d5x_reg(info, 0x2da8, 0x195, frequency_11(info), 1060, 775, 484,
3651 480, 0);
3652 set_2d5x_reg(info, 0x2db0, 0x195, 0x78, 4183, 6023, 2217, 2048, 0);
3653 write_mchbar32(0x2dbc, ((frequency_11(info) / 2) - 1) | 0xe00000);
3654 write_mchbar32(0x2db8, ((info->fsb_frequency - 1) << 16) | 0x77);
3655}
3656
3657static u16 get_max_timing(struct raminfo *info, int channel)
3658{
3659 int slot, rank, lane;
3660 u16 ret = 0;
3661
3662 if ((read_mchbar8(0x2ca8) >> 2) < 1)
3663 return 384;
3664
3665 if (info->revision < 8)
3666 return 256;
3667
3668 for (slot = 0; slot < NUM_SLOTS; slot++)
3669 for (rank = 0; rank < NUM_RANKS; rank++)
3670 if (info->populated_ranks[channel][slot][rank])
3671 for (lane = 0; lane < 8 + info->use_ecc; lane++)
3672 ret = max(ret, read_500(info, channel,
3673 get_timing_register_addr
3674 (lane, 0, slot,
3675 rank), 9));
3676 return ret;
3677}
3678
3679static void set_274265(struct raminfo *info)
3680{
3681 int delay_a_ps, delay_b_ps, delay_c_ps, delay_d_ps;
3682 int delay_e_ps, delay_e_cycles, delay_f_cycles;
3683 int delay_e_over_cycle_ps;
3684 int cycletime_ps;
3685 int channel;
3686
3687 delay_a_ps = 4 * halfcycle_ps(info) + 6 * fsbcycle_ps(info);
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003688 info->training.reg2ca9_bit0 = 0;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003689 for (channel = 0; channel < NUM_CHANNELS; channel++) {
3690 cycletime_ps =
3691 900000 / lcm(2 * info->fsb_frequency, frequency_11(info));
3692 delay_d_ps =
3693 (halfcycle_ps(info) * get_max_timing(info, channel) >> 6)
3694 - info->some_delay_3_ps_rounded + 200;
3695 if (!
3696 ((info->silicon_revision == 0
3697 || info->silicon_revision == 1)
3698 && (info->revision >= 8)))
3699 delay_d_ps += halfcycle_ps(info) * 2;
3700 delay_d_ps +=
3701 halfcycle_ps(info) * (!info->revision_flag_1 +
3702 info->some_delay_2_halfcycles_ceil +
3703 2 * info->some_delay_1_cycle_floor +
3704 info->clock_speed_index +
3705 2 * info->cas_latency - 7 + 11);
3706 delay_d_ps += info->revision >= 8 ? 2758 : 4428;
3707
3708 write_mchbar32(0x140,
3709 (read_mchbar32(0x140) & 0xfaffffff) | 0x2000000);
3710 write_mchbar32(0x138,
3711 (read_mchbar32(0x138) & 0xfaffffff) | 0x2000000);
3712 if ((read_mchbar8(0x144) & 0x1f) > 0x13)
3713 delay_d_ps += 650;
3714 delay_c_ps = delay_d_ps + 1800;
3715 if (delay_c_ps <= delay_a_ps)
3716 delay_e_ps = 0;
3717 else
3718 delay_e_ps =
3719 cycletime_ps * div_roundup(delay_c_ps - delay_a_ps,
3720 cycletime_ps);
3721
3722 delay_e_over_cycle_ps = delay_e_ps % (2 * halfcycle_ps(info));
3723 delay_e_cycles = delay_e_ps / (2 * halfcycle_ps(info));
3724 delay_f_cycles =
3725 div_roundup(2500 - delay_e_over_cycle_ps,
3726 2 * halfcycle_ps(info));
3727 if (delay_f_cycles > delay_e_cycles) {
3728 info->delay46_ps[channel] = delay_e_ps;
3729 delay_e_cycles = 0;
3730 } else {
3731 info->delay46_ps[channel] =
3732 delay_e_over_cycle_ps +
3733 2 * halfcycle_ps(info) * delay_f_cycles;
3734 delay_e_cycles -= delay_f_cycles;
3735 }
3736
3737 if (info->delay46_ps[channel] < 2500) {
3738 info->delay46_ps[channel] = 2500;
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003739 info->training.reg2ca9_bit0 = 1;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003740 }
3741 delay_b_ps = halfcycle_ps(info) + delay_c_ps;
3742 if (delay_b_ps <= delay_a_ps)
3743 delay_b_ps = 0;
3744 else
3745 delay_b_ps -= delay_a_ps;
3746 info->delay54_ps[channel] =
3747 cycletime_ps * div_roundup(delay_b_ps,
3748 cycletime_ps) -
3749 2 * halfcycle_ps(info) * delay_e_cycles;
3750 if (info->delay54_ps[channel] < 2500)
3751 info->delay54_ps[channel] = 2500;
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003752 info->training.reg274265[channel][0] = delay_e_cycles;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003753 if (delay_d_ps + 7 * halfcycle_ps(info) <=
3754 24 * halfcycle_ps(info))
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003755 info->training.reg274265[channel][1] = 0;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003756 else
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003757 info->training.reg274265[channel][1] =
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003758 div_roundup(delay_d_ps + 7 * halfcycle_ps(info),
3759 4 * halfcycle_ps(info)) - 6;
3760 write_mchbar32((channel << 10) + 0x274,
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003761 info->training.reg274265[channel][1]
3762 | (info->training.reg274265[channel][0] << 16));
3763 info->training.reg274265[channel][2] =
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003764 div_roundup(delay_c_ps + 3 * fsbcycle_ps(info),
3765 4 * halfcycle_ps(info)) + 1;
3766 write_mchbar16((channel << 10) + 0x265,
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003767 info->training.reg274265[channel][2] << 8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003768 }
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003769 if (info->training.reg2ca9_bit0)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003770 write_mchbar8(0x2ca9, read_mchbar8(0x2ca9) | 1);
3771 else
3772 write_mchbar8(0x2ca9, read_mchbar8(0x2ca9) & ~1);
3773}
3774
3775static void restore_274265(struct raminfo *info)
3776{
3777 int channel;
3778
3779 for (channel = 0; channel < NUM_CHANNELS; channel++) {
3780 write_mchbar32((channel << 10) + 0x274,
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003781 (info->cached_training->reg274265[channel][0] << 16)
3782 | info->cached_training->reg274265[channel][1]);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003783 write_mchbar16((channel << 10) + 0x265,
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003784 info->cached_training->reg274265[channel][2] << 8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003785 }
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003786 if (info->cached_training->reg2ca9_bit0)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003787 write_mchbar8(0x2ca9, read_mchbar8(0x2ca9) | 1);
3788 else
3789 write_mchbar8(0x2ca9, read_mchbar8(0x2ca9) & ~1);
3790}
3791
3792#if REAL
3793static void dmi_setup(void)
3794{
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08003795 gav(read8(DEFAULT_DMIBAR + 0x254));
3796 write8(DEFAULT_DMIBAR + 0x254, 0x1);
3797 write16(DEFAULT_DMIBAR + 0x1b8, 0x18f2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003798 read_mchbar16(0x48);
3799 write_mchbar16(0x48, 0x2);
3800
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08003801 write32(DEFAULT_DMIBAR + 0xd68, read32(DEFAULT_DMIBAR + 0xd68) | 0x08000000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003802
3803 outl((gav(inl(DEFAULT_GPIOBASE | 0x38)) & ~0x140000) | 0x400000,
3804 DEFAULT_GPIOBASE | 0x38);
3805 gav(inb(DEFAULT_GPIOBASE | 0xe)); // = 0xfdcaff6e
3806}
3807#endif
3808
Vladimir Serbinenko9817a372014-02-19 22:07:12 +01003809void chipset_init(const int s3resume)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003810{
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003811 u8 x2ca8;
Vladimir Serbinenko55391c42014-08-03 14:51:00 +02003812 u16 ggc;
3813 u8 gfxsize;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003814
Vladimir Serbinenko9817a372014-02-19 22:07:12 +01003815 x2ca8 = read_mchbar8(0x2ca8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003816 if ((x2ca8 & 1) || (x2ca8 == 8 && !s3resume)) {
3817 printk(BIOS_DEBUG, "soft reset detected, rebooting properly\n");
3818 write_mchbar8(0x2ca8, 0);
Vladimir Serbinenkoe1eef692014-02-19 22:08:51 +01003819 outb(0x6, 0xcf9);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003820#if REAL
Patrick Georgi546953c2014-11-29 10:38:17 +01003821 halt();
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003822#else
3823 printf("CP5\n");
3824 exit(0);
3825#endif
3826 }
3827#if !REAL
3828 if (!s3resume) {
3829 pre_raminit_3(x2ca8);
3830 }
Vladimir Serbinenkof62669c2014-01-09 10:59:38 +01003831 pre_raminit_4a(x2ca8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003832#endif
3833
3834 dmi_setup();
3835
3836 write_mchbar16(0x1170, 0xa880);
3837 write_mchbar8(0x11c1, 0x1);
3838 write_mchbar16(0x1170, 0xb880);
3839 read_mchbar8(0x1210);
3840 write_mchbar8(0x1210, 0x84);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003841
Vladimir Serbinenko55391c42014-08-03 14:51:00 +02003842 if (get_option(&gfxsize, "gfx_uma_size") != CB_SUCCESS) {
3843 /* 0 for 32MB */
3844 gfxsize = 0;
3845 }
3846
3847 ggc = 0xb00 | ((gfxsize + 5) << 4);
3848
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003849 pci_write_config16(NORTHBRIDGE, D0F0_GGC, ggc | 2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003850
3851 u16 deven;
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003852 deven = pci_read_config16(NORTHBRIDGE, D0F0_DEVEN); // = 0x3
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003853
3854 if (deven & 8) {
3855 write_mchbar8(0x2c30, 0x20);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003856 pci_read_config8(NORTHBRIDGE, 0x8); // = 0x18
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003857 write_mchbar16(0x2c30, read_mchbar16(0x2c30) | 0x200);
3858 write_mchbar16(0x2c32, 0x434);
3859 read_mchbar32(0x2c44);
3860 write_mchbar32(0x2c44, 0x1053687);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003861 pci_read_config8(GMA, 0x62); // = 0x2
3862 pci_write_config8(GMA, 0x62, 0x2);
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08003863 read8(DEFAULT_RCBA + 0x2318);
3864 write8(DEFAULT_RCBA + 0x2318, 0x47);
3865 read8(DEFAULT_RCBA + 0x2320);
3866 write8(DEFAULT_RCBA + 0x2320, 0xfc);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003867 }
3868
3869 read_mchbar32(0x30);
3870 write_mchbar32(0x30, 0x40);
3871
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003872 pci_write_config16(NORTHBRIDGE, D0F0_GGC, ggc);
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08003873 gav(read32(DEFAULT_RCBA + 0x3428));
3874 write32(DEFAULT_RCBA + 0x3428, 0x1d);
Vladimir Serbinenko9817a372014-02-19 22:07:12 +01003875}
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003876
Vladimir Serbinenko9817a372014-02-19 22:07:12 +01003877void raminit(const int s3resume, const u8 *spd_addrmap)
3878{
3879 unsigned channel, slot, lane, rank;
3880 int i;
3881 struct raminfo info;
3882 u8 x2ca8;
3883 u16 deven;
Vladimir Serbinenkob16f0922014-06-07 16:27:27 +02003884 int cbmem_wasnot_inited;
Vladimir Serbinenko9817a372014-02-19 22:07:12 +01003885
3886 x2ca8 = read_mchbar8(0x2ca8);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003887 deven = pci_read_config16(NORTHBRIDGE, D0F0_DEVEN);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003888
3889 memset(&info, 0x5a, sizeof(info));
3890
3891 info.last_500_command[0] = 0;
3892 info.last_500_command[1] = 0;
3893
3894 info.fsb_frequency = 135 * 2;
3895 info.board_lane_delay[0] = 0x14;
3896 info.board_lane_delay[1] = 0x07;
3897 info.board_lane_delay[2] = 0x07;
3898 info.board_lane_delay[3] = 0x08;
3899 info.board_lane_delay[4] = 0x56;
3900 info.board_lane_delay[5] = 0x04;
3901 info.board_lane_delay[6] = 0x04;
3902 info.board_lane_delay[7] = 0x05;
3903 info.board_lane_delay[8] = 0x10;
3904
3905 info.training.reg_178 = 0;
3906 info.training.reg_10b = 0;
3907
3908 info.heci_bar = 0;
3909 info.memory_reserved_for_heci_mb = 0;
3910
3911 /* before SPD */
3912 timestamp_add_now(101);
3913
3914 if (!s3resume || REAL) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003915 pci_read_config8(SOUTHBRIDGE, GEN_PMCON_2); // = 0x80
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003916
3917 collect_system_info(&info);
3918
3919#if REAL
3920 /* Enable SMBUS. */
3921 enable_smbus();
3922#endif
3923
3924 memset(&info.populated_ranks, 0, sizeof(info.populated_ranks));
3925
3926 info.use_ecc = 1;
3927 for (channel = 0; channel < NUM_CHANNELS; channel++)
Vladimir Serbinenko2ab8ec72014-02-20 14:34:56 +01003928 for (slot = 0; slot < NUM_SLOTS; slot++) {
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003929 int v;
3930 int try;
3931 int addr;
3932 const u8 useful_addresses[] = {
3933 DEVICE_TYPE,
3934 MODULE_TYPE,
3935 DENSITY,
3936 RANKS_AND_DQ,
3937 MEMORY_BUS_WIDTH,
3938 TIMEBASE_DIVIDEND,
3939 TIMEBASE_DIVISOR,
3940 CYCLETIME,
3941 CAS_LATENCIES_LSB,
3942 CAS_LATENCIES_MSB,
3943 CAS_LATENCY_TIME,
3944 0x11, 0x12, 0x13, 0x14, 0x15,
3945 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b,
3946 0x1c, 0x1d,
3947 THERMAL_AND_REFRESH,
3948 0x20,
3949 REFERENCE_RAW_CARD_USED,
3950 RANK1_ADDRESS_MAPPING,
3951 0x75, 0x76, 0x77, 0x78,
3952 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e,
3953 0x7f, 0x80, 0x81, 0x82, 0x83, 0x84,
3954 0x85, 0x86, 0x87, 0x88,
3955 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e,
3956 0x8f, 0x90, 0x91, 0x92, 0x93, 0x94,
3957 0x95
3958 };
Vladimir Serbinenko902626c2014-02-16 17:22:26 +01003959 if (!spd_addrmap[2 * channel + slot])
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003960 continue;
3961 for (try = 0; try < 5; try++) {
Vladimir Serbinenko902626c2014-02-16 17:22:26 +01003962 v = smbus_read_byte(spd_addrmap[2 * channel + slot],
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003963 DEVICE_TYPE);
3964 if (v >= 0)
3965 break;
3966 }
3967 if (v < 0)
3968 continue;
3969 for (addr = 0;
3970 addr <
3971 sizeof(useful_addresses) /
3972 sizeof(useful_addresses[0]); addr++)
3973 gav(info.
3974 spd[channel][0][useful_addresses
3975 [addr]] =
Vladimir Serbinenko902626c2014-02-16 17:22:26 +01003976 smbus_read_byte(spd_addrmap[2 * channel + slot],
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003977 useful_addresses
3978 [addr]));
3979 if (info.spd[channel][0][DEVICE_TYPE] != 11)
3980 die("Only DDR3 is supported");
3981
3982 v = info.spd[channel][0][RANKS_AND_DQ];
3983 info.populated_ranks[channel][0][0] = 1;
3984 info.populated_ranks[channel][0][1] =
3985 ((v >> 3) & 7);
3986 if (((v >> 3) & 7) > 1)
3987 die("At most 2 ranks are supported");
3988 if ((v & 7) == 0 || (v & 7) > 2)
3989 die("Only x8 and x16 modules are supported");
3990 if ((info.
3991 spd[channel][slot][MODULE_TYPE] & 0xF) != 2
3992 && (info.
3993 spd[channel][slot][MODULE_TYPE] & 0xF)
3994 != 3)
3995 die("Registered memory is not supported");
3996 info.is_x16_module[channel][0] = (v & 7) - 1;
3997 info.density[channel][slot] =
3998 info.spd[channel][slot][DENSITY] & 0xF;
3999 if (!
4000 (info.
4001 spd[channel][slot][MEMORY_BUS_WIDTH] &
4002 0x18))
4003 info.use_ecc = 0;
4004 }
4005
4006 gav(0x55);
4007
4008 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4009 int v = 0;
4010 for (slot = 0; slot < NUM_SLOTS; slot++)
4011 for (rank = 0; rank < NUM_RANKS; rank++)
4012 v |= info.
4013 populated_ranks[channel][slot][rank]
4014 << (2 * slot + rank);
4015 info.populated_ranks_mask[channel] = v;
4016 }
4017
4018 gav(0x55);
4019
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004020 gav(pci_read_config32(NORTHBRIDGE, D0F0_CAPID0 + 4));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004021 }
4022
4023 /* after SPD */
4024 timestamp_add_now(102);
4025
4026 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) & 0xfc);
4027#if !REAL
Alexandru Gagniuc86091f92015-09-30 20:23:09 -07004028 rdmsr (MTRR_PHYS_MASK (3));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004029#endif
4030
4031 collect_system_info(&info);
4032 calculate_timings(&info);
4033
4034#if !REAL
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004035 pci_write_config8(NORTHBRIDGE, 0xdf, 0x82);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004036#endif
4037
4038 if (!s3resume) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004039 u8 reg8 = pci_read_config8(SOUTHBRIDGE, GEN_PMCON_2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004040 if (x2ca8 == 0 && (reg8 & 0x80)) {
4041 /* Don't enable S4-assertion stretch. Makes trouble on roda/rk9.
4042 reg8 = pci_read_config8(PCI_DEV(0, 0x1f, 0), 0xa4);
4043 pci_write_config8(PCI_DEV(0, 0x1f, 0), 0xa4, reg8 | 0x08);
4044 */
4045
4046 /* Clear bit7. */
4047
4048 pci_write_config8(SOUTHBRIDGE, GEN_PMCON_2,
4049 (reg8 & ~(1 << 7)));
4050
4051 printk(BIOS_INFO,
4052 "Interrupted RAM init, reset required.\n");
4053 outb(0x6, 0xcf9);
4054#if REAL
Patrick Georgi546953c2014-11-29 10:38:17 +01004055 halt();
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004056#endif
4057 }
4058 }
4059#if !REAL
4060 gav(read_mchbar8(0x2ca8)); ///!!!!
4061#endif
4062
4063 if (!s3resume && x2ca8 == 0)
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004064 pci_write_config8(SOUTHBRIDGE, GEN_PMCON_2,
4065 pci_read_config8(SOUTHBRIDGE, GEN_PMCON_2) | 0x80);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004066
4067 compute_derived_timings(&info);
4068
4069 if (x2ca8 == 0) {
4070 gav(read_mchbar8(0x164));
4071 write_mchbar8(0x164, 0x26);
4072 write_mchbar16(0x2c20, 0x10);
4073 }
4074
4075 write_mchbar32(0x18b4, read_mchbar32(0x18b4) | 0x210000); /* OK */
4076 write_mchbar32(0x1890, read_mchbar32(0x1890) | 0x2000000); /* OK */
4077 write_mchbar32(0x18b4, read_mchbar32(0x18b4) | 0x8000);
4078
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004079 gav(pci_read_config32(PCI_DEV(0xff, 2, 1), 0x50)); // !!!!
4080 pci_write_config8(PCI_DEV(0xff, 2, 1), 0x54, 0x12);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004081
4082 gav(read_mchbar16(0x2c10)); // !!!!
4083 write_mchbar16(0x2c10, 0x412);
4084 gav(read_mchbar16(0x2c10)); // !!!!
4085 write_mchbar16(0x2c12, read_mchbar16(0x2c12) | 0x100); /* OK */
4086
4087 gav(read_mchbar8(0x2ca8)); // !!!!
4088 write_mchbar32(0x1804,
4089 (read_mchbar32(0x1804) & 0xfffffffc) | 0x8400080);
4090
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004091 pci_read_config32(PCI_DEV(0xff, 2, 1), 0x6c); // !!!!
4092 pci_write_config32(PCI_DEV(0xff, 2, 1), 0x6c, 0x40a0a0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004093 gav(read_mchbar32(0x1c04)); // !!!!
4094 gav(read_mchbar32(0x1804)); // !!!!
4095
4096 if (x2ca8 == 0) {
4097 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) | 1);
4098 }
4099
4100 write_mchbar32(0x18d8, 0x120000);
4101 write_mchbar32(0x18dc, 0x30a484a);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004102 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xe0, 0x0);
4103 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xf4, 0x9444a);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004104 write_mchbar32(0x18d8, 0x40000);
4105 write_mchbar32(0x18dc, 0xb000000);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004106 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xe0, 0x60000);
4107 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xf4, 0x0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004108 write_mchbar32(0x18d8, 0x180000);
4109 write_mchbar32(0x18dc, 0xc0000142);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004110 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xe0, 0x20000);
4111 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xf4, 0x142);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004112 write_mchbar32(0x18d8, 0x1e0000);
4113
4114 gav(read_mchbar32(0x18dc)); // !!!!
4115 write_mchbar32(0x18dc, 0x3);
4116 gav(read_mchbar32(0x18dc)); // !!!!
4117
4118 if (x2ca8 == 0) {
4119 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) | 1); // guess
4120 }
4121
4122 write_mchbar32(0x188c, 0x20bc09);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004123 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xd0, 0x40b0c09);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004124 write_mchbar32(0x1a10, 0x4200010e);
4125 write_mchbar32(0x18b8, read_mchbar32(0x18b8) | 0x200);
4126 gav(read_mchbar32(0x1918)); // !!!!
4127 write_mchbar32(0x1918, 0x332);
4128
4129 gav(read_mchbar32(0x18b8)); // !!!!
4130 write_mchbar32(0x18b8, 0xe00);
4131 gav(read_mchbar32(0x182c)); // !!!!
4132 write_mchbar32(0x182c, 0x10202);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004133 gav(pci_read_config32(PCI_DEV(0xff, 2, 1), 0x94)); // !!!!
4134 pci_write_config32(PCI_DEV(0xff, 2, 1), 0x94, 0x10202);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004135 write_mchbar32(0x1a1c, read_mchbar32(0x1a1c) & 0x8fffffff);
4136 write_mchbar32(0x1a70, read_mchbar32(0x1a70) | 0x100000);
4137
4138 write_mchbar32(0x18b4, read_mchbar32(0x18b4) & 0xffff7fff);
4139 gav(read_mchbar32(0x1a68)); // !!!!
4140 write_mchbar32(0x1a68, 0x343800);
4141 gav(read_mchbar32(0x1e68)); // !!!!
4142 gav(read_mchbar32(0x1a68)); // !!!!
4143
4144 if (x2ca8 == 0) {
4145 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) | 1); // guess
4146 }
4147
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004148 pci_read_config32(PCI_DEV(0xff, 2, 0), 0x048); // !!!!
4149 pci_write_config32(PCI_DEV(0xff, 2, 0), 0x048, 0x140000);
4150 pci_read_config32(PCI_DEV(0xff, 2, 0), 0x058); // !!!!
4151 pci_write_config32(PCI_DEV(0xff, 2, 0), 0x058, 0x64555);
4152 pci_read_config32(PCI_DEV(0xff, 2, 0), 0x058); // !!!!
4153 pci_read_config32(PCI_DEV (0xff, 0, 0), 0xd0); // !!!!
4154 pci_write_config32(PCI_DEV (0xff, 0, 0), 0xd0, 0x180);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004155 gav(read_mchbar32(0x1af0)); // !!!!
4156 gav(read_mchbar32(0x1af0)); // !!!!
4157 write_mchbar32(0x1af0, 0x1f020003);
4158 gav(read_mchbar32(0x1af0)); // !!!!
4159
Edward O'Callaghan42b716f2014-06-26 21:38:52 +10004160 if (x2ca8 == 0) {
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004161 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) | 1); // guess
4162 }
4163
4164 gav(read_mchbar32(0x1890)); // !!!!
4165 write_mchbar32(0x1890, 0x80102);
4166 gav(read_mchbar32(0x18b4)); // !!!!
4167 write_mchbar32(0x18b4, 0x216000);
4168 write_mchbar32(0x18a4, 0x22222222);
4169 write_mchbar32(0x18a8, 0x22222222);
4170 write_mchbar32(0x18ac, 0x22222);
4171
4172 udelay(1000);
4173
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01004174 info.cached_training = get_cached_training();
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004175
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01004176 if (x2ca8 == 0) {
4177 int j;
4178 if (s3resume && info.cached_training) {
4179 restore_274265(&info);
4180 printk(BIOS_DEBUG, "reg2ca9_bit0 = %x\n",
4181 info.cached_training->reg2ca9_bit0);
4182 for (i = 0; i < 2; i++)
4183 for (j = 0; j < 3; j++)
4184 printk(BIOS_DEBUG, "reg274265[%d][%d] = %x\n",
4185 i, j, info.cached_training->reg274265[i][j]);
4186 } else {
4187 set_274265(&info);
4188 printk(BIOS_DEBUG, "reg2ca9_bit0 = %x\n",
4189 info.training.reg2ca9_bit0);
4190 for (i = 0; i < 2; i++)
4191 for (j = 0; j < 3; j++)
4192 printk(BIOS_DEBUG, "reg274265[%d][%d] = %x\n",
4193 i, j, info.training.reg274265[i][j]);
4194 }
4195
4196 set_2dxx_series(&info, s3resume);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004197
4198 if (!(deven & 8)) {
4199 read_mchbar32(0x2cb0);
4200 write_mchbar32(0x2cb0, 0x40);
4201 }
4202
4203 udelay(1000);
4204
4205 if (deven & 8) {
4206 write_mchbar32(0xff8, 0x1800 | read_mchbar32(0xff8));
4207 read_mchbar32(0x2cb0);
4208 write_mchbar32(0x2cb0, 0x00);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004209 pci_read_config8(PCI_DEV (0, 0x2, 0x0), 0x4c);
4210 pci_read_config8(PCI_DEV (0, 0x2, 0x0), 0x4c);
4211 pci_read_config8(PCI_DEV (0, 0x2, 0x0), 0x4e);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004212
4213 read_mchbar8(0x1150);
4214 read_mchbar8(0x1151);
4215 read_mchbar8(0x1022);
4216 read_mchbar8(0x16d0);
4217 write_mchbar32(0x1300, 0x60606060);
4218 write_mchbar32(0x1304, 0x60606060);
4219 write_mchbar32(0x1308, 0x78797a7b);
4220 write_mchbar32(0x130c, 0x7c7d7e7f);
4221 write_mchbar32(0x1310, 0x60606060);
4222 write_mchbar32(0x1314, 0x60606060);
4223 write_mchbar32(0x1318, 0x60606060);
4224 write_mchbar32(0x131c, 0x60606060);
4225 write_mchbar32(0x1320, 0x50515253);
4226 write_mchbar32(0x1324, 0x54555657);
4227 write_mchbar32(0x1328, 0x58595a5b);
4228 write_mchbar32(0x132c, 0x5c5d5e5f);
4229 write_mchbar32(0x1330, 0x40414243);
4230 write_mchbar32(0x1334, 0x44454647);
4231 write_mchbar32(0x1338, 0x48494a4b);
4232 write_mchbar32(0x133c, 0x4c4d4e4f);
4233 write_mchbar32(0x1340, 0x30313233);
4234 write_mchbar32(0x1344, 0x34353637);
4235 write_mchbar32(0x1348, 0x38393a3b);
4236 write_mchbar32(0x134c, 0x3c3d3e3f);
4237 write_mchbar32(0x1350, 0x20212223);
4238 write_mchbar32(0x1354, 0x24252627);
4239 write_mchbar32(0x1358, 0x28292a2b);
4240 write_mchbar32(0x135c, 0x2c2d2e2f);
4241 write_mchbar32(0x1360, 0x10111213);
4242 write_mchbar32(0x1364, 0x14151617);
4243 write_mchbar32(0x1368, 0x18191a1b);
4244 write_mchbar32(0x136c, 0x1c1d1e1f);
4245 write_mchbar32(0x1370, 0x10203);
4246 write_mchbar32(0x1374, 0x4050607);
4247 write_mchbar32(0x1378, 0x8090a0b);
4248 write_mchbar32(0x137c, 0xc0d0e0f);
4249 write_mchbar8(0x11cc, 0x4e);
4250 write_mchbar32(0x1110, 0x73970404);
4251 write_mchbar32(0x1114, 0x72960404);
4252 write_mchbar32(0x1118, 0x6f950404);
4253 write_mchbar32(0x111c, 0x6d940404);
4254 write_mchbar32(0x1120, 0x6a930404);
4255 write_mchbar32(0x1124, 0x68a41404);
4256 write_mchbar32(0x1128, 0x66a21404);
4257 write_mchbar32(0x112c, 0x63a01404);
4258 write_mchbar32(0x1130, 0x609e1404);
4259 write_mchbar32(0x1134, 0x5f9c1404);
4260 write_mchbar32(0x1138, 0x5c961404);
4261 write_mchbar32(0x113c, 0x58a02404);
4262 write_mchbar32(0x1140, 0x54942404);
4263 write_mchbar32(0x1190, 0x900080a);
4264 write_mchbar16(0x11c0, 0xc40b);
4265 write_mchbar16(0x11c2, 0x303);
4266 write_mchbar16(0x11c4, 0x301);
4267 read_mchbar32(0x1190);
4268 write_mchbar32(0x1190, 0x8900080a);
4269 write_mchbar32(0x11b8, 0x70c3000);
4270 write_mchbar8(0x11ec, 0xa);
4271 write_mchbar16(0x1100, 0x800);
4272 read_mchbar32(0x11bc);
4273 write_mchbar32(0x11bc, 0x1e84800);
4274 write_mchbar16(0x11ca, 0xfa);
4275 write_mchbar32(0x11e4, 0x4e20);
4276 write_mchbar8(0x11bc, 0xf);
4277 write_mchbar16(0x11da, 0x19);
4278 write_mchbar16(0x11ba, 0x470c);
4279 write_mchbar32(0x1680, 0xe6ffe4ff);
4280 write_mchbar32(0x1684, 0xdeffdaff);
4281 write_mchbar32(0x1688, 0xd4ffd0ff);
4282 write_mchbar32(0x168c, 0xccffc6ff);
4283 write_mchbar32(0x1690, 0xc0ffbeff);
4284 write_mchbar32(0x1694, 0xb8ffb0ff);
4285 write_mchbar32(0x1698, 0xa8ff0000);
4286 write_mchbar32(0x169c, 0xc00);
4287 write_mchbar32(0x1290, 0x5000000);
4288 }
4289
4290 write_mchbar32(0x124c, 0x15040d00);
4291 write_mchbar32(0x1250, 0x7f0000);
4292 write_mchbar32(0x1254, 0x1e220004);
4293 write_mchbar32(0x1258, 0x4000004);
4294 write_mchbar32(0x1278, 0x0);
4295 write_mchbar32(0x125c, 0x0);
4296 write_mchbar32(0x1260, 0x0);
4297 write_mchbar32(0x1264, 0x0);
4298 write_mchbar32(0x1268, 0x0);
4299 write_mchbar32(0x126c, 0x0);
4300 write_mchbar32(0x1270, 0x0);
4301 write_mchbar32(0x1274, 0x0);
4302 }
4303
4304 if ((deven & 8) && x2ca8 == 0) {
4305 write_mchbar16(0x1214, 0x320);
4306 write_mchbar32(0x1600, 0x40000000);
4307 read_mchbar32(0x11f4);
4308 write_mchbar32(0x11f4, 0x10000000);
4309 read_mchbar16(0x1230);
4310 write_mchbar16(0x1230, 0x8000);
4311 write_mchbar32(0x1400, 0x13040020);
4312 write_mchbar32(0x1404, 0xe090120);
4313 write_mchbar32(0x1408, 0x5120220);
4314 write_mchbar32(0x140c, 0x5120330);
4315 write_mchbar32(0x1410, 0xe090220);
4316 write_mchbar32(0x1414, 0x1010001);
4317 write_mchbar32(0x1418, 0x1110000);
4318 write_mchbar32(0x141c, 0x9020020);
4319 write_mchbar32(0x1420, 0xd090220);
4320 write_mchbar32(0x1424, 0x2090220);
4321 write_mchbar32(0x1428, 0x2090330);
4322 write_mchbar32(0x142c, 0xd090220);
4323 write_mchbar32(0x1430, 0x1010001);
4324 write_mchbar32(0x1434, 0x1110000);
4325 write_mchbar32(0x1438, 0x11040020);
4326 write_mchbar32(0x143c, 0x4030220);
4327 write_mchbar32(0x1440, 0x1060220);
4328 write_mchbar32(0x1444, 0x1060330);
4329 write_mchbar32(0x1448, 0x4030220);
4330 write_mchbar32(0x144c, 0x1010001);
4331 write_mchbar32(0x1450, 0x1110000);
4332 write_mchbar32(0x1454, 0x4010020);
4333 write_mchbar32(0x1458, 0xb090220);
4334 write_mchbar32(0x145c, 0x1090220);
4335 write_mchbar32(0x1460, 0x1090330);
4336 write_mchbar32(0x1464, 0xb090220);
4337 write_mchbar32(0x1468, 0x1010001);
4338 write_mchbar32(0x146c, 0x1110000);
4339 write_mchbar32(0x1470, 0xf040020);
4340 write_mchbar32(0x1474, 0xa090220);
4341 write_mchbar32(0x1478, 0x1120220);
4342 write_mchbar32(0x147c, 0x1120330);
4343 write_mchbar32(0x1480, 0xa090220);
4344 write_mchbar32(0x1484, 0x1010001);
4345 write_mchbar32(0x1488, 0x1110000);
4346 write_mchbar32(0x148c, 0x7020020);
4347 write_mchbar32(0x1490, 0x1010220);
4348 write_mchbar32(0x1494, 0x10210);
4349 write_mchbar32(0x1498, 0x10320);
4350 write_mchbar32(0x149c, 0x1010220);
4351 write_mchbar32(0x14a0, 0x1010001);
4352 write_mchbar32(0x14a4, 0x1110000);
4353 write_mchbar32(0x14a8, 0xd040020);
4354 write_mchbar32(0x14ac, 0x8090220);
4355 write_mchbar32(0x14b0, 0x1111310);
4356 write_mchbar32(0x14b4, 0x1111420);
4357 write_mchbar32(0x14b8, 0x8090220);
4358 write_mchbar32(0x14bc, 0x1010001);
4359 write_mchbar32(0x14c0, 0x1110000);
4360 write_mchbar32(0x14c4, 0x3010020);
4361 write_mchbar32(0x14c8, 0x7090220);
4362 write_mchbar32(0x14cc, 0x1081310);
4363 write_mchbar32(0x14d0, 0x1081420);
4364 write_mchbar32(0x14d4, 0x7090220);
4365 write_mchbar32(0x14d8, 0x1010001);
4366 write_mchbar32(0x14dc, 0x1110000);
4367 write_mchbar32(0x14e0, 0xb040020);
4368 write_mchbar32(0x14e4, 0x2030220);
4369 write_mchbar32(0x14e8, 0x1051310);
4370 write_mchbar32(0x14ec, 0x1051420);
4371 write_mchbar32(0x14f0, 0x2030220);
4372 write_mchbar32(0x14f4, 0x1010001);
4373 write_mchbar32(0x14f8, 0x1110000);
4374 write_mchbar32(0x14fc, 0x5020020);
4375 write_mchbar32(0x1500, 0x5090220);
4376 write_mchbar32(0x1504, 0x2071310);
4377 write_mchbar32(0x1508, 0x2071420);
4378 write_mchbar32(0x150c, 0x5090220);
4379 write_mchbar32(0x1510, 0x1010001);
4380 write_mchbar32(0x1514, 0x1110000);
4381 write_mchbar32(0x1518, 0x7040120);
4382 write_mchbar32(0x151c, 0x2090220);
4383 write_mchbar32(0x1520, 0x70b1210);
4384 write_mchbar32(0x1524, 0x70b1310);
4385 write_mchbar32(0x1528, 0x2090220);
4386 write_mchbar32(0x152c, 0x1010001);
4387 write_mchbar32(0x1530, 0x1110000);
4388 write_mchbar32(0x1534, 0x1010110);
4389 write_mchbar32(0x1538, 0x1081310);
4390 write_mchbar32(0x153c, 0x5041200);
4391 write_mchbar32(0x1540, 0x5041310);
4392 write_mchbar32(0x1544, 0x1081310);
4393 write_mchbar32(0x1548, 0x1010001);
4394 write_mchbar32(0x154c, 0x1110000);
4395 write_mchbar32(0x1550, 0x1040120);
4396 write_mchbar32(0x1554, 0x4051210);
4397 write_mchbar32(0x1558, 0xd051200);
4398 write_mchbar32(0x155c, 0xd051200);
4399 write_mchbar32(0x1560, 0x4051210);
4400 write_mchbar32(0x1564, 0x1010001);
4401 write_mchbar32(0x1568, 0x1110000);
4402 write_mchbar16(0x1222, 0x220a);
4403 write_mchbar16(0x123c, 0x1fc0);
4404 write_mchbar16(0x1220, 0x1388);
4405 }
4406
4407 read_mchbar32(0x2c80); // !!!!
4408 write_mchbar32(0x2c80, 0x1053688);
4409 read_mchbar32(0x1c04); // !!!!
4410 write_mchbar32(0x1804, 0x406080);
4411
4412 read_mchbar8(0x2ca8);
4413
4414 if (x2ca8 == 0) {
4415 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) & ~3);
4416 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) + 4);
4417 write_mchbar32(0x1af0, read_mchbar32(0x1af0) | 0x10);
4418#if REAL
Patrick Georgi546953c2014-11-29 10:38:17 +01004419 halt();
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004420#else
4421 printf("CP5\n");
4422 exit(0);
4423#endif
4424 }
4425
4426 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8));
4427 read_mchbar32(0x2c80); // !!!!
4428 write_mchbar32(0x2c80, 0x53688);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004429 pci_write_config32(PCI_DEV (0xff, 0, 0), 0x60, 0x20220);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004430 read_mchbar16(0x2c20); // !!!!
4431 read_mchbar16(0x2c10); // !!!!
4432 read_mchbar16(0x2c00); // !!!!
4433 write_mchbar16(0x2c00, 0x8c0);
4434 udelay(1000);
4435 write_1d0(0, 0x33d, 0, 0);
4436 write_500(&info, 0, 0, 0xb61, 0, 0);
4437 write_500(&info, 1, 0, 0xb61, 0, 0);
4438 write_mchbar32(0x1a30, 0x0);
4439 write_mchbar32(0x1a34, 0x0);
4440 write_mchbar16(0x614,
4441 0xb5b | (info.populated_ranks[1][0][0] *
4442 0x404) | (info.populated_ranks[0][0][0] *
4443 0xa0));
4444 write_mchbar16(0x616, 0x26a);
4445 write_mchbar32(0x134, 0x856000);
4446 write_mchbar32(0x160, 0x5ffffff);
4447 read_mchbar32(0x114); // !!!!
4448 write_mchbar32(0x114, 0xc2024440);
4449 read_mchbar32(0x118); // !!!!
4450 write_mchbar32(0x118, 0x4);
4451 for (channel = 0; channel < NUM_CHANNELS; channel++)
4452 write_mchbar32(0x260 + (channel << 10),
4453 0x30809ff |
4454 ((info.
4455 populated_ranks_mask[channel] & 3) << 20));
4456 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4457 write_mchbar16(0x31c + (channel << 10), 0x101);
4458 write_mchbar16(0x360 + (channel << 10), 0x909);
4459 write_mchbar16(0x3a4 + (channel << 10), 0x101);
4460 write_mchbar16(0x3e8 + (channel << 10), 0x101);
4461 write_mchbar32(0x320 + (channel << 10), 0x29002900);
4462 write_mchbar32(0x324 + (channel << 10), 0x0);
4463 write_mchbar32(0x368 + (channel << 10), 0x32003200);
4464 write_mchbar16(0x352 + (channel << 10), 0x505);
4465 write_mchbar16(0x354 + (channel << 10), 0x3c3c);
4466 write_mchbar16(0x356 + (channel << 10), 0x1040);
4467 write_mchbar16(0x39a + (channel << 10), 0x73e4);
4468 write_mchbar16(0x3de + (channel << 10), 0x77ed);
4469 write_mchbar16(0x422 + (channel << 10), 0x1040);
4470 }
4471
4472 write_1d0(0x4, 0x151, 4, 1);
4473 write_1d0(0, 0x142, 3, 1);
4474 rdmsr(0x1ac); // !!!!
4475 write_500(&info, 1, 1, 0x6b3, 4, 1);
4476 write_500(&info, 1, 1, 0x6cf, 4, 1);
4477
4478 rmw_1d0(0x21c, 0x38, 0, 6, 1);
4479
4480 write_1d0(((!info.populated_ranks[1][0][0]) << 1) | ((!info.
4481 populated_ranks[0]
4482 [0][0]) << 0),
4483 0x1d1, 3, 1);
4484 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4485 write_mchbar16(0x38e + (channel << 10), 0x5f5f);
4486 write_mchbar16(0x3d2 + (channel << 10), 0x5f5f);
4487 }
4488
4489 set_334(0);
4490
4491 program_base_timings(&info);
4492
4493 write_mchbar8(0x5ff, read_mchbar8(0x5ff) | 0x80); /* OK */
4494
4495 write_1d0(0x2, 0x1d5, 2, 1);
4496 write_1d0(0x20, 0x166, 7, 1);
4497 write_1d0(0x0, 0xeb, 3, 1);
4498 write_1d0(0x0, 0xf3, 6, 1);
4499
4500 for (channel = 0; channel < NUM_CHANNELS; channel++)
4501 for (lane = 0; lane < 9; lane++) {
4502 u16 addr = 0x125 + get_lane_offset(0, 0, lane);
4503 u8 a;
4504 a = read_500(&info, channel, addr, 6); // = 0x20040080 //!!!!
4505 write_500(&info, channel, a, addr, 6, 1);
4506 }
4507
4508 udelay(1000);
4509
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004510 if (s3resume) {
4511 if (info.cached_training == NULL) {
4512 u32 reg32;
4513 printk(BIOS_ERR,
4514 "Couldn't find training data. Rebooting\n");
4515 reg32 = inl(DEFAULT_PMBASE + 0x04);
4516 outl(reg32 & ~(7 << 10), DEFAULT_PMBASE + 0x04);
4517 outb(0xe, 0xcf9);
4518
4519#if REAL
Patrick Georgi546953c2014-11-29 10:38:17 +01004520 halt();
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004521#else
4522 printf("CP5\n");
4523 exit(0);
4524#endif
4525 }
4526 int tm;
4527 info.training = *info.cached_training;
4528 for (tm = 0; tm < 4; tm++)
4529 for (channel = 0; channel < NUM_CHANNELS; channel++)
4530 for (slot = 0; slot < NUM_SLOTS; slot++)
4531 for (rank = 0; rank < NUM_RANKS; rank++)
4532 for (lane = 0; lane < 9; lane++)
4533 write_500(&info,
4534 channel,
4535 info.training.
4536 lane_timings
4537 [tm][channel]
4538 [slot][rank]
4539 [lane],
4540 get_timing_register_addr
4541 (lane, tm,
4542 slot, rank),
4543 9, 0);
4544 write_1d0(info.cached_training->reg_178, 0x178, 7, 1);
4545 write_1d0(info.cached_training->reg_10b, 0x10b, 6, 1);
4546 }
4547
4548 read_mchbar32(0x1f4); // !!!!
4549 write_mchbar32(0x1f4, 0x20000);
4550 write_mchbar32(0x1f0, 0x1d000200);
4551 read_mchbar8(0x1f0); // !!!!
4552 write_mchbar8(0x1f0, 0x1);
4553 read_mchbar8(0x1f0); // !!!!
4554
4555 program_board_delay(&info);
4556
4557 write_mchbar8(0x5ff, 0x0); /* OK */
4558 write_mchbar8(0x5ff, 0x80); /* OK */
4559 write_mchbar8(0x5f4, 0x1); /* OK */
4560
4561 write_mchbar32(0x130, read_mchbar32(0x130) & 0xfffffffd); // | 2 when ?
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02004562 while (read_mchbar32(0x130) & 1);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004563 gav(read_1d0(0x14b, 7)); // = 0x81023100
4564 write_1d0(0x30, 0x14b, 7, 1);
4565 read_1d0(0xd6, 6); // = 0xfa008080 // !!!!
4566 write_1d0(7, 0xd6, 6, 1);
4567 read_1d0(0x328, 6); // = 0xfa018080 // !!!!
4568 write_1d0(7, 0x328, 6, 1);
4569
4570 for (channel = 0; channel < NUM_CHANNELS; channel++)
4571 set_4cf(&info, channel,
4572 info.populated_ranks[channel][0][0] ? 8 : 0);
4573
4574 read_1d0(0x116, 4); // = 0x4040432 // !!!!
4575 write_1d0(2, 0x116, 4, 1);
4576 read_1d0(0xae, 6); // = 0xe8088080 // !!!!
4577 write_1d0(0, 0xae, 6, 1);
4578 read_1d0(0x300, 4); // = 0x48088080 // !!!!
4579 write_1d0(0, 0x300, 6, 1);
4580 read_mchbar16(0x356); // !!!!
4581 write_mchbar16(0x356, 0x1040);
4582 read_mchbar16(0x756); // !!!!
4583 write_mchbar16(0x756, 0x1040);
4584 write_mchbar32(0x140, read_mchbar32(0x140) & ~0x07000000);
4585 write_mchbar32(0x138, read_mchbar32(0x138) & ~0x07000000);
4586 write_mchbar32(0x130, 0x31111301);
Vladimir Serbinenko25fc5322014-12-07 13:05:44 +01004587 /* Wait until REG130b0 is 1. */
4588 while (read_mchbar32(0x130) & 1)
4589 ;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004590
4591 {
4592 u32 t;
4593 u8 val_a1;
4594 val_a1 = read_1d0(0xa1, 6); // = 0x1cf4040 // !!!!
4595 t = read_1d0(0x2f3, 6); // = 0x10a4040 // !!!!
4596 rmw_1d0(0x320, 0x07,
4597 (t & 4) | ((t & 8) >> 2) | ((t & 0x10) >> 4), 6, 1);
4598 rmw_1d0(0x14b, 0x78,
4599 ((((val_a1 >> 2) & 4) | (val_a1 & 8)) >> 2) | (val_a1 &
4600 4), 7,
4601 1);
4602 rmw_1d0(0xce, 0x38,
4603 ((((val_a1 >> 2) & 4) | (val_a1 & 8)) >> 2) | (val_a1 &
4604 4), 6,
4605 1);
4606 }
4607
4608 for (channel = 0; channel < NUM_CHANNELS; channel++)
4609 set_4cf(&info, channel,
4610 info.populated_ranks[channel][0][0] ? 9 : 1);
4611
4612 rmw_1d0(0x116, 0xe, 1, 4, 1); // = 0x4040432 // !!!!
4613 read_mchbar32(0x144); // !!!!
4614 write_1d0(2, 0xae, 6, 1);
4615 write_1d0(2, 0x300, 6, 1);
4616 write_1d0(2, 0x121, 3, 1);
4617 read_1d0(0xd6, 6); // = 0xfa00c0c7 // !!!!
4618 write_1d0(4, 0xd6, 6, 1);
4619 read_1d0(0x328, 6); // = 0xfa00c0c7 // !!!!
4620 write_1d0(4, 0x328, 6, 1);
4621
4622 for (channel = 0; channel < NUM_CHANNELS; channel++)
4623 set_4cf(&info, channel,
4624 info.populated_ranks[channel][0][0] ? 9 : 0);
4625
4626 write_mchbar32(0x130,
4627 0x11111301 | (info.
4628 populated_ranks[1][0][0] << 30) | (info.
4629 populated_ranks
4630 [0][0]
4631 [0] <<
4632 29));
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02004633 while (read_mchbar8(0x130) & 1); // !!!!
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004634 read_1d0(0xa1, 6); // = 0x1cf4054 // !!!!
4635 read_1d0(0x2f3, 6); // = 0x10a4054 // !!!!
4636 read_1d0(0x21c, 6); // = 0xafa00c0 // !!!!
4637 write_1d0(0, 0x21c, 6, 1);
4638 read_1d0(0x14b, 7); // = 0x810231b0 // !!!!
4639 write_1d0(0x35, 0x14b, 7, 1);
4640
4641 for (channel = 0; channel < NUM_CHANNELS; channel++)
4642 set_4cf(&info, channel,
4643 info.populated_ranks[channel][0][0] ? 0xb : 0x2);
4644
4645 set_334(1);
4646
4647 write_mchbar8(0x1e8, 0x4); /* OK */
4648
4649 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4650 write_500(&info, channel,
4651 0x3 & ~(info.populated_ranks_mask[channel]), 0x6b7, 2,
4652 1);
4653 write_500(&info, channel, 0x3, 0x69b, 2, 1);
4654 }
4655 write_mchbar32(0x2d0, (read_mchbar32(0x2d0) & 0xff2c01ff) | 0x200000); /* OK */
4656 write_mchbar16(0x6c0, 0x14a0); /* OK */
4657 write_mchbar32(0x6d0, (read_mchbar32(0x6d0) & 0xff0080ff) | 0x8000); /* OK */
4658 write_mchbar16(0x232, 0x8);
4659 write_mchbar32(0x234, (read_mchbar32(0x234) & 0xfffbfffb) | 0x40004); /* 0x40004 or 0 depending on ? */
4660 write_mchbar32(0x34, (read_mchbar32(0x34) & 0xfffffffd) | 5); /* OK */
4661 write_mchbar32(0x128, 0x2150d05);
4662 write_mchbar8(0x12c, 0x1f); /* OK */
4663 write_mchbar8(0x12d, 0x56); /* OK */
4664 write_mchbar8(0x12e, 0x31);
4665 write_mchbar8(0x12f, 0x0); /* OK */
4666 write_mchbar8(0x271, 0x2); /* OK */
4667 write_mchbar8(0x671, 0x2); /* OK */
4668 write_mchbar8(0x1e8, 0x4); /* OK */
4669 for (channel = 0; channel < NUM_CHANNELS; channel++)
4670 write_mchbar32(0x294 + (channel << 10),
4671 (info.populated_ranks_mask[channel] & 3) << 16);
4672 write_mchbar32(0x134, (read_mchbar32(0x134) & 0xfc01ffff) | 0x10000); /* OK */
4673 write_mchbar32(0x134, (read_mchbar32(0x134) & 0xfc85ffff) | 0x850000); /* OK */
4674 for (channel = 0; channel < NUM_CHANNELS; channel++)
4675 write_mchbar32(0x260 + (channel << 10),
4676 (read_mchbar32(0x260 + (channel << 10)) &
4677 ~0xf00000) | 0x8000000 | ((info.
4678 populated_ranks_mask
4679 [channel] & 3) <<
4680 20));
4681
4682 if (!s3resume)
4683 jedec_init(&info);
4684
4685 int totalrank = 0;
4686 for (channel = 0; channel < NUM_CHANNELS; channel++)
4687 for (slot = 0; slot < NUM_SLOTS; slot++)
4688 for (rank = 0; rank < NUM_RANKS; rank++)
4689 if (info.populated_ranks[channel][slot][rank]) {
4690 jedec_read(&info, channel, slot, rank,
4691 totalrank, 0xa, 0x400);
4692 totalrank++;
4693 }
4694
4695 write_mchbar8(0x12c, 0x9f);
4696
4697 read_mchbar8(0x271); // 2 // !!!!
4698 write_mchbar8(0x271, 0xe);
4699 read_mchbar8(0x671); // !!!!
4700 write_mchbar8(0x671, 0xe);
4701
4702 if (!s3resume) {
4703 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4704 write_mchbar32(0x294 + (channel << 10),
4705 (info.
4706 populated_ranks_mask[channel] & 3) <<
4707 16);
4708 write_mchbar16(0x298 + (channel << 10),
4709 (info.
4710 populated_ranks[channel][0][0]) | (info.
4711 populated_ranks
4712 [channel]
4713 [0]
4714 [1]
4715 <<
4716 5));
4717 write_mchbar32(0x29c + (channel << 10), 0x77a);
4718 }
4719 read_mchbar32(0x2c0); /// !!!
4720 write_mchbar32(0x2c0, 0x6009cc00);
4721
4722 {
4723 u8 a, b;
4724 a = read_mchbar8(0x243); // !!!!
4725 b = read_mchbar8(0x643); // !!!!
4726 write_mchbar8(0x243, a | 2);
4727 write_mchbar8(0x643, b | 2);
4728 }
4729
4730 write_1d0(7, 0x19b, 3, 1);
4731 write_1d0(7, 0x1c0, 3, 1);
4732 write_1d0(4, 0x1c6, 4, 1);
4733 write_1d0(4, 0x1cc, 4, 1);
4734 read_1d0(0x151, 4); // = 0x408c6d74 // !!!!
4735 write_1d0(4, 0x151, 4, 1);
4736 write_mchbar32(0x584, 0xfffff);
4737 write_mchbar32(0x984, 0xfffff);
4738
4739 for (channel = 0; channel < NUM_CHANNELS; channel++)
4740 for (slot = 0; slot < NUM_SLOTS; slot++)
4741 for (rank = 0; rank < NUM_RANKS; rank++)
4742 if (info.
4743 populated_ranks[channel][slot]
4744 [rank])
4745 config_rank(&info, s3resume,
4746 channel, slot,
4747 rank);
4748
4749 write_mchbar8(0x243, 0x1);
4750 write_mchbar8(0x643, 0x1);
4751 }
4752
4753 /* was == 1 but is common */
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004754 pci_write_config16(NORTHBRIDGE, 0xc8, 3);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004755 write_26c(0, 0x820);
4756 write_26c(1, 0x820);
4757 write_mchbar32(0x130, read_mchbar32(0x130) | 2);
4758 /* end */
4759
4760 if (s3resume) {
4761 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4762 write_mchbar32(0x294 + (channel << 10),
4763 (info.
4764 populated_ranks_mask[channel] & 3) <<
4765 16);
4766 write_mchbar16(0x298 + (channel << 10),
4767 (info.
4768 populated_ranks[channel][0][0]) | (info.
4769 populated_ranks
4770 [channel]
4771 [0]
4772 [1]
4773 <<
4774 5));
4775 write_mchbar32(0x29c + (channel << 10), 0x77a);
4776 }
4777 read_mchbar32(0x2c0); /// !!!
4778 write_mchbar32(0x2c0, 0x6009cc00);
4779 }
4780
4781 write_mchbar32(0xfa4, read_mchbar32(0xfa4) & ~0x01000002);
4782 write_mchbar32(0xfb0, 0x2000e019);
4783
4784#if !REAL
4785 printf("CP16\n");
4786#endif
4787
4788 /* Before training. */
4789 timestamp_add_now(103);
4790
4791 if (!s3resume)
4792 ram_training(&info);
4793
4794 /* After training. */
Paul Menzel9e817bf2015-05-28 07:32:48 +02004795 timestamp_add_now(104);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004796
4797 dump_timings(&info);
4798
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004799 program_modules_memory_map(&info, 0);
4800 program_total_memory_map(&info);
4801
4802 if (info.non_interleaved_part_mb != 0 && info.interleaved_part_mb != 0)
4803 write_mchbar8(0x111, 0x20 | (0 << 2) | (1 << 6) | (0 << 7));
4804 else if (have_match_ranks(&info, 0, 4) && have_match_ranks(&info, 1, 4))
4805 write_mchbar8(0x111, 0x20 | (3 << 2) | (0 << 6) | (1 << 7));
4806 else if (have_match_ranks(&info, 0, 2) && have_match_ranks(&info, 1, 2))
4807 write_mchbar8(0x111, 0x20 | (3 << 2) | (0 << 6) | (0 << 7));
4808 else
4809 write_mchbar8(0x111, 0x20 | (3 << 2) | (1 << 6) | (0 << 7));
4810
4811 write_mchbar32(0xfac, read_mchbar32(0xfac) & ~0x80000000); // OK
4812 write_mchbar32(0xfb4, 0x4800); // OK
4813 write_mchbar32(0xfb8, (info.revision < 8) ? 0x20 : 0x0); // OK
4814 write_mchbar32(0xe94, 0x7ffff); // OK
4815 write_mchbar32(0xfc0, 0x80002040); // OK
4816 write_mchbar32(0xfc4, 0x701246); // OK
4817 write_mchbar8(0xfc8, read_mchbar8(0xfc8) & ~0x70); // OK
4818 write_mchbar32(0xe5c, 0x1000000 | read_mchbar32(0xe5c)); // OK
4819 write_mchbar32(0x1a70, (read_mchbar32(0x1a70) | 0x00200000) & ~0x00100000); // OK
4820 write_mchbar32(0x50, 0x700b0); // OK
4821 write_mchbar32(0x3c, 0x10); // OK
4822 write_mchbar8(0x1aa8, (read_mchbar8(0x1aa8) & ~0x35) | 0xa); // OK
4823 write_mchbar8(0xff4, read_mchbar8(0xff4) | 0x2); // OK
4824 write_mchbar32(0xff8, (read_mchbar32(0xff8) & ~0xe008) | 0x1020); // OK
4825
4826#if REAL
4827 write_mchbar32(0xd00, IOMMU_BASE2 | 1);
4828 write_mchbar32(0xd40, IOMMU_BASE1 | 1);
4829 write_mchbar32(0xdc0, IOMMU_BASE4 | 1);
4830
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08004831 write32p(IOMMU_BASE1 | 0xffc, 0x80000000);
4832 write32p(IOMMU_BASE2 | 0xffc, 0xc0000000);
4833 write32p(IOMMU_BASE4 | 0xffc, 0x80000000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004834
4835#else
4836 {
4837 u32 eax;
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08004838 eax = read32p(0xffc + (read_mchbar32(0xd00) & ~1)) | 0x08000000; // = 0xe911714b// OK
4839 write32p(0xffc + (read_mchbar32(0xd00) & ~1), eax); // OK
4840 eax = read32p(0xffc + (read_mchbar32(0xdc0) & ~1)) | 0x40000000; // = 0xe911714b// OK
4841 write32p(0xffc + (read_mchbar32(0xdc0) & ~1), eax); // OK
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004842 }
4843#endif
4844
4845 {
4846 u32 eax;
4847
4848 eax = info.fsb_frequency / 9;
4849 write_mchbar32(0xfcc, (read_mchbar32(0xfcc) & 0xfffc0000) | (eax * 0x280) | (eax * 0x5000) | eax | 0x40000); // OK
4850 write_mchbar32(0x20, 0x33001); //OK
4851 }
4852
4853 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4854 write_mchbar32(0x220 + (channel << 10), read_mchbar32(0x220 + (channel << 10)) & ~0x7770); //OK
4855 if (info.max_slots_used_in_channel == 1)
4856 write_mchbar16(0x237 + (channel << 10), (read_mchbar16(0x237 + (channel << 10)) | 0x0201)); //OK
4857 else
4858 write_mchbar16(0x237 + (channel << 10), (read_mchbar16(0x237 + (channel << 10)) & ~0x0201)); //OK
4859
4860 write_mchbar8(0x241 + (channel << 10), read_mchbar8(0x241 + (channel << 10)) | 1); // OK
4861
4862 if (info.clock_speed_index <= 1
4863 && (info.silicon_revision == 2
4864 || info.silicon_revision == 3))
4865 write_mchbar32(0x248 + (channel << 10), (read_mchbar32(0x248 + (channel << 10)) | 0x00102000)); // OK
4866 else
4867 write_mchbar32(0x248 + (channel << 10), (read_mchbar32(0x248 + (channel << 10)) & ~0x00102000)); // OK
4868 }
4869
4870 write_mchbar32(0x115, read_mchbar32(0x115) | 0x1000000); // OK
4871
4872 {
4873 u8 al;
4874 al = 0xd;
4875 if (!(info.silicon_revision == 0 || info.silicon_revision == 1))
4876 al += 2;
4877 al |= ((1 << (info.max_slots_used_in_channel - 1)) - 1) << 4;
4878 write_mchbar32(0x210, (al << 16) | 0x20); // OK
4879 }
4880
4881 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4882 write_mchbar32(0x288 + (channel << 10), 0x70605040); // OK
4883 write_mchbar32(0x28c + (channel << 10), 0xfffec080); // OK
4884 write_mchbar32(0x290 + (channel << 10), 0x282091c | ((info.max_slots_used_in_channel - 1) << 0x16)); // OK
4885 }
4886 u32 reg1c;
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004887 pci_read_config32(NORTHBRIDGE, 0x40); // = DEFAULT_EPBAR | 0x001 // OK
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08004888 reg1c = read32p(DEFAULT_EPBAR | 0x01c); // = 0x8001 // OK
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004889 pci_read_config32(NORTHBRIDGE, 0x40); // = DEFAULT_EPBAR | 0x001 // OK
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08004890 write32p(DEFAULT_EPBAR | 0x01c, reg1c); // OK
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004891 read_mchbar8(0xe08); // = 0x0
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004892 pci_read_config32(NORTHBRIDGE, 0xe4); // = 0x316126
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004893 write_mchbar8(0x1210, read_mchbar8(0x1210) | 2); // OK
4894 write_mchbar32(0x1200, 0x8800440); // OK
4895 write_mchbar32(0x1204, 0x53ff0453); // OK
4896 write_mchbar32(0x1208, 0x19002043); // OK
4897 write_mchbar16(0x1214, 0x320); // OK
4898
4899 if (info.revision == 0x10 || info.revision == 0x11) {
4900 write_mchbar16(0x1214, 0x220); // OK
4901 write_mchbar8(0x1210, read_mchbar8(0x1210) | 0x40); // OK
4902 }
4903
4904 write_mchbar8(0x1214, read_mchbar8(0x1214) | 0x4); // OK
4905 write_mchbar8(0x120c, 0x1); // OK
4906 write_mchbar8(0x1218, 0x3); // OK
4907 write_mchbar8(0x121a, 0x3); // OK
4908 write_mchbar8(0x121c, 0x3); // OK
4909 write_mchbar16(0xc14, 0x0); // OK
4910 write_mchbar16(0xc20, 0x0); // OK
4911 write_mchbar32(0x1c, 0x0); // OK
4912
4913 /* revision dependent here. */
4914
4915 write_mchbar16(0x1230, read_mchbar16(0x1230) | 0x1f07); // OK
4916
4917 if (info.uma_enabled)
4918 write_mchbar32(0x11f4, read_mchbar32(0x11f4) | 0x10000000); // OK
4919
4920 write_mchbar16(0x1230, read_mchbar16(0x1230) | 0x8000); // OK
4921 write_mchbar8(0x1214, read_mchbar8(0x1214) | 1); // OK
4922
4923 u8 bl, ebpb;
4924 u16 reg_1020;
4925
4926 reg_1020 = read_mchbar32(0x1020); // = 0x6c733c // OK
4927 write_mchbar8(0x1070, 0x1); // OK
4928
4929 write_mchbar32(0x1000, 0x100); // OK
4930 write_mchbar8(0x1007, 0x0); // OK
4931
4932 if (reg_1020 != 0) {
4933 write_mchbar16(0x1018, 0x0); // OK
4934 bl = reg_1020 >> 8;
4935 ebpb = reg_1020 & 0xff;
4936 } else {
4937 ebpb = 0;
4938 bl = 8;
4939 }
4940
4941 rdmsr(0x1a2);
4942
4943 write_mchbar32(0x1014, 0xffffffff); // OK
4944
4945 write_mchbar32(0x1010, ((((ebpb + 0x7d) << 7) / bl) & 0xff) * (! !reg_1020)); // OK
4946
4947 write_mchbar8(0x101c, 0xb8); // OK
4948
4949 write_mchbar8(0x123e, (read_mchbar8(0x123e) & 0xf) | 0x60); // OK
4950 if (reg_1020 != 0) {
4951 write_mchbar32(0x123c, (read_mchbar32(0x123c) & ~0x00900000) | 0x600000); // OK
4952 write_mchbar8(0x101c, 0xb8); // OK
4953 }
4954
4955 setup_heci_uma(&info);
4956
4957 if (info.uma_enabled) {
4958 u16 ax;
4959 write_mchbar32(0x11b0, read_mchbar32(0x11b0) | 0x4000); // OK
4960 write_mchbar32(0x11b4, read_mchbar32(0x11b4) | 0x4000); // OK
4961 write_mchbar16(0x1190, read_mchbar16(0x1190) | 0x4000); // OK
4962
4963 ax = read_mchbar16(0x1190) & 0xf00; // = 0x480a // OK
4964 write_mchbar16(0x1170, ax | (read_mchbar16(0x1170) & 0x107f) | 0x4080); // OK
4965 write_mchbar16(0x1170, read_mchbar16(0x1170) | 0x1000); // OK
4966#if REAL
4967 udelay(1000);
4968#endif
4969 u16 ecx;
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02004970 for (ecx = 0xffff; ecx && (read_mchbar16(0x1170) & 0x1000); ecx--); // OK
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004971 write_mchbar16(0x1190, read_mchbar16(0x1190) & ~0x4000); // OK
4972 }
4973
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004974 pci_write_config8(SOUTHBRIDGE, GEN_PMCON_2,
4975 pci_read_config8(SOUTHBRIDGE, GEN_PMCON_2) & ~0x80);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004976 udelay(10000);
Vladimir Serbinenkoc7db28c2014-02-19 22:09:33 +01004977 write_mchbar16(0x2ca8, 0x8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004978
4979#if REAL
4980 udelay(1000);
4981 dump_timings(&info);
Vladimir Serbinenkob16f0922014-06-07 16:27:27 +02004982 cbmem_wasnot_inited = cbmem_recovery(s3resume);
4983
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004984 if (!s3resume)
4985 save_timings(&info);
Vladimir Serbinenkob16f0922014-06-07 16:27:27 +02004986 if (s3resume && cbmem_wasnot_inited) {
4987 u32 reg32;
4988 printk(BIOS_ERR, "Failed S3 resume.\n");
4989 ram_check(0x100000, 0x200000);
4990
4991 /* Clear SLP_TYPE. */
4992 reg32 = inl(DEFAULT_PMBASE + 0x04);
4993 outl(reg32 & ~(7 << 10), DEFAULT_PMBASE + 0x04);
4994
4995 /* Failed S3 resume, reset to come up cleanly */
4996 outb(0xe, 0xcf9);
Patrick Georgi546953c2014-11-29 10:38:17 +01004997 halt();
Vladimir Serbinenkob16f0922014-06-07 16:27:27 +02004998 }
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004999#endif
5000}