blob: df88ef3e7950578d45628cfb386e0e8979123b46 [file] [log] [blame]
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001/*
2 * This file is part of the coreboot project.
3 *
4 * Copyright (C) 2013 Vladimir Serbinenko.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010015 */
16
Stefan Taunerf13bd412017-01-07 07:26:27 +010017/* Please don't remove this. It's needed for debugging and reverse
18 * engineering more nehalem variants in the future. */
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010019#ifndef REAL
20#define REAL 1
21#endif
22
23#if REAL
Kyösti Mälkki931c1dc2014-06-30 09:40:19 +030024#include <stdlib.h>
Stefan Reinauer6a001132017-07-13 02:20:27 +020025#include <compiler.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010026#include <console/console.h>
27#include <string.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010028#include <arch/io.h>
29#include <cpu/x86/msr.h>
30#include <cbmem.h>
31#include <arch/cbfs.h>
32#include <cbfs.h>
33#include <ip_checksum.h>
34#include <pc80/mc146818rtc.h>
35#include <device/pci_def.h>
Patrick Rudolph266a1f72016-06-09 18:13:34 +020036#include <device/device.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010037#include <arch/cpu.h>
Patrick Georgi546953c2014-11-29 10:38:17 +010038#include <halt.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010039#include <spd.h>
40#include "raminit.h"
Patrick Rudolph266a1f72016-06-09 18:13:34 +020041#include "chip.h"
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010042#include <timestamp.h>
43#include <cpu/x86/mtrr.h>
44#include <cpu/intel/speedstep.h>
45#include <cpu/intel/turbo.h>
Arthur Heymansdc71e252018-01-29 10:14:48 +010046#include <mrc_cache.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010047#endif
48
49#if !REAL
50typedef unsigned char u8;
51typedef unsigned short u16;
52typedef unsigned int u32;
53typedef u32 device_t;
54#endif
55
56#include "nehalem.h"
57
Arthur Heymansd2d2aef2018-01-16 14:19:37 +010058#include <southbridge/intel/common/rcba.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010059#include "southbridge/intel/ibexpeak/me.h"
60
61#if REAL
62#include <delay.h>
63#endif
64
65#define NORTHBRIDGE PCI_DEV(0, 0, 0)
66#define SOUTHBRIDGE PCI_DEV(0, 0x1f, 0)
67#define GMA PCI_DEV (0, 0x2, 0x0)
68#define HECIDEV PCI_DEV(0, 0x16, 0)
69#define HECIBAR 0x10
70
71#define FOR_ALL_RANKS \
72 for (channel = 0; channel < NUM_CHANNELS; channel++) \
73 for (slot = 0; slot < NUM_SLOTS; slot++) \
74 for (rank = 0; rank < NUM_RANKS; rank++)
75
76#define FOR_POPULATED_RANKS \
77 for (channel = 0; channel < NUM_CHANNELS; channel++) \
78 for (slot = 0; slot < NUM_SLOTS; slot++) \
79 for (rank = 0; rank < NUM_RANKS; rank++) \
80 if (info->populated_ranks[channel][slot][rank])
81
82#define FOR_POPULATED_RANKS_BACKWARDS \
83 for (channel = NUM_CHANNELS - 1; channel >= 0; channel--) \
84 for (slot = 0; slot < NUM_SLOTS; slot++) \
85 for (rank = 0; rank < NUM_RANKS; rank++) \
86 if (info->populated_ranks[channel][slot][rank])
87
88/* [REG_178][CHANNEL][2 * SLOT + RANK][LANE] */
89typedef struct {
90 u8 smallest;
91 u8 largest;
92} timing_bounds_t[2][2][2][9];
93
Arthur Heymansdc71e252018-01-29 10:14:48 +010094#define MRC_CACHE_VERSION 1
95
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010096struct ram_training {
97 /* [TM][CHANNEL][SLOT][RANK][LANE] */
98 u16 lane_timings[4][2][2][2][9];
99 u16 reg_178;
100 u16 reg_10b;
101
102 u8 reg178_center;
103 u8 reg178_smallest;
104 u8 reg178_largest;
105 timing_bounds_t timing_bounds[2];
106 u16 timing_offset[2][2][2][9];
107 u16 timing2_offset[2][2][2][9];
108 u16 timing2_bounds[2][2][2][9][2];
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +0100109 u8 reg274265[2][3]; /* [CHANNEL][REGISTER] */
110 u8 reg2ca9_bit0;
111 u32 reg_6dc;
112 u32 reg_6e8;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100113};
114
115#if !REAL
116#include "raminit_fake.c"
117#else
118
119#include <lib.h> /* Prototypes */
120
121static inline void write_mchbar32(u32 addr, u32 val)
122{
123 MCHBAR32(addr) = val;
124}
125
126static inline void write_mchbar16(u32 addr, u16 val)
127{
128 MCHBAR16(addr) = val;
129}
130
131static inline void write_mchbar8(u32 addr, u8 val)
132{
133 MCHBAR8(addr) = val;
134}
135
136
137static inline u32 read_mchbar32(u32 addr)
138{
139 return MCHBAR32(addr);
140}
141
142static inline u16 read_mchbar16(u32 addr)
143{
144 return MCHBAR16(addr);
145}
146
147static inline u8 read_mchbar8(u32 addr)
148{
149 return MCHBAR8(addr);
150}
151
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100152static void clflush(u32 addr)
153{
154 asm volatile ("clflush (%0)"::"r" (addr));
155}
156
157typedef struct _u128 {
158 u64 lo;
159 u64 hi;
160} u128;
161
162static void read128(u32 addr, u64 * out)
163{
164 u128 ret;
165 u128 stor;
166 asm volatile ("movdqu %%xmm0, %0\n"
167 "movdqa (%2), %%xmm0\n"
168 "movdqu %%xmm0, %1\n"
169 "movdqu %0, %%xmm0":"+m" (stor), "=m"(ret):"r"(addr));
170 out[0] = ret.lo;
171 out[1] = ret.hi;
172}
173
174#endif
175
176/* OK */
177static void write_1d0(u32 val, u16 addr, int bits, int flag)
178{
179 write_mchbar32(0x1d0, 0);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200180 while (read_mchbar32(0x1d0) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100181 write_mchbar32(0x1d4,
182 (val & ((1 << bits) - 1)) | (2 << bits) | (flag <<
183 bits));
184 write_mchbar32(0x1d0, 0x40000000 | addr);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200185 while (read_mchbar32(0x1d0) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100186}
187
188/* OK */
189static u16 read_1d0(u16 addr, int split)
190{
191 u32 val;
192 write_mchbar32(0x1d0, 0);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200193 while (read_mchbar32(0x1d0) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100194 write_mchbar32(0x1d0,
195 0x80000000 | (((read_mchbar8(0x246) >> 2) & 3) +
196 0x361 - addr));
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200197 while (read_mchbar32(0x1d0) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100198 val = read_mchbar32(0x1d8);
199 write_1d0(0, 0x33d, 0, 0);
200 write_1d0(0, 0x33d, 0, 0);
201 val &= ((1 << split) - 1);
202 // printk (BIOS_ERR, "R1D0C [%x] => %x\n", addr, val);
203 return val;
204}
205
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800206static void write32p(uintptr_t addr, uint32_t val)
207{
208 write32((void *)addr, val);
209}
210
211static uint32_t read32p(uintptr_t addr)
212{
213 return read32((void *)addr);
214}
215
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100216static void sfence(void)
217{
218#if REAL
219 asm volatile ("sfence");
220#endif
221}
222
223static inline u16 get_lane_offset(int slot, int rank, int lane)
224{
225 return 0x124 * lane + ((lane & 4) ? 0x23e : 0) + 11 * rank + 22 * slot -
226 0x452 * (lane == 8);
227}
228
229static inline u16 get_timing_register_addr(int lane, int tm, int slot, int rank)
230{
231 const u16 offs[] = { 0x1d, 0xa8, 0xe6, 0x5c };
232 return get_lane_offset(slot, rank, lane) + offs[(tm + 3) % 4];
233}
234
235#if REAL
236static u32 gav_real(int line, u32 in)
237{
238 // printk (BIOS_DEBUG, "%d: GAV: %x\n", line, in);
239 return in;
240}
241
242#define gav(x) gav_real (__LINE__, (x))
243#endif
244struct raminfo {
245 u16 clock_speed_index; /* clock_speed (REAL, not DDR) / 133.(3) - 3 */
246 u16 fsb_frequency; /* in 1.(1)/2 MHz. */
247 u8 is_x16_module[2][2]; /* [CHANNEL][SLOT] */
248 u8 density[2][2]; /* [CHANNEL][SLOT] */
249 u8 populated_ranks[2][2][2]; /* [CHANNEL][SLOT][RANK] */
250 int rank_start[2][2][2];
251 u8 cas_latency;
252 u8 board_lane_delay[9];
253 u8 use_ecc;
254 u8 revision;
255 u8 max_supported_clock_speed_index;
256 u8 uma_enabled;
257 u8 spd[2][2][151]; /* [CHANNEL][SLOT][BYTE] */
258 u8 silicon_revision;
259 u8 populated_ranks_mask[2];
260 u8 max_slots_used_in_channel;
261 u8 mode4030[2];
262 u16 avg4044[2];
263 u16 max4048[2];
264 unsigned total_memory_mb;
265 unsigned interleaved_part_mb;
266 unsigned non_interleaved_part_mb;
267
268 u32 heci_bar;
269 u64 heci_uma_addr;
270 unsigned memory_reserved_for_heci_mb;
271
272 struct ram_training training;
273 u32 last_500_command[2];
274
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100275 u32 delay46_ps[2];
276 u32 delay54_ps[2];
277 u8 revision_flag_1;
278 u8 some_delay_1_cycle_floor;
279 u8 some_delay_2_halfcycles_ceil;
280 u8 some_delay_3_ps_rounded;
281
282 const struct ram_training *cached_training;
283};
284
285static void
286write_500(struct raminfo *info, int channel, u32 val, u16 addr, int bits,
287 int flag);
288
289/* OK */
290static u16
291read_500(struct raminfo *info, int channel, u16 addr, int split)
292{
293 u32 val;
294 info->last_500_command[channel] = 0x80000000;
295 write_mchbar32(0x500 + (channel << 10), 0);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200296 while (read_mchbar32(0x500 + (channel << 10)) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100297 write_mchbar32(0x500 + (channel << 10),
298 0x80000000 |
299 (((read_mchbar8(0x246 + (channel << 10)) >> 2) &
300 3) + 0xb88 - addr));
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200301 while (read_mchbar32(0x500 + (channel << 10)) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100302 val = read_mchbar32(0x508 + (channel << 10));
303 return val & ((1 << split) - 1);
304}
305
306/* OK */
307static void
308write_500(struct raminfo *info, int channel, u32 val, u16 addr, int bits,
309 int flag)
310{
311 if (info->last_500_command[channel] == 0x80000000) {
312 info->last_500_command[channel] = 0x40000000;
313 write_500(info, channel, 0, 0xb61, 0, 0);
314 }
315 write_mchbar32(0x500 + (channel << 10), 0);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200316 while (read_mchbar32(0x500 + (channel << 10)) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100317 write_mchbar32(0x504 + (channel << 10),
318 (val & ((1 << bits) - 1)) | (2 << bits) | (flag <<
319 bits));
320 write_mchbar32(0x500 + (channel << 10), 0x40000000 | addr);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200321 while (read_mchbar32(0x500 + (channel << 10)) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100322}
323
324static int rw_test(int rank)
325{
326 const u32 mask = 0xf00fc33c;
327 int ok = 0xff;
328 int i;
329 for (i = 0; i < 64; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800330 write32p((rank << 28) | (i << 2), 0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100331 sfence();
332 for (i = 0; i < 64; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800333 gav(read32p((rank << 28) | (i << 2)));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100334 sfence();
335 for (i = 0; i < 32; i++) {
336 u32 pat = (((mask >> i) & 1) ? 0xffffffff : 0);
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800337 write32p((rank << 28) | (i << 3), pat);
338 write32p((rank << 28) | (i << 3) | 4, pat);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100339 }
340 sfence();
341 for (i = 0; i < 32; i++) {
342 u8 pat = (((mask >> i) & 1) ? 0xff : 0);
343 int j;
344 u32 val;
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800345 gav(val = read32p((rank << 28) | (i << 3)));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100346 for (j = 0; j < 4; j++)
347 if (((val >> (j * 8)) & 0xff) != pat)
348 ok &= ~(1 << j);
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800349 gav(val = read32p((rank << 28) | (i << 3) | 4));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100350 for (j = 0; j < 4; j++)
351 if (((val >> (j * 8)) & 0xff) != pat)
352 ok &= ~(16 << j);
353 }
354 sfence();
355 for (i = 0; i < 64; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800356 write32p((rank << 28) | (i << 2), 0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100357 sfence();
358 for (i = 0; i < 64; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800359 gav(read32p((rank << 28) | (i << 2)));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100360
361 return ok;
362}
363
364static void
365program_timings(struct raminfo *info, u16 base, int channel, int slot, int rank)
366{
367 int lane;
368 for (lane = 0; lane < 8; lane++) {
369 write_500(info, channel,
370 base +
371 info->training.
372 lane_timings[2][channel][slot][rank][lane],
373 get_timing_register_addr(lane, 2, slot, rank), 9, 0);
374 write_500(info, channel,
375 base +
376 info->training.
377 lane_timings[3][channel][slot][rank][lane],
378 get_timing_register_addr(lane, 3, slot, rank), 9, 0);
379 }
380}
381
382static void write_26c(int channel, u16 si)
383{
384 write_mchbar32(0x26c + (channel << 10), 0x03243f35);
385 write_mchbar32(0x268 + (channel << 10), 0xcfc00000 | (si << 9));
386 write_mchbar16(0x2b9 + (channel << 10), si);
387}
388
389static u32 get_580(int channel, u8 addr)
390{
391 u32 ret;
392 gav(read_1d0(0x142, 3));
393 write_mchbar8(0x5ff, 0x0); /* OK */
394 write_mchbar8(0x5ff, 0x80); /* OK */
395 write_mchbar32(0x580 + (channel << 10), 0x8493c012 | addr);
396 write_mchbar8(0x580 + (channel << 10),
397 read_mchbar8(0x580 + (channel << 10)) | 1);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200398 while (!((ret = read_mchbar32(0x580 + (channel << 10))) & 0x10000));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100399 write_mchbar8(0x580 + (channel << 10),
400 read_mchbar8(0x580 + (channel << 10)) & ~1);
401 return ret;
402}
403
404const int cached_config = 0;
405
406#define NUM_CHANNELS 2
407#define NUM_SLOTS 2
408#define NUM_RANKS 2
409#define RANK_SHIFT 28
410#define CHANNEL_SHIFT 10
411
412#include "raminit_tables.c"
413
414static void seq9(struct raminfo *info, int channel, int slot, int rank)
415{
416 int i, lane;
417
418 for (i = 0; i < 2; i++)
419 for (lane = 0; lane < 8; lane++)
420 write_500(info, channel,
421 info->training.lane_timings[i +
422 1][channel][slot]
423 [rank][lane], get_timing_register_addr(lane,
424 i + 1,
425 slot,
426 rank),
427 9, 0);
428
429 write_1d0(1, 0x103, 6, 1);
430 for (lane = 0; lane < 8; lane++)
431 write_500(info, channel,
432 info->training.
433 lane_timings[0][channel][slot][rank][lane],
434 get_timing_register_addr(lane, 0, slot, rank), 9, 0);
435
436 for (i = 0; i < 2; i++) {
437 for (lane = 0; lane < 8; lane++)
438 write_500(info, channel,
439 info->training.lane_timings[i +
440 1][channel][slot]
441 [rank][lane], get_timing_register_addr(lane,
442 i + 1,
443 slot,
444 rank),
445 9, 0);
446 gav(get_580(channel, ((i + 1) << 2) | (rank << 5)));
447 }
448
449 gav(read_1d0(0x142, 3)); // = 0x10408118
450 write_mchbar8(0x5ff, 0x0); /* OK */
451 write_mchbar8(0x5ff, 0x80); /* OK */
452 write_1d0(0x2, 0x142, 3, 1);
453 for (lane = 0; lane < 8; lane++) {
454 // printk (BIOS_ERR, "before: %x\n", info->training.lane_timings[2][channel][slot][rank][lane]);
455 info->training.lane_timings[2][channel][slot][rank][lane] =
456 read_500(info, channel,
457 get_timing_register_addr(lane, 2, slot, rank), 9);
458 //printk (BIOS_ERR, "after: %x\n", info->training.lane_timings[2][channel][slot][rank][lane]);
459 info->training.lane_timings[3][channel][slot][rank][lane] =
460 info->training.lane_timings[2][channel][slot][rank][lane] +
461 0x20;
462 }
463}
464
465static int count_ranks_in_channel(struct raminfo *info, int channel)
466{
467 int slot, rank;
468 int res = 0;
469 for (slot = 0; slot < NUM_SLOTS; slot++)
470 for (rank = 0; rank < NUM_SLOTS; rank++)
471 res += info->populated_ranks[channel][slot][rank];
472 return res;
473}
474
475static void
476config_rank(struct raminfo *info, int s3resume, int channel, int slot, int rank)
477{
478 int add;
479
480 write_1d0(0, 0x178, 7, 1);
481 seq9(info, channel, slot, rank);
482 program_timings(info, 0x80, channel, slot, rank);
483
484 if (channel == 0)
485 add = count_ranks_in_channel(info, 1);
486 else
487 add = 0;
488 if (!s3resume)
489 gav(rw_test(rank + add));
490 program_timings(info, 0x00, channel, slot, rank);
491 if (!s3resume)
492 gav(rw_test(rank + add));
493 if (!s3resume)
494 gav(rw_test(rank + add));
495 write_1d0(0, 0x142, 3, 1);
496 write_1d0(0, 0x103, 6, 1);
497
498 gav(get_580(channel, 0xc | (rank << 5)));
499 gav(read_1d0(0x142, 3));
500
501 write_mchbar8(0x5ff, 0x0); /* OK */
502 write_mchbar8(0x5ff, 0x80); /* OK */
503}
504
505static void set_4cf(struct raminfo *info, int channel, u8 val)
506{
507 gav(read_500(info, channel, 0x4cf, 4)); // = 0xc2300cf9
508 write_500(info, channel, val, 0x4cf, 4, 1);
509 gav(read_500(info, channel, 0x659, 4)); // = 0x80300839
510 write_500(info, channel, val, 0x659, 4, 1);
511 gav(read_500(info, channel, 0x697, 4)); // = 0x80300839
512 write_500(info, channel, val, 0x697, 4, 1);
513}
514
515static void set_334(int zero)
516{
517 int j, k, channel;
518 const u32 val3[] = { 0x2a2b2a2b, 0x26272627, 0x2e2f2e2f, 0x2a2b };
519 u32 vd8[2][16];
520
521 for (channel = 0; channel < NUM_CHANNELS; channel++) {
522 for (j = 0; j < 4; j++) {
523 u32 a = (j == 1) ? 0x29292929 : 0x31313131;
524 u32 lmask = (j == 3) ? 0xffff : 0xffffffff;
525 u16 c;
526 if ((j == 0 || j == 3) && zero)
527 c = 0;
528 else if (j == 3)
529 c = 0x5f;
530 else
531 c = 0x5f5f;
532
533 for (k = 0; k < 2; k++) {
534 write_mchbar32(0x138 + 8 * k,
535 (channel << 26) | (j << 24));
536 gav(vd8[1][(channel << 3) | (j << 1) | k] =
537 read_mchbar32(0x138 + 8 * k));
538 gav(vd8[0][(channel << 3) | (j << 1) | k] =
539 read_mchbar32(0x13c + 8 * k));
540 }
541
542 write_mchbar32(0x334 + (channel << 10) + (j * 0x44),
543 zero ? 0 : val3[j]);
544 write_mchbar32(0x32c + (channel << 10) + (j * 0x44),
545 zero ? 0 : (0x18191819 & lmask));
546 write_mchbar16(0x34a + (channel << 10) + (j * 0x44), c);
547 write_mchbar32(0x33c + (channel << 10) + (j * 0x44),
548 zero ? 0 : (a & lmask));
549 write_mchbar32(0x344 + (channel << 10) + (j * 0x44),
550 zero ? 0 : (a & lmask));
551 }
552 }
553
554 write_mchbar32(0x130, read_mchbar32(0x130) | 1); /* OK */
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200555 while (read_mchbar8(0x130) & 1); /* OK */
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100556}
557
558static void rmw_1d0(u16 addr, u32 and, u32 or, int split, int flag)
559{
560 u32 v;
561 v = read_1d0(addr, split);
562 write_1d0((v & and) | or, addr, split, flag);
563}
564
565static int find_highest_bit_set(u16 val)
566{
567 int i;
568 for (i = 15; i >= 0; i--)
569 if (val & (1 << i))
570 return i;
571 return -1;
572}
573
574static int find_lowest_bit_set32(u32 val)
575{
576 int i;
577 for (i = 0; i < 32; i++)
578 if (val & (1 << i))
579 return i;
580 return -1;
581}
582
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100583enum {
584 DEVICE_TYPE = 2,
585 MODULE_TYPE = 3,
586 DENSITY = 4,
587 RANKS_AND_DQ = 7,
588 MEMORY_BUS_WIDTH = 8,
589 TIMEBASE_DIVIDEND = 10,
590 TIMEBASE_DIVISOR = 11,
591 CYCLETIME = 12,
592
593 CAS_LATENCIES_LSB = 14,
594 CAS_LATENCIES_MSB = 15,
595 CAS_LATENCY_TIME = 16,
596 THERMAL_AND_REFRESH = 31,
597 REFERENCE_RAW_CARD_USED = 62,
598 RANK1_ADDRESS_MAPPING = 63
599};
600
601static void calculate_timings(struct raminfo *info)
602{
603 unsigned cycletime;
604 unsigned cas_latency_time;
605 unsigned supported_cas_latencies;
606 unsigned channel, slot;
607 unsigned clock_speed_index;
608 unsigned min_cas_latency;
609 unsigned cas_latency;
610 unsigned max_clock_index;
611
612 /* Find common CAS latency */
613 supported_cas_latencies = 0x3fe;
614 for (channel = 0; channel < NUM_CHANNELS; channel++)
615 for (slot = 0; slot < NUM_SLOTS; slot++)
616 if (info->populated_ranks[channel][slot][0])
617 supported_cas_latencies &=
618 2 *
619 (info->
620 spd[channel][slot][CAS_LATENCIES_LSB] |
621 (info->
622 spd[channel][slot][CAS_LATENCIES_MSB] <<
623 8));
624
625 max_clock_index = min(3, info->max_supported_clock_speed_index);
626
627 cycletime = min_cycletime[max_clock_index];
628 cas_latency_time = min_cas_latency_time[max_clock_index];
629
630 for (channel = 0; channel < NUM_CHANNELS; channel++)
631 for (slot = 0; slot < NUM_SLOTS; slot++)
632 if (info->populated_ranks[channel][slot][0]) {
633 unsigned timebase;
634 timebase =
635 1000 *
636 info->
637 spd[channel][slot][TIMEBASE_DIVIDEND] /
638 info->spd[channel][slot][TIMEBASE_DIVISOR];
639 cycletime =
640 max(cycletime,
641 timebase *
642 info->spd[channel][slot][CYCLETIME]);
643 cas_latency_time =
644 max(cas_latency_time,
645 timebase *
646 info->
647 spd[channel][slot][CAS_LATENCY_TIME]);
648 }
649 for (clock_speed_index = 0; clock_speed_index < 3; clock_speed_index++) {
650 if (cycletime == min_cycletime[clock_speed_index])
651 break;
652 if (cycletime > min_cycletime[clock_speed_index]) {
653 clock_speed_index--;
654 cycletime = min_cycletime[clock_speed_index];
655 break;
656 }
657 }
Edward O'Callaghan7116ac82014-07-08 01:53:24 +1000658 min_cas_latency = CEIL_DIV(cas_latency_time, cycletime);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100659 cas_latency = 0;
660 while (supported_cas_latencies) {
661 cas_latency = find_highest_bit_set(supported_cas_latencies) + 3;
662 if (cas_latency <= min_cas_latency)
663 break;
664 supported_cas_latencies &=
665 ~(1 << find_highest_bit_set(supported_cas_latencies));
666 }
667
668 if (cas_latency != min_cas_latency && clock_speed_index)
669 clock_speed_index--;
670
671 if (cas_latency * min_cycletime[clock_speed_index] > 20000)
672 die("Couldn't configure DRAM");
673 info->clock_speed_index = clock_speed_index;
674 info->cas_latency = cas_latency;
675}
676
677static void program_base_timings(struct raminfo *info)
678{
679 unsigned channel;
680 unsigned slot, rank, lane;
681 unsigned extended_silicon_revision;
682 int i;
683
684 extended_silicon_revision = info->silicon_revision;
685 if (info->silicon_revision == 0)
686 for (channel = 0; channel < NUM_CHANNELS; channel++)
687 for (slot = 0; slot < NUM_SLOTS; slot++)
688 if ((info->
689 spd[channel][slot][MODULE_TYPE] & 0xF) ==
690 3)
691 extended_silicon_revision = 4;
692
693 for (channel = 0; channel < NUM_CHANNELS; channel++) {
694 for (slot = 0; slot < NUM_SLOTS; slot++)
695 for (rank = 0; rank < NUM_SLOTS; rank++) {
696 int card_timing_2;
697 if (!info->populated_ranks[channel][slot][rank])
698 continue;
699
700 for (lane = 0; lane < 9; lane++) {
701 int tm_reg;
702 int card_timing;
703
704 card_timing = 0;
705 if ((info->
706 spd[channel][slot][MODULE_TYPE] &
707 0xF) == 3) {
708 int reference_card;
709 reference_card =
710 info->
711 spd[channel][slot]
712 [REFERENCE_RAW_CARD_USED] &
713 0x1f;
714 if (reference_card == 3)
715 card_timing =
716 u16_ffd1188[0][lane]
717 [info->
718 clock_speed_index];
719 if (reference_card == 5)
720 card_timing =
721 u16_ffd1188[1][lane]
722 [info->
723 clock_speed_index];
724 }
725
726 info->training.
727 lane_timings[0][channel][slot][rank]
728 [lane] =
729 u8_FFFD1218[info->
730 clock_speed_index];
731 info->training.
732 lane_timings[1][channel][slot][rank]
733 [lane] = 256;
734
735 for (tm_reg = 2; tm_reg < 4; tm_reg++)
736 info->training.
737 lane_timings[tm_reg]
738 [channel][slot][rank][lane]
739 =
740 u8_FFFD1240[channel]
741 [extended_silicon_revision]
742 [lane][2 * slot +
743 rank][info->
744 clock_speed_index]
745 + info->max4048[channel]
746 +
747 u8_FFFD0C78[channel]
748 [extended_silicon_revision]
749 [info->
750 mode4030[channel]][slot]
751 [rank][info->
752 clock_speed_index]
753 + card_timing;
754 for (tm_reg = 0; tm_reg < 4; tm_reg++)
755 write_500(info, channel,
756 info->training.
757 lane_timings[tm_reg]
758 [channel][slot][rank]
759 [lane],
760 get_timing_register_addr
761 (lane, tm_reg, slot,
762 rank), 9, 0);
763 }
764
765 card_timing_2 = 0;
766 if (!(extended_silicon_revision != 4
767 || (info->
768 populated_ranks_mask[channel] & 5) ==
769 5)) {
770 if ((info->
771 spd[channel][slot]
772 [REFERENCE_RAW_CARD_USED] & 0x1F)
773 == 3)
774 card_timing_2 =
775 u16_FFFE0EB8[0][info->
776 clock_speed_index];
777 if ((info->
778 spd[channel][slot]
779 [REFERENCE_RAW_CARD_USED] & 0x1F)
780 == 5)
781 card_timing_2 =
782 u16_FFFE0EB8[1][info->
783 clock_speed_index];
784 }
785
786 for (i = 0; i < 3; i++)
787 write_500(info, channel,
788 (card_timing_2 +
789 info->max4048[channel]
790 +
791 u8_FFFD0EF8[channel]
792 [extended_silicon_revision]
793 [info->
794 mode4030[channel]][info->
795 clock_speed_index]),
796 u16_fffd0c50[i][slot][rank],
797 8, 1);
798 write_500(info, channel,
799 (info->max4048[channel] +
800 u8_FFFD0C78[channel]
801 [extended_silicon_revision][info->
802 mode4030
803 [channel]]
804 [slot][rank][info->
805 clock_speed_index]),
806 u16_fffd0c70[slot][rank], 7, 1);
807 }
808 if (!info->populated_ranks_mask[channel])
809 continue;
810 for (i = 0; i < 3; i++)
811 write_500(info, channel,
812 (info->max4048[channel] +
813 info->avg4044[channel]
814 +
815 u8_FFFD17E0[channel]
816 [extended_silicon_revision][info->
817 mode4030
818 [channel]][info->
819 clock_speed_index]),
820 u16_fffd0c68[i], 8, 1);
821 }
822}
823
824static unsigned int fsbcycle_ps(struct raminfo *info)
825{
826 return 900000 / info->fsb_frequency;
827}
828
829/* The time of DDR transfer in ps. */
830static unsigned int halfcycle_ps(struct raminfo *info)
831{
832 return 3750 / (info->clock_speed_index + 3);
833}
834
835/* The time of clock cycle in ps. */
836static unsigned int cycle_ps(struct raminfo *info)
837{
838 return 2 * halfcycle_ps(info);
839}
840
841/* Frequency in 1.(1)=10/9 MHz units. */
842static unsigned frequency_11(struct raminfo *info)
843{
844 return (info->clock_speed_index + 3) * 120;
845}
846
847/* Frequency in 0.1 MHz units. */
848static unsigned frequency_01(struct raminfo *info)
849{
850 return 100 * frequency_11(info) / 9;
851}
852
853static unsigned ps_to_halfcycles(struct raminfo *info, unsigned int ps)
854{
855 return (frequency_11(info) * 2) * ps / 900000;
856}
857
858static unsigned ns_to_cycles(struct raminfo *info, unsigned int ns)
859{
860 return (frequency_11(info)) * ns / 900;
861}
862
863static void compute_derived_timings(struct raminfo *info)
864{
865 unsigned channel, slot, rank;
866 int extended_silicon_revision;
867 int some_delay_1_ps;
868 int some_delay_2_ps;
869 int some_delay_2_halfcycles_ceil;
870 int some_delay_2_halfcycles_floor;
871 int some_delay_3_ps;
872 int some_delay_3_halfcycles;
873 int some_delay_3_ps_rounded;
874 int some_delay_1_cycle_ceil;
875 int some_delay_1_cycle_floor;
876
877 some_delay_3_halfcycles = 0;
878 some_delay_3_ps_rounded = 0;
879 extended_silicon_revision = info->silicon_revision;
880 if (!info->silicon_revision)
881 for (channel = 0; channel < NUM_CHANNELS; channel++)
882 for (slot = 0; slot < NUM_SLOTS; slot++)
883 if ((info->
884 spd[channel][slot][MODULE_TYPE] & 0xF) ==
885 3)
886 extended_silicon_revision = 4;
887 if (info->board_lane_delay[7] < 5)
888 info->board_lane_delay[7] = 5;
889 info->revision_flag_1 = 2;
890 if (info->silicon_revision == 2 || info->silicon_revision == 3)
891 info->revision_flag_1 = 0;
892 if (info->revision < 16)
893 info->revision_flag_1 = 0;
894
895 if (info->revision < 8)
896 info->revision_flag_1 = 0;
897 if (info->revision >= 8 && (info->silicon_revision == 0
898 || info->silicon_revision == 1))
899 some_delay_2_ps = 735;
900 else
901 some_delay_2_ps = 750;
902
903 if (info->revision >= 0x10 && (info->silicon_revision == 0
904 || info->silicon_revision == 1))
905 some_delay_1_ps = 3929;
906 else
907 some_delay_1_ps = 3490;
908
909 some_delay_1_cycle_floor = some_delay_1_ps / cycle_ps(info);
910 some_delay_1_cycle_ceil = some_delay_1_ps / cycle_ps(info);
911 if (some_delay_1_ps % cycle_ps(info))
912 some_delay_1_cycle_ceil++;
913 else
914 some_delay_1_cycle_floor--;
915 info->some_delay_1_cycle_floor = some_delay_1_cycle_floor;
916 if (info->revision_flag_1)
917 some_delay_2_ps = halfcycle_ps(info) >> 6;
918 some_delay_2_ps +=
919 max(some_delay_1_ps - 30,
920 2 * halfcycle_ps(info) * (some_delay_1_cycle_ceil - 1) + 1000) +
921 375;
922 some_delay_3_ps =
923 halfcycle_ps(info) - some_delay_2_ps % halfcycle_ps(info);
924 if (info->revision_flag_1) {
925 if (some_delay_3_ps < 150)
926 some_delay_3_halfcycles = 0;
927 else
928 some_delay_3_halfcycles =
929 (some_delay_3_ps << 6) / halfcycle_ps(info);
930 some_delay_3_ps_rounded =
931 halfcycle_ps(info) * some_delay_3_halfcycles >> 6;
932 }
933 some_delay_2_halfcycles_ceil =
934 (some_delay_2_ps + halfcycle_ps(info) - 1) / halfcycle_ps(info) -
935 2 * (some_delay_1_cycle_ceil - 1);
936 if (info->revision_flag_1 && some_delay_3_ps < 150)
937 some_delay_2_halfcycles_ceil++;
938 some_delay_2_halfcycles_floor = some_delay_2_halfcycles_ceil;
939 if (info->revision < 0x10)
940 some_delay_2_halfcycles_floor =
941 some_delay_2_halfcycles_ceil - 1;
942 if (!info->revision_flag_1)
943 some_delay_2_halfcycles_floor++;
944 info->some_delay_2_halfcycles_ceil = some_delay_2_halfcycles_ceil;
945 info->some_delay_3_ps_rounded = some_delay_3_ps_rounded;
946 if ((info->populated_ranks[0][0][0] && info->populated_ranks[0][1][0])
947 || (info->populated_ranks[1][0][0]
948 && info->populated_ranks[1][1][0]))
949 info->max_slots_used_in_channel = 2;
950 else
951 info->max_slots_used_in_channel = 1;
952 for (channel = 0; channel < 2; channel++)
953 write_mchbar32(0x244 + (channel << 10),
954 ((info->revision < 8) ? 1 : 0x200)
955 | ((2 - info->max_slots_used_in_channel) << 17) |
956 (channel << 21) | (info->
957 some_delay_1_cycle_floor <<
958 18) | 0x9510);
959 if (info->max_slots_used_in_channel == 1) {
960 info->mode4030[0] = (count_ranks_in_channel(info, 0) == 2);
961 info->mode4030[1] = (count_ranks_in_channel(info, 1) == 2);
962 } else {
963 info->mode4030[0] = ((count_ranks_in_channel(info, 0) == 1) || (count_ranks_in_channel(info, 0) == 2)) ? 2 : 3; /* 2 if 1 or 2 ranks */
964 info->mode4030[1] = ((count_ranks_in_channel(info, 1) == 1)
965 || (count_ranks_in_channel(info, 1) ==
966 2)) ? 2 : 3;
967 }
968 for (channel = 0; channel < NUM_CHANNELS; channel++) {
969 int max_of_unk;
970 int min_of_unk_2;
971
972 int i, count;
973 int sum;
974
975 if (!info->populated_ranks_mask[channel])
976 continue;
977
978 max_of_unk = 0;
979 min_of_unk_2 = 32767;
980
981 sum = 0;
982 count = 0;
983 for (i = 0; i < 3; i++) {
984 int unk1;
985 if (info->revision < 8)
986 unk1 =
987 u8_FFFD1891[0][channel][info->
988 clock_speed_index]
989 [i];
990 else if (!
991 (info->revision >= 0x10
992 || info->revision_flag_1))
993 unk1 =
994 u8_FFFD1891[1][channel][info->
995 clock_speed_index]
996 [i];
997 else
998 unk1 = 0;
999 for (slot = 0; slot < NUM_SLOTS; slot++)
1000 for (rank = 0; rank < NUM_RANKS; rank++) {
1001 int a = 0;
1002 int b = 0;
1003
1004 if (!info->
1005 populated_ranks[channel][slot]
1006 [rank])
1007 continue;
1008 if (extended_silicon_revision == 4
1009 && (info->
1010 populated_ranks_mask[channel] &
1011 5) != 5) {
1012 if ((info->
1013 spd[channel][slot]
1014 [REFERENCE_RAW_CARD_USED] &
1015 0x1F) == 3) {
1016 a = u16_ffd1178[0]
1017 [info->
1018 clock_speed_index];
1019 b = u16_fe0eb8[0][info->
1020 clock_speed_index];
1021 } else
1022 if ((info->
1023 spd[channel][slot]
1024 [REFERENCE_RAW_CARD_USED]
1025 & 0x1F) == 5) {
1026 a = u16_ffd1178[1]
1027 [info->
1028 clock_speed_index];
1029 b = u16_fe0eb8[1][info->
1030 clock_speed_index];
1031 }
1032 }
1033 min_of_unk_2 = min(min_of_unk_2, a);
1034 min_of_unk_2 = min(min_of_unk_2, b);
1035 if (rank == 0) {
1036 sum += a;
1037 count++;
1038 }
1039 {
1040 int t;
1041 t = b +
1042 u8_FFFD0EF8[channel]
1043 [extended_silicon_revision]
1044 [info->
1045 mode4030[channel]][info->
1046 clock_speed_index];
1047 if (unk1 >= t)
1048 max_of_unk =
1049 max(max_of_unk,
1050 unk1 - t);
1051 }
1052 }
1053 {
1054 int t =
1055 u8_FFFD17E0[channel]
1056 [extended_silicon_revision][info->
1057 mode4030
1058 [channel]]
1059 [info->clock_speed_index] + min_of_unk_2;
1060 if (unk1 >= t)
1061 max_of_unk = max(max_of_unk, unk1 - t);
1062 }
1063 }
1064
1065 info->avg4044[channel] = sum / count;
1066 info->max4048[channel] = max_of_unk;
1067 }
1068}
1069
1070static void jedec_read(struct raminfo *info,
1071 int channel, int slot, int rank,
1072 int total_rank, u8 addr3, unsigned int value)
1073{
1074 /* Handle mirrored mapping. */
1075 if ((rank & 1) && (info->spd[channel][slot][RANK1_ADDRESS_MAPPING] & 1))
1076 addr3 =
1077 (addr3 & 0xCF) | ((addr3 & 0x10) << 1) | ((addr3 >> 1) &
1078 0x10);
1079 write_mchbar8(0x271, addr3 | (read_mchbar8(0x271) & 0xC1));
1080 write_mchbar8(0x671, addr3 | (read_mchbar8(0x671) & 0xC1));
1081
1082 /* Handle mirrored mapping. */
1083 if ((rank & 1) && (info->spd[channel][slot][RANK1_ADDRESS_MAPPING] & 1))
1084 value =
1085 (value & ~0x1f8) | ((value >> 1) & 0xa8) | ((value & 0xa8)
1086 << 1);
1087
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001088 read32p((value << 3) | (total_rank << 28));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001089
1090 write_mchbar8(0x271, (read_mchbar8(0x271) & 0xC3) | 2);
1091 write_mchbar8(0x671, (read_mchbar8(0x671) & 0xC3) | 2);
1092
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001093 read32p(total_rank << 28);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001094}
1095
1096enum {
1097 MR1_RZQ12 = 512,
1098 MR1_RZQ2 = 64,
1099 MR1_RZQ4 = 4,
1100 MR1_ODS34OHM = 2
1101};
1102
1103enum {
1104 MR0_BT_INTERLEAVED = 8,
1105 MR0_DLL_RESET_ON = 256
1106};
1107
1108enum {
1109 MR2_RTT_WR_DISABLED = 0,
1110 MR2_RZQ2 = 1 << 10
1111};
1112
1113static void jedec_init(struct raminfo *info)
1114{
1115 int write_recovery;
1116 int channel, slot, rank;
1117 int total_rank;
1118 int dll_on;
1119 int self_refresh_temperature;
1120 int auto_self_refresh;
1121
1122 auto_self_refresh = 1;
1123 self_refresh_temperature = 1;
1124 if (info->board_lane_delay[3] <= 10) {
1125 if (info->board_lane_delay[3] <= 8)
1126 write_recovery = info->board_lane_delay[3] - 4;
1127 else
1128 write_recovery = 5;
1129 } else {
1130 write_recovery = 6;
1131 }
1132 FOR_POPULATED_RANKS {
1133 auto_self_refresh &=
1134 (info->spd[channel][slot][THERMAL_AND_REFRESH] >> 2) & 1;
1135 self_refresh_temperature &=
1136 info->spd[channel][slot][THERMAL_AND_REFRESH] & 1;
1137 }
1138 if (auto_self_refresh == 1)
1139 self_refresh_temperature = 0;
1140
1141 dll_on = ((info->silicon_revision != 2 && info->silicon_revision != 3)
1142 || (info->populated_ranks[0][0][0]
1143 && info->populated_ranks[0][1][0])
1144 || (info->populated_ranks[1][0][0]
1145 && info->populated_ranks[1][1][0]));
1146
1147 total_rank = 0;
1148
1149 for (channel = NUM_CHANNELS - 1; channel >= 0; channel--) {
1150 int rtt, rtt_wr = MR2_RTT_WR_DISABLED;
1151 int rzq_reg58e;
1152
1153 if (info->silicon_revision == 2 || info->silicon_revision == 3) {
1154 rzq_reg58e = 64;
1155 rtt = MR1_RZQ2;
1156 if (info->clock_speed_index != 0) {
1157 rzq_reg58e = 4;
1158 if (info->populated_ranks_mask[channel] == 3)
1159 rtt = MR1_RZQ4;
1160 }
1161 } else {
1162 if ((info->populated_ranks_mask[channel] & 5) == 5) {
1163 rtt = MR1_RZQ12;
1164 rzq_reg58e = 64;
1165 rtt_wr = MR2_RZQ2;
1166 } else {
1167 rzq_reg58e = 4;
1168 rtt = MR1_RZQ4;
1169 }
1170 }
1171
1172 write_mchbar16(0x588 + (channel << 10), 0x0);
1173 write_mchbar16(0x58a + (channel << 10), 0x4);
1174 write_mchbar16(0x58c + (channel << 10), rtt | MR1_ODS34OHM);
1175 write_mchbar16(0x58e + (channel << 10), rzq_reg58e | 0x82);
1176 write_mchbar16(0x590 + (channel << 10), 0x1282);
1177
1178 for (slot = 0; slot < NUM_SLOTS; slot++)
1179 for (rank = 0; rank < NUM_RANKS; rank++)
1180 if (info->populated_ranks[channel][slot][rank]) {
1181 jedec_read(info, channel, slot, rank,
1182 total_rank, 0x28,
1183 rtt_wr | (info->
1184 clock_speed_index
1185 << 3)
1186 | (auto_self_refresh << 6) |
1187 (self_refresh_temperature <<
1188 7));
1189 jedec_read(info, channel, slot, rank,
1190 total_rank, 0x38, 0);
1191 jedec_read(info, channel, slot, rank,
1192 total_rank, 0x18,
1193 rtt | MR1_ODS34OHM);
1194 jedec_read(info, channel, slot, rank,
1195 total_rank, 6,
1196 (dll_on << 12) |
1197 (write_recovery << 9)
1198 | ((info->cas_latency - 4) <<
1199 4) | MR0_BT_INTERLEAVED |
1200 MR0_DLL_RESET_ON);
1201 total_rank++;
1202 }
1203 }
1204}
1205
1206static void program_modules_memory_map(struct raminfo *info, int pre_jedec)
1207{
1208 unsigned channel, slot, rank;
1209 unsigned int total_mb[2] = { 0, 0 }; /* total memory per channel in MB */
1210 unsigned int channel_0_non_interleaved;
1211
1212 FOR_ALL_RANKS {
1213 if (info->populated_ranks[channel][slot][rank]) {
1214 total_mb[channel] +=
1215 pre_jedec ? 256 : (256 << info->
1216 density[channel][slot] >> info->
1217 is_x16_module[channel][slot]);
1218 write_mchbar8(0x208 + rank + 2 * slot + (channel << 10),
1219 (pre_jedec ? (1 | ((1 + 1) << 1))
1220 : (info->
1221 is_x16_module[channel][slot] |
1222 ((info->density[channel][slot] +
1223 1) << 1))) | 0x80);
1224 }
1225 write_mchbar16(0x200 + (channel << 10) + 4 * slot + 2 * rank,
1226 total_mb[channel] >> 6);
1227 }
1228
1229 info->total_memory_mb = total_mb[0] + total_mb[1];
1230
1231 info->interleaved_part_mb =
1232 pre_jedec ? 0 : 2 * min(total_mb[0], total_mb[1]);
1233 info->non_interleaved_part_mb =
1234 total_mb[0] + total_mb[1] - info->interleaved_part_mb;
1235 channel_0_non_interleaved = total_mb[0] - info->interleaved_part_mb / 2;
1236 write_mchbar32(0x100,
1237 channel_0_non_interleaved | (info->
1238 non_interleaved_part_mb <<
1239 16));
1240 if (!pre_jedec)
1241 write_mchbar16(0x104, info->interleaved_part_mb);
1242}
1243
1244static void program_board_delay(struct raminfo *info)
1245{
1246 int cas_latency_shift;
1247 int some_delay_ns;
1248 int some_delay_3_half_cycles;
1249
1250 unsigned channel, i;
1251 int high_multiplier;
1252 int lane_3_delay;
1253 int cas_latency_derived;
1254
1255 high_multiplier = 0;
1256 some_delay_ns = 200;
1257 some_delay_3_half_cycles = 4;
1258 cas_latency_shift = info->silicon_revision == 0
1259 || info->silicon_revision == 1 ? 1 : 0;
1260 if (info->revision < 8) {
1261 some_delay_ns = 600;
1262 cas_latency_shift = 0;
1263 }
1264 {
1265 int speed_bit;
1266 speed_bit =
1267 ((info->clock_speed_index > 1
1268 || (info->silicon_revision != 2
1269 && info->silicon_revision != 3))) ^ (info->revision >=
1270 0x10);
1271 write_500(info, 0, speed_bit | ((!info->use_ecc) << 1), 0x60e,
1272 3, 1);
1273 write_500(info, 1, speed_bit | ((!info->use_ecc) << 1), 0x60e,
1274 3, 1);
1275 if (info->revision >= 0x10 && info->clock_speed_index <= 1
1276 && (info->silicon_revision == 2
1277 || info->silicon_revision == 3))
1278 rmw_1d0(0x116, 5, 2, 4, 1);
1279 }
1280 write_mchbar32(0x120,
1281 (1 << (info->max_slots_used_in_channel + 28)) |
1282 0x188e7f9f);
1283
1284 write_mchbar8(0x124,
1285 info->board_lane_delay[4] +
1286 ((frequency_01(info) + 999) / 1000));
1287 write_mchbar16(0x125, 0x1360);
1288 write_mchbar8(0x127, 0x40);
1289 if (info->fsb_frequency < frequency_11(info) / 2) {
1290 unsigned some_delay_2_half_cycles;
1291 high_multiplier = 1;
1292 some_delay_2_half_cycles = ps_to_halfcycles(info,
1293 ((3 *
1294 fsbcycle_ps(info))
1295 >> 1) +
1296 (halfcycle_ps(info)
1297 *
1298 reg178_min[info->
1299 clock_speed_index]
1300 >> 6)
1301 +
1302 4 *
1303 halfcycle_ps(info)
1304 + 2230);
1305 some_delay_3_half_cycles =
1306 min((some_delay_2_half_cycles +
1307 (frequency_11(info) * 2) * (28 -
1308 some_delay_2_half_cycles) /
1309 (frequency_11(info) * 2 -
1310 4 * (info->fsb_frequency))) >> 3, 7);
1311 }
1312 if (read_mchbar8(0x2ca9) & 1)
1313 some_delay_3_half_cycles = 3;
1314 for (channel = 0; channel < NUM_CHANNELS; channel++) {
1315 write_mchbar32(0x220 + (channel << 10),
1316 read_mchbar32(0x220 +
1317 (channel << 10)) | 0x18001117);
1318 write_mchbar32(0x224 + (channel << 10),
1319 (info->max_slots_used_in_channel - 1)
1320 |
1321 ((info->cas_latency - 5 -
1322 info->clock_speed_index) << 21)
1323 |
1324 ((info->max_slots_used_in_channel +
1325 info->cas_latency - cas_latency_shift -
1326 4) << 16)
1327 | ((info->cas_latency - cas_latency_shift - 4) <<
1328 26)
1329 |
1330 ((info->cas_latency - info->clock_speed_index +
1331 info->max_slots_used_in_channel - 6) << 8));
1332 write_mchbar32(0x228 + (channel << 10),
1333 info->max_slots_used_in_channel);
1334 write_mchbar8(0x239 + (channel << 10), 32);
1335 write_mchbar32(0x248 + (channel << 10),
1336 (high_multiplier << 24) |
1337 (some_delay_3_half_cycles << 25) | 0x840000);
1338 write_mchbar32(0x278 + (channel << 10), 0xc362042);
1339 write_mchbar32(0x27c + (channel << 10), 0x8b000062);
1340 write_mchbar32(0x24c + (channel << 10),
1341 ((! !info->
1342 clock_speed_index) << 17) | (((2 +
1343 info->
1344 clock_speed_index
1345 -
1346 (! !info->
1347 clock_speed_index)))
1348 << 12) | 0x10200);
1349
1350 write_mchbar8(0x267 + (channel << 10), 0x4);
1351 write_mchbar16(0x272 + (channel << 10), 0x155);
1352 write_mchbar32(0x2bc + (channel << 10),
1353 (read_mchbar32(0x2bc + (channel << 10)) &
1354 0xFF000000)
1355 | 0x707070);
1356
1357 write_500(info, channel,
1358 ((!info->populated_ranks[channel][1][1])
1359 | (!info->populated_ranks[channel][1][0] << 1)
1360 | (!info->populated_ranks[channel][0][1] << 2)
1361 | (!info->populated_ranks[channel][0][0] << 3)),
1362 0x4c9, 4, 1);
1363 }
1364
1365 write_mchbar8(0x2c4, ((1 + (info->clock_speed_index != 0)) << 6) | 0xC);
1366 {
1367 u8 freq_divisor = 2;
1368 if (info->fsb_frequency == frequency_11(info))
1369 freq_divisor = 3;
1370 else if (2 * info->fsb_frequency < 3 * (frequency_11(info) / 2))
1371 freq_divisor = 1;
1372 else
1373 freq_divisor = 2;
1374 write_mchbar32(0x2c0, (freq_divisor << 11) | 0x6009c400);
1375 }
1376
1377 if (info->board_lane_delay[3] <= 10) {
1378 if (info->board_lane_delay[3] <= 8)
1379 lane_3_delay = info->board_lane_delay[3];
1380 else
1381 lane_3_delay = 10;
1382 } else {
1383 lane_3_delay = 12;
1384 }
1385 cas_latency_derived = info->cas_latency - info->clock_speed_index + 2;
1386 if (info->clock_speed_index > 1)
1387 cas_latency_derived++;
1388 for (channel = 0; channel < NUM_CHANNELS; channel++) {
1389 write_mchbar32(0x240 + (channel << 10),
1390 ((info->clock_speed_index ==
1391 0) * 0x11000) | 0x1002100 | ((2 +
1392 info->
1393 clock_speed_index)
1394 << 4) | (info->
1395 cas_latency
1396 - 3));
1397 write_500(info, channel, (info->clock_speed_index << 1) | 1,
1398 0x609, 6, 1);
1399 write_500(info, channel,
1400 info->clock_speed_index + 2 * info->cas_latency - 7,
1401 0x601, 6, 1);
1402
1403 write_mchbar32(0x250 + (channel << 10),
1404 ((lane_3_delay + info->clock_speed_index +
1405 9) << 6)
1406 | (info->board_lane_delay[7] << 2) | (info->
1407 board_lane_delay
1408 [4] << 16)
1409 | (info->board_lane_delay[1] << 25) | (info->
1410 board_lane_delay
1411 [1] << 29)
1412 | 1);
1413 write_mchbar32(0x254 + (channel << 10),
1414 (info->
1415 board_lane_delay[1] >> 3) | ((info->
1416 board_lane_delay
1417 [8] +
1418 4 *
1419 info->
1420 use_ecc) << 6) |
1421 0x80 | (info->board_lane_delay[6] << 1) | (info->
1422 board_lane_delay
1423 [2] <<
1424 28) |
1425 (cas_latency_derived << 16) | 0x4700000);
1426 write_mchbar32(0x258 + (channel << 10),
1427 ((info->board_lane_delay[5] +
1428 info->clock_speed_index +
1429 9) << 12) | ((info->clock_speed_index -
1430 info->cas_latency + 12) << 8)
1431 | (info->board_lane_delay[2] << 17) | (info->
1432 board_lane_delay
1433 [4] << 24)
1434 | 0x47);
1435 write_mchbar32(0x25c + (channel << 10),
1436 (info->board_lane_delay[1] << 1) | (info->
1437 board_lane_delay
1438 [0] << 8) |
1439 0x1da50000);
1440 write_mchbar8(0x264 + (channel << 10), 0xff);
1441 write_mchbar8(0x5f8 + (channel << 10),
1442 (cas_latency_shift << 3) | info->use_ecc);
1443 }
1444
1445 program_modules_memory_map(info, 1);
1446
1447 write_mchbar16(0x610,
1448 (min(ns_to_cycles(info, some_delay_ns) / 2, 127) << 9)
1449 | (read_mchbar16(0x610) & 0x1C3) | 0x3C);
1450 write_mchbar16(0x612, read_mchbar16(0x612) | 0x100);
1451 write_mchbar16(0x214, read_mchbar16(0x214) | 0x3E00);
1452 for (i = 0; i < 8; i++) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001453 pci_write_config32(PCI_DEV (QUICKPATH_BUS, 0, 1), 0x80 + 4 * i,
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001454 (info->total_memory_mb - 64) | !i | 2);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001455 pci_write_config32(PCI_DEV (QUICKPATH_BUS, 0, 1), 0xc0 + 4 * i, 0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001456 }
1457}
1458
Patrick Rudolph266a1f72016-06-09 18:13:34 +02001459#define DEFAULT_PCI_MMIO_SIZE 2048
1460#define HOST_BRIDGE PCI_DEVFN(0, 0)
1461
1462static unsigned int get_mmio_size(void)
1463{
1464 const struct device *dev;
1465 const struct northbridge_intel_nehalem_config *cfg = NULL;
1466
1467 dev = dev_find_slot(0, HOST_BRIDGE);
1468 if (dev)
1469 cfg = dev->chip_info;
1470
1471 /* If this is zero, it just means devicetree.cb didn't set it */
1472 if (!cfg || cfg->pci_mmio_size == 0)
1473 return DEFAULT_PCI_MMIO_SIZE;
1474 else
1475 return cfg->pci_mmio_size;
1476}
1477
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001478#define BETTER_MEMORY_MAP 0
1479
1480static void program_total_memory_map(struct raminfo *info)
1481{
1482 unsigned int TOM, TOLUD, TOUUD;
1483 unsigned int quickpath_reserved;
1484 unsigned int REMAPbase;
1485 unsigned int uma_base_igd;
1486 unsigned int uma_base_gtt;
Patrick Rudolph266a1f72016-06-09 18:13:34 +02001487 unsigned int mmio_size;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001488 int memory_remap;
1489 unsigned int memory_map[8];
1490 int i;
1491 unsigned int current_limit;
1492 unsigned int tseg_base;
1493 int uma_size_igd = 0, uma_size_gtt = 0;
1494
1495 memset(memory_map, 0, sizeof(memory_map));
1496
1497#if REAL
1498 if (info->uma_enabled) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001499 u16 t = pci_read_config16(NORTHBRIDGE, D0F0_GGC);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001500 gav(t);
1501 const int uma_sizes_gtt[16] =
1502 { 0, 1, 0, 2, 0, 0, 0, 0, 0, 2, 3, 4, 42, 42, 42, 42 };
1503 /* Igd memory */
1504 const int uma_sizes_igd[16] = {
1505 0, 0, 0, 0, 0, 32, 48, 64, 128, 256, 96, 160, 224, 352,
1506 256, 512
1507 };
1508
1509 uma_size_igd = uma_sizes_igd[(t >> 4) & 0xF];
1510 uma_size_gtt = uma_sizes_gtt[(t >> 8) & 0xF];
1511 }
1512#endif
1513
Patrick Rudolph266a1f72016-06-09 18:13:34 +02001514 mmio_size = get_mmio_size();
1515
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001516 TOM = info->total_memory_mb;
1517 if (TOM == 4096)
1518 TOM = 4032;
1519 TOUUD = ALIGN_DOWN(TOM - info->memory_reserved_for_heci_mb, 64);
Patrick Rudolph266a1f72016-06-09 18:13:34 +02001520 TOLUD = ALIGN_DOWN(min(4096 - mmio_size + ALIGN_UP(uma_size_igd + uma_size_gtt, 64)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001521 , TOUUD), 64);
1522 memory_remap = 0;
1523 if (TOUUD - TOLUD > 64) {
1524 memory_remap = 1;
1525 REMAPbase = max(4096, TOUUD);
1526 TOUUD = TOUUD - TOLUD + 4096;
1527 }
1528 if (TOUUD > 4096)
1529 memory_map[2] = TOUUD | 1;
1530 quickpath_reserved = 0;
1531
1532 {
1533 u32 t;
1534
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001535 gav(t = pci_read_config32(PCI_DEV(QUICKPATH_BUS, 0, 1), 0x68));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001536 if (t & 0x800)
1537 quickpath_reserved =
1538 (1 << find_lowest_bit_set32(t >> 20));
1539 }
1540 if (memory_remap)
1541 TOUUD -= quickpath_reserved;
1542
1543#if !REAL
1544 if (info->uma_enabled) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001545 u16 t = pci_read_config16(NORTHBRIDGE, D0F0_GGC);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001546 gav(t);
1547 const int uma_sizes_gtt[16] =
1548 { 0, 1, 0, 2, 0, 0, 0, 0, 0, 2, 3, 4, 42, 42, 42, 42 };
1549 /* Igd memory */
1550 const int uma_sizes_igd[16] = {
1551 0, 0, 0, 0, 0, 32, 48, 64, 128, 256, 96, 160, 224, 352,
1552 256, 512
1553 };
1554
1555 uma_size_igd = uma_sizes_igd[(t >> 4) & 0xF];
1556 uma_size_gtt = uma_sizes_gtt[(t >> 8) & 0xF];
1557 }
1558#endif
1559
1560 uma_base_igd = TOLUD - uma_size_igd;
1561 uma_base_gtt = uma_base_igd - uma_size_gtt;
1562 tseg_base = ALIGN_DOWN(uma_base_gtt, 64) - (CONFIG_SMM_TSEG_SIZE >> 20);
1563 if (!memory_remap)
1564 tseg_base -= quickpath_reserved;
1565 tseg_base = ALIGN_DOWN(tseg_base, 8);
1566
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001567 pci_write_config16(NORTHBRIDGE, D0F0_TOLUD, TOLUD << 4);
1568 pci_write_config16(NORTHBRIDGE, D0F0_TOM, TOM >> 6);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001569 if (memory_remap) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001570 pci_write_config16(NORTHBRIDGE, D0F0_REMAPBASE, REMAPbase >> 6);
1571 pci_write_config16(NORTHBRIDGE, D0F0_REMAPLIMIT, (TOUUD - 64) >> 6);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001572 }
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001573 pci_write_config16(NORTHBRIDGE, D0F0_TOUUD, TOUUD);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001574
1575 if (info->uma_enabled) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001576 pci_write_config32(NORTHBRIDGE, D0F0_IGD_BASE, uma_base_igd << 20);
1577 pci_write_config32(NORTHBRIDGE, D0F0_GTT_BASE, uma_base_gtt << 20);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001578 }
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001579 pci_write_config32(NORTHBRIDGE, TSEG, tseg_base << 20);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001580
1581 current_limit = 0;
1582 memory_map[0] = ALIGN_DOWN(uma_base_gtt, 64) | 1;
1583 memory_map[1] = 4096;
1584 for (i = 0; i < ARRAY_SIZE(memory_map); i++) {
1585 current_limit = max(current_limit, memory_map[i] & ~1);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001586 pci_write_config32(PCI_DEV(QUICKPATH_BUS, 0, 1), 4 * i + 0x80,
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001587 (memory_map[i] & 1) | ALIGN_DOWN(current_limit -
1588 1, 64) | 2);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001589 pci_write_config32(PCI_DEV(QUICKPATH_BUS, 0, 1), 4 * i + 0xc0, 0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001590 }
1591}
1592
1593static void collect_system_info(struct raminfo *info)
1594{
1595 u32 capid0[3];
1596 int i;
1597 unsigned channel;
1598
1599 /* Wait for some bit, maybe TXT clear. */
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02001600 while (!(read8((u8 *)0xfed40000) & (1 << 7)));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001601
1602 if (!info->heci_bar)
1603 gav(info->heci_bar =
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001604 pci_read_config32(HECIDEV, HECIBAR) & 0xFFFFFFF8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001605 if (!info->memory_reserved_for_heci_mb) {
1606 /* Wait for ME to be ready */
1607 intel_early_me_init();
1608 info->memory_reserved_for_heci_mb = intel_early_me_uma_size();
1609 }
1610
1611 for (i = 0; i < 3; i++)
1612 gav(capid0[i] =
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001613 pci_read_config32(NORTHBRIDGE, D0F0_CAPID0 | (i << 2)));
1614 gav(info->revision = pci_read_config8(NORTHBRIDGE, PCI_REVISION_ID));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001615 info->max_supported_clock_speed_index = (~capid0[1] & 7);
1616
1617 if ((capid0[1] >> 11) & 1)
1618 info->uma_enabled = 0;
1619 else
1620 gav(info->uma_enabled =
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001621 pci_read_config8(NORTHBRIDGE, D0F0_DEVEN) & 8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001622 /* Unrecognised: [0000:fffd3d2d] 37f81.37f82 ! CPUID: eax: 00000001; ecx: 00000e00 => 00020655.00010800.029ae3ff.bfebfbff */
1623 info->silicon_revision = 0;
1624
1625 if (capid0[2] & 2) {
1626 info->silicon_revision = 0;
1627 info->max_supported_clock_speed_index = 2;
1628 for (channel = 0; channel < NUM_CHANNELS; channel++)
1629 if (info->populated_ranks[channel][0][0]
1630 && (info->spd[channel][0][MODULE_TYPE] & 0xf) ==
1631 3) {
1632 info->silicon_revision = 2;
1633 info->max_supported_clock_speed_index = 1;
1634 }
1635 } else {
1636 switch (((capid0[2] >> 18) & 1) + 2 * ((capid0[1] >> 3) & 1)) {
1637 case 1:
1638 case 2:
1639 info->silicon_revision = 3;
1640 break;
1641 case 3:
1642 info->silicon_revision = 0;
1643 break;
1644 case 0:
1645 info->silicon_revision = 2;
1646 break;
1647 }
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001648 switch (pci_read_config16(NORTHBRIDGE, PCI_DEVICE_ID)) {
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001649 case 0x40:
1650 info->silicon_revision = 0;
1651 break;
1652 case 0x48:
1653 info->silicon_revision = 1;
1654 break;
1655 }
1656 }
1657}
1658
1659static void write_training_data(struct raminfo *info)
1660{
1661 int tm, channel, slot, rank, lane;
1662 if (info->revision < 8)
1663 return;
1664
1665 for (tm = 0; tm < 4; tm++)
1666 for (channel = 0; channel < NUM_CHANNELS; channel++)
1667 for (slot = 0; slot < NUM_SLOTS; slot++)
1668 for (rank = 0; rank < NUM_RANKS; rank++)
1669 for (lane = 0; lane < 9; lane++)
1670 write_500(info, channel,
1671 info->
1672 cached_training->
1673 lane_timings[tm]
1674 [channel][slot][rank]
1675 [lane],
1676 get_timing_register_addr
1677 (lane, tm, slot,
1678 rank), 9, 0);
1679 write_1d0(info->cached_training->reg_178, 0x178, 7, 1);
1680 write_1d0(info->cached_training->reg_10b, 0x10b, 6, 1);
1681}
1682
1683static void dump_timings(struct raminfo *info)
1684{
1685#if REAL
1686 int channel, slot, rank, lane, i;
1687 printk(BIOS_DEBUG, "Timings:\n");
1688 FOR_POPULATED_RANKS {
1689 printk(BIOS_DEBUG, "channel %d, slot %d, rank %d\n", channel,
1690 slot, rank);
1691 for (lane = 0; lane < 9; lane++) {
1692 printk(BIOS_DEBUG, "lane %d: ", lane);
1693 for (i = 0; i < 4; i++) {
1694 printk(BIOS_DEBUG, "%x (%x) ",
1695 read_500(info, channel,
1696 get_timing_register_addr
1697 (lane, i, slot, rank),
1698 9),
1699 info->training.
1700 lane_timings[i][channel][slot][rank]
1701 [lane]);
1702 }
1703 printk(BIOS_DEBUG, "\n");
1704 }
1705 }
1706 printk(BIOS_DEBUG, "[178] = %x (%x)\n", read_1d0(0x178, 7),
1707 info->training.reg_178);
1708 printk(BIOS_DEBUG, "[10b] = %x (%x)\n", read_1d0(0x10b, 6),
1709 info->training.reg_10b);
1710#endif
1711}
1712
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01001713/* Read timings and other registers that need to be restored verbatim and
1714 put them to CBMEM.
1715 */
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001716static void save_timings(struct raminfo *info)
1717{
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001718 struct ram_training train;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001719 int channel, slot, rank, lane, i;
1720
1721 train = info->training;
1722 FOR_POPULATED_RANKS for (lane = 0; lane < 9; lane++)
1723 for (i = 0; i < 4; i++)
1724 train.lane_timings[i][channel][slot][rank][lane] =
1725 read_500(info, channel,
1726 get_timing_register_addr(lane, i, slot,
1727 rank), 9);
1728 train.reg_178 = read_1d0(0x178, 7);
1729 train.reg_10b = read_1d0(0x10b, 6);
1730
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01001731 for (channel = 0; channel < NUM_CHANNELS; channel++) {
1732 u32 reg32;
1733 reg32 = read_mchbar32 ((channel << 10) + 0x274);
1734 train.reg274265[channel][0] = reg32 >> 16;
1735 train.reg274265[channel][1] = reg32 & 0xffff;
1736 train.reg274265[channel][2] = read_mchbar16 ((channel << 10) + 0x265) >> 8;
1737 }
1738 train.reg2ca9_bit0 = read_mchbar8(0x2ca9) & 1;
1739 train.reg_6dc = read_mchbar32 (0x6dc);
1740 train.reg_6e8 = read_mchbar32 (0x6e8);
1741
1742 printk (BIOS_SPEW, "[6dc] = %x\n", train.reg_6dc);
1743 printk (BIOS_SPEW, "[6e8] = %x\n", train.reg_6e8);
1744
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001745 /* Save the MRC S3 restore data to cbmem */
Arthur Heymansdc71e252018-01-29 10:14:48 +01001746 mrc_cache_stash_data(MRC_TRAINING_DATA, MRC_CACHE_VERSION,
1747 &train, sizeof(train));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001748}
1749
1750#if REAL
1751static const struct ram_training *get_cached_training(void)
1752{
Arthur Heymansdc71e252018-01-29 10:14:48 +01001753 struct region_device rdev;
1754 if (mrc_cache_get_current(MRC_TRAINING_DATA, MRC_CACHE_VERSION,
1755 &rdev))
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001756 return 0;
Arthur Heymansdc71e252018-01-29 10:14:48 +01001757 return (void *)rdev_mmap_full(&rdev);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001758}
1759#endif
1760
1761/* FIXME: add timeout. */
1762static void wait_heci_ready(void)
1763{
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02001764 while (!(read32(DEFAULT_HECIBAR + 0xc) & 8)); // = 0x8000000c
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001765 write32((DEFAULT_HECIBAR + 0x4),
1766 (read32(DEFAULT_HECIBAR + 0x4) & ~0x10) | 0xc);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001767}
1768
1769/* FIXME: add timeout. */
1770static void wait_heci_cb_avail(int len)
1771{
1772 union {
1773 struct mei_csr csr;
1774 u32 raw;
1775 } csr;
1776
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02001777 while (!(read32(DEFAULT_HECIBAR + 0xc) & 8));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001778
1779 do
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001780 csr.raw = read32(DEFAULT_HECIBAR + 0x4);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001781 while (len >
1782 csr.csr.buffer_depth - (csr.csr.buffer_write_ptr -
1783 csr.csr.buffer_read_ptr));
1784}
1785
1786static void send_heci_packet(struct mei_header *head, u32 * payload)
1787{
1788 int len = (head->length + 3) / 4;
1789 int i;
1790
1791 wait_heci_cb_avail(len + 1);
1792
1793 /* FIXME: handle leftovers correctly. */
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001794 write32(DEFAULT_HECIBAR + 0, *(u32 *) head);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001795 for (i = 0; i < len - 1; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001796 write32(DEFAULT_HECIBAR + 0, payload[i]);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001797
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001798 write32(DEFAULT_HECIBAR + 0, payload[i] & ((1 << (8 * len)) - 1));
1799 write32(DEFAULT_HECIBAR + 0x4, read32(DEFAULT_HECIBAR + 0x4) | 0x4);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001800}
1801
1802static void
1803send_heci_message(u8 * msg, int len, u8 hostaddress, u8 clientaddress)
1804{
1805 struct mei_header head;
1806 int maxlen;
1807
1808 wait_heci_ready();
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001809 maxlen = (read32(DEFAULT_HECIBAR + 0x4) >> 24) * 4 - 4;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001810
1811 while (len) {
1812 int cur = len;
1813 if (cur > maxlen) {
1814 cur = maxlen;
1815 head.is_complete = 0;
1816 } else
1817 head.is_complete = 1;
1818 head.length = cur;
1819 head.reserved = 0;
1820 head.client_address = clientaddress;
1821 head.host_address = hostaddress;
1822 send_heci_packet(&head, (u32 *) msg);
1823 len -= cur;
1824 msg += cur;
1825 }
1826}
1827
1828/* FIXME: Add timeout. */
1829static int
1830recv_heci_packet(struct raminfo *info, struct mei_header *head, u32 * packet,
1831 u32 * packet_size)
1832{
1833 union {
1834 struct mei_csr csr;
1835 u32 raw;
1836 } csr;
1837 int i = 0;
1838
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001839 write32(DEFAULT_HECIBAR + 0x4, read32(DEFAULT_HECIBAR + 0x4) | 2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001840 do {
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001841 csr.raw = read32(DEFAULT_HECIBAR + 0xc);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001842#if !REAL
1843 if (i++ > 346)
1844 return -1;
1845#endif
1846 }
1847 while (csr.csr.buffer_write_ptr == csr.csr.buffer_read_ptr);
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001848 *(u32 *) head = read32(DEFAULT_HECIBAR + 0x8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001849 if (!head->length) {
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001850 write32(DEFAULT_HECIBAR + 0x4,
1851 read32(DEFAULT_HECIBAR + 0x4) | 2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001852 *packet_size = 0;
1853 return 0;
1854 }
1855 if (head->length + 4 > 4 * csr.csr.buffer_depth
1856 || head->length > *packet_size) {
1857 *packet_size = 0;
1858 return -1;
1859 }
1860
1861 do
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001862 csr.raw = read32(DEFAULT_HECIBAR + 0xc);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001863 while ((head->length + 3) >> 2 >
1864 csr.csr.buffer_write_ptr - csr.csr.buffer_read_ptr);
1865
1866 for (i = 0; i < (head->length + 3) >> 2; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001867 packet[i++] = read32(DEFAULT_HECIBAR + 0x8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001868 *packet_size = head->length;
1869 if (!csr.csr.ready)
1870 *packet_size = 0;
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001871 write32(DEFAULT_HECIBAR + 0x4, read32(DEFAULT_HECIBAR + 0x4) | 4);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001872 return 0;
1873}
1874
1875/* FIXME: Add timeout. */
1876static int
1877recv_heci_message(struct raminfo *info, u32 * message, u32 * message_size)
1878{
1879 struct mei_header head;
1880 int current_position;
1881
1882 current_position = 0;
1883 while (1) {
1884 u32 current_size;
1885 current_size = *message_size - current_position;
1886 if (recv_heci_packet
1887 (info, &head, message + (current_position >> 2),
1888 &current_size) == -1)
1889 break;
1890 if (!current_size)
1891 break;
1892 current_position += current_size;
1893 if (head.is_complete) {
1894 *message_size = current_position;
1895 return 0;
1896 }
1897
1898 if (current_position >= *message_size)
1899 break;
1900 }
1901 *message_size = 0;
1902 return -1;
1903}
1904
1905static void send_heci_uma_message(struct raminfo *info)
1906{
1907 struct uma_reply {
1908 u8 group_id;
1909 u8 command;
1910 u8 reserved;
1911 u8 result;
1912 u8 field2;
1913 u8 unk3[0x48 - 4 - 1];
Stefan Reinauer6a001132017-07-13 02:20:27 +02001914 } __packed reply;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001915 struct uma_message {
1916 u8 group_id;
1917 u8 cmd;
1918 u8 reserved;
1919 u8 result;
1920 u32 c2;
1921 u64 heci_uma_addr;
1922 u32 memory_reserved_for_heci_mb;
1923 u16 c3;
Stefan Reinauer6a001132017-07-13 02:20:27 +02001924 } __packed msg = {
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001925 0, MKHI_SET_UMA, 0, 0,
1926 0x82,
1927 info->heci_uma_addr, info->memory_reserved_for_heci_mb, 0};
1928 u32 reply_size;
1929
1930 send_heci_message((u8 *) & msg, sizeof(msg), 0, 7);
1931
1932 reply_size = sizeof(reply);
1933 if (recv_heci_message(info, (u32 *) & reply, &reply_size) == -1)
1934 return;
1935
1936 if (reply.command != (MKHI_SET_UMA | (1 << 7)))
1937 die("HECI init failed\n");
1938}
1939
1940static void setup_heci_uma(struct raminfo *info)
1941{
1942 u32 reg44;
1943
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001944 reg44 = pci_read_config32(HECIDEV, 0x44); // = 0x80010020
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001945 info->memory_reserved_for_heci_mb = 0;
1946 info->heci_uma_addr = 0;
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001947 if (!((reg44 & 0x10000) && !(pci_read_config32(HECIDEV, 0x40) & 0x20)))
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001948 return;
1949
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001950 info->heci_bar = pci_read_config32(HECIDEV, 0x10) & 0xFFFFFFF0;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001951 info->memory_reserved_for_heci_mb = reg44 & 0x3f;
1952 info->heci_uma_addr =
1953 ((u64)
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001954 ((((u64) pci_read_config16(NORTHBRIDGE, D0F0_TOM)) << 6) -
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001955 info->memory_reserved_for_heci_mb)) << 20;
1956
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001957 pci_read_config32(NORTHBRIDGE, DMIBAR);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001958 if (info->memory_reserved_for_heci_mb) {
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001959 write32(DEFAULT_DMIBAR + 0x14,
1960 read32(DEFAULT_DMIBAR + 0x14) & ~0x80);
1961 write32(DEFAULT_RCBA + 0x14,
1962 read32(DEFAULT_RCBA + 0x14) & ~0x80);
1963 write32(DEFAULT_DMIBAR + 0x20,
1964 read32(DEFAULT_DMIBAR + 0x20) & ~0x80);
1965 write32(DEFAULT_RCBA + 0x20,
1966 read32(DEFAULT_RCBA + 0x20) & ~0x80);
1967 write32(DEFAULT_DMIBAR + 0x2c,
1968 read32(DEFAULT_DMIBAR + 0x2c) & ~0x80);
1969 write32(DEFAULT_RCBA + 0x30,
1970 read32(DEFAULT_RCBA + 0x30) & ~0x80);
1971 write32(DEFAULT_DMIBAR + 0x38,
1972 read32(DEFAULT_DMIBAR + 0x38) & ~0x80);
1973 write32(DEFAULT_RCBA + 0x40,
1974 read32(DEFAULT_RCBA + 0x40) & ~0x80);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001975
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001976 write32(DEFAULT_RCBA + 0x40, 0x87000080); // OK
1977 write32(DEFAULT_DMIBAR + 0x38, 0x87000080); // OK
1978 while (read16(DEFAULT_RCBA + 0x46) & 2
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02001979 && read16(DEFAULT_DMIBAR + 0x3e) & 2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001980 }
1981
1982 write_mchbar32(0x24, 0x10000 + info->memory_reserved_for_heci_mb);
1983
1984 send_heci_uma_message(info);
1985
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001986 pci_write_config32(HECIDEV, 0x10, 0x0);
1987 pci_write_config8(HECIDEV, 0x4, 0x0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001988
1989}
1990
1991static int have_match_ranks(struct raminfo *info, int channel, int ranks)
1992{
1993 int ranks_in_channel;
1994 ranks_in_channel = info->populated_ranks[channel][0][0]
1995 + info->populated_ranks[channel][0][1]
1996 + info->populated_ranks[channel][1][0]
1997 + info->populated_ranks[channel][1][1];
1998
1999 /* empty channel */
2000 if (ranks_in_channel == 0)
2001 return 1;
2002
2003 if (ranks_in_channel != ranks)
2004 return 0;
2005 /* single slot */
2006 if (info->populated_ranks[channel][0][0] !=
2007 info->populated_ranks[channel][1][0])
2008 return 1;
2009 if (info->populated_ranks[channel][0][1] !=
2010 info->populated_ranks[channel][1][1])
2011 return 1;
2012 if (info->is_x16_module[channel][0] != info->is_x16_module[channel][1])
2013 return 0;
2014 if (info->density[channel][0] != info->density[channel][1])
2015 return 0;
2016 return 1;
2017}
2018
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002019static void read_4090(struct raminfo *info)
2020{
2021 int i, channel, slot, rank, lane;
2022 for (i = 0; i < 2; i++)
2023 for (slot = 0; slot < NUM_SLOTS; slot++)
2024 for (rank = 0; rank < NUM_RANKS; rank++)
2025 for (lane = 0; lane < 9; lane++)
2026 info->training.
2027 lane_timings[0][i][slot][rank][lane]
2028 = 32;
2029
2030 for (i = 1; i < 4; i++)
2031 for (channel = 0; channel < NUM_CHANNELS; channel++)
2032 for (slot = 0; slot < NUM_SLOTS; slot++)
2033 for (rank = 0; rank < NUM_RANKS; rank++)
2034 for (lane = 0; lane < 9; lane++) {
2035 info->training.
2036 lane_timings[i][channel]
2037 [slot][rank][lane] =
2038 read_500(info, channel,
2039 get_timing_register_addr
2040 (lane, i, slot,
2041 rank), 9)
2042 + (i == 1) * 11; // !!!!
2043 }
2044
2045}
2046
2047static u32 get_etalon2(int flip, u32 addr)
2048{
2049 const u16 invmask[] = {
2050 0xaaaa, 0x6db6, 0x4924, 0xeeee, 0xcccc, 0x8888, 0x7bde, 0x739c,
2051 0x6318, 0x4210, 0xefbe, 0xcf3c, 0x8e38, 0x0c30, 0x0820
2052 };
2053 u32 ret;
2054 u32 comp4 = addr / 480;
2055 addr %= 480;
2056 u32 comp1 = addr & 0xf;
2057 u32 comp2 = (addr >> 4) & 1;
2058 u32 comp3 = addr >> 5;
2059
2060 if (comp4)
2061 ret = 0x1010101 << (comp4 - 1);
2062 else
2063 ret = 0;
2064 if (flip ^ (((invmask[comp3] >> comp1) ^ comp2) & 1))
2065 ret = ~ret;
2066
2067 return ret;
2068}
2069
2070static void disable_cache(void)
2071{
2072 msr_t msr = {.lo = 0, .hi = 0 };
2073
Alexandru Gagniuc86091f92015-09-30 20:23:09 -07002074 wrmsr(MTRR_PHYS_BASE(3), msr);
2075 wrmsr(MTRR_PHYS_MASK(3), msr);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002076}
2077
2078static void enable_cache(unsigned int base, unsigned int size)
2079{
2080 msr_t msr;
2081 msr.lo = base | MTRR_TYPE_WRPROT;
2082 msr.hi = 0;
Alexandru Gagniuc86091f92015-09-30 20:23:09 -07002083 wrmsr(MTRR_PHYS_BASE(3), msr);
2084 msr.lo = ((~(ALIGN_DOWN(size + 4096, 4096) - 1) | MTRR_DEF_TYPE_EN)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002085 & 0xffffffff);
2086 msr.hi = 0x0000000f;
Alexandru Gagniuc86091f92015-09-30 20:23:09 -07002087 wrmsr(MTRR_PHYS_MASK(3), msr);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002088}
2089
2090static void flush_cache(u32 start, u32 size)
2091{
2092 u32 end;
2093 u32 addr;
2094
2095 end = start + (ALIGN_DOWN(size + 4096, 4096));
2096 for (addr = start; addr < end; addr += 64)
2097 clflush(addr);
2098}
2099
2100static void clear_errors(void)
2101{
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03002102 pci_write_config8(NORTHBRIDGE, 0xc0, 0x01);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002103}
2104
2105static void write_testing(struct raminfo *info, int totalrank, int flip)
2106{
2107 int nwrites = 0;
2108 /* in 8-byte units. */
2109 u32 offset;
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08002110 u8 *base;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002111
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08002112 base = (u8 *)(totalrank << 28);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002113 for (offset = 0; offset < 9 * 480; offset += 2) {
2114 write32(base + offset * 8, get_etalon2(flip, offset));
2115 write32(base + offset * 8 + 4, get_etalon2(flip, offset));
2116 write32(base + offset * 8 + 8, get_etalon2(flip, offset + 1));
2117 write32(base + offset * 8 + 12, get_etalon2(flip, offset + 1));
2118 nwrites += 4;
2119 if (nwrites >= 320) {
2120 clear_errors();
2121 nwrites = 0;
2122 }
2123 }
2124}
2125
2126static u8 check_testing(struct raminfo *info, u8 total_rank, int flip)
2127{
2128 u8 failmask = 0;
2129 int i;
2130 int comp1, comp2, comp3;
2131 u32 failxor[2] = { 0, 0 };
2132
2133 enable_cache((total_rank << 28), 1728 * 5 * 4);
2134
2135 for (comp3 = 0; comp3 < 9 && failmask != 0xff; comp3++) {
2136 for (comp1 = 0; comp1 < 4; comp1++)
2137 for (comp2 = 0; comp2 < 60; comp2++) {
2138 u32 re[4];
2139 u32 curroffset =
2140 comp3 * 8 * 60 + 2 * comp1 + 8 * comp2;
2141 read128((total_rank << 28) | (curroffset << 3),
2142 (u64 *) re);
2143 failxor[0] |=
2144 get_etalon2(flip, curroffset) ^ re[0];
2145 failxor[1] |=
2146 get_etalon2(flip, curroffset) ^ re[1];
2147 failxor[0] |=
2148 get_etalon2(flip, curroffset | 1) ^ re[2];
2149 failxor[1] |=
2150 get_etalon2(flip, curroffset | 1) ^ re[3];
2151 }
2152 for (i = 0; i < 8; i++)
2153 if ((0xff << (8 * (i % 4))) & failxor[i / 4])
2154 failmask |= 1 << i;
2155 }
2156 disable_cache();
2157 flush_cache((total_rank << 28), 1728 * 5 * 4);
2158 return failmask;
2159}
2160
2161const u32 seed1[0x18] = {
2162 0x3a9d5ab5, 0x576cb65b, 0x555773b6, 0x2ab772ee,
2163 0x555556ee, 0x3a9d5ab5, 0x576cb65b, 0x555773b6,
2164 0x2ab772ee, 0x555556ee, 0x5155a555, 0x5155a555,
2165 0x5155a555, 0x5155a555, 0x3a9d5ab5, 0x576cb65b,
2166 0x555773b6, 0x2ab772ee, 0x555556ee, 0x55d6b4a5,
2167 0x366d6b3a, 0x2ae5ddbb, 0x3b9ddbb7, 0x55d6b4a5,
2168};
2169
2170static u32 get_seed2(int a, int b)
2171{
2172 const u32 seed2[5] = {
2173 0x55555555, 0x33333333, 0x2e555a55, 0x55555555,
2174 0x5b6db6db,
2175 };
2176 u32 r;
2177 r = seed2[(a + (a >= 10)) / 5];
2178 return b ? ~r : r;
2179}
2180
2181static int make_shift(int comp2, int comp5, int x)
2182{
2183 const u8 seed3[32] = {
2184 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2185 0x00, 0x00, 0x38, 0x1c, 0x3c, 0x18, 0x38, 0x38,
2186 0x38, 0x38, 0x38, 0x38, 0x0f, 0x0f, 0x0f, 0x0f,
2187 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f,
2188 };
2189
2190 return (comp2 - ((seed3[comp5] >> (x & 7)) & 1)) & 0x1f;
2191}
2192
2193static u32 get_etalon(int flip, u32 addr)
2194{
2195 u32 mask_byte = 0;
2196 int comp1 = (addr >> 1) & 1;
2197 int comp2 = (addr >> 3) & 0x1f;
2198 int comp3 = (addr >> 8) & 0xf;
2199 int comp4 = (addr >> 12) & 0xf;
2200 int comp5 = (addr >> 16) & 0x1f;
2201 u32 mask_bit = ~(0x10001 << comp3);
2202 u32 part1;
2203 u32 part2;
2204 int byte;
2205
2206 part2 =
2207 ((seed1[comp5] >>
2208 make_shift(comp2, comp5,
2209 (comp3 >> 3) | (comp1 << 2) | 2)) & 1) ^ flip;
2210 part1 =
2211 ((seed1[comp5] >>
2212 make_shift(comp2, comp5,
2213 (comp3 >> 3) | (comp1 << 2) | 0)) & 1) ^ flip;
2214
2215 for (byte = 0; byte < 4; byte++)
2216 if ((get_seed2(comp5, comp4) >>
2217 make_shift(comp2, comp5, (byte | (comp1 << 2)))) & 1)
2218 mask_byte |= 0xff << (8 * byte);
2219
2220 return (mask_bit & mask_byte) | (part1 << comp3) | (part2 <<
2221 (comp3 + 16));
2222}
2223
2224static void
2225write_testing_type2(struct raminfo *info, u8 totalrank, u8 region, u8 block,
2226 char flip)
2227{
2228 int i;
2229 for (i = 0; i < 2048; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08002230 write32p((totalrank << 28) | (region << 25) | (block << 16) |
2231 (i << 2), get_etalon(flip, (block << 16) | (i << 2)));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002232}
2233
2234static u8
2235check_testing_type2(struct raminfo *info, u8 totalrank, u8 region, u8 block,
2236 char flip)
2237{
2238 u8 failmask = 0;
2239 u32 failxor[2];
2240 int i;
2241 int comp1, comp2, comp3;
2242
2243 failxor[0] = 0;
2244 failxor[1] = 0;
2245
2246 enable_cache(totalrank << 28, 134217728);
2247 for (comp3 = 0; comp3 < 2 && failmask != 0xff; comp3++) {
2248 for (comp1 = 0; comp1 < 16; comp1++)
2249 for (comp2 = 0; comp2 < 64; comp2++) {
2250 u32 addr =
2251 (totalrank << 28) | (region << 25) | (block
2252 << 16)
2253 | (comp3 << 12) | (comp2 << 6) | (comp1 <<
2254 2);
2255 failxor[comp1 & 1] |=
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08002256 read32p(addr) ^ get_etalon(flip, addr);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002257 }
2258 for (i = 0; i < 8; i++)
2259 if ((0xff << (8 * (i % 4))) & failxor[i / 4])
2260 failmask |= 1 << i;
2261 }
2262 disable_cache();
2263 flush_cache((totalrank << 28) | (region << 25) | (block << 16), 16384);
2264 return failmask;
2265}
2266
2267static int check_bounded(unsigned short *vals, u16 bound)
2268{
2269 int i;
2270
2271 for (i = 0; i < 8; i++)
2272 if (vals[i] < bound)
2273 return 0;
2274 return 1;
2275}
2276
2277enum state {
2278 BEFORE_USABLE = 0, AT_USABLE = 1, AT_MARGIN = 2, COMPLETE = 3
2279};
2280
2281static int validate_state(enum state *in)
2282{
2283 int i;
2284 for (i = 0; i < 8; i++)
2285 if (in[i] != COMPLETE)
2286 return 0;
2287 return 1;
2288}
2289
2290static void
2291do_fsm(enum state *state, u16 * counter,
2292 u8 fail_mask, int margin, int uplimit,
2293 u8 * res_low, u8 * res_high, u8 val)
2294{
2295 int lane;
2296
2297 for (lane = 0; lane < 8; lane++) {
2298 int is_fail = (fail_mask >> lane) & 1;
2299 switch (state[lane]) {
2300 case BEFORE_USABLE:
2301 if (!is_fail) {
2302 counter[lane] = 1;
2303 state[lane] = AT_USABLE;
2304 break;
2305 }
2306 counter[lane] = 0;
2307 state[lane] = BEFORE_USABLE;
2308 break;
2309 case AT_USABLE:
2310 if (!is_fail) {
2311 ++counter[lane];
2312 if (counter[lane] >= margin) {
2313 state[lane] = AT_MARGIN;
2314 res_low[lane] = val - margin + 1;
2315 break;
2316 }
2317 state[lane] = 1;
2318 break;
2319 }
2320 counter[lane] = 0;
2321 state[lane] = BEFORE_USABLE;
2322 break;
2323 case AT_MARGIN:
2324 if (is_fail) {
2325 state[lane] = COMPLETE;
2326 res_high[lane] = val - 1;
2327 } else {
2328 counter[lane]++;
2329 state[lane] = AT_MARGIN;
2330 if (val == uplimit) {
2331 state[lane] = COMPLETE;
2332 res_high[lane] = uplimit;
2333 }
2334 }
2335 break;
2336 case COMPLETE:
2337 break;
2338 }
2339 }
2340}
2341
2342static void
2343train_ram_at_178(struct raminfo *info, u8 channel, int slot, int rank,
2344 u8 total_rank, u8 reg_178, int first_run, int niter,
2345 timing_bounds_t * timings)
2346{
2347 int lane;
2348 enum state state[8];
2349 u16 count[8];
2350 u8 lower_usable[8];
2351 u8 upper_usable[8];
2352 unsigned short num_sucessfully_checked[8];
2353 u8 secondary_total_rank;
2354 u8 reg1b3;
2355
2356 if (info->populated_ranks_mask[1]) {
2357 if (channel == 1)
2358 secondary_total_rank =
2359 info->populated_ranks[1][0][0] +
2360 info->populated_ranks[1][0][1]
2361 + info->populated_ranks[1][1][0] +
2362 info->populated_ranks[1][1][1];
2363 else
2364 secondary_total_rank = 0;
2365 } else
2366 secondary_total_rank = total_rank;
2367
2368 {
2369 int i;
2370 for (i = 0; i < 8; i++)
2371 state[i] = BEFORE_USABLE;
2372 }
2373
2374 if (!first_run) {
2375 int is_all_ok = 1;
2376 for (lane = 0; lane < 8; lane++)
2377 if (timings[reg_178][channel][slot][rank][lane].
2378 smallest ==
2379 timings[reg_178][channel][slot][rank][lane].
2380 largest) {
2381 timings[reg_178][channel][slot][rank][lane].
2382 smallest = 0;
2383 timings[reg_178][channel][slot][rank][lane].
2384 largest = 0;
2385 is_all_ok = 0;
2386 }
2387 if (is_all_ok) {
2388 int i;
2389 for (i = 0; i < 8; i++)
2390 state[i] = COMPLETE;
2391 }
2392 }
2393
2394 for (reg1b3 = 0; reg1b3 < 0x30 && !validate_state(state); reg1b3++) {
2395 u8 failmask = 0;
2396 write_1d0(reg1b3 ^ 32, 0x1b3, 6, 1);
2397 write_1d0(reg1b3 ^ 32, 0x1a3, 6, 1);
2398 failmask = check_testing(info, total_rank, 0);
2399 write_mchbar32(0xfb0, read_mchbar32(0xfb0) | 0x00030000);
2400 do_fsm(state, count, failmask, 5, 47, lower_usable,
2401 upper_usable, reg1b3);
2402 }
2403
2404 if (reg1b3) {
2405 write_1d0(0, 0x1b3, 6, 1);
2406 write_1d0(0, 0x1a3, 6, 1);
2407 for (lane = 0; lane < 8; lane++) {
2408 if (state[lane] == COMPLETE) {
2409 timings[reg_178][channel][slot][rank][lane].
2410 smallest =
2411 lower_usable[lane] +
2412 (info->training.
2413 lane_timings[0][channel][slot][rank][lane]
2414 & 0x3F) - 32;
2415 timings[reg_178][channel][slot][rank][lane].
2416 largest =
2417 upper_usable[lane] +
2418 (info->training.
2419 lane_timings[0][channel][slot][rank][lane]
2420 & 0x3F) - 32;
2421 }
2422 }
2423 }
2424
2425 if (!first_run) {
2426 for (lane = 0; lane < 8; lane++)
2427 if (state[lane] == COMPLETE) {
2428 write_500(info, channel,
2429 timings[reg_178][channel][slot][rank]
2430 [lane].smallest,
2431 get_timing_register_addr(lane, 0,
2432 slot, rank),
2433 9, 1);
2434 write_500(info, channel,
2435 timings[reg_178][channel][slot][rank]
2436 [lane].smallest +
2437 info->training.
2438 lane_timings[1][channel][slot][rank]
2439 [lane]
2440 -
2441 info->training.
2442 lane_timings[0][channel][slot][rank]
2443 [lane], get_timing_register_addr(lane,
2444 1,
2445 slot,
2446 rank),
2447 9, 1);
2448 num_sucessfully_checked[lane] = 0;
2449 } else
2450 num_sucessfully_checked[lane] = -1;
2451
2452 do {
2453 u8 failmask = 0;
2454 int i;
2455 for (i = 0; i < niter; i++) {
2456 if (failmask == 0xFF)
2457 break;
2458 failmask |=
2459 check_testing_type2(info, total_rank, 2, i,
2460 0);
2461 failmask |=
2462 check_testing_type2(info, total_rank, 3, i,
2463 1);
2464 }
2465 write_mchbar32(0xfb0,
2466 read_mchbar32(0xfb0) | 0x00030000);
2467 for (lane = 0; lane < 8; lane++)
2468 if (num_sucessfully_checked[lane] != 0xffff) {
2469 if ((1 << lane) & failmask) {
2470 if (timings[reg_178][channel]
2471 [slot][rank][lane].
2472 largest <=
2473 timings[reg_178][channel]
2474 [slot][rank][lane].smallest)
2475 num_sucessfully_checked
2476 [lane] = -1;
2477 else {
2478 num_sucessfully_checked
2479 [lane] = 0;
2480 timings[reg_178]
2481 [channel][slot]
2482 [rank][lane].
2483 smallest++;
2484 write_500(info, channel,
2485 timings
2486 [reg_178]
2487 [channel]
2488 [slot][rank]
2489 [lane].
2490 smallest,
2491 get_timing_register_addr
2492 (lane, 0,
2493 slot, rank),
2494 9, 1);
2495 write_500(info, channel,
2496 timings
2497 [reg_178]
2498 [channel]
2499 [slot][rank]
2500 [lane].
2501 smallest +
2502 info->
2503 training.
2504 lane_timings
2505 [1][channel]
2506 [slot][rank]
2507 [lane]
2508 -
2509 info->
2510 training.
2511 lane_timings
2512 [0][channel]
2513 [slot][rank]
2514 [lane],
2515 get_timing_register_addr
2516 (lane, 1,
2517 slot, rank),
2518 9, 1);
2519 }
2520 } else
2521 num_sucessfully_checked[lane]++;
2522 }
2523 }
2524 while (!check_bounded(num_sucessfully_checked, 2));
2525
2526 for (lane = 0; lane < 8; lane++)
2527 if (state[lane] == COMPLETE) {
2528 write_500(info, channel,
2529 timings[reg_178][channel][slot][rank]
2530 [lane].largest,
2531 get_timing_register_addr(lane, 0,
2532 slot, rank),
2533 9, 1);
2534 write_500(info, channel,
2535 timings[reg_178][channel][slot][rank]
2536 [lane].largest +
2537 info->training.
2538 lane_timings[1][channel][slot][rank]
2539 [lane]
2540 -
2541 info->training.
2542 lane_timings[0][channel][slot][rank]
2543 [lane], get_timing_register_addr(lane,
2544 1,
2545 slot,
2546 rank),
2547 9, 1);
2548 num_sucessfully_checked[lane] = 0;
2549 } else
2550 num_sucessfully_checked[lane] = -1;
2551
2552 do {
2553 int failmask = 0;
2554 int i;
2555 for (i = 0; i < niter; i++) {
2556 if (failmask == 0xFF)
2557 break;
2558 failmask |=
2559 check_testing_type2(info, total_rank, 2, i,
2560 0);
2561 failmask |=
2562 check_testing_type2(info, total_rank, 3, i,
2563 1);
2564 }
2565
2566 write_mchbar32(0xfb0,
2567 read_mchbar32(0xfb0) | 0x00030000);
2568 for (lane = 0; lane < 8; lane++) {
2569 if (num_sucessfully_checked[lane] != 0xffff) {
2570 if ((1 << lane) & failmask) {
2571 if (timings[reg_178][channel]
2572 [slot][rank][lane].
2573 largest <=
2574 timings[reg_178][channel]
2575 [slot][rank][lane].
2576 smallest) {
2577 num_sucessfully_checked
2578 [lane] = -1;
2579 } else {
2580 num_sucessfully_checked
2581 [lane] = 0;
2582 timings[reg_178]
2583 [channel][slot]
2584 [rank][lane].
2585 largest--;
2586 write_500(info, channel,
2587 timings
2588 [reg_178]
2589 [channel]
2590 [slot][rank]
2591 [lane].
2592 largest,
2593 get_timing_register_addr
2594 (lane, 0,
2595 slot, rank),
2596 9, 1);
2597 write_500(info, channel,
2598 timings
2599 [reg_178]
2600 [channel]
2601 [slot][rank]
2602 [lane].
2603 largest +
2604 info->
2605 training.
2606 lane_timings
2607 [1][channel]
2608 [slot][rank]
2609 [lane]
2610 -
2611 info->
2612 training.
2613 lane_timings
2614 [0][channel]
2615 [slot][rank]
2616 [lane],
2617 get_timing_register_addr
2618 (lane, 1,
2619 slot, rank),
2620 9, 1);
2621 }
2622 } else
2623 num_sucessfully_checked[lane]++;
2624 }
2625 }
2626 }
2627 while (!check_bounded(num_sucessfully_checked, 3));
2628
2629 for (lane = 0; lane < 8; lane++) {
2630 write_500(info, channel,
2631 info->training.
2632 lane_timings[0][channel][slot][rank][lane],
2633 get_timing_register_addr(lane, 0, slot, rank),
2634 9, 1);
2635 write_500(info, channel,
2636 info->training.
2637 lane_timings[1][channel][slot][rank][lane],
2638 get_timing_register_addr(lane, 1, slot, rank),
2639 9, 1);
2640 if (timings[reg_178][channel][slot][rank][lane].
2641 largest <=
2642 timings[reg_178][channel][slot][rank][lane].
2643 smallest) {
2644 timings[reg_178][channel][slot][rank][lane].
2645 largest = 0;
2646 timings[reg_178][channel][slot][rank][lane].
2647 smallest = 0;
2648 }
2649 }
2650 }
2651}
2652
2653static void set_10b(struct raminfo *info, u8 val)
2654{
2655 int channel;
2656 int slot, rank;
2657 int lane;
2658
2659 if (read_1d0(0x10b, 6) == val)
2660 return;
2661
2662 write_1d0(val, 0x10b, 6, 1);
2663
2664 FOR_POPULATED_RANKS_BACKWARDS for (lane = 0; lane < 9; lane++) {
2665 u16 reg_500;
2666 reg_500 = read_500(info, channel,
2667 get_timing_register_addr(lane, 0, slot,
2668 rank), 9);
2669 if (val == 1) {
2670 if (lut16[info->clock_speed_index] <= reg_500)
2671 reg_500 -= lut16[info->clock_speed_index];
2672 else
2673 reg_500 = 0;
2674 } else {
2675 reg_500 += lut16[info->clock_speed_index];
2676 }
2677 write_500(info, channel, reg_500,
2678 get_timing_register_addr(lane, 0, slot, rank), 9, 1);
2679 }
2680}
2681
2682static void set_ecc(int onoff)
2683{
2684 int channel;
2685 for (channel = 0; channel < NUM_CHANNELS; channel++) {
2686 u8 t;
2687 t = read_mchbar8((channel << 10) + 0x5f8);
2688 if (onoff)
2689 t |= 1;
2690 else
2691 t &= ~1;
2692 write_mchbar8((channel << 10) + 0x5f8, t);
2693 }
2694}
2695
2696static void set_178(u8 val)
2697{
2698 if (val >= 31)
2699 val = val - 31;
2700 else
2701 val = 63 - val;
2702
2703 write_1d0(2 * val, 0x178, 7, 1);
2704}
2705
2706static void
2707write_500_timings_type(struct raminfo *info, int channel, int slot, int rank,
2708 int type)
2709{
2710 int lane;
2711
2712 for (lane = 0; lane < 8; lane++)
2713 write_500(info, channel,
2714 info->training.
2715 lane_timings[type][channel][slot][rank][lane],
2716 get_timing_register_addr(lane, type, slot, rank), 9,
2717 0);
2718}
2719
2720static void
2721try_timing_offsets(struct raminfo *info, int channel,
2722 int slot, int rank, int totalrank)
2723{
2724 u16 count[8];
2725 enum state state[8];
2726 u8 lower_usable[8], upper_usable[8];
2727 int lane;
2728 int i;
2729 int flip = 1;
2730 int timing_offset;
2731
2732 for (i = 0; i < 8; i++)
2733 state[i] = BEFORE_USABLE;
2734
2735 memset(count, 0, sizeof(count));
2736
2737 for (lane = 0; lane < 8; lane++)
2738 write_500(info, channel,
2739 info->training.
2740 lane_timings[2][channel][slot][rank][lane] + 32,
2741 get_timing_register_addr(lane, 3, slot, rank), 9, 1);
2742
2743 for (timing_offset = 0; !validate_state(state) && timing_offset < 64;
2744 timing_offset++) {
2745 u8 failmask;
2746 write_1d0(timing_offset ^ 32, 0x1bb, 6, 1);
2747 failmask = 0;
2748 for (i = 0; i < 2 && failmask != 0xff; i++) {
2749 flip = !flip;
2750 write_testing(info, totalrank, flip);
2751 failmask |= check_testing(info, totalrank, flip);
2752 }
2753 do_fsm(state, count, failmask, 10, 63, lower_usable,
2754 upper_usable, timing_offset);
2755 }
2756 write_1d0(0, 0x1bb, 6, 1);
2757 dump_timings(info);
2758 if (!validate_state(state))
2759 die("Couldn't discover DRAM timings (1)\n");
2760
2761 for (lane = 0; lane < 8; lane++) {
2762 u8 bias = 0;
2763
2764 if (info->silicon_revision) {
2765 int usable_length;
2766
2767 usable_length = upper_usable[lane] - lower_usable[lane];
2768 if (usable_length >= 20) {
2769 bias = usable_length / 2 - 10;
2770 if (bias >= 2)
2771 bias = 2;
2772 }
2773 }
2774 write_500(info, channel,
2775 info->training.
2776 lane_timings[2][channel][slot][rank][lane] +
2777 (upper_usable[lane] + lower_usable[lane]) / 2 - bias,
2778 get_timing_register_addr(lane, 3, slot, rank), 9, 1);
2779 info->training.timing2_bounds[channel][slot][rank][lane][0] =
2780 info->training.lane_timings[2][channel][slot][rank][lane] +
2781 lower_usable[lane];
2782 info->training.timing2_bounds[channel][slot][rank][lane][1] =
2783 info->training.lane_timings[2][channel][slot][rank][lane] +
2784 upper_usable[lane];
2785 info->training.timing2_offset[channel][slot][rank][lane] =
2786 info->training.lane_timings[2][channel][slot][rank][lane];
2787 }
2788}
2789
2790static u8
2791choose_training(struct raminfo *info, int channel, int slot, int rank,
2792 int lane, timing_bounds_t * timings, u8 center_178)
2793{
2794 u16 central_weight;
2795 u16 side_weight;
2796 unsigned int sum = 0, count = 0;
2797 u8 span;
2798 u8 lower_margin, upper_margin;
2799 u8 reg_178;
2800 u8 result;
2801
2802 span = 12;
2803 central_weight = 20;
2804 side_weight = 20;
2805 if (info->silicon_revision == 1 && channel == 1) {
2806 central_weight = 5;
2807 side_weight = 20;
2808 if ((info->
2809 populated_ranks_mask[1] ^ (info->
2810 populated_ranks_mask[1] >> 2)) &
2811 1)
2812 span = 18;
2813 }
2814 if ((info->populated_ranks_mask[0] & 5) == 5) {
2815 central_weight = 20;
2816 side_weight = 20;
2817 }
2818 if (info->clock_speed_index >= 2
2819 && (info->populated_ranks_mask[0] & 5) == 5 && slot == 1) {
2820 if (info->silicon_revision == 1) {
2821 switch (channel) {
2822 case 0:
2823 if (lane == 1) {
2824 central_weight = 10;
2825 side_weight = 20;
2826 }
2827 break;
2828 case 1:
2829 if (lane == 6) {
2830 side_weight = 5;
2831 central_weight = 20;
2832 }
2833 break;
2834 }
2835 }
2836 if (info->silicon_revision == 0 && channel == 0 && lane == 0) {
2837 side_weight = 5;
2838 central_weight = 20;
2839 }
2840 }
2841 for (reg_178 = center_178 - span; reg_178 <= center_178 + span;
2842 reg_178 += span) {
2843 u8 smallest;
2844 u8 largest;
2845 largest = timings[reg_178][channel][slot][rank][lane].largest;
2846 smallest = timings[reg_178][channel][slot][rank][lane].smallest;
2847 if (largest - smallest + 1 >= 5) {
2848 unsigned int weight;
2849 if (reg_178 == center_178)
2850 weight = central_weight;
2851 else
2852 weight = side_weight;
2853 sum += weight * (largest + smallest);
2854 count += weight;
2855 }
2856 }
2857 dump_timings(info);
2858 if (count == 0)
2859 die("Couldn't discover DRAM timings (2)\n");
2860 result = sum / (2 * count);
2861 lower_margin =
2862 result - timings[center_178][channel][slot][rank][lane].smallest;
2863 upper_margin =
2864 timings[center_178][channel][slot][rank][lane].largest - result;
2865 if (upper_margin < 10 && lower_margin > 10)
2866 result -= min(lower_margin - 10, 10 - upper_margin);
2867 if (upper_margin > 10 && lower_margin < 10)
2868 result += min(upper_margin - 10, 10 - lower_margin);
2869 return result;
2870}
2871
2872#define STANDARD_MIN_MARGIN 5
2873
2874static u8 choose_reg178(struct raminfo *info, timing_bounds_t * timings)
2875{
2876 u16 margin[64];
2877 int lane, rank, slot, channel;
2878 u8 reg178;
2879 int count = 0, sum = 0;
2880
2881 for (reg178 = reg178_min[info->clock_speed_index];
2882 reg178 < reg178_max[info->clock_speed_index];
2883 reg178 += reg178_step[info->clock_speed_index]) {
2884 margin[reg178] = -1;
2885 FOR_POPULATED_RANKS_BACKWARDS for (lane = 0; lane < 8; lane++) {
2886 int curmargin =
2887 timings[reg178][channel][slot][rank][lane].largest -
2888 timings[reg178][channel][slot][rank][lane].
2889 smallest + 1;
2890 if (curmargin < margin[reg178])
2891 margin[reg178] = curmargin;
2892 }
2893 if (margin[reg178] >= STANDARD_MIN_MARGIN) {
2894 u16 weight;
2895 weight = margin[reg178] - STANDARD_MIN_MARGIN;
2896 sum += weight * reg178;
2897 count += weight;
2898 }
2899 }
2900 dump_timings(info);
2901 if (count == 0)
2902 die("Couldn't discover DRAM timings (3)\n");
2903
2904 u8 threshold;
2905
2906 for (threshold = 30; threshold >= 5; threshold--) {
2907 int usable_length = 0;
2908 int smallest_fount = 0;
2909 for (reg178 = reg178_min[info->clock_speed_index];
2910 reg178 < reg178_max[info->clock_speed_index];
2911 reg178 += reg178_step[info->clock_speed_index])
2912 if (margin[reg178] >= threshold) {
2913 usable_length +=
2914 reg178_step[info->clock_speed_index];
2915 info->training.reg178_largest =
2916 reg178 -
2917 2 * reg178_step[info->clock_speed_index];
2918
2919 if (!smallest_fount) {
2920 smallest_fount = 1;
2921 info->training.reg178_smallest =
2922 reg178 +
2923 reg178_step[info->
2924 clock_speed_index];
2925 }
2926 }
2927 if (usable_length >= 0x21)
2928 break;
2929 }
2930
2931 return sum / count;
2932}
2933
2934static int check_cached_sanity(struct raminfo *info)
2935{
2936 int lane;
2937 int slot, rank;
2938 int channel;
2939
2940 if (!info->cached_training)
2941 return 0;
2942
2943 for (channel = 0; channel < NUM_CHANNELS; channel++)
2944 for (slot = 0; slot < NUM_SLOTS; slot++)
2945 for (rank = 0; rank < NUM_RANKS; rank++)
2946 for (lane = 0; lane < 8 + info->use_ecc; lane++) {
2947 u16 cached_value, estimation_value;
2948 cached_value =
2949 info->cached_training->
2950 lane_timings[1][channel][slot][rank]
2951 [lane];
2952 if (cached_value >= 0x18
2953 && cached_value <= 0x1E7) {
2954 estimation_value =
2955 info->training.
2956 lane_timings[1][channel]
2957 [slot][rank][lane];
2958 if (estimation_value <
2959 cached_value - 24)
2960 return 0;
2961 if (estimation_value >
2962 cached_value + 24)
2963 return 0;
2964 }
2965 }
2966 return 1;
2967}
2968
2969static int try_cached_training(struct raminfo *info)
2970{
2971 u8 saved_243[2];
2972 u8 tm;
2973
2974 int channel, slot, rank, lane;
2975 int flip = 1;
2976 int i, j;
2977
2978 if (!check_cached_sanity(info))
2979 return 0;
2980
2981 info->training.reg178_center = info->cached_training->reg178_center;
2982 info->training.reg178_smallest = info->cached_training->reg178_smallest;
2983 info->training.reg178_largest = info->cached_training->reg178_largest;
2984 memcpy(&info->training.timing_bounds,
2985 &info->cached_training->timing_bounds,
2986 sizeof(info->training.timing_bounds));
2987 memcpy(&info->training.timing_offset,
2988 &info->cached_training->timing_offset,
2989 sizeof(info->training.timing_offset));
2990
2991 write_1d0(2, 0x142, 3, 1);
2992 saved_243[0] = read_mchbar8(0x243);
2993 saved_243[1] = read_mchbar8(0x643);
2994 write_mchbar8(0x243, saved_243[0] | 2);
2995 write_mchbar8(0x643, saved_243[1] | 2);
2996 set_ecc(0);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03002997 pci_write_config16(NORTHBRIDGE, 0xc8, 3);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002998 if (read_1d0(0x10b, 6) & 1)
2999 set_10b(info, 0);
3000 for (tm = 0; tm < 2; tm++) {
3001 int totalrank;
3002
3003 set_178(tm ? info->cached_training->reg178_largest : info->
3004 cached_training->reg178_smallest);
3005
3006 totalrank = 0;
3007 /* Check timing ranges. With i == 0 we check smallest one and with
3008 i == 1 the largest bound. With j == 0 we check that on the bound
3009 it still works whereas with j == 1 we check that just outside of
3010 bound we fail.
3011 */
3012 FOR_POPULATED_RANKS_BACKWARDS {
3013 for (i = 0; i < 2; i++) {
3014 for (lane = 0; lane < 8; lane++) {
3015 write_500(info, channel,
3016 info->cached_training->
3017 timing2_bounds[channel][slot]
3018 [rank][lane][i],
3019 get_timing_register_addr(lane,
3020 3,
3021 slot,
3022 rank),
3023 9, 1);
3024
3025 if (!i)
3026 write_500(info, channel,
3027 info->
3028 cached_training->
3029 timing2_offset
3030 [channel][slot][rank]
3031 [lane],
3032 get_timing_register_addr
3033 (lane, 2, slot, rank),
3034 9, 1);
3035 write_500(info, channel,
3036 i ? info->cached_training->
3037 timing_bounds[tm][channel]
3038 [slot][rank][lane].
3039 largest : info->
3040 cached_training->
3041 timing_bounds[tm][channel]
3042 [slot][rank][lane].smallest,
3043 get_timing_register_addr(lane,
3044 0,
3045 slot,
3046 rank),
3047 9, 1);
3048 write_500(info, channel,
3049 info->cached_training->
3050 timing_offset[channel][slot]
3051 [rank][lane] +
3052 (i ? info->cached_training->
3053 timing_bounds[tm][channel]
3054 [slot][rank][lane].
3055 largest : info->
3056 cached_training->
3057 timing_bounds[tm][channel]
3058 [slot][rank][lane].
3059 smallest) - 64,
3060 get_timing_register_addr(lane,
3061 1,
3062 slot,
3063 rank),
3064 9, 1);
3065 }
3066 for (j = 0; j < 2; j++) {
3067 u8 failmask;
3068 u8 expected_failmask;
3069 char reg1b3;
3070
3071 reg1b3 = (j == 1) + 4;
3072 reg1b3 =
3073 j == i ? reg1b3 : (-reg1b3) & 0x3f;
3074 write_1d0(reg1b3, 0x1bb, 6, 1);
3075 write_1d0(reg1b3, 0x1b3, 6, 1);
3076 write_1d0(reg1b3, 0x1a3, 6, 1);
3077
3078 flip = !flip;
3079 write_testing(info, totalrank, flip);
3080 failmask =
3081 check_testing(info, totalrank,
3082 flip);
3083 expected_failmask =
3084 j == 0 ? 0x00 : 0xff;
3085 if (failmask != expected_failmask)
3086 goto fail;
3087 }
3088 }
3089 totalrank++;
3090 }
3091 }
3092
3093 set_178(info->cached_training->reg178_center);
3094 if (info->use_ecc)
3095 set_ecc(1);
3096 write_training_data(info);
3097 write_1d0(0, 322, 3, 1);
3098 info->training = *info->cached_training;
3099
3100 write_1d0(0, 0x1bb, 6, 1);
3101 write_1d0(0, 0x1b3, 6, 1);
3102 write_1d0(0, 0x1a3, 6, 1);
3103 write_mchbar8(0x243, saved_243[0]);
3104 write_mchbar8(0x643, saved_243[1]);
3105
3106 return 1;
3107
3108fail:
3109 FOR_POPULATED_RANKS {
3110 write_500_timings_type(info, channel, slot, rank, 1);
3111 write_500_timings_type(info, channel, slot, rank, 2);
3112 write_500_timings_type(info, channel, slot, rank, 3);
3113 }
3114
3115 write_1d0(0, 0x1bb, 6, 1);
3116 write_1d0(0, 0x1b3, 6, 1);
3117 write_1d0(0, 0x1a3, 6, 1);
3118 write_mchbar8(0x243, saved_243[0]);
3119 write_mchbar8(0x643, saved_243[1]);
3120
3121 return 0;
3122}
3123
3124static void do_ram_training(struct raminfo *info)
3125{
3126 u8 saved_243[2];
3127 int totalrank = 0;
3128 u8 reg_178;
3129 int niter;
3130
3131 timing_bounds_t timings[64];
3132 int lane, rank, slot, channel;
3133 u8 reg178_center;
3134
3135 write_1d0(2, 0x142, 3, 1);
3136 saved_243[0] = read_mchbar8(0x243);
3137 saved_243[1] = read_mchbar8(0x643);
3138 write_mchbar8(0x243, saved_243[0] | 2);
3139 write_mchbar8(0x643, saved_243[1] | 2);
3140 switch (info->clock_speed_index) {
3141 case 0:
3142 niter = 5;
3143 break;
3144 case 1:
3145 niter = 10;
3146 break;
3147 default:
3148 niter = 19;
3149 break;
3150 }
3151 set_ecc(0);
3152
3153 FOR_POPULATED_RANKS_BACKWARDS {
3154 int i;
3155
3156 write_500_timings_type(info, channel, slot, rank, 0);
3157
3158 write_testing(info, totalrank, 0);
3159 for (i = 0; i < niter; i++) {
3160 write_testing_type2(info, totalrank, 2, i, 0);
3161 write_testing_type2(info, totalrank, 3, i, 1);
3162 }
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003163 pci_write_config8(NORTHBRIDGE, 0xc0, 0x01);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003164 totalrank++;
3165 }
3166
3167 if (reg178_min[info->clock_speed_index] <
3168 reg178_max[info->clock_speed_index])
3169 memset(timings[reg178_min[info->clock_speed_index]], 0,
3170 sizeof(timings[0]) *
3171 (reg178_max[info->clock_speed_index] -
3172 reg178_min[info->clock_speed_index]));
3173 for (reg_178 = reg178_min[info->clock_speed_index];
3174 reg_178 < reg178_max[info->clock_speed_index];
3175 reg_178 += reg178_step[info->clock_speed_index]) {
3176 totalrank = 0;
3177 set_178(reg_178);
3178 for (channel = NUM_CHANNELS - 1; channel >= 0; channel--)
3179 for (slot = 0; slot < NUM_SLOTS; slot++)
3180 for (rank = 0; rank < NUM_RANKS; rank++) {
3181 memset(&timings[reg_178][channel][slot]
3182 [rank][0].smallest, 0, 16);
3183 if (info->
3184 populated_ranks[channel][slot]
3185 [rank]) {
3186 train_ram_at_178(info, channel,
3187 slot, rank,
3188 totalrank,
3189 reg_178, 1,
3190 niter,
3191 timings);
3192 totalrank++;
3193 }
3194 }
3195 }
3196
3197 reg178_center = choose_reg178(info, timings);
3198
3199 FOR_POPULATED_RANKS_BACKWARDS for (lane = 0; lane < 8; lane++) {
3200 info->training.timing_bounds[0][channel][slot][rank][lane].
3201 smallest =
3202 timings[info->training.
3203 reg178_smallest][channel][slot][rank][lane].
3204 smallest;
3205 info->training.timing_bounds[0][channel][slot][rank][lane].
3206 largest =
3207 timings[info->training.
3208 reg178_smallest][channel][slot][rank][lane].largest;
3209 info->training.timing_bounds[1][channel][slot][rank][lane].
3210 smallest =
3211 timings[info->training.
3212 reg178_largest][channel][slot][rank][lane].smallest;
3213 info->training.timing_bounds[1][channel][slot][rank][lane].
3214 largest =
3215 timings[info->training.
3216 reg178_largest][channel][slot][rank][lane].largest;
3217 info->training.timing_offset[channel][slot][rank][lane] =
3218 info->training.lane_timings[1][channel][slot][rank][lane]
3219 -
3220 info->training.lane_timings[0][channel][slot][rank][lane] +
3221 64;
3222 }
3223
3224 if (info->silicon_revision == 1
3225 && (info->
3226 populated_ranks_mask[1] ^ (info->
3227 populated_ranks_mask[1] >> 2)) & 1) {
3228 int ranks_after_channel1;
3229
3230 totalrank = 0;
3231 for (reg_178 = reg178_center - 18;
3232 reg_178 <= reg178_center + 18; reg_178 += 18) {
3233 totalrank = 0;
3234 set_178(reg_178);
3235 for (slot = 0; slot < NUM_SLOTS; slot++)
3236 for (rank = 0; rank < NUM_RANKS; rank++) {
3237 if (info->
3238 populated_ranks[1][slot][rank]) {
3239 train_ram_at_178(info, 1, slot,
3240 rank,
3241 totalrank,
3242 reg_178, 0,
3243 niter,
3244 timings);
3245 totalrank++;
3246 }
3247 }
3248 }
3249 ranks_after_channel1 = totalrank;
3250
3251 for (reg_178 = reg178_center - 12;
3252 reg_178 <= reg178_center + 12; reg_178 += 12) {
3253 totalrank = ranks_after_channel1;
3254 set_178(reg_178);
3255 for (slot = 0; slot < NUM_SLOTS; slot++)
3256 for (rank = 0; rank < NUM_RANKS; rank++)
3257 if (info->
3258 populated_ranks[0][slot][rank]) {
3259 train_ram_at_178(info, 0, slot,
3260 rank,
3261 totalrank,
3262 reg_178, 0,
3263 niter,
3264 timings);
3265 totalrank++;
3266 }
3267
3268 }
3269 } else {
3270 for (reg_178 = reg178_center - 12;
3271 reg_178 <= reg178_center + 12; reg_178 += 12) {
3272 totalrank = 0;
3273 set_178(reg_178);
3274 FOR_POPULATED_RANKS_BACKWARDS {
3275 train_ram_at_178(info, channel, slot, rank,
3276 totalrank, reg_178, 0, niter,
3277 timings);
3278 totalrank++;
3279 }
3280 }
3281 }
3282
3283 set_178(reg178_center);
3284 FOR_POPULATED_RANKS_BACKWARDS for (lane = 0; lane < 8; lane++) {
3285 u16 tm0;
3286
3287 tm0 =
3288 choose_training(info, channel, slot, rank, lane, timings,
3289 reg178_center);
3290 write_500(info, channel, tm0,
3291 get_timing_register_addr(lane, 0, slot, rank), 9, 1);
3292 write_500(info, channel,
3293 tm0 +
3294 info->training.
3295 lane_timings[1][channel][slot][rank][lane] -
3296 info->training.
3297 lane_timings[0][channel][slot][rank][lane],
3298 get_timing_register_addr(lane, 1, slot, rank), 9, 1);
3299 }
3300
3301 totalrank = 0;
3302 FOR_POPULATED_RANKS_BACKWARDS {
3303 try_timing_offsets(info, channel, slot, rank, totalrank);
3304 totalrank++;
3305 }
3306 write_mchbar8(0x243, saved_243[0]);
3307 write_mchbar8(0x643, saved_243[1]);
3308 write_1d0(0, 0x142, 3, 1);
3309 info->training.reg178_center = reg178_center;
3310}
3311
3312static void ram_training(struct raminfo *info)
3313{
3314 u16 saved_fc4;
3315
3316 saved_fc4 = read_mchbar16(0xfc4);
3317 write_mchbar16(0xfc4, 0xffff);
3318
3319 if (info->revision >= 8)
3320 read_4090(info);
3321
3322 if (!try_cached_training(info))
3323 do_ram_training(info);
3324 if ((info->silicon_revision == 2 || info->silicon_revision == 3)
3325 && info->clock_speed_index < 2)
3326 set_10b(info, 1);
3327 write_mchbar16(0xfc4, saved_fc4);
3328}
3329
3330static unsigned gcd(unsigned a, unsigned b)
3331{
3332 unsigned t;
3333 if (a > b) {
3334 t = a;
3335 a = b;
3336 b = t;
3337 }
3338 /* invariant a < b. */
3339 while (a) {
3340 t = b % a;
3341 b = a;
3342 a = t;
3343 }
3344 return b;
3345}
3346
3347static inline int div_roundup(int a, int b)
3348{
Edward O'Callaghan7116ac82014-07-08 01:53:24 +10003349 return CEIL_DIV(a, b);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003350}
3351
3352static unsigned lcm(unsigned a, unsigned b)
3353{
3354 return (a * b) / gcd(a, b);
3355}
3356
3357struct stru1 {
3358 u8 freqs_reversed;
3359 u8 freq_diff_reduced;
3360 u8 freq_min_reduced;
3361 u8 divisor_f4_to_fmax;
3362 u8 divisor_f3_to_fmax;
3363 u8 freq4_to_max_remainder;
3364 u8 freq3_to_2_remainder;
3365 u8 freq3_to_2_remaindera;
3366 u8 freq4_to_2_remainder;
3367 int divisor_f3_to_f1, divisor_f4_to_f2;
3368 int common_time_unit_ps;
3369 int freq_max_reduced;
3370};
3371
3372static void
3373compute_frequence_ratios(struct raminfo *info, u16 freq1, u16 freq2,
3374 int num_cycles_2, int num_cycles_1, int round_it,
3375 int add_freqs, struct stru1 *result)
3376{
3377 int g;
3378 int common_time_unit_ps;
3379 int freq1_reduced, freq2_reduced;
3380 int freq_min_reduced;
3381 int freq_max_reduced;
3382 int freq3, freq4;
3383
3384 g = gcd(freq1, freq2);
3385 freq1_reduced = freq1 / g;
3386 freq2_reduced = freq2 / g;
3387 freq_min_reduced = min(freq1_reduced, freq2_reduced);
3388 freq_max_reduced = max(freq1_reduced, freq2_reduced);
3389
3390 common_time_unit_ps = div_roundup(900000, lcm(freq1, freq2));
3391 freq3 = div_roundup(num_cycles_2, common_time_unit_ps) - 1;
3392 freq4 = div_roundup(num_cycles_1, common_time_unit_ps) - 1;
3393 if (add_freqs) {
3394 freq3 += freq2_reduced;
3395 freq4 += freq1_reduced;
3396 }
3397
3398 if (round_it) {
3399 result->freq3_to_2_remainder = 0;
3400 result->freq3_to_2_remaindera = 0;
3401 result->freq4_to_max_remainder = 0;
3402 result->divisor_f4_to_f2 = 0;
3403 result->divisor_f3_to_f1 = 0;
3404 } else {
3405 if (freq2_reduced < freq1_reduced) {
3406 result->freq3_to_2_remainder =
3407 result->freq3_to_2_remaindera =
3408 freq3 % freq1_reduced - freq1_reduced + 1;
3409 result->freq4_to_max_remainder =
3410 -(freq4 % freq1_reduced);
3411 result->divisor_f3_to_f1 = freq3 / freq1_reduced;
3412 result->divisor_f4_to_f2 =
3413 (freq4 -
3414 (freq1_reduced - freq2_reduced)) / freq2_reduced;
3415 result->freq4_to_2_remainder =
3416 -(char)((freq1_reduced - freq2_reduced) +
3417 ((u8) freq4 -
3418 (freq1_reduced -
3419 freq2_reduced)) % (u8) freq2_reduced);
3420 } else {
3421 if (freq2_reduced > freq1_reduced) {
3422 result->freq4_to_max_remainder =
3423 (freq4 % freq2_reduced) - freq2_reduced + 1;
3424 result->freq4_to_2_remainder =
3425 freq4 % freq_max_reduced -
3426 freq_max_reduced + 1;
3427 } else {
3428 result->freq4_to_max_remainder =
3429 -(freq4 % freq2_reduced);
3430 result->freq4_to_2_remainder =
3431 -(char)(freq4 % freq_max_reduced);
3432 }
3433 result->divisor_f4_to_f2 = freq4 / freq2_reduced;
3434 result->divisor_f3_to_f1 =
3435 (freq3 -
3436 (freq2_reduced - freq1_reduced)) / freq1_reduced;
3437 result->freq3_to_2_remainder = -(freq3 % freq2_reduced);
3438 result->freq3_to_2_remaindera =
3439 -(char)((freq_max_reduced - freq_min_reduced) +
3440 (freq3 -
3441 (freq_max_reduced -
3442 freq_min_reduced)) % freq1_reduced);
3443 }
3444 }
3445 result->divisor_f3_to_fmax = freq3 / freq_max_reduced;
3446 result->divisor_f4_to_fmax = freq4 / freq_max_reduced;
3447 if (round_it) {
3448 if (freq2_reduced > freq1_reduced) {
3449 if (freq3 % freq_max_reduced)
3450 result->divisor_f3_to_fmax++;
3451 }
3452 if (freq2_reduced < freq1_reduced) {
3453 if (freq4 % freq_max_reduced)
3454 result->divisor_f4_to_fmax++;
3455 }
3456 }
3457 result->freqs_reversed = (freq2_reduced < freq1_reduced);
3458 result->freq_diff_reduced = freq_max_reduced - freq_min_reduced;
3459 result->freq_min_reduced = freq_min_reduced;
3460 result->common_time_unit_ps = common_time_unit_ps;
3461 result->freq_max_reduced = freq_max_reduced;
3462}
3463
3464static void
3465set_2d5x_reg(struct raminfo *info, u16 reg, u16 freq1, u16 freq2,
3466 int num_cycles_2, int num_cycles_1, int num_cycles_3,
3467 int num_cycles_4, int reverse)
3468{
3469 struct stru1 vv;
3470 char multiplier;
3471
3472 compute_frequence_ratios(info, freq1, freq2, num_cycles_2, num_cycles_1,
3473 0, 1, &vv);
3474
3475 multiplier =
3476 div_roundup(max
3477 (div_roundup(num_cycles_2, vv.common_time_unit_ps) +
3478 div_roundup(num_cycles_3, vv.common_time_unit_ps),
3479 div_roundup(num_cycles_1,
3480 vv.common_time_unit_ps) +
3481 div_roundup(num_cycles_4, vv.common_time_unit_ps))
3482 + vv.freq_min_reduced - 1, vv.freq_max_reduced) - 1;
3483
3484 u32 y =
3485 (u8) ((vv.freq_max_reduced - vv.freq_min_reduced) +
3486 vv.freq_max_reduced * multiplier)
3487 | (vv.
3488 freqs_reversed << 8) | ((u8) (vv.freq_min_reduced *
3489 multiplier) << 16) | ((u8) (vv.
3490 freq_min_reduced
3491 *
3492 multiplier)
3493 << 24);
3494 u32 x =
3495 vv.freq3_to_2_remaindera | (vv.freq4_to_2_remainder << 8) | (vv.
3496 divisor_f3_to_f1
3497 << 16)
3498 | (vv.divisor_f4_to_f2 << 20) | (vv.freq_min_reduced << 24);
3499 if (reverse) {
3500 write_mchbar32(reg, y);
3501 write_mchbar32(reg + 4, x);
3502 } else {
3503 write_mchbar32(reg + 4, y);
3504 write_mchbar32(reg, x);
3505 }
3506}
3507
3508static void
3509set_6d_reg(struct raminfo *info, u16 reg, u16 freq1, u16 freq2,
3510 int num_cycles_1, int num_cycles_2, int num_cycles_3,
3511 int num_cycles_4)
3512{
3513 struct stru1 ratios1;
3514 struct stru1 ratios2;
3515
3516 compute_frequence_ratios(info, freq1, freq2, num_cycles_1, num_cycles_2,
3517 0, 1, &ratios2);
3518 compute_frequence_ratios(info, freq1, freq2, num_cycles_3, num_cycles_4,
3519 0, 1, &ratios1);
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003520 printk (BIOS_SPEW, "[%x] <= %x\n", reg,
3521 ratios1.freq4_to_max_remainder | (ratios2.
3522 freq4_to_max_remainder
3523 << 8)
3524 | (ratios1.divisor_f4_to_fmax << 16) | (ratios2.
3525 divisor_f4_to_fmax
3526 << 20));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003527 write_mchbar32(reg,
3528 ratios1.freq4_to_max_remainder | (ratios2.
3529 freq4_to_max_remainder
3530 << 8)
3531 | (ratios1.divisor_f4_to_fmax << 16) | (ratios2.
3532 divisor_f4_to_fmax
3533 << 20));
3534}
3535
3536static void
3537set_2dx8_reg(struct raminfo *info, u16 reg, u8 mode, u16 freq1, u16 freq2,
3538 int num_cycles_2, int num_cycles_1, int round_it, int add_freqs)
3539{
3540 struct stru1 ratios;
3541
3542 compute_frequence_ratios(info, freq1, freq2, num_cycles_2, num_cycles_1,
3543 round_it, add_freqs, &ratios);
3544 switch (mode) {
3545 case 0:
3546 write_mchbar32(reg + 4,
3547 ratios.freq_diff_reduced | (ratios.
3548 freqs_reversed <<
3549 8));
3550 write_mchbar32(reg,
3551 ratios.freq3_to_2_remainder | (ratios.
3552 freq4_to_max_remainder
3553 << 8)
3554 | (ratios.divisor_f3_to_fmax << 16) | (ratios.
3555 divisor_f4_to_fmax
3556 << 20) |
3557 (ratios.freq_min_reduced << 24));
3558 break;
3559
3560 case 1:
3561 write_mchbar32(reg,
3562 ratios.freq3_to_2_remainder | (ratios.
3563 divisor_f3_to_fmax
3564 << 16));
3565 break;
3566
3567 case 2:
3568 write_mchbar32(reg,
3569 ratios.freq3_to_2_remainder | (ratios.
3570 freq4_to_max_remainder
3571 << 8) | (ratios.
3572 divisor_f3_to_fmax
3573 << 16) |
3574 (ratios.divisor_f4_to_fmax << 20));
3575 break;
3576
3577 case 4:
3578 write_mchbar32(reg, (ratios.divisor_f3_to_fmax << 4)
3579 | (ratios.divisor_f4_to_fmax << 8) | (ratios.
3580 freqs_reversed
3581 << 12) |
3582 (ratios.freq_min_reduced << 16) | (ratios.
3583 freq_diff_reduced
3584 << 24));
3585 break;
3586 }
3587}
3588
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003589static void set_2dxx_series(struct raminfo *info, int s3resume)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003590{
3591 set_2dx8_reg(info, 0x2d00, 0, 0x78, frequency_11(info) / 2, 1359, 1005,
3592 0, 1);
3593 set_2dx8_reg(info, 0x2d08, 0, 0x78, 0x78, 3273, 5033, 1, 1);
3594 set_2dx8_reg(info, 0x2d10, 0, 0x78, info->fsb_frequency, 1475, 1131, 0,
3595 1);
3596 set_2dx8_reg(info, 0x2d18, 0, 2 * info->fsb_frequency,
3597 frequency_11(info), 1231, 1524, 0, 1);
3598 set_2dx8_reg(info, 0x2d20, 0, 2 * info->fsb_frequency,
3599 frequency_11(info) / 2, 1278, 2008, 0, 1);
3600 set_2dx8_reg(info, 0x2d28, 0, info->fsb_frequency, frequency_11(info),
3601 1167, 1539, 0, 1);
3602 set_2dx8_reg(info, 0x2d30, 0, info->fsb_frequency,
3603 frequency_11(info) / 2, 1403, 1318, 0, 1);
3604 set_2dx8_reg(info, 0x2d38, 0, info->fsb_frequency, 0x78, 3460, 5363, 1,
3605 1);
3606 set_2dx8_reg(info, 0x2d40, 0, info->fsb_frequency, 0x3c, 2792, 5178, 1,
3607 1);
3608 set_2dx8_reg(info, 0x2d48, 0, 2 * info->fsb_frequency, 0x78, 2738, 4610,
3609 1, 1);
3610 set_2dx8_reg(info, 0x2d50, 0, info->fsb_frequency, 0x78, 2819, 5932, 1,
3611 1);
3612 set_2dx8_reg(info, 0x6d4, 1, info->fsb_frequency,
3613 frequency_11(info) / 2, 4000, 0, 0, 0);
3614 set_2dx8_reg(info, 0x6d8, 2, info->fsb_frequency,
3615 frequency_11(info) / 2, 4000, 4000, 0, 0);
3616
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003617 if (s3resume) {
3618 printk (BIOS_SPEW, "[6dc] <= %x\n", info->cached_training->reg_6dc);
3619 write_mchbar32(0x6dc, info->cached_training->reg_6dc);
3620 } else
3621 set_6d_reg(info, 0x6dc, 2 * info->fsb_frequency, frequency_11(info), 0,
3622 info->delay46_ps[0], 0,
3623 info->delay54_ps[0]);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003624 set_2dx8_reg(info, 0x6e0, 1, 2 * info->fsb_frequency,
3625 frequency_11(info), 2500, 0, 0, 0);
3626 set_2dx8_reg(info, 0x6e4, 1, 2 * info->fsb_frequency,
3627 frequency_11(info) / 2, 3500, 0, 0, 0);
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003628 if (s3resume) {
3629 printk (BIOS_SPEW, "[6e8] <= %x\n", info->cached_training->reg_6e8);
3630 write_mchbar32(0x6e8, info->cached_training->reg_6e8);
3631 } else
3632 set_6d_reg(info, 0x6e8, 2 * info->fsb_frequency, frequency_11(info), 0,
3633 info->delay46_ps[1], 0,
3634 info->delay54_ps[1]);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003635 set_2d5x_reg(info, 0x2d58, 0x78, 0x78, 864, 1195, 762, 786, 0);
3636 set_2d5x_reg(info, 0x2d60, 0x195, info->fsb_frequency, 1352, 725, 455,
3637 470, 0);
3638 set_2d5x_reg(info, 0x2d68, 0x195, 0x3c, 2707, 5632, 3277, 2207, 0);
3639 set_2d5x_reg(info, 0x2d70, 0x195, frequency_11(info) / 2, 1276, 758,
3640 454, 459, 0);
3641 set_2d5x_reg(info, 0x2d78, 0x195, 0x78, 1021, 799, 510, 513, 0);
3642 set_2d5x_reg(info, 0x2d80, info->fsb_frequency, 0xe1, 0, 2862, 2579,
3643 2588, 0);
3644 set_2d5x_reg(info, 0x2d88, info->fsb_frequency, 0xe1, 0, 2690, 2405,
3645 2405, 0);
3646 set_2d5x_reg(info, 0x2da0, 0x78, 0xe1, 0, 2560, 2264, 2251, 0);
3647 set_2d5x_reg(info, 0x2da8, 0x195, frequency_11(info), 1060, 775, 484,
3648 480, 0);
3649 set_2d5x_reg(info, 0x2db0, 0x195, 0x78, 4183, 6023, 2217, 2048, 0);
3650 write_mchbar32(0x2dbc, ((frequency_11(info) / 2) - 1) | 0xe00000);
3651 write_mchbar32(0x2db8, ((info->fsb_frequency - 1) << 16) | 0x77);
3652}
3653
3654static u16 get_max_timing(struct raminfo *info, int channel)
3655{
3656 int slot, rank, lane;
3657 u16 ret = 0;
3658
3659 if ((read_mchbar8(0x2ca8) >> 2) < 1)
3660 return 384;
3661
3662 if (info->revision < 8)
3663 return 256;
3664
3665 for (slot = 0; slot < NUM_SLOTS; slot++)
3666 for (rank = 0; rank < NUM_RANKS; rank++)
3667 if (info->populated_ranks[channel][slot][rank])
3668 for (lane = 0; lane < 8 + info->use_ecc; lane++)
3669 ret = max(ret, read_500(info, channel,
3670 get_timing_register_addr
3671 (lane, 0, slot,
3672 rank), 9));
3673 return ret;
3674}
3675
3676static void set_274265(struct raminfo *info)
3677{
3678 int delay_a_ps, delay_b_ps, delay_c_ps, delay_d_ps;
3679 int delay_e_ps, delay_e_cycles, delay_f_cycles;
3680 int delay_e_over_cycle_ps;
3681 int cycletime_ps;
3682 int channel;
3683
3684 delay_a_ps = 4 * halfcycle_ps(info) + 6 * fsbcycle_ps(info);
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003685 info->training.reg2ca9_bit0 = 0;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003686 for (channel = 0; channel < NUM_CHANNELS; channel++) {
3687 cycletime_ps =
3688 900000 / lcm(2 * info->fsb_frequency, frequency_11(info));
3689 delay_d_ps =
3690 (halfcycle_ps(info) * get_max_timing(info, channel) >> 6)
3691 - info->some_delay_3_ps_rounded + 200;
3692 if (!
3693 ((info->silicon_revision == 0
3694 || info->silicon_revision == 1)
3695 && (info->revision >= 8)))
3696 delay_d_ps += halfcycle_ps(info) * 2;
3697 delay_d_ps +=
3698 halfcycle_ps(info) * (!info->revision_flag_1 +
3699 info->some_delay_2_halfcycles_ceil +
3700 2 * info->some_delay_1_cycle_floor +
3701 info->clock_speed_index +
3702 2 * info->cas_latency - 7 + 11);
3703 delay_d_ps += info->revision >= 8 ? 2758 : 4428;
3704
3705 write_mchbar32(0x140,
3706 (read_mchbar32(0x140) & 0xfaffffff) | 0x2000000);
3707 write_mchbar32(0x138,
3708 (read_mchbar32(0x138) & 0xfaffffff) | 0x2000000);
3709 if ((read_mchbar8(0x144) & 0x1f) > 0x13)
3710 delay_d_ps += 650;
3711 delay_c_ps = delay_d_ps + 1800;
3712 if (delay_c_ps <= delay_a_ps)
3713 delay_e_ps = 0;
3714 else
3715 delay_e_ps =
3716 cycletime_ps * div_roundup(delay_c_ps - delay_a_ps,
3717 cycletime_ps);
3718
3719 delay_e_over_cycle_ps = delay_e_ps % (2 * halfcycle_ps(info));
3720 delay_e_cycles = delay_e_ps / (2 * halfcycle_ps(info));
3721 delay_f_cycles =
3722 div_roundup(2500 - delay_e_over_cycle_ps,
3723 2 * halfcycle_ps(info));
3724 if (delay_f_cycles > delay_e_cycles) {
3725 info->delay46_ps[channel] = delay_e_ps;
3726 delay_e_cycles = 0;
3727 } else {
3728 info->delay46_ps[channel] =
3729 delay_e_over_cycle_ps +
3730 2 * halfcycle_ps(info) * delay_f_cycles;
3731 delay_e_cycles -= delay_f_cycles;
3732 }
3733
3734 if (info->delay46_ps[channel] < 2500) {
3735 info->delay46_ps[channel] = 2500;
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003736 info->training.reg2ca9_bit0 = 1;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003737 }
3738 delay_b_ps = halfcycle_ps(info) + delay_c_ps;
3739 if (delay_b_ps <= delay_a_ps)
3740 delay_b_ps = 0;
3741 else
3742 delay_b_ps -= delay_a_ps;
3743 info->delay54_ps[channel] =
3744 cycletime_ps * div_roundup(delay_b_ps,
3745 cycletime_ps) -
3746 2 * halfcycle_ps(info) * delay_e_cycles;
3747 if (info->delay54_ps[channel] < 2500)
3748 info->delay54_ps[channel] = 2500;
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003749 info->training.reg274265[channel][0] = delay_e_cycles;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003750 if (delay_d_ps + 7 * halfcycle_ps(info) <=
3751 24 * halfcycle_ps(info))
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003752 info->training.reg274265[channel][1] = 0;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003753 else
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003754 info->training.reg274265[channel][1] =
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003755 div_roundup(delay_d_ps + 7 * halfcycle_ps(info),
3756 4 * halfcycle_ps(info)) - 6;
3757 write_mchbar32((channel << 10) + 0x274,
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003758 info->training.reg274265[channel][1]
3759 | (info->training.reg274265[channel][0] << 16));
3760 info->training.reg274265[channel][2] =
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003761 div_roundup(delay_c_ps + 3 * fsbcycle_ps(info),
3762 4 * halfcycle_ps(info)) + 1;
3763 write_mchbar16((channel << 10) + 0x265,
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003764 info->training.reg274265[channel][2] << 8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003765 }
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003766 if (info->training.reg2ca9_bit0)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003767 write_mchbar8(0x2ca9, read_mchbar8(0x2ca9) | 1);
3768 else
3769 write_mchbar8(0x2ca9, read_mchbar8(0x2ca9) & ~1);
3770}
3771
3772static void restore_274265(struct raminfo *info)
3773{
3774 int channel;
3775
3776 for (channel = 0; channel < NUM_CHANNELS; channel++) {
3777 write_mchbar32((channel << 10) + 0x274,
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003778 (info->cached_training->reg274265[channel][0] << 16)
3779 | info->cached_training->reg274265[channel][1]);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003780 write_mchbar16((channel << 10) + 0x265,
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003781 info->cached_training->reg274265[channel][2] << 8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003782 }
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003783 if (info->cached_training->reg2ca9_bit0)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003784 write_mchbar8(0x2ca9, read_mchbar8(0x2ca9) | 1);
3785 else
3786 write_mchbar8(0x2ca9, read_mchbar8(0x2ca9) & ~1);
3787}
3788
3789#if REAL
3790static void dmi_setup(void)
3791{
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08003792 gav(read8(DEFAULT_DMIBAR + 0x254));
3793 write8(DEFAULT_DMIBAR + 0x254, 0x1);
3794 write16(DEFAULT_DMIBAR + 0x1b8, 0x18f2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003795 read_mchbar16(0x48);
3796 write_mchbar16(0x48, 0x2);
3797
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08003798 write32(DEFAULT_DMIBAR + 0xd68, read32(DEFAULT_DMIBAR + 0xd68) | 0x08000000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003799
3800 outl((gav(inl(DEFAULT_GPIOBASE | 0x38)) & ~0x140000) | 0x400000,
3801 DEFAULT_GPIOBASE | 0x38);
3802 gav(inb(DEFAULT_GPIOBASE | 0xe)); // = 0xfdcaff6e
3803}
3804#endif
3805
Vladimir Serbinenko9817a372014-02-19 22:07:12 +01003806void chipset_init(const int s3resume)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003807{
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003808 u8 x2ca8;
Vladimir Serbinenko55391c42014-08-03 14:51:00 +02003809 u16 ggc;
3810 u8 gfxsize;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003811
Vladimir Serbinenko9817a372014-02-19 22:07:12 +01003812 x2ca8 = read_mchbar8(0x2ca8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003813 if ((x2ca8 & 1) || (x2ca8 == 8 && !s3resume)) {
3814 printk(BIOS_DEBUG, "soft reset detected, rebooting properly\n");
3815 write_mchbar8(0x2ca8, 0);
Vladimir Serbinenkoe1eef692014-02-19 22:08:51 +01003816 outb(0x6, 0xcf9);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003817#if REAL
Patrick Georgi546953c2014-11-29 10:38:17 +01003818 halt();
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003819#else
3820 printf("CP5\n");
3821 exit(0);
3822#endif
3823 }
3824#if !REAL
3825 if (!s3resume) {
3826 pre_raminit_3(x2ca8);
3827 }
Vladimir Serbinenkof62669c2014-01-09 10:59:38 +01003828 pre_raminit_4a(x2ca8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003829#endif
3830
3831 dmi_setup();
3832
3833 write_mchbar16(0x1170, 0xa880);
3834 write_mchbar8(0x11c1, 0x1);
3835 write_mchbar16(0x1170, 0xb880);
3836 read_mchbar8(0x1210);
3837 write_mchbar8(0x1210, 0x84);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003838
Vladimir Serbinenko55391c42014-08-03 14:51:00 +02003839 if (get_option(&gfxsize, "gfx_uma_size") != CB_SUCCESS) {
3840 /* 0 for 32MB */
3841 gfxsize = 0;
3842 }
3843
3844 ggc = 0xb00 | ((gfxsize + 5) << 4);
3845
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003846 pci_write_config16(NORTHBRIDGE, D0F0_GGC, ggc | 2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003847
3848 u16 deven;
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003849 deven = pci_read_config16(NORTHBRIDGE, D0F0_DEVEN); // = 0x3
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003850
3851 if (deven & 8) {
3852 write_mchbar8(0x2c30, 0x20);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003853 pci_read_config8(NORTHBRIDGE, 0x8); // = 0x18
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003854 write_mchbar16(0x2c30, read_mchbar16(0x2c30) | 0x200);
3855 write_mchbar16(0x2c32, 0x434);
3856 read_mchbar32(0x2c44);
3857 write_mchbar32(0x2c44, 0x1053687);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003858 pci_read_config8(GMA, 0x62); // = 0x2
3859 pci_write_config8(GMA, 0x62, 0x2);
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08003860 read8(DEFAULT_RCBA + 0x2318);
3861 write8(DEFAULT_RCBA + 0x2318, 0x47);
3862 read8(DEFAULT_RCBA + 0x2320);
3863 write8(DEFAULT_RCBA + 0x2320, 0xfc);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003864 }
3865
3866 read_mchbar32(0x30);
3867 write_mchbar32(0x30, 0x40);
3868
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003869 pci_write_config16(NORTHBRIDGE, D0F0_GGC, ggc);
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08003870 gav(read32(DEFAULT_RCBA + 0x3428));
3871 write32(DEFAULT_RCBA + 0x3428, 0x1d);
Vladimir Serbinenko9817a372014-02-19 22:07:12 +01003872}
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003873
Vladimir Serbinenko9817a372014-02-19 22:07:12 +01003874void raminit(const int s3resume, const u8 *spd_addrmap)
3875{
3876 unsigned channel, slot, lane, rank;
3877 int i;
3878 struct raminfo info;
3879 u8 x2ca8;
3880 u16 deven;
Vladimir Serbinenkob16f0922014-06-07 16:27:27 +02003881 int cbmem_wasnot_inited;
Vladimir Serbinenko9817a372014-02-19 22:07:12 +01003882
3883 x2ca8 = read_mchbar8(0x2ca8);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003884 deven = pci_read_config16(NORTHBRIDGE, D0F0_DEVEN);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003885
3886 memset(&info, 0x5a, sizeof(info));
3887
3888 info.last_500_command[0] = 0;
3889 info.last_500_command[1] = 0;
3890
3891 info.fsb_frequency = 135 * 2;
3892 info.board_lane_delay[0] = 0x14;
3893 info.board_lane_delay[1] = 0x07;
3894 info.board_lane_delay[2] = 0x07;
3895 info.board_lane_delay[3] = 0x08;
3896 info.board_lane_delay[4] = 0x56;
3897 info.board_lane_delay[5] = 0x04;
3898 info.board_lane_delay[6] = 0x04;
3899 info.board_lane_delay[7] = 0x05;
3900 info.board_lane_delay[8] = 0x10;
3901
3902 info.training.reg_178 = 0;
3903 info.training.reg_10b = 0;
3904
3905 info.heci_bar = 0;
3906 info.memory_reserved_for_heci_mb = 0;
3907
3908 /* before SPD */
3909 timestamp_add_now(101);
3910
3911 if (!s3resume || REAL) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003912 pci_read_config8(SOUTHBRIDGE, GEN_PMCON_2); // = 0x80
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003913
3914 collect_system_info(&info);
3915
3916#if REAL
3917 /* Enable SMBUS. */
3918 enable_smbus();
3919#endif
3920
3921 memset(&info.populated_ranks, 0, sizeof(info.populated_ranks));
3922
3923 info.use_ecc = 1;
3924 for (channel = 0; channel < NUM_CHANNELS; channel++)
Vladimir Serbinenko2ab8ec72014-02-20 14:34:56 +01003925 for (slot = 0; slot < NUM_SLOTS; slot++) {
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003926 int v;
3927 int try;
3928 int addr;
3929 const u8 useful_addresses[] = {
3930 DEVICE_TYPE,
3931 MODULE_TYPE,
3932 DENSITY,
3933 RANKS_AND_DQ,
3934 MEMORY_BUS_WIDTH,
3935 TIMEBASE_DIVIDEND,
3936 TIMEBASE_DIVISOR,
3937 CYCLETIME,
3938 CAS_LATENCIES_LSB,
3939 CAS_LATENCIES_MSB,
3940 CAS_LATENCY_TIME,
3941 0x11, 0x12, 0x13, 0x14, 0x15,
3942 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b,
3943 0x1c, 0x1d,
3944 THERMAL_AND_REFRESH,
3945 0x20,
3946 REFERENCE_RAW_CARD_USED,
3947 RANK1_ADDRESS_MAPPING,
3948 0x75, 0x76, 0x77, 0x78,
3949 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e,
3950 0x7f, 0x80, 0x81, 0x82, 0x83, 0x84,
3951 0x85, 0x86, 0x87, 0x88,
3952 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e,
3953 0x8f, 0x90, 0x91, 0x92, 0x93, 0x94,
3954 0x95
3955 };
Vladimir Serbinenko902626c2014-02-16 17:22:26 +01003956 if (!spd_addrmap[2 * channel + slot])
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003957 continue;
3958 for (try = 0; try < 5; try++) {
Vladimir Serbinenko902626c2014-02-16 17:22:26 +01003959 v = smbus_read_byte(spd_addrmap[2 * channel + slot],
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003960 DEVICE_TYPE);
3961 if (v >= 0)
3962 break;
3963 }
3964 if (v < 0)
3965 continue;
3966 for (addr = 0;
3967 addr <
3968 sizeof(useful_addresses) /
3969 sizeof(useful_addresses[0]); addr++)
3970 gav(info.
3971 spd[channel][0][useful_addresses
3972 [addr]] =
Vladimir Serbinenko902626c2014-02-16 17:22:26 +01003973 smbus_read_byte(spd_addrmap[2 * channel + slot],
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003974 useful_addresses
3975 [addr]));
3976 if (info.spd[channel][0][DEVICE_TYPE] != 11)
3977 die("Only DDR3 is supported");
3978
3979 v = info.spd[channel][0][RANKS_AND_DQ];
3980 info.populated_ranks[channel][0][0] = 1;
3981 info.populated_ranks[channel][0][1] =
3982 ((v >> 3) & 7);
3983 if (((v >> 3) & 7) > 1)
3984 die("At most 2 ranks are supported");
3985 if ((v & 7) == 0 || (v & 7) > 2)
3986 die("Only x8 and x16 modules are supported");
3987 if ((info.
3988 spd[channel][slot][MODULE_TYPE] & 0xF) != 2
3989 && (info.
3990 spd[channel][slot][MODULE_TYPE] & 0xF)
3991 != 3)
3992 die("Registered memory is not supported");
3993 info.is_x16_module[channel][0] = (v & 7) - 1;
3994 info.density[channel][slot] =
3995 info.spd[channel][slot][DENSITY] & 0xF;
3996 if (!
3997 (info.
3998 spd[channel][slot][MEMORY_BUS_WIDTH] &
3999 0x18))
4000 info.use_ecc = 0;
4001 }
4002
4003 gav(0x55);
4004
4005 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4006 int v = 0;
4007 for (slot = 0; slot < NUM_SLOTS; slot++)
4008 for (rank = 0; rank < NUM_RANKS; rank++)
4009 v |= info.
4010 populated_ranks[channel][slot][rank]
4011 << (2 * slot + rank);
4012 info.populated_ranks_mask[channel] = v;
4013 }
4014
4015 gav(0x55);
4016
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004017 gav(pci_read_config32(NORTHBRIDGE, D0F0_CAPID0 + 4));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004018 }
4019
4020 /* after SPD */
4021 timestamp_add_now(102);
4022
4023 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) & 0xfc);
4024#if !REAL
Alexandru Gagniuc86091f92015-09-30 20:23:09 -07004025 rdmsr (MTRR_PHYS_MASK (3));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004026#endif
4027
4028 collect_system_info(&info);
4029 calculate_timings(&info);
4030
4031#if !REAL
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004032 pci_write_config8(NORTHBRIDGE, 0xdf, 0x82);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004033#endif
4034
4035 if (!s3resume) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004036 u8 reg8 = pci_read_config8(SOUTHBRIDGE, GEN_PMCON_2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004037 if (x2ca8 == 0 && (reg8 & 0x80)) {
4038 /* Don't enable S4-assertion stretch. Makes trouble on roda/rk9.
4039 reg8 = pci_read_config8(PCI_DEV(0, 0x1f, 0), 0xa4);
4040 pci_write_config8(PCI_DEV(0, 0x1f, 0), 0xa4, reg8 | 0x08);
4041 */
4042
4043 /* Clear bit7. */
4044
4045 pci_write_config8(SOUTHBRIDGE, GEN_PMCON_2,
4046 (reg8 & ~(1 << 7)));
4047
4048 printk(BIOS_INFO,
4049 "Interrupted RAM init, reset required.\n");
4050 outb(0x6, 0xcf9);
4051#if REAL
Patrick Georgi546953c2014-11-29 10:38:17 +01004052 halt();
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004053#endif
4054 }
4055 }
4056#if !REAL
4057 gav(read_mchbar8(0x2ca8)); ///!!!!
4058#endif
4059
4060 if (!s3resume && x2ca8 == 0)
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004061 pci_write_config8(SOUTHBRIDGE, GEN_PMCON_2,
4062 pci_read_config8(SOUTHBRIDGE, GEN_PMCON_2) | 0x80);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004063
4064 compute_derived_timings(&info);
4065
4066 if (x2ca8 == 0) {
4067 gav(read_mchbar8(0x164));
4068 write_mchbar8(0x164, 0x26);
4069 write_mchbar16(0x2c20, 0x10);
4070 }
4071
4072 write_mchbar32(0x18b4, read_mchbar32(0x18b4) | 0x210000); /* OK */
4073 write_mchbar32(0x1890, read_mchbar32(0x1890) | 0x2000000); /* OK */
4074 write_mchbar32(0x18b4, read_mchbar32(0x18b4) | 0x8000);
4075
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004076 gav(pci_read_config32(PCI_DEV(0xff, 2, 1), 0x50)); // !!!!
4077 pci_write_config8(PCI_DEV(0xff, 2, 1), 0x54, 0x12);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004078
4079 gav(read_mchbar16(0x2c10)); // !!!!
4080 write_mchbar16(0x2c10, 0x412);
4081 gav(read_mchbar16(0x2c10)); // !!!!
4082 write_mchbar16(0x2c12, read_mchbar16(0x2c12) | 0x100); /* OK */
4083
4084 gav(read_mchbar8(0x2ca8)); // !!!!
4085 write_mchbar32(0x1804,
4086 (read_mchbar32(0x1804) & 0xfffffffc) | 0x8400080);
4087
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004088 pci_read_config32(PCI_DEV(0xff, 2, 1), 0x6c); // !!!!
4089 pci_write_config32(PCI_DEV(0xff, 2, 1), 0x6c, 0x40a0a0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004090 gav(read_mchbar32(0x1c04)); // !!!!
4091 gav(read_mchbar32(0x1804)); // !!!!
4092
4093 if (x2ca8 == 0) {
4094 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) | 1);
4095 }
4096
4097 write_mchbar32(0x18d8, 0x120000);
4098 write_mchbar32(0x18dc, 0x30a484a);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004099 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xe0, 0x0);
4100 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xf4, 0x9444a);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004101 write_mchbar32(0x18d8, 0x40000);
4102 write_mchbar32(0x18dc, 0xb000000);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004103 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xe0, 0x60000);
4104 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xf4, 0x0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004105 write_mchbar32(0x18d8, 0x180000);
4106 write_mchbar32(0x18dc, 0xc0000142);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004107 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xe0, 0x20000);
4108 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xf4, 0x142);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004109 write_mchbar32(0x18d8, 0x1e0000);
4110
4111 gav(read_mchbar32(0x18dc)); // !!!!
4112 write_mchbar32(0x18dc, 0x3);
4113 gav(read_mchbar32(0x18dc)); // !!!!
4114
4115 if (x2ca8 == 0) {
4116 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) | 1); // guess
4117 }
4118
4119 write_mchbar32(0x188c, 0x20bc09);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004120 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xd0, 0x40b0c09);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004121 write_mchbar32(0x1a10, 0x4200010e);
4122 write_mchbar32(0x18b8, read_mchbar32(0x18b8) | 0x200);
4123 gav(read_mchbar32(0x1918)); // !!!!
4124 write_mchbar32(0x1918, 0x332);
4125
4126 gav(read_mchbar32(0x18b8)); // !!!!
4127 write_mchbar32(0x18b8, 0xe00);
4128 gav(read_mchbar32(0x182c)); // !!!!
4129 write_mchbar32(0x182c, 0x10202);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004130 gav(pci_read_config32(PCI_DEV(0xff, 2, 1), 0x94)); // !!!!
4131 pci_write_config32(PCI_DEV(0xff, 2, 1), 0x94, 0x10202);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004132 write_mchbar32(0x1a1c, read_mchbar32(0x1a1c) & 0x8fffffff);
4133 write_mchbar32(0x1a70, read_mchbar32(0x1a70) | 0x100000);
4134
4135 write_mchbar32(0x18b4, read_mchbar32(0x18b4) & 0xffff7fff);
4136 gav(read_mchbar32(0x1a68)); // !!!!
4137 write_mchbar32(0x1a68, 0x343800);
4138 gav(read_mchbar32(0x1e68)); // !!!!
4139 gav(read_mchbar32(0x1a68)); // !!!!
4140
4141 if (x2ca8 == 0) {
4142 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) | 1); // guess
4143 }
4144
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004145 pci_read_config32(PCI_DEV(0xff, 2, 0), 0x048); // !!!!
4146 pci_write_config32(PCI_DEV(0xff, 2, 0), 0x048, 0x140000);
4147 pci_read_config32(PCI_DEV(0xff, 2, 0), 0x058); // !!!!
4148 pci_write_config32(PCI_DEV(0xff, 2, 0), 0x058, 0x64555);
4149 pci_read_config32(PCI_DEV(0xff, 2, 0), 0x058); // !!!!
4150 pci_read_config32(PCI_DEV (0xff, 0, 0), 0xd0); // !!!!
4151 pci_write_config32(PCI_DEV (0xff, 0, 0), 0xd0, 0x180);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004152 gav(read_mchbar32(0x1af0)); // !!!!
4153 gav(read_mchbar32(0x1af0)); // !!!!
4154 write_mchbar32(0x1af0, 0x1f020003);
4155 gav(read_mchbar32(0x1af0)); // !!!!
4156
Edward O'Callaghan42b716f2014-06-26 21:38:52 +10004157 if (x2ca8 == 0) {
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004158 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) | 1); // guess
4159 }
4160
4161 gav(read_mchbar32(0x1890)); // !!!!
4162 write_mchbar32(0x1890, 0x80102);
4163 gav(read_mchbar32(0x18b4)); // !!!!
4164 write_mchbar32(0x18b4, 0x216000);
4165 write_mchbar32(0x18a4, 0x22222222);
4166 write_mchbar32(0x18a8, 0x22222222);
4167 write_mchbar32(0x18ac, 0x22222);
4168
4169 udelay(1000);
4170
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01004171 info.cached_training = get_cached_training();
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004172
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01004173 if (x2ca8 == 0) {
4174 int j;
4175 if (s3resume && info.cached_training) {
4176 restore_274265(&info);
4177 printk(BIOS_DEBUG, "reg2ca9_bit0 = %x\n",
4178 info.cached_training->reg2ca9_bit0);
4179 for (i = 0; i < 2; i++)
4180 for (j = 0; j < 3; j++)
4181 printk(BIOS_DEBUG, "reg274265[%d][%d] = %x\n",
4182 i, j, info.cached_training->reg274265[i][j]);
4183 } else {
4184 set_274265(&info);
4185 printk(BIOS_DEBUG, "reg2ca9_bit0 = %x\n",
4186 info.training.reg2ca9_bit0);
4187 for (i = 0; i < 2; i++)
4188 for (j = 0; j < 3; j++)
4189 printk(BIOS_DEBUG, "reg274265[%d][%d] = %x\n",
4190 i, j, info.training.reg274265[i][j]);
4191 }
4192
4193 set_2dxx_series(&info, s3resume);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004194
4195 if (!(deven & 8)) {
4196 read_mchbar32(0x2cb0);
4197 write_mchbar32(0x2cb0, 0x40);
4198 }
4199
4200 udelay(1000);
4201
4202 if (deven & 8) {
4203 write_mchbar32(0xff8, 0x1800 | read_mchbar32(0xff8));
4204 read_mchbar32(0x2cb0);
4205 write_mchbar32(0x2cb0, 0x00);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004206 pci_read_config8(PCI_DEV (0, 0x2, 0x0), 0x4c);
4207 pci_read_config8(PCI_DEV (0, 0x2, 0x0), 0x4c);
4208 pci_read_config8(PCI_DEV (0, 0x2, 0x0), 0x4e);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004209
4210 read_mchbar8(0x1150);
4211 read_mchbar8(0x1151);
4212 read_mchbar8(0x1022);
4213 read_mchbar8(0x16d0);
4214 write_mchbar32(0x1300, 0x60606060);
4215 write_mchbar32(0x1304, 0x60606060);
4216 write_mchbar32(0x1308, 0x78797a7b);
4217 write_mchbar32(0x130c, 0x7c7d7e7f);
4218 write_mchbar32(0x1310, 0x60606060);
4219 write_mchbar32(0x1314, 0x60606060);
4220 write_mchbar32(0x1318, 0x60606060);
4221 write_mchbar32(0x131c, 0x60606060);
4222 write_mchbar32(0x1320, 0x50515253);
4223 write_mchbar32(0x1324, 0x54555657);
4224 write_mchbar32(0x1328, 0x58595a5b);
4225 write_mchbar32(0x132c, 0x5c5d5e5f);
4226 write_mchbar32(0x1330, 0x40414243);
4227 write_mchbar32(0x1334, 0x44454647);
4228 write_mchbar32(0x1338, 0x48494a4b);
4229 write_mchbar32(0x133c, 0x4c4d4e4f);
4230 write_mchbar32(0x1340, 0x30313233);
4231 write_mchbar32(0x1344, 0x34353637);
4232 write_mchbar32(0x1348, 0x38393a3b);
4233 write_mchbar32(0x134c, 0x3c3d3e3f);
4234 write_mchbar32(0x1350, 0x20212223);
4235 write_mchbar32(0x1354, 0x24252627);
4236 write_mchbar32(0x1358, 0x28292a2b);
4237 write_mchbar32(0x135c, 0x2c2d2e2f);
4238 write_mchbar32(0x1360, 0x10111213);
4239 write_mchbar32(0x1364, 0x14151617);
4240 write_mchbar32(0x1368, 0x18191a1b);
4241 write_mchbar32(0x136c, 0x1c1d1e1f);
4242 write_mchbar32(0x1370, 0x10203);
4243 write_mchbar32(0x1374, 0x4050607);
4244 write_mchbar32(0x1378, 0x8090a0b);
4245 write_mchbar32(0x137c, 0xc0d0e0f);
4246 write_mchbar8(0x11cc, 0x4e);
4247 write_mchbar32(0x1110, 0x73970404);
4248 write_mchbar32(0x1114, 0x72960404);
4249 write_mchbar32(0x1118, 0x6f950404);
4250 write_mchbar32(0x111c, 0x6d940404);
4251 write_mchbar32(0x1120, 0x6a930404);
4252 write_mchbar32(0x1124, 0x68a41404);
4253 write_mchbar32(0x1128, 0x66a21404);
4254 write_mchbar32(0x112c, 0x63a01404);
4255 write_mchbar32(0x1130, 0x609e1404);
4256 write_mchbar32(0x1134, 0x5f9c1404);
4257 write_mchbar32(0x1138, 0x5c961404);
4258 write_mchbar32(0x113c, 0x58a02404);
4259 write_mchbar32(0x1140, 0x54942404);
4260 write_mchbar32(0x1190, 0x900080a);
4261 write_mchbar16(0x11c0, 0xc40b);
4262 write_mchbar16(0x11c2, 0x303);
4263 write_mchbar16(0x11c4, 0x301);
4264 read_mchbar32(0x1190);
4265 write_mchbar32(0x1190, 0x8900080a);
4266 write_mchbar32(0x11b8, 0x70c3000);
4267 write_mchbar8(0x11ec, 0xa);
4268 write_mchbar16(0x1100, 0x800);
4269 read_mchbar32(0x11bc);
4270 write_mchbar32(0x11bc, 0x1e84800);
4271 write_mchbar16(0x11ca, 0xfa);
4272 write_mchbar32(0x11e4, 0x4e20);
4273 write_mchbar8(0x11bc, 0xf);
4274 write_mchbar16(0x11da, 0x19);
4275 write_mchbar16(0x11ba, 0x470c);
4276 write_mchbar32(0x1680, 0xe6ffe4ff);
4277 write_mchbar32(0x1684, 0xdeffdaff);
4278 write_mchbar32(0x1688, 0xd4ffd0ff);
4279 write_mchbar32(0x168c, 0xccffc6ff);
4280 write_mchbar32(0x1690, 0xc0ffbeff);
4281 write_mchbar32(0x1694, 0xb8ffb0ff);
4282 write_mchbar32(0x1698, 0xa8ff0000);
4283 write_mchbar32(0x169c, 0xc00);
4284 write_mchbar32(0x1290, 0x5000000);
4285 }
4286
4287 write_mchbar32(0x124c, 0x15040d00);
4288 write_mchbar32(0x1250, 0x7f0000);
4289 write_mchbar32(0x1254, 0x1e220004);
4290 write_mchbar32(0x1258, 0x4000004);
4291 write_mchbar32(0x1278, 0x0);
4292 write_mchbar32(0x125c, 0x0);
4293 write_mchbar32(0x1260, 0x0);
4294 write_mchbar32(0x1264, 0x0);
4295 write_mchbar32(0x1268, 0x0);
4296 write_mchbar32(0x126c, 0x0);
4297 write_mchbar32(0x1270, 0x0);
4298 write_mchbar32(0x1274, 0x0);
4299 }
4300
4301 if ((deven & 8) && x2ca8 == 0) {
4302 write_mchbar16(0x1214, 0x320);
4303 write_mchbar32(0x1600, 0x40000000);
4304 read_mchbar32(0x11f4);
4305 write_mchbar32(0x11f4, 0x10000000);
4306 read_mchbar16(0x1230);
4307 write_mchbar16(0x1230, 0x8000);
4308 write_mchbar32(0x1400, 0x13040020);
4309 write_mchbar32(0x1404, 0xe090120);
4310 write_mchbar32(0x1408, 0x5120220);
4311 write_mchbar32(0x140c, 0x5120330);
4312 write_mchbar32(0x1410, 0xe090220);
4313 write_mchbar32(0x1414, 0x1010001);
4314 write_mchbar32(0x1418, 0x1110000);
4315 write_mchbar32(0x141c, 0x9020020);
4316 write_mchbar32(0x1420, 0xd090220);
4317 write_mchbar32(0x1424, 0x2090220);
4318 write_mchbar32(0x1428, 0x2090330);
4319 write_mchbar32(0x142c, 0xd090220);
4320 write_mchbar32(0x1430, 0x1010001);
4321 write_mchbar32(0x1434, 0x1110000);
4322 write_mchbar32(0x1438, 0x11040020);
4323 write_mchbar32(0x143c, 0x4030220);
4324 write_mchbar32(0x1440, 0x1060220);
4325 write_mchbar32(0x1444, 0x1060330);
4326 write_mchbar32(0x1448, 0x4030220);
4327 write_mchbar32(0x144c, 0x1010001);
4328 write_mchbar32(0x1450, 0x1110000);
4329 write_mchbar32(0x1454, 0x4010020);
4330 write_mchbar32(0x1458, 0xb090220);
4331 write_mchbar32(0x145c, 0x1090220);
4332 write_mchbar32(0x1460, 0x1090330);
4333 write_mchbar32(0x1464, 0xb090220);
4334 write_mchbar32(0x1468, 0x1010001);
4335 write_mchbar32(0x146c, 0x1110000);
4336 write_mchbar32(0x1470, 0xf040020);
4337 write_mchbar32(0x1474, 0xa090220);
4338 write_mchbar32(0x1478, 0x1120220);
4339 write_mchbar32(0x147c, 0x1120330);
4340 write_mchbar32(0x1480, 0xa090220);
4341 write_mchbar32(0x1484, 0x1010001);
4342 write_mchbar32(0x1488, 0x1110000);
4343 write_mchbar32(0x148c, 0x7020020);
4344 write_mchbar32(0x1490, 0x1010220);
4345 write_mchbar32(0x1494, 0x10210);
4346 write_mchbar32(0x1498, 0x10320);
4347 write_mchbar32(0x149c, 0x1010220);
4348 write_mchbar32(0x14a0, 0x1010001);
4349 write_mchbar32(0x14a4, 0x1110000);
4350 write_mchbar32(0x14a8, 0xd040020);
4351 write_mchbar32(0x14ac, 0x8090220);
4352 write_mchbar32(0x14b0, 0x1111310);
4353 write_mchbar32(0x14b4, 0x1111420);
4354 write_mchbar32(0x14b8, 0x8090220);
4355 write_mchbar32(0x14bc, 0x1010001);
4356 write_mchbar32(0x14c0, 0x1110000);
4357 write_mchbar32(0x14c4, 0x3010020);
4358 write_mchbar32(0x14c8, 0x7090220);
4359 write_mchbar32(0x14cc, 0x1081310);
4360 write_mchbar32(0x14d0, 0x1081420);
4361 write_mchbar32(0x14d4, 0x7090220);
4362 write_mchbar32(0x14d8, 0x1010001);
4363 write_mchbar32(0x14dc, 0x1110000);
4364 write_mchbar32(0x14e0, 0xb040020);
4365 write_mchbar32(0x14e4, 0x2030220);
4366 write_mchbar32(0x14e8, 0x1051310);
4367 write_mchbar32(0x14ec, 0x1051420);
4368 write_mchbar32(0x14f0, 0x2030220);
4369 write_mchbar32(0x14f4, 0x1010001);
4370 write_mchbar32(0x14f8, 0x1110000);
4371 write_mchbar32(0x14fc, 0x5020020);
4372 write_mchbar32(0x1500, 0x5090220);
4373 write_mchbar32(0x1504, 0x2071310);
4374 write_mchbar32(0x1508, 0x2071420);
4375 write_mchbar32(0x150c, 0x5090220);
4376 write_mchbar32(0x1510, 0x1010001);
4377 write_mchbar32(0x1514, 0x1110000);
4378 write_mchbar32(0x1518, 0x7040120);
4379 write_mchbar32(0x151c, 0x2090220);
4380 write_mchbar32(0x1520, 0x70b1210);
4381 write_mchbar32(0x1524, 0x70b1310);
4382 write_mchbar32(0x1528, 0x2090220);
4383 write_mchbar32(0x152c, 0x1010001);
4384 write_mchbar32(0x1530, 0x1110000);
4385 write_mchbar32(0x1534, 0x1010110);
4386 write_mchbar32(0x1538, 0x1081310);
4387 write_mchbar32(0x153c, 0x5041200);
4388 write_mchbar32(0x1540, 0x5041310);
4389 write_mchbar32(0x1544, 0x1081310);
4390 write_mchbar32(0x1548, 0x1010001);
4391 write_mchbar32(0x154c, 0x1110000);
4392 write_mchbar32(0x1550, 0x1040120);
4393 write_mchbar32(0x1554, 0x4051210);
4394 write_mchbar32(0x1558, 0xd051200);
4395 write_mchbar32(0x155c, 0xd051200);
4396 write_mchbar32(0x1560, 0x4051210);
4397 write_mchbar32(0x1564, 0x1010001);
4398 write_mchbar32(0x1568, 0x1110000);
4399 write_mchbar16(0x1222, 0x220a);
4400 write_mchbar16(0x123c, 0x1fc0);
4401 write_mchbar16(0x1220, 0x1388);
4402 }
4403
4404 read_mchbar32(0x2c80); // !!!!
4405 write_mchbar32(0x2c80, 0x1053688);
4406 read_mchbar32(0x1c04); // !!!!
4407 write_mchbar32(0x1804, 0x406080);
4408
4409 read_mchbar8(0x2ca8);
4410
4411 if (x2ca8 == 0) {
4412 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) & ~3);
4413 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) + 4);
4414 write_mchbar32(0x1af0, read_mchbar32(0x1af0) | 0x10);
4415#if REAL
Patrick Georgi546953c2014-11-29 10:38:17 +01004416 halt();
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004417#else
4418 printf("CP5\n");
4419 exit(0);
4420#endif
4421 }
4422
4423 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8));
4424 read_mchbar32(0x2c80); // !!!!
4425 write_mchbar32(0x2c80, 0x53688);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004426 pci_write_config32(PCI_DEV (0xff, 0, 0), 0x60, 0x20220);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004427 read_mchbar16(0x2c20); // !!!!
4428 read_mchbar16(0x2c10); // !!!!
4429 read_mchbar16(0x2c00); // !!!!
4430 write_mchbar16(0x2c00, 0x8c0);
4431 udelay(1000);
4432 write_1d0(0, 0x33d, 0, 0);
4433 write_500(&info, 0, 0, 0xb61, 0, 0);
4434 write_500(&info, 1, 0, 0xb61, 0, 0);
4435 write_mchbar32(0x1a30, 0x0);
4436 write_mchbar32(0x1a34, 0x0);
4437 write_mchbar16(0x614,
4438 0xb5b | (info.populated_ranks[1][0][0] *
4439 0x404) | (info.populated_ranks[0][0][0] *
4440 0xa0));
4441 write_mchbar16(0x616, 0x26a);
4442 write_mchbar32(0x134, 0x856000);
4443 write_mchbar32(0x160, 0x5ffffff);
4444 read_mchbar32(0x114); // !!!!
4445 write_mchbar32(0x114, 0xc2024440);
4446 read_mchbar32(0x118); // !!!!
4447 write_mchbar32(0x118, 0x4);
4448 for (channel = 0; channel < NUM_CHANNELS; channel++)
4449 write_mchbar32(0x260 + (channel << 10),
4450 0x30809ff |
4451 ((info.
4452 populated_ranks_mask[channel] & 3) << 20));
4453 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4454 write_mchbar16(0x31c + (channel << 10), 0x101);
4455 write_mchbar16(0x360 + (channel << 10), 0x909);
4456 write_mchbar16(0x3a4 + (channel << 10), 0x101);
4457 write_mchbar16(0x3e8 + (channel << 10), 0x101);
4458 write_mchbar32(0x320 + (channel << 10), 0x29002900);
4459 write_mchbar32(0x324 + (channel << 10), 0x0);
4460 write_mchbar32(0x368 + (channel << 10), 0x32003200);
4461 write_mchbar16(0x352 + (channel << 10), 0x505);
4462 write_mchbar16(0x354 + (channel << 10), 0x3c3c);
4463 write_mchbar16(0x356 + (channel << 10), 0x1040);
4464 write_mchbar16(0x39a + (channel << 10), 0x73e4);
4465 write_mchbar16(0x3de + (channel << 10), 0x77ed);
4466 write_mchbar16(0x422 + (channel << 10), 0x1040);
4467 }
4468
4469 write_1d0(0x4, 0x151, 4, 1);
4470 write_1d0(0, 0x142, 3, 1);
4471 rdmsr(0x1ac); // !!!!
4472 write_500(&info, 1, 1, 0x6b3, 4, 1);
4473 write_500(&info, 1, 1, 0x6cf, 4, 1);
4474
4475 rmw_1d0(0x21c, 0x38, 0, 6, 1);
4476
4477 write_1d0(((!info.populated_ranks[1][0][0]) << 1) | ((!info.
4478 populated_ranks[0]
4479 [0][0]) << 0),
4480 0x1d1, 3, 1);
4481 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4482 write_mchbar16(0x38e + (channel << 10), 0x5f5f);
4483 write_mchbar16(0x3d2 + (channel << 10), 0x5f5f);
4484 }
4485
4486 set_334(0);
4487
4488 program_base_timings(&info);
4489
4490 write_mchbar8(0x5ff, read_mchbar8(0x5ff) | 0x80); /* OK */
4491
4492 write_1d0(0x2, 0x1d5, 2, 1);
4493 write_1d0(0x20, 0x166, 7, 1);
4494 write_1d0(0x0, 0xeb, 3, 1);
4495 write_1d0(0x0, 0xf3, 6, 1);
4496
4497 for (channel = 0; channel < NUM_CHANNELS; channel++)
4498 for (lane = 0; lane < 9; lane++) {
4499 u16 addr = 0x125 + get_lane_offset(0, 0, lane);
4500 u8 a;
4501 a = read_500(&info, channel, addr, 6); // = 0x20040080 //!!!!
4502 write_500(&info, channel, a, addr, 6, 1);
4503 }
4504
4505 udelay(1000);
4506
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004507 if (s3resume) {
4508 if (info.cached_training == NULL) {
4509 u32 reg32;
4510 printk(BIOS_ERR,
4511 "Couldn't find training data. Rebooting\n");
4512 reg32 = inl(DEFAULT_PMBASE + 0x04);
4513 outl(reg32 & ~(7 << 10), DEFAULT_PMBASE + 0x04);
4514 outb(0xe, 0xcf9);
4515
4516#if REAL
Patrick Georgi546953c2014-11-29 10:38:17 +01004517 halt();
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004518#else
4519 printf("CP5\n");
4520 exit(0);
4521#endif
4522 }
4523 int tm;
4524 info.training = *info.cached_training;
4525 for (tm = 0; tm < 4; tm++)
4526 for (channel = 0; channel < NUM_CHANNELS; channel++)
4527 for (slot = 0; slot < NUM_SLOTS; slot++)
4528 for (rank = 0; rank < NUM_RANKS; rank++)
4529 for (lane = 0; lane < 9; lane++)
4530 write_500(&info,
4531 channel,
4532 info.training.
4533 lane_timings
4534 [tm][channel]
4535 [slot][rank]
4536 [lane],
4537 get_timing_register_addr
4538 (lane, tm,
4539 slot, rank),
4540 9, 0);
4541 write_1d0(info.cached_training->reg_178, 0x178, 7, 1);
4542 write_1d0(info.cached_training->reg_10b, 0x10b, 6, 1);
4543 }
4544
4545 read_mchbar32(0x1f4); // !!!!
4546 write_mchbar32(0x1f4, 0x20000);
4547 write_mchbar32(0x1f0, 0x1d000200);
4548 read_mchbar8(0x1f0); // !!!!
4549 write_mchbar8(0x1f0, 0x1);
4550 read_mchbar8(0x1f0); // !!!!
4551
4552 program_board_delay(&info);
4553
4554 write_mchbar8(0x5ff, 0x0); /* OK */
4555 write_mchbar8(0x5ff, 0x80); /* OK */
4556 write_mchbar8(0x5f4, 0x1); /* OK */
4557
4558 write_mchbar32(0x130, read_mchbar32(0x130) & 0xfffffffd); // | 2 when ?
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02004559 while (read_mchbar32(0x130) & 1);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004560 gav(read_1d0(0x14b, 7)); // = 0x81023100
4561 write_1d0(0x30, 0x14b, 7, 1);
4562 read_1d0(0xd6, 6); // = 0xfa008080 // !!!!
4563 write_1d0(7, 0xd6, 6, 1);
4564 read_1d0(0x328, 6); // = 0xfa018080 // !!!!
4565 write_1d0(7, 0x328, 6, 1);
4566
4567 for (channel = 0; channel < NUM_CHANNELS; channel++)
4568 set_4cf(&info, channel,
4569 info.populated_ranks[channel][0][0] ? 8 : 0);
4570
4571 read_1d0(0x116, 4); // = 0x4040432 // !!!!
4572 write_1d0(2, 0x116, 4, 1);
4573 read_1d0(0xae, 6); // = 0xe8088080 // !!!!
4574 write_1d0(0, 0xae, 6, 1);
4575 read_1d0(0x300, 4); // = 0x48088080 // !!!!
4576 write_1d0(0, 0x300, 6, 1);
4577 read_mchbar16(0x356); // !!!!
4578 write_mchbar16(0x356, 0x1040);
4579 read_mchbar16(0x756); // !!!!
4580 write_mchbar16(0x756, 0x1040);
4581 write_mchbar32(0x140, read_mchbar32(0x140) & ~0x07000000);
4582 write_mchbar32(0x138, read_mchbar32(0x138) & ~0x07000000);
4583 write_mchbar32(0x130, 0x31111301);
Vladimir Serbinenko25fc5322014-12-07 13:05:44 +01004584 /* Wait until REG130b0 is 1. */
4585 while (read_mchbar32(0x130) & 1)
4586 ;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004587
4588 {
4589 u32 t;
4590 u8 val_a1;
4591 val_a1 = read_1d0(0xa1, 6); // = 0x1cf4040 // !!!!
4592 t = read_1d0(0x2f3, 6); // = 0x10a4040 // !!!!
4593 rmw_1d0(0x320, 0x07,
4594 (t & 4) | ((t & 8) >> 2) | ((t & 0x10) >> 4), 6, 1);
4595 rmw_1d0(0x14b, 0x78,
4596 ((((val_a1 >> 2) & 4) | (val_a1 & 8)) >> 2) | (val_a1 &
4597 4), 7,
4598 1);
4599 rmw_1d0(0xce, 0x38,
4600 ((((val_a1 >> 2) & 4) | (val_a1 & 8)) >> 2) | (val_a1 &
4601 4), 6,
4602 1);
4603 }
4604
4605 for (channel = 0; channel < NUM_CHANNELS; channel++)
4606 set_4cf(&info, channel,
4607 info.populated_ranks[channel][0][0] ? 9 : 1);
4608
4609 rmw_1d0(0x116, 0xe, 1, 4, 1); // = 0x4040432 // !!!!
4610 read_mchbar32(0x144); // !!!!
4611 write_1d0(2, 0xae, 6, 1);
4612 write_1d0(2, 0x300, 6, 1);
4613 write_1d0(2, 0x121, 3, 1);
4614 read_1d0(0xd6, 6); // = 0xfa00c0c7 // !!!!
4615 write_1d0(4, 0xd6, 6, 1);
4616 read_1d0(0x328, 6); // = 0xfa00c0c7 // !!!!
4617 write_1d0(4, 0x328, 6, 1);
4618
4619 for (channel = 0; channel < NUM_CHANNELS; channel++)
4620 set_4cf(&info, channel,
4621 info.populated_ranks[channel][0][0] ? 9 : 0);
4622
4623 write_mchbar32(0x130,
4624 0x11111301 | (info.
4625 populated_ranks[1][0][0] << 30) | (info.
4626 populated_ranks
4627 [0][0]
4628 [0] <<
4629 29));
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02004630 while (read_mchbar8(0x130) & 1); // !!!!
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004631 read_1d0(0xa1, 6); // = 0x1cf4054 // !!!!
4632 read_1d0(0x2f3, 6); // = 0x10a4054 // !!!!
4633 read_1d0(0x21c, 6); // = 0xafa00c0 // !!!!
4634 write_1d0(0, 0x21c, 6, 1);
4635 read_1d0(0x14b, 7); // = 0x810231b0 // !!!!
4636 write_1d0(0x35, 0x14b, 7, 1);
4637
4638 for (channel = 0; channel < NUM_CHANNELS; channel++)
4639 set_4cf(&info, channel,
4640 info.populated_ranks[channel][0][0] ? 0xb : 0x2);
4641
4642 set_334(1);
4643
4644 write_mchbar8(0x1e8, 0x4); /* OK */
4645
4646 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4647 write_500(&info, channel,
4648 0x3 & ~(info.populated_ranks_mask[channel]), 0x6b7, 2,
4649 1);
4650 write_500(&info, channel, 0x3, 0x69b, 2, 1);
4651 }
4652 write_mchbar32(0x2d0, (read_mchbar32(0x2d0) & 0xff2c01ff) | 0x200000); /* OK */
4653 write_mchbar16(0x6c0, 0x14a0); /* OK */
4654 write_mchbar32(0x6d0, (read_mchbar32(0x6d0) & 0xff0080ff) | 0x8000); /* OK */
4655 write_mchbar16(0x232, 0x8);
4656 write_mchbar32(0x234, (read_mchbar32(0x234) & 0xfffbfffb) | 0x40004); /* 0x40004 or 0 depending on ? */
4657 write_mchbar32(0x34, (read_mchbar32(0x34) & 0xfffffffd) | 5); /* OK */
4658 write_mchbar32(0x128, 0x2150d05);
4659 write_mchbar8(0x12c, 0x1f); /* OK */
4660 write_mchbar8(0x12d, 0x56); /* OK */
4661 write_mchbar8(0x12e, 0x31);
4662 write_mchbar8(0x12f, 0x0); /* OK */
4663 write_mchbar8(0x271, 0x2); /* OK */
4664 write_mchbar8(0x671, 0x2); /* OK */
4665 write_mchbar8(0x1e8, 0x4); /* OK */
4666 for (channel = 0; channel < NUM_CHANNELS; channel++)
4667 write_mchbar32(0x294 + (channel << 10),
4668 (info.populated_ranks_mask[channel] & 3) << 16);
4669 write_mchbar32(0x134, (read_mchbar32(0x134) & 0xfc01ffff) | 0x10000); /* OK */
4670 write_mchbar32(0x134, (read_mchbar32(0x134) & 0xfc85ffff) | 0x850000); /* OK */
4671 for (channel = 0; channel < NUM_CHANNELS; channel++)
4672 write_mchbar32(0x260 + (channel << 10),
4673 (read_mchbar32(0x260 + (channel << 10)) &
4674 ~0xf00000) | 0x8000000 | ((info.
4675 populated_ranks_mask
4676 [channel] & 3) <<
4677 20));
4678
4679 if (!s3resume)
4680 jedec_init(&info);
4681
4682 int totalrank = 0;
4683 for (channel = 0; channel < NUM_CHANNELS; channel++)
4684 for (slot = 0; slot < NUM_SLOTS; slot++)
4685 for (rank = 0; rank < NUM_RANKS; rank++)
4686 if (info.populated_ranks[channel][slot][rank]) {
4687 jedec_read(&info, channel, slot, rank,
4688 totalrank, 0xa, 0x400);
4689 totalrank++;
4690 }
4691
4692 write_mchbar8(0x12c, 0x9f);
4693
4694 read_mchbar8(0x271); // 2 // !!!!
4695 write_mchbar8(0x271, 0xe);
4696 read_mchbar8(0x671); // !!!!
4697 write_mchbar8(0x671, 0xe);
4698
4699 if (!s3resume) {
4700 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4701 write_mchbar32(0x294 + (channel << 10),
4702 (info.
4703 populated_ranks_mask[channel] & 3) <<
4704 16);
4705 write_mchbar16(0x298 + (channel << 10),
4706 (info.
4707 populated_ranks[channel][0][0]) | (info.
4708 populated_ranks
4709 [channel]
4710 [0]
4711 [1]
4712 <<
4713 5));
4714 write_mchbar32(0x29c + (channel << 10), 0x77a);
4715 }
4716 read_mchbar32(0x2c0); /// !!!
4717 write_mchbar32(0x2c0, 0x6009cc00);
4718
4719 {
4720 u8 a, b;
4721 a = read_mchbar8(0x243); // !!!!
4722 b = read_mchbar8(0x643); // !!!!
4723 write_mchbar8(0x243, a | 2);
4724 write_mchbar8(0x643, b | 2);
4725 }
4726
4727 write_1d0(7, 0x19b, 3, 1);
4728 write_1d0(7, 0x1c0, 3, 1);
4729 write_1d0(4, 0x1c6, 4, 1);
4730 write_1d0(4, 0x1cc, 4, 1);
4731 read_1d0(0x151, 4); // = 0x408c6d74 // !!!!
4732 write_1d0(4, 0x151, 4, 1);
4733 write_mchbar32(0x584, 0xfffff);
4734 write_mchbar32(0x984, 0xfffff);
4735
4736 for (channel = 0; channel < NUM_CHANNELS; channel++)
4737 for (slot = 0; slot < NUM_SLOTS; slot++)
4738 for (rank = 0; rank < NUM_RANKS; rank++)
4739 if (info.
4740 populated_ranks[channel][slot]
4741 [rank])
4742 config_rank(&info, s3resume,
4743 channel, slot,
4744 rank);
4745
4746 write_mchbar8(0x243, 0x1);
4747 write_mchbar8(0x643, 0x1);
4748 }
4749
4750 /* was == 1 but is common */
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004751 pci_write_config16(NORTHBRIDGE, 0xc8, 3);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004752 write_26c(0, 0x820);
4753 write_26c(1, 0x820);
4754 write_mchbar32(0x130, read_mchbar32(0x130) | 2);
4755 /* end */
4756
4757 if (s3resume) {
4758 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4759 write_mchbar32(0x294 + (channel << 10),
4760 (info.
4761 populated_ranks_mask[channel] & 3) <<
4762 16);
4763 write_mchbar16(0x298 + (channel << 10),
4764 (info.
4765 populated_ranks[channel][0][0]) | (info.
4766 populated_ranks
4767 [channel]
4768 [0]
4769 [1]
4770 <<
4771 5));
4772 write_mchbar32(0x29c + (channel << 10), 0x77a);
4773 }
4774 read_mchbar32(0x2c0); /// !!!
4775 write_mchbar32(0x2c0, 0x6009cc00);
4776 }
4777
4778 write_mchbar32(0xfa4, read_mchbar32(0xfa4) & ~0x01000002);
4779 write_mchbar32(0xfb0, 0x2000e019);
4780
4781#if !REAL
4782 printf("CP16\n");
4783#endif
4784
4785 /* Before training. */
4786 timestamp_add_now(103);
4787
4788 if (!s3resume)
4789 ram_training(&info);
4790
4791 /* After training. */
Paul Menzel9e817bf2015-05-28 07:32:48 +02004792 timestamp_add_now(104);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004793
4794 dump_timings(&info);
4795
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004796 program_modules_memory_map(&info, 0);
4797 program_total_memory_map(&info);
4798
4799 if (info.non_interleaved_part_mb != 0 && info.interleaved_part_mb != 0)
4800 write_mchbar8(0x111, 0x20 | (0 << 2) | (1 << 6) | (0 << 7));
4801 else if (have_match_ranks(&info, 0, 4) && have_match_ranks(&info, 1, 4))
4802 write_mchbar8(0x111, 0x20 | (3 << 2) | (0 << 6) | (1 << 7));
4803 else if (have_match_ranks(&info, 0, 2) && have_match_ranks(&info, 1, 2))
4804 write_mchbar8(0x111, 0x20 | (3 << 2) | (0 << 6) | (0 << 7));
4805 else
4806 write_mchbar8(0x111, 0x20 | (3 << 2) | (1 << 6) | (0 << 7));
4807
4808 write_mchbar32(0xfac, read_mchbar32(0xfac) & ~0x80000000); // OK
4809 write_mchbar32(0xfb4, 0x4800); // OK
4810 write_mchbar32(0xfb8, (info.revision < 8) ? 0x20 : 0x0); // OK
4811 write_mchbar32(0xe94, 0x7ffff); // OK
4812 write_mchbar32(0xfc0, 0x80002040); // OK
4813 write_mchbar32(0xfc4, 0x701246); // OK
4814 write_mchbar8(0xfc8, read_mchbar8(0xfc8) & ~0x70); // OK
4815 write_mchbar32(0xe5c, 0x1000000 | read_mchbar32(0xe5c)); // OK
4816 write_mchbar32(0x1a70, (read_mchbar32(0x1a70) | 0x00200000) & ~0x00100000); // OK
4817 write_mchbar32(0x50, 0x700b0); // OK
4818 write_mchbar32(0x3c, 0x10); // OK
4819 write_mchbar8(0x1aa8, (read_mchbar8(0x1aa8) & ~0x35) | 0xa); // OK
4820 write_mchbar8(0xff4, read_mchbar8(0xff4) | 0x2); // OK
4821 write_mchbar32(0xff8, (read_mchbar32(0xff8) & ~0xe008) | 0x1020); // OK
4822
4823#if REAL
4824 write_mchbar32(0xd00, IOMMU_BASE2 | 1);
4825 write_mchbar32(0xd40, IOMMU_BASE1 | 1);
4826 write_mchbar32(0xdc0, IOMMU_BASE4 | 1);
4827
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08004828 write32p(IOMMU_BASE1 | 0xffc, 0x80000000);
4829 write32p(IOMMU_BASE2 | 0xffc, 0xc0000000);
4830 write32p(IOMMU_BASE4 | 0xffc, 0x80000000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004831
4832#else
4833 {
4834 u32 eax;
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08004835 eax = read32p(0xffc + (read_mchbar32(0xd00) & ~1)) | 0x08000000; // = 0xe911714b// OK
4836 write32p(0xffc + (read_mchbar32(0xd00) & ~1), eax); // OK
4837 eax = read32p(0xffc + (read_mchbar32(0xdc0) & ~1)) | 0x40000000; // = 0xe911714b// OK
4838 write32p(0xffc + (read_mchbar32(0xdc0) & ~1), eax); // OK
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004839 }
4840#endif
4841
4842 {
4843 u32 eax;
4844
4845 eax = info.fsb_frequency / 9;
4846 write_mchbar32(0xfcc, (read_mchbar32(0xfcc) & 0xfffc0000) | (eax * 0x280) | (eax * 0x5000) | eax | 0x40000); // OK
4847 write_mchbar32(0x20, 0x33001); //OK
4848 }
4849
4850 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4851 write_mchbar32(0x220 + (channel << 10), read_mchbar32(0x220 + (channel << 10)) & ~0x7770); //OK
4852 if (info.max_slots_used_in_channel == 1)
4853 write_mchbar16(0x237 + (channel << 10), (read_mchbar16(0x237 + (channel << 10)) | 0x0201)); //OK
4854 else
4855 write_mchbar16(0x237 + (channel << 10), (read_mchbar16(0x237 + (channel << 10)) & ~0x0201)); //OK
4856
4857 write_mchbar8(0x241 + (channel << 10), read_mchbar8(0x241 + (channel << 10)) | 1); // OK
4858
4859 if (info.clock_speed_index <= 1
4860 && (info.silicon_revision == 2
4861 || info.silicon_revision == 3))
4862 write_mchbar32(0x248 + (channel << 10), (read_mchbar32(0x248 + (channel << 10)) | 0x00102000)); // OK
4863 else
4864 write_mchbar32(0x248 + (channel << 10), (read_mchbar32(0x248 + (channel << 10)) & ~0x00102000)); // OK
4865 }
4866
4867 write_mchbar32(0x115, read_mchbar32(0x115) | 0x1000000); // OK
4868
4869 {
4870 u8 al;
4871 al = 0xd;
4872 if (!(info.silicon_revision == 0 || info.silicon_revision == 1))
4873 al += 2;
4874 al |= ((1 << (info.max_slots_used_in_channel - 1)) - 1) << 4;
4875 write_mchbar32(0x210, (al << 16) | 0x20); // OK
4876 }
4877
4878 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4879 write_mchbar32(0x288 + (channel << 10), 0x70605040); // OK
4880 write_mchbar32(0x28c + (channel << 10), 0xfffec080); // OK
4881 write_mchbar32(0x290 + (channel << 10), 0x282091c | ((info.max_slots_used_in_channel - 1) << 0x16)); // OK
4882 }
4883 u32 reg1c;
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004884 pci_read_config32(NORTHBRIDGE, 0x40); // = DEFAULT_EPBAR | 0x001 // OK
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08004885 reg1c = read32p(DEFAULT_EPBAR | 0x01c); // = 0x8001 // OK
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004886 pci_read_config32(NORTHBRIDGE, 0x40); // = DEFAULT_EPBAR | 0x001 // OK
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08004887 write32p(DEFAULT_EPBAR | 0x01c, reg1c); // OK
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004888 read_mchbar8(0xe08); // = 0x0
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004889 pci_read_config32(NORTHBRIDGE, 0xe4); // = 0x316126
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004890 write_mchbar8(0x1210, read_mchbar8(0x1210) | 2); // OK
4891 write_mchbar32(0x1200, 0x8800440); // OK
4892 write_mchbar32(0x1204, 0x53ff0453); // OK
4893 write_mchbar32(0x1208, 0x19002043); // OK
4894 write_mchbar16(0x1214, 0x320); // OK
4895
4896 if (info.revision == 0x10 || info.revision == 0x11) {
4897 write_mchbar16(0x1214, 0x220); // OK
4898 write_mchbar8(0x1210, read_mchbar8(0x1210) | 0x40); // OK
4899 }
4900
4901 write_mchbar8(0x1214, read_mchbar8(0x1214) | 0x4); // OK
4902 write_mchbar8(0x120c, 0x1); // OK
4903 write_mchbar8(0x1218, 0x3); // OK
4904 write_mchbar8(0x121a, 0x3); // OK
4905 write_mchbar8(0x121c, 0x3); // OK
4906 write_mchbar16(0xc14, 0x0); // OK
4907 write_mchbar16(0xc20, 0x0); // OK
4908 write_mchbar32(0x1c, 0x0); // OK
4909
4910 /* revision dependent here. */
4911
4912 write_mchbar16(0x1230, read_mchbar16(0x1230) | 0x1f07); // OK
4913
4914 if (info.uma_enabled)
4915 write_mchbar32(0x11f4, read_mchbar32(0x11f4) | 0x10000000); // OK
4916
4917 write_mchbar16(0x1230, read_mchbar16(0x1230) | 0x8000); // OK
4918 write_mchbar8(0x1214, read_mchbar8(0x1214) | 1); // OK
4919
4920 u8 bl, ebpb;
4921 u16 reg_1020;
4922
4923 reg_1020 = read_mchbar32(0x1020); // = 0x6c733c // OK
4924 write_mchbar8(0x1070, 0x1); // OK
4925
4926 write_mchbar32(0x1000, 0x100); // OK
4927 write_mchbar8(0x1007, 0x0); // OK
4928
4929 if (reg_1020 != 0) {
4930 write_mchbar16(0x1018, 0x0); // OK
4931 bl = reg_1020 >> 8;
4932 ebpb = reg_1020 & 0xff;
4933 } else {
4934 ebpb = 0;
4935 bl = 8;
4936 }
4937
4938 rdmsr(0x1a2);
4939
4940 write_mchbar32(0x1014, 0xffffffff); // OK
4941
4942 write_mchbar32(0x1010, ((((ebpb + 0x7d) << 7) / bl) & 0xff) * (! !reg_1020)); // OK
4943
4944 write_mchbar8(0x101c, 0xb8); // OK
4945
4946 write_mchbar8(0x123e, (read_mchbar8(0x123e) & 0xf) | 0x60); // OK
4947 if (reg_1020 != 0) {
4948 write_mchbar32(0x123c, (read_mchbar32(0x123c) & ~0x00900000) | 0x600000); // OK
4949 write_mchbar8(0x101c, 0xb8); // OK
4950 }
4951
4952 setup_heci_uma(&info);
4953
4954 if (info.uma_enabled) {
4955 u16 ax;
4956 write_mchbar32(0x11b0, read_mchbar32(0x11b0) | 0x4000); // OK
4957 write_mchbar32(0x11b4, read_mchbar32(0x11b4) | 0x4000); // OK
4958 write_mchbar16(0x1190, read_mchbar16(0x1190) | 0x4000); // OK
4959
4960 ax = read_mchbar16(0x1190) & 0xf00; // = 0x480a // OK
4961 write_mchbar16(0x1170, ax | (read_mchbar16(0x1170) & 0x107f) | 0x4080); // OK
4962 write_mchbar16(0x1170, read_mchbar16(0x1170) | 0x1000); // OK
4963#if REAL
4964 udelay(1000);
4965#endif
4966 u16 ecx;
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02004967 for (ecx = 0xffff; ecx && (read_mchbar16(0x1170) & 0x1000); ecx--); // OK
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004968 write_mchbar16(0x1190, read_mchbar16(0x1190) & ~0x4000); // OK
4969 }
4970
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004971 pci_write_config8(SOUTHBRIDGE, GEN_PMCON_2,
4972 pci_read_config8(SOUTHBRIDGE, GEN_PMCON_2) & ~0x80);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004973 udelay(10000);
Vladimir Serbinenkoc7db28c2014-02-19 22:09:33 +01004974 write_mchbar16(0x2ca8, 0x8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004975
4976#if REAL
4977 udelay(1000);
4978 dump_timings(&info);
Vladimir Serbinenkob16f0922014-06-07 16:27:27 +02004979 cbmem_wasnot_inited = cbmem_recovery(s3resume);
4980
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004981 if (!s3resume)
4982 save_timings(&info);
Vladimir Serbinenkob16f0922014-06-07 16:27:27 +02004983 if (s3resume && cbmem_wasnot_inited) {
4984 u32 reg32;
4985 printk(BIOS_ERR, "Failed S3 resume.\n");
4986 ram_check(0x100000, 0x200000);
4987
4988 /* Clear SLP_TYPE. */
4989 reg32 = inl(DEFAULT_PMBASE + 0x04);
4990 outl(reg32 & ~(7 << 10), DEFAULT_PMBASE + 0x04);
4991
4992 /* Failed S3 resume, reset to come up cleanly */
4993 outb(0xe, 0xcf9);
Patrick Georgi546953c2014-11-29 10:38:17 +01004994 halt();
Vladimir Serbinenkob16f0922014-06-07 16:27:27 +02004995 }
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004996#endif
4997}