blob: 403af7864b23fe4a672c8d9a95df43eafbd426d6 [file] [log] [blame]
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001/*
2 * This file is part of the coreboot project.
3 *
4 * Copyright (C) 2013 Vladimir Serbinenko.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010015 */
16
Kyösti Mälkki931c1dc2014-06-30 09:40:19 +030017#include <stdlib.h>
Stefan Reinauer6a001132017-07-13 02:20:27 +020018#include <compiler.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010019#include <console/console.h>
20#include <string.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010021#include <arch/io.h>
22#include <cpu/x86/msr.h>
23#include <cbmem.h>
24#include <arch/cbfs.h>
25#include <cbfs.h>
26#include <ip_checksum.h>
27#include <pc80/mc146818rtc.h>
28#include <device/pci_def.h>
Patrick Rudolph266a1f72016-06-09 18:13:34 +020029#include <device/device.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010030#include <arch/cpu.h>
Patrick Georgi546953c2014-11-29 10:38:17 +010031#include <halt.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010032#include <spd.h>
33#include "raminit.h"
Patrick Rudolph266a1f72016-06-09 18:13:34 +020034#include "chip.h"
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010035#include <timestamp.h>
36#include <cpu/x86/mtrr.h>
37#include <cpu/intel/speedstep.h>
38#include <cpu/intel/turbo.h>
Arthur Heymansdc71e252018-01-29 10:14:48 +010039#include <mrc_cache.h>
Matthias Gazzaridfa51252018-05-19 00:44:20 +020040#include <arch/early_variables.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010041
42#include "nehalem.h"
43
Elyes HAOUAS21b71ce62018-06-16 18:43:52 +020044#include <southbridge/intel/ibexpeak/me.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010045#include <delay.h>
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010046
47#define NORTHBRIDGE PCI_DEV(0, 0, 0)
48#define SOUTHBRIDGE PCI_DEV(0, 0x1f, 0)
49#define GMA PCI_DEV (0, 0x2, 0x0)
50#define HECIDEV PCI_DEV(0, 0x16, 0)
51#define HECIBAR 0x10
52
53#define FOR_ALL_RANKS \
54 for (channel = 0; channel < NUM_CHANNELS; channel++) \
55 for (slot = 0; slot < NUM_SLOTS; slot++) \
56 for (rank = 0; rank < NUM_RANKS; rank++)
57
58#define FOR_POPULATED_RANKS \
59 for (channel = 0; channel < NUM_CHANNELS; channel++) \
60 for (slot = 0; slot < NUM_SLOTS; slot++) \
61 for (rank = 0; rank < NUM_RANKS; rank++) \
62 if (info->populated_ranks[channel][slot][rank])
63
64#define FOR_POPULATED_RANKS_BACKWARDS \
65 for (channel = NUM_CHANNELS - 1; channel >= 0; channel--) \
66 for (slot = 0; slot < NUM_SLOTS; slot++) \
67 for (rank = 0; rank < NUM_RANKS; rank++) \
68 if (info->populated_ranks[channel][slot][rank])
69
70/* [REG_178][CHANNEL][2 * SLOT + RANK][LANE] */
71typedef struct {
72 u8 smallest;
73 u8 largest;
74} timing_bounds_t[2][2][2][9];
75
Arthur Heymansdc71e252018-01-29 10:14:48 +010076#define MRC_CACHE_VERSION 1
77
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010078struct ram_training {
79 /* [TM][CHANNEL][SLOT][RANK][LANE] */
80 u16 lane_timings[4][2][2][2][9];
81 u16 reg_178;
82 u16 reg_10b;
83
84 u8 reg178_center;
85 u8 reg178_smallest;
86 u8 reg178_largest;
87 timing_bounds_t timing_bounds[2];
88 u16 timing_offset[2][2][2][9];
89 u16 timing2_offset[2][2][2][9];
90 u16 timing2_bounds[2][2][2][9][2];
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +010091 u8 reg274265[2][3]; /* [CHANNEL][REGISTER] */
92 u8 reg2ca9_bit0;
93 u32 reg_6dc;
94 u32 reg_6e8;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010095};
96
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +010097#include <lib.h> /* Prototypes */
98
99static inline void write_mchbar32(u32 addr, u32 val)
100{
101 MCHBAR32(addr) = val;
102}
103
104static inline void write_mchbar16(u32 addr, u16 val)
105{
106 MCHBAR16(addr) = val;
107}
108
109static inline void write_mchbar8(u32 addr, u8 val)
110{
111 MCHBAR8(addr) = val;
112}
113
114
115static inline u32 read_mchbar32(u32 addr)
116{
117 return MCHBAR32(addr);
118}
119
120static inline u16 read_mchbar16(u32 addr)
121{
122 return MCHBAR16(addr);
123}
124
125static inline u8 read_mchbar8(u32 addr)
126{
127 return MCHBAR8(addr);
128}
129
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100130static void clflush(u32 addr)
131{
132 asm volatile ("clflush (%0)"::"r" (addr));
133}
134
135typedef struct _u128 {
136 u64 lo;
137 u64 hi;
138} u128;
139
140static void read128(u32 addr, u64 * out)
141{
142 u128 ret;
143 u128 stor;
144 asm volatile ("movdqu %%xmm0, %0\n"
145 "movdqa (%2), %%xmm0\n"
146 "movdqu %%xmm0, %1\n"
147 "movdqu %0, %%xmm0":"+m" (stor), "=m"(ret):"r"(addr));
148 out[0] = ret.lo;
149 out[1] = ret.hi;
150}
151
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100152/* OK */
153static void write_1d0(u32 val, u16 addr, int bits, int flag)
154{
155 write_mchbar32(0x1d0, 0);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200156 while (read_mchbar32(0x1d0) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100157 write_mchbar32(0x1d4,
158 (val & ((1 << bits) - 1)) | (2 << bits) | (flag <<
159 bits));
160 write_mchbar32(0x1d0, 0x40000000 | addr);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200161 while (read_mchbar32(0x1d0) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100162}
163
164/* OK */
165static u16 read_1d0(u16 addr, int split)
166{
167 u32 val;
168 write_mchbar32(0x1d0, 0);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200169 while (read_mchbar32(0x1d0) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100170 write_mchbar32(0x1d0,
171 0x80000000 | (((read_mchbar8(0x246) >> 2) & 3) +
172 0x361 - addr));
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200173 while (read_mchbar32(0x1d0) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100174 val = read_mchbar32(0x1d8);
175 write_1d0(0, 0x33d, 0, 0);
176 write_1d0(0, 0x33d, 0, 0);
177 val &= ((1 << split) - 1);
178 // printk (BIOS_ERR, "R1D0C [%x] => %x\n", addr, val);
179 return val;
180}
181
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800182static void write32p(uintptr_t addr, uint32_t val)
183{
184 write32((void *)addr, val);
185}
186
187static uint32_t read32p(uintptr_t addr)
188{
189 return read32((void *)addr);
190}
191
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100192static void sfence(void)
193{
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100194 asm volatile ("sfence");
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100195}
196
197static inline u16 get_lane_offset(int slot, int rank, int lane)
198{
199 return 0x124 * lane + ((lane & 4) ? 0x23e : 0) + 11 * rank + 22 * slot -
200 0x452 * (lane == 8);
201}
202
203static inline u16 get_timing_register_addr(int lane, int tm, int slot, int rank)
204{
205 const u16 offs[] = { 0x1d, 0xa8, 0xe6, 0x5c };
206 return get_lane_offset(slot, rank, lane) + offs[(tm + 3) % 4];
207}
208
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100209static u32 gav_real(int line, u32 in)
210{
211 // printk (BIOS_DEBUG, "%d: GAV: %x\n", line, in);
212 return in;
213}
214
215#define gav(x) gav_real (__LINE__, (x))
Felix Held29a9c072018-07-29 01:34:45 +0200216
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100217struct raminfo {
218 u16 clock_speed_index; /* clock_speed (REAL, not DDR) / 133.(3) - 3 */
219 u16 fsb_frequency; /* in 1.(1)/2 MHz. */
220 u8 is_x16_module[2][2]; /* [CHANNEL][SLOT] */
221 u8 density[2][2]; /* [CHANNEL][SLOT] */
222 u8 populated_ranks[2][2][2]; /* [CHANNEL][SLOT][RANK] */
223 int rank_start[2][2][2];
224 u8 cas_latency;
225 u8 board_lane_delay[9];
226 u8 use_ecc;
227 u8 revision;
228 u8 max_supported_clock_speed_index;
229 u8 uma_enabled;
230 u8 spd[2][2][151]; /* [CHANNEL][SLOT][BYTE] */
231 u8 silicon_revision;
232 u8 populated_ranks_mask[2];
233 u8 max_slots_used_in_channel;
234 u8 mode4030[2];
235 u16 avg4044[2];
236 u16 max4048[2];
237 unsigned total_memory_mb;
238 unsigned interleaved_part_mb;
239 unsigned non_interleaved_part_mb;
240
241 u32 heci_bar;
242 u64 heci_uma_addr;
243 unsigned memory_reserved_for_heci_mb;
244
245 struct ram_training training;
246 u32 last_500_command[2];
247
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100248 u32 delay46_ps[2];
249 u32 delay54_ps[2];
250 u8 revision_flag_1;
251 u8 some_delay_1_cycle_floor;
252 u8 some_delay_2_halfcycles_ceil;
253 u8 some_delay_3_ps_rounded;
254
255 const struct ram_training *cached_training;
256};
257
Matthias Gazzaridfa51252018-05-19 00:44:20 +0200258/* Global allocation of timings_car */
259timing_bounds_t timings_car[64] CAR_GLOBAL;
260
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100261static void
262write_500(struct raminfo *info, int channel, u32 val, u16 addr, int bits,
263 int flag);
264
265/* OK */
266static u16
267read_500(struct raminfo *info, int channel, u16 addr, int split)
268{
269 u32 val;
270 info->last_500_command[channel] = 0x80000000;
271 write_mchbar32(0x500 + (channel << 10), 0);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200272 while (read_mchbar32(0x500 + (channel << 10)) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100273 write_mchbar32(0x500 + (channel << 10),
274 0x80000000 |
275 (((read_mchbar8(0x246 + (channel << 10)) >> 2) &
276 3) + 0xb88 - addr));
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200277 while (read_mchbar32(0x500 + (channel << 10)) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100278 val = read_mchbar32(0x508 + (channel << 10));
279 return val & ((1 << split) - 1);
280}
281
282/* OK */
283static void
284write_500(struct raminfo *info, int channel, u32 val, u16 addr, int bits,
285 int flag)
286{
287 if (info->last_500_command[channel] == 0x80000000) {
288 info->last_500_command[channel] = 0x40000000;
289 write_500(info, channel, 0, 0xb61, 0, 0);
290 }
291 write_mchbar32(0x500 + (channel << 10), 0);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200292 while (read_mchbar32(0x500 + (channel << 10)) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100293 write_mchbar32(0x504 + (channel << 10),
294 (val & ((1 << bits) - 1)) | (2 << bits) | (flag <<
295 bits));
296 write_mchbar32(0x500 + (channel << 10), 0x40000000 | addr);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200297 while (read_mchbar32(0x500 + (channel << 10)) & 0x800000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100298}
299
300static int rw_test(int rank)
301{
302 const u32 mask = 0xf00fc33c;
303 int ok = 0xff;
304 int i;
305 for (i = 0; i < 64; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800306 write32p((rank << 28) | (i << 2), 0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100307 sfence();
308 for (i = 0; i < 64; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800309 gav(read32p((rank << 28) | (i << 2)));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100310 sfence();
311 for (i = 0; i < 32; i++) {
312 u32 pat = (((mask >> i) & 1) ? 0xffffffff : 0);
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800313 write32p((rank << 28) | (i << 3), pat);
314 write32p((rank << 28) | (i << 3) | 4, pat);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100315 }
316 sfence();
317 for (i = 0; i < 32; i++) {
318 u8 pat = (((mask >> i) & 1) ? 0xff : 0);
319 int j;
320 u32 val;
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800321 gav(val = read32p((rank << 28) | (i << 3)));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100322 for (j = 0; j < 4; j++)
323 if (((val >> (j * 8)) & 0xff) != pat)
324 ok &= ~(1 << j);
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800325 gav(val = read32p((rank << 28) | (i << 3) | 4));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100326 for (j = 0; j < 4; j++)
327 if (((val >> (j * 8)) & 0xff) != pat)
328 ok &= ~(16 << j);
329 }
330 sfence();
331 for (i = 0; i < 64; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800332 write32p((rank << 28) | (i << 2), 0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100333 sfence();
334 for (i = 0; i < 64; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800335 gav(read32p((rank << 28) | (i << 2)));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100336
337 return ok;
338}
339
340static void
341program_timings(struct raminfo *info, u16 base, int channel, int slot, int rank)
342{
343 int lane;
344 for (lane = 0; lane < 8; lane++) {
345 write_500(info, channel,
346 base +
347 info->training.
348 lane_timings[2][channel][slot][rank][lane],
349 get_timing_register_addr(lane, 2, slot, rank), 9, 0);
350 write_500(info, channel,
351 base +
352 info->training.
353 lane_timings[3][channel][slot][rank][lane],
354 get_timing_register_addr(lane, 3, slot, rank), 9, 0);
355 }
356}
357
358static void write_26c(int channel, u16 si)
359{
360 write_mchbar32(0x26c + (channel << 10), 0x03243f35);
361 write_mchbar32(0x268 + (channel << 10), 0xcfc00000 | (si << 9));
362 write_mchbar16(0x2b9 + (channel << 10), si);
363}
364
365static u32 get_580(int channel, u8 addr)
366{
367 u32 ret;
368 gav(read_1d0(0x142, 3));
369 write_mchbar8(0x5ff, 0x0); /* OK */
370 write_mchbar8(0x5ff, 0x80); /* OK */
371 write_mchbar32(0x580 + (channel << 10), 0x8493c012 | addr);
372 write_mchbar8(0x580 + (channel << 10),
373 read_mchbar8(0x580 + (channel << 10)) | 1);
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200374 while (!((ret = read_mchbar32(0x580 + (channel << 10))) & 0x10000));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100375 write_mchbar8(0x580 + (channel << 10),
376 read_mchbar8(0x580 + (channel << 10)) & ~1);
377 return ret;
378}
379
380const int cached_config = 0;
381
382#define NUM_CHANNELS 2
383#define NUM_SLOTS 2
384#define NUM_RANKS 2
385#define RANK_SHIFT 28
386#define CHANNEL_SHIFT 10
387
388#include "raminit_tables.c"
389
390static void seq9(struct raminfo *info, int channel, int slot, int rank)
391{
392 int i, lane;
393
394 for (i = 0; i < 2; i++)
395 for (lane = 0; lane < 8; lane++)
396 write_500(info, channel,
397 info->training.lane_timings[i +
398 1][channel][slot]
399 [rank][lane], get_timing_register_addr(lane,
400 i + 1,
401 slot,
402 rank),
403 9, 0);
404
405 write_1d0(1, 0x103, 6, 1);
406 for (lane = 0; lane < 8; lane++)
407 write_500(info, channel,
408 info->training.
409 lane_timings[0][channel][slot][rank][lane],
410 get_timing_register_addr(lane, 0, slot, rank), 9, 0);
411
412 for (i = 0; i < 2; i++) {
413 for (lane = 0; lane < 8; lane++)
414 write_500(info, channel,
415 info->training.lane_timings[i +
416 1][channel][slot]
417 [rank][lane], get_timing_register_addr(lane,
418 i + 1,
419 slot,
420 rank),
421 9, 0);
422 gav(get_580(channel, ((i + 1) << 2) | (rank << 5)));
423 }
424
425 gav(read_1d0(0x142, 3)); // = 0x10408118
426 write_mchbar8(0x5ff, 0x0); /* OK */
427 write_mchbar8(0x5ff, 0x80); /* OK */
428 write_1d0(0x2, 0x142, 3, 1);
429 for (lane = 0; lane < 8; lane++) {
430 // printk (BIOS_ERR, "before: %x\n", info->training.lane_timings[2][channel][slot][rank][lane]);
431 info->training.lane_timings[2][channel][slot][rank][lane] =
432 read_500(info, channel,
433 get_timing_register_addr(lane, 2, slot, rank), 9);
434 //printk (BIOS_ERR, "after: %x\n", info->training.lane_timings[2][channel][slot][rank][lane]);
435 info->training.lane_timings[3][channel][slot][rank][lane] =
436 info->training.lane_timings[2][channel][slot][rank][lane] +
437 0x20;
438 }
439}
440
441static int count_ranks_in_channel(struct raminfo *info, int channel)
442{
443 int slot, rank;
444 int res = 0;
445 for (slot = 0; slot < NUM_SLOTS; slot++)
446 for (rank = 0; rank < NUM_SLOTS; rank++)
447 res += info->populated_ranks[channel][slot][rank];
448 return res;
449}
450
451static void
452config_rank(struct raminfo *info, int s3resume, int channel, int slot, int rank)
453{
454 int add;
455
456 write_1d0(0, 0x178, 7, 1);
457 seq9(info, channel, slot, rank);
458 program_timings(info, 0x80, channel, slot, rank);
459
460 if (channel == 0)
461 add = count_ranks_in_channel(info, 1);
462 else
463 add = 0;
464 if (!s3resume)
465 gav(rw_test(rank + add));
466 program_timings(info, 0x00, channel, slot, rank);
467 if (!s3resume)
468 gav(rw_test(rank + add));
469 if (!s3resume)
470 gav(rw_test(rank + add));
471 write_1d0(0, 0x142, 3, 1);
472 write_1d0(0, 0x103, 6, 1);
473
474 gav(get_580(channel, 0xc | (rank << 5)));
475 gav(read_1d0(0x142, 3));
476
477 write_mchbar8(0x5ff, 0x0); /* OK */
478 write_mchbar8(0x5ff, 0x80); /* OK */
479}
480
481static void set_4cf(struct raminfo *info, int channel, u8 val)
482{
483 gav(read_500(info, channel, 0x4cf, 4)); // = 0xc2300cf9
484 write_500(info, channel, val, 0x4cf, 4, 1);
485 gav(read_500(info, channel, 0x659, 4)); // = 0x80300839
486 write_500(info, channel, val, 0x659, 4, 1);
487 gav(read_500(info, channel, 0x697, 4)); // = 0x80300839
488 write_500(info, channel, val, 0x697, 4, 1);
489}
490
491static void set_334(int zero)
492{
493 int j, k, channel;
494 const u32 val3[] = { 0x2a2b2a2b, 0x26272627, 0x2e2f2e2f, 0x2a2b };
495 u32 vd8[2][16];
496
497 for (channel = 0; channel < NUM_CHANNELS; channel++) {
498 for (j = 0; j < 4; j++) {
499 u32 a = (j == 1) ? 0x29292929 : 0x31313131;
500 u32 lmask = (j == 3) ? 0xffff : 0xffffffff;
501 u16 c;
502 if ((j == 0 || j == 3) && zero)
503 c = 0;
504 else if (j == 3)
505 c = 0x5f;
506 else
507 c = 0x5f5f;
508
509 for (k = 0; k < 2; k++) {
510 write_mchbar32(0x138 + 8 * k,
511 (channel << 26) | (j << 24));
512 gav(vd8[1][(channel << 3) | (j << 1) | k] =
513 read_mchbar32(0x138 + 8 * k));
514 gav(vd8[0][(channel << 3) | (j << 1) | k] =
515 read_mchbar32(0x13c + 8 * k));
516 }
517
518 write_mchbar32(0x334 + (channel << 10) + (j * 0x44),
519 zero ? 0 : val3[j]);
520 write_mchbar32(0x32c + (channel << 10) + (j * 0x44),
521 zero ? 0 : (0x18191819 & lmask));
522 write_mchbar16(0x34a + (channel << 10) + (j * 0x44), c);
523 write_mchbar32(0x33c + (channel << 10) + (j * 0x44),
524 zero ? 0 : (a & lmask));
525 write_mchbar32(0x344 + (channel << 10) + (j * 0x44),
526 zero ? 0 : (a & lmask));
527 }
528 }
529
530 write_mchbar32(0x130, read_mchbar32(0x130) | 1); /* OK */
Elyes HAOUAS7db506c2016-10-02 11:56:39 +0200531 while (read_mchbar8(0x130) & 1); /* OK */
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100532}
533
534static void rmw_1d0(u16 addr, u32 and, u32 or, int split, int flag)
535{
536 u32 v;
537 v = read_1d0(addr, split);
538 write_1d0((v & and) | or, addr, split, flag);
539}
540
541static int find_highest_bit_set(u16 val)
542{
543 int i;
544 for (i = 15; i >= 0; i--)
545 if (val & (1 << i))
546 return i;
547 return -1;
548}
549
550static int find_lowest_bit_set32(u32 val)
551{
552 int i;
553 for (i = 0; i < 32; i++)
554 if (val & (1 << i))
555 return i;
556 return -1;
557}
558
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100559enum {
560 DEVICE_TYPE = 2,
561 MODULE_TYPE = 3,
562 DENSITY = 4,
563 RANKS_AND_DQ = 7,
564 MEMORY_BUS_WIDTH = 8,
565 TIMEBASE_DIVIDEND = 10,
566 TIMEBASE_DIVISOR = 11,
567 CYCLETIME = 12,
568
569 CAS_LATENCIES_LSB = 14,
570 CAS_LATENCIES_MSB = 15,
571 CAS_LATENCY_TIME = 16,
572 THERMAL_AND_REFRESH = 31,
573 REFERENCE_RAW_CARD_USED = 62,
574 RANK1_ADDRESS_MAPPING = 63
575};
576
577static void calculate_timings(struct raminfo *info)
578{
579 unsigned cycletime;
580 unsigned cas_latency_time;
581 unsigned supported_cas_latencies;
582 unsigned channel, slot;
583 unsigned clock_speed_index;
584 unsigned min_cas_latency;
585 unsigned cas_latency;
586 unsigned max_clock_index;
587
588 /* Find common CAS latency */
589 supported_cas_latencies = 0x3fe;
590 for (channel = 0; channel < NUM_CHANNELS; channel++)
591 for (slot = 0; slot < NUM_SLOTS; slot++)
592 if (info->populated_ranks[channel][slot][0])
593 supported_cas_latencies &=
594 2 *
595 (info->
596 spd[channel][slot][CAS_LATENCIES_LSB] |
597 (info->
598 spd[channel][slot][CAS_LATENCIES_MSB] <<
599 8));
600
601 max_clock_index = min(3, info->max_supported_clock_speed_index);
602
603 cycletime = min_cycletime[max_clock_index];
604 cas_latency_time = min_cas_latency_time[max_clock_index];
605
606 for (channel = 0; channel < NUM_CHANNELS; channel++)
607 for (slot = 0; slot < NUM_SLOTS; slot++)
608 if (info->populated_ranks[channel][slot][0]) {
609 unsigned timebase;
610 timebase =
611 1000 *
612 info->
613 spd[channel][slot][TIMEBASE_DIVIDEND] /
614 info->spd[channel][slot][TIMEBASE_DIVISOR];
615 cycletime =
616 max(cycletime,
617 timebase *
618 info->spd[channel][slot][CYCLETIME]);
619 cas_latency_time =
620 max(cas_latency_time,
621 timebase *
622 info->
623 spd[channel][slot][CAS_LATENCY_TIME]);
624 }
625 for (clock_speed_index = 0; clock_speed_index < 3; clock_speed_index++) {
626 if (cycletime == min_cycletime[clock_speed_index])
627 break;
628 if (cycletime > min_cycletime[clock_speed_index]) {
629 clock_speed_index--;
630 cycletime = min_cycletime[clock_speed_index];
631 break;
632 }
633 }
Edward O'Callaghan7116ac82014-07-08 01:53:24 +1000634 min_cas_latency = CEIL_DIV(cas_latency_time, cycletime);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +0100635 cas_latency = 0;
636 while (supported_cas_latencies) {
637 cas_latency = find_highest_bit_set(supported_cas_latencies) + 3;
638 if (cas_latency <= min_cas_latency)
639 break;
640 supported_cas_latencies &=
641 ~(1 << find_highest_bit_set(supported_cas_latencies));
642 }
643
644 if (cas_latency != min_cas_latency && clock_speed_index)
645 clock_speed_index--;
646
647 if (cas_latency * min_cycletime[clock_speed_index] > 20000)
648 die("Couldn't configure DRAM");
649 info->clock_speed_index = clock_speed_index;
650 info->cas_latency = cas_latency;
651}
652
653static void program_base_timings(struct raminfo *info)
654{
655 unsigned channel;
656 unsigned slot, rank, lane;
657 unsigned extended_silicon_revision;
658 int i;
659
660 extended_silicon_revision = info->silicon_revision;
661 if (info->silicon_revision == 0)
662 for (channel = 0; channel < NUM_CHANNELS; channel++)
663 for (slot = 0; slot < NUM_SLOTS; slot++)
664 if ((info->
665 spd[channel][slot][MODULE_TYPE] & 0xF) ==
666 3)
667 extended_silicon_revision = 4;
668
669 for (channel = 0; channel < NUM_CHANNELS; channel++) {
670 for (slot = 0; slot < NUM_SLOTS; slot++)
671 for (rank = 0; rank < NUM_SLOTS; rank++) {
672 int card_timing_2;
673 if (!info->populated_ranks[channel][slot][rank])
674 continue;
675
676 for (lane = 0; lane < 9; lane++) {
677 int tm_reg;
678 int card_timing;
679
680 card_timing = 0;
681 if ((info->
682 spd[channel][slot][MODULE_TYPE] &
683 0xF) == 3) {
684 int reference_card;
685 reference_card =
686 info->
687 spd[channel][slot]
688 [REFERENCE_RAW_CARD_USED] &
689 0x1f;
690 if (reference_card == 3)
691 card_timing =
692 u16_ffd1188[0][lane]
693 [info->
694 clock_speed_index];
695 if (reference_card == 5)
696 card_timing =
697 u16_ffd1188[1][lane]
698 [info->
699 clock_speed_index];
700 }
701
702 info->training.
703 lane_timings[0][channel][slot][rank]
704 [lane] =
705 u8_FFFD1218[info->
706 clock_speed_index];
707 info->training.
708 lane_timings[1][channel][slot][rank]
709 [lane] = 256;
710
711 for (tm_reg = 2; tm_reg < 4; tm_reg++)
712 info->training.
713 lane_timings[tm_reg]
714 [channel][slot][rank][lane]
715 =
716 u8_FFFD1240[channel]
717 [extended_silicon_revision]
718 [lane][2 * slot +
719 rank][info->
720 clock_speed_index]
721 + info->max4048[channel]
722 +
723 u8_FFFD0C78[channel]
724 [extended_silicon_revision]
725 [info->
726 mode4030[channel]][slot]
727 [rank][info->
728 clock_speed_index]
729 + card_timing;
730 for (tm_reg = 0; tm_reg < 4; tm_reg++)
731 write_500(info, channel,
732 info->training.
733 lane_timings[tm_reg]
734 [channel][slot][rank]
735 [lane],
736 get_timing_register_addr
737 (lane, tm_reg, slot,
738 rank), 9, 0);
739 }
740
741 card_timing_2 = 0;
742 if (!(extended_silicon_revision != 4
743 || (info->
744 populated_ranks_mask[channel] & 5) ==
745 5)) {
746 if ((info->
747 spd[channel][slot]
748 [REFERENCE_RAW_CARD_USED] & 0x1F)
749 == 3)
750 card_timing_2 =
751 u16_FFFE0EB8[0][info->
752 clock_speed_index];
753 if ((info->
754 spd[channel][slot]
755 [REFERENCE_RAW_CARD_USED] & 0x1F)
756 == 5)
757 card_timing_2 =
758 u16_FFFE0EB8[1][info->
759 clock_speed_index];
760 }
761
762 for (i = 0; i < 3; i++)
763 write_500(info, channel,
764 (card_timing_2 +
765 info->max4048[channel]
766 +
767 u8_FFFD0EF8[channel]
768 [extended_silicon_revision]
769 [info->
770 mode4030[channel]][info->
771 clock_speed_index]),
772 u16_fffd0c50[i][slot][rank],
773 8, 1);
774 write_500(info, channel,
775 (info->max4048[channel] +
776 u8_FFFD0C78[channel]
777 [extended_silicon_revision][info->
778 mode4030
779 [channel]]
780 [slot][rank][info->
781 clock_speed_index]),
782 u16_fffd0c70[slot][rank], 7, 1);
783 }
784 if (!info->populated_ranks_mask[channel])
785 continue;
786 for (i = 0; i < 3; i++)
787 write_500(info, channel,
788 (info->max4048[channel] +
789 info->avg4044[channel]
790 +
791 u8_FFFD17E0[channel]
792 [extended_silicon_revision][info->
793 mode4030
794 [channel]][info->
795 clock_speed_index]),
796 u16_fffd0c68[i], 8, 1);
797 }
798}
799
800static unsigned int fsbcycle_ps(struct raminfo *info)
801{
802 return 900000 / info->fsb_frequency;
803}
804
805/* The time of DDR transfer in ps. */
806static unsigned int halfcycle_ps(struct raminfo *info)
807{
808 return 3750 / (info->clock_speed_index + 3);
809}
810
811/* The time of clock cycle in ps. */
812static unsigned int cycle_ps(struct raminfo *info)
813{
814 return 2 * halfcycle_ps(info);
815}
816
817/* Frequency in 1.(1)=10/9 MHz units. */
818static unsigned frequency_11(struct raminfo *info)
819{
820 return (info->clock_speed_index + 3) * 120;
821}
822
823/* Frequency in 0.1 MHz units. */
824static unsigned frequency_01(struct raminfo *info)
825{
826 return 100 * frequency_11(info) / 9;
827}
828
829static unsigned ps_to_halfcycles(struct raminfo *info, unsigned int ps)
830{
831 return (frequency_11(info) * 2) * ps / 900000;
832}
833
834static unsigned ns_to_cycles(struct raminfo *info, unsigned int ns)
835{
836 return (frequency_11(info)) * ns / 900;
837}
838
839static void compute_derived_timings(struct raminfo *info)
840{
841 unsigned channel, slot, rank;
842 int extended_silicon_revision;
843 int some_delay_1_ps;
844 int some_delay_2_ps;
845 int some_delay_2_halfcycles_ceil;
846 int some_delay_2_halfcycles_floor;
847 int some_delay_3_ps;
848 int some_delay_3_halfcycles;
849 int some_delay_3_ps_rounded;
850 int some_delay_1_cycle_ceil;
851 int some_delay_1_cycle_floor;
852
853 some_delay_3_halfcycles = 0;
854 some_delay_3_ps_rounded = 0;
855 extended_silicon_revision = info->silicon_revision;
856 if (!info->silicon_revision)
857 for (channel = 0; channel < NUM_CHANNELS; channel++)
858 for (slot = 0; slot < NUM_SLOTS; slot++)
859 if ((info->
860 spd[channel][slot][MODULE_TYPE] & 0xF) ==
861 3)
862 extended_silicon_revision = 4;
863 if (info->board_lane_delay[7] < 5)
864 info->board_lane_delay[7] = 5;
865 info->revision_flag_1 = 2;
866 if (info->silicon_revision == 2 || info->silicon_revision == 3)
867 info->revision_flag_1 = 0;
868 if (info->revision < 16)
869 info->revision_flag_1 = 0;
870
871 if (info->revision < 8)
872 info->revision_flag_1 = 0;
873 if (info->revision >= 8 && (info->silicon_revision == 0
874 || info->silicon_revision == 1))
875 some_delay_2_ps = 735;
876 else
877 some_delay_2_ps = 750;
878
879 if (info->revision >= 0x10 && (info->silicon_revision == 0
880 || info->silicon_revision == 1))
881 some_delay_1_ps = 3929;
882 else
883 some_delay_1_ps = 3490;
884
885 some_delay_1_cycle_floor = some_delay_1_ps / cycle_ps(info);
886 some_delay_1_cycle_ceil = some_delay_1_ps / cycle_ps(info);
887 if (some_delay_1_ps % cycle_ps(info))
888 some_delay_1_cycle_ceil++;
889 else
890 some_delay_1_cycle_floor--;
891 info->some_delay_1_cycle_floor = some_delay_1_cycle_floor;
892 if (info->revision_flag_1)
893 some_delay_2_ps = halfcycle_ps(info) >> 6;
894 some_delay_2_ps +=
895 max(some_delay_1_ps - 30,
896 2 * halfcycle_ps(info) * (some_delay_1_cycle_ceil - 1) + 1000) +
897 375;
898 some_delay_3_ps =
899 halfcycle_ps(info) - some_delay_2_ps % halfcycle_ps(info);
900 if (info->revision_flag_1) {
901 if (some_delay_3_ps < 150)
902 some_delay_3_halfcycles = 0;
903 else
904 some_delay_3_halfcycles =
905 (some_delay_3_ps << 6) / halfcycle_ps(info);
906 some_delay_3_ps_rounded =
907 halfcycle_ps(info) * some_delay_3_halfcycles >> 6;
908 }
909 some_delay_2_halfcycles_ceil =
910 (some_delay_2_ps + halfcycle_ps(info) - 1) / halfcycle_ps(info) -
911 2 * (some_delay_1_cycle_ceil - 1);
912 if (info->revision_flag_1 && some_delay_3_ps < 150)
913 some_delay_2_halfcycles_ceil++;
914 some_delay_2_halfcycles_floor = some_delay_2_halfcycles_ceil;
915 if (info->revision < 0x10)
916 some_delay_2_halfcycles_floor =
917 some_delay_2_halfcycles_ceil - 1;
918 if (!info->revision_flag_1)
919 some_delay_2_halfcycles_floor++;
920 info->some_delay_2_halfcycles_ceil = some_delay_2_halfcycles_ceil;
921 info->some_delay_3_ps_rounded = some_delay_3_ps_rounded;
922 if ((info->populated_ranks[0][0][0] && info->populated_ranks[0][1][0])
923 || (info->populated_ranks[1][0][0]
924 && info->populated_ranks[1][1][0]))
925 info->max_slots_used_in_channel = 2;
926 else
927 info->max_slots_used_in_channel = 1;
928 for (channel = 0; channel < 2; channel++)
929 write_mchbar32(0x244 + (channel << 10),
930 ((info->revision < 8) ? 1 : 0x200)
931 | ((2 - info->max_slots_used_in_channel) << 17) |
932 (channel << 21) | (info->
933 some_delay_1_cycle_floor <<
934 18) | 0x9510);
935 if (info->max_slots_used_in_channel == 1) {
936 info->mode4030[0] = (count_ranks_in_channel(info, 0) == 2);
937 info->mode4030[1] = (count_ranks_in_channel(info, 1) == 2);
938 } else {
939 info->mode4030[0] = ((count_ranks_in_channel(info, 0) == 1) || (count_ranks_in_channel(info, 0) == 2)) ? 2 : 3; /* 2 if 1 or 2 ranks */
940 info->mode4030[1] = ((count_ranks_in_channel(info, 1) == 1)
941 || (count_ranks_in_channel(info, 1) ==
942 2)) ? 2 : 3;
943 }
944 for (channel = 0; channel < NUM_CHANNELS; channel++) {
945 int max_of_unk;
946 int min_of_unk_2;
947
948 int i, count;
949 int sum;
950
951 if (!info->populated_ranks_mask[channel])
952 continue;
953
954 max_of_unk = 0;
955 min_of_unk_2 = 32767;
956
957 sum = 0;
958 count = 0;
959 for (i = 0; i < 3; i++) {
960 int unk1;
961 if (info->revision < 8)
962 unk1 =
963 u8_FFFD1891[0][channel][info->
964 clock_speed_index]
965 [i];
966 else if (!
967 (info->revision >= 0x10
968 || info->revision_flag_1))
969 unk1 =
970 u8_FFFD1891[1][channel][info->
971 clock_speed_index]
972 [i];
973 else
974 unk1 = 0;
975 for (slot = 0; slot < NUM_SLOTS; slot++)
976 for (rank = 0; rank < NUM_RANKS; rank++) {
977 int a = 0;
978 int b = 0;
979
980 if (!info->
981 populated_ranks[channel][slot]
982 [rank])
983 continue;
984 if (extended_silicon_revision == 4
985 && (info->
986 populated_ranks_mask[channel] &
987 5) != 5) {
988 if ((info->
989 spd[channel][slot]
990 [REFERENCE_RAW_CARD_USED] &
991 0x1F) == 3) {
992 a = u16_ffd1178[0]
993 [info->
994 clock_speed_index];
995 b = u16_fe0eb8[0][info->
996 clock_speed_index];
997 } else
998 if ((info->
999 spd[channel][slot]
1000 [REFERENCE_RAW_CARD_USED]
1001 & 0x1F) == 5) {
1002 a = u16_ffd1178[1]
1003 [info->
1004 clock_speed_index];
1005 b = u16_fe0eb8[1][info->
1006 clock_speed_index];
1007 }
1008 }
1009 min_of_unk_2 = min(min_of_unk_2, a);
1010 min_of_unk_2 = min(min_of_unk_2, b);
1011 if (rank == 0) {
1012 sum += a;
1013 count++;
1014 }
1015 {
1016 int t;
1017 t = b +
1018 u8_FFFD0EF8[channel]
1019 [extended_silicon_revision]
1020 [info->
1021 mode4030[channel]][info->
1022 clock_speed_index];
1023 if (unk1 >= t)
1024 max_of_unk =
1025 max(max_of_unk,
1026 unk1 - t);
1027 }
1028 }
1029 {
1030 int t =
1031 u8_FFFD17E0[channel]
1032 [extended_silicon_revision][info->
1033 mode4030
1034 [channel]]
1035 [info->clock_speed_index] + min_of_unk_2;
1036 if (unk1 >= t)
1037 max_of_unk = max(max_of_unk, unk1 - t);
1038 }
1039 }
1040
1041 info->avg4044[channel] = sum / count;
1042 info->max4048[channel] = max_of_unk;
1043 }
1044}
1045
1046static void jedec_read(struct raminfo *info,
1047 int channel, int slot, int rank,
1048 int total_rank, u8 addr3, unsigned int value)
1049{
1050 /* Handle mirrored mapping. */
1051 if ((rank & 1) && (info->spd[channel][slot][RANK1_ADDRESS_MAPPING] & 1))
1052 addr3 =
1053 (addr3 & 0xCF) | ((addr3 & 0x10) << 1) | ((addr3 >> 1) &
1054 0x10);
1055 write_mchbar8(0x271, addr3 | (read_mchbar8(0x271) & 0xC1));
1056 write_mchbar8(0x671, addr3 | (read_mchbar8(0x671) & 0xC1));
1057
1058 /* Handle mirrored mapping. */
1059 if ((rank & 1) && (info->spd[channel][slot][RANK1_ADDRESS_MAPPING] & 1))
1060 value =
1061 (value & ~0x1f8) | ((value >> 1) & 0xa8) | ((value & 0xa8)
1062 << 1);
1063
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001064 read32p((value << 3) | (total_rank << 28));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001065
1066 write_mchbar8(0x271, (read_mchbar8(0x271) & 0xC3) | 2);
1067 write_mchbar8(0x671, (read_mchbar8(0x671) & 0xC3) | 2);
1068
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001069 read32p(total_rank << 28);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001070}
1071
1072enum {
1073 MR1_RZQ12 = 512,
1074 MR1_RZQ2 = 64,
1075 MR1_RZQ4 = 4,
1076 MR1_ODS34OHM = 2
1077};
1078
1079enum {
1080 MR0_BT_INTERLEAVED = 8,
1081 MR0_DLL_RESET_ON = 256
1082};
1083
1084enum {
1085 MR2_RTT_WR_DISABLED = 0,
1086 MR2_RZQ2 = 1 << 10
1087};
1088
1089static void jedec_init(struct raminfo *info)
1090{
1091 int write_recovery;
1092 int channel, slot, rank;
1093 int total_rank;
1094 int dll_on;
1095 int self_refresh_temperature;
1096 int auto_self_refresh;
1097
1098 auto_self_refresh = 1;
1099 self_refresh_temperature = 1;
1100 if (info->board_lane_delay[3] <= 10) {
1101 if (info->board_lane_delay[3] <= 8)
1102 write_recovery = info->board_lane_delay[3] - 4;
1103 else
1104 write_recovery = 5;
1105 } else {
1106 write_recovery = 6;
1107 }
1108 FOR_POPULATED_RANKS {
1109 auto_self_refresh &=
1110 (info->spd[channel][slot][THERMAL_AND_REFRESH] >> 2) & 1;
1111 self_refresh_temperature &=
1112 info->spd[channel][slot][THERMAL_AND_REFRESH] & 1;
1113 }
1114 if (auto_self_refresh == 1)
1115 self_refresh_temperature = 0;
1116
1117 dll_on = ((info->silicon_revision != 2 && info->silicon_revision != 3)
1118 || (info->populated_ranks[0][0][0]
1119 && info->populated_ranks[0][1][0])
1120 || (info->populated_ranks[1][0][0]
1121 && info->populated_ranks[1][1][0]));
1122
1123 total_rank = 0;
1124
1125 for (channel = NUM_CHANNELS - 1; channel >= 0; channel--) {
1126 int rtt, rtt_wr = MR2_RTT_WR_DISABLED;
1127 int rzq_reg58e;
1128
1129 if (info->silicon_revision == 2 || info->silicon_revision == 3) {
1130 rzq_reg58e = 64;
1131 rtt = MR1_RZQ2;
1132 if (info->clock_speed_index != 0) {
1133 rzq_reg58e = 4;
1134 if (info->populated_ranks_mask[channel] == 3)
1135 rtt = MR1_RZQ4;
1136 }
1137 } else {
1138 if ((info->populated_ranks_mask[channel] & 5) == 5) {
1139 rtt = MR1_RZQ12;
1140 rzq_reg58e = 64;
1141 rtt_wr = MR2_RZQ2;
1142 } else {
1143 rzq_reg58e = 4;
1144 rtt = MR1_RZQ4;
1145 }
1146 }
1147
1148 write_mchbar16(0x588 + (channel << 10), 0x0);
1149 write_mchbar16(0x58a + (channel << 10), 0x4);
1150 write_mchbar16(0x58c + (channel << 10), rtt | MR1_ODS34OHM);
1151 write_mchbar16(0x58e + (channel << 10), rzq_reg58e | 0x82);
1152 write_mchbar16(0x590 + (channel << 10), 0x1282);
1153
1154 for (slot = 0; slot < NUM_SLOTS; slot++)
1155 for (rank = 0; rank < NUM_RANKS; rank++)
1156 if (info->populated_ranks[channel][slot][rank]) {
1157 jedec_read(info, channel, slot, rank,
1158 total_rank, 0x28,
1159 rtt_wr | (info->
1160 clock_speed_index
1161 << 3)
1162 | (auto_self_refresh << 6) |
1163 (self_refresh_temperature <<
1164 7));
1165 jedec_read(info, channel, slot, rank,
1166 total_rank, 0x38, 0);
1167 jedec_read(info, channel, slot, rank,
1168 total_rank, 0x18,
1169 rtt | MR1_ODS34OHM);
1170 jedec_read(info, channel, slot, rank,
1171 total_rank, 6,
1172 (dll_on << 12) |
1173 (write_recovery << 9)
1174 | ((info->cas_latency - 4) <<
1175 4) | MR0_BT_INTERLEAVED |
1176 MR0_DLL_RESET_ON);
1177 total_rank++;
1178 }
1179 }
1180}
1181
1182static void program_modules_memory_map(struct raminfo *info, int pre_jedec)
1183{
1184 unsigned channel, slot, rank;
1185 unsigned int total_mb[2] = { 0, 0 }; /* total memory per channel in MB */
1186 unsigned int channel_0_non_interleaved;
1187
1188 FOR_ALL_RANKS {
1189 if (info->populated_ranks[channel][slot][rank]) {
1190 total_mb[channel] +=
1191 pre_jedec ? 256 : (256 << info->
1192 density[channel][slot] >> info->
1193 is_x16_module[channel][slot]);
1194 write_mchbar8(0x208 + rank + 2 * slot + (channel << 10),
1195 (pre_jedec ? (1 | ((1 + 1) << 1))
1196 : (info->
1197 is_x16_module[channel][slot] |
1198 ((info->density[channel][slot] +
1199 1) << 1))) | 0x80);
1200 }
1201 write_mchbar16(0x200 + (channel << 10) + 4 * slot + 2 * rank,
1202 total_mb[channel] >> 6);
1203 }
1204
1205 info->total_memory_mb = total_mb[0] + total_mb[1];
1206
1207 info->interleaved_part_mb =
1208 pre_jedec ? 0 : 2 * min(total_mb[0], total_mb[1]);
1209 info->non_interleaved_part_mb =
1210 total_mb[0] + total_mb[1] - info->interleaved_part_mb;
1211 channel_0_non_interleaved = total_mb[0] - info->interleaved_part_mb / 2;
1212 write_mchbar32(0x100,
1213 channel_0_non_interleaved | (info->
1214 non_interleaved_part_mb <<
1215 16));
1216 if (!pre_jedec)
1217 write_mchbar16(0x104, info->interleaved_part_mb);
1218}
1219
1220static void program_board_delay(struct raminfo *info)
1221{
1222 int cas_latency_shift;
1223 int some_delay_ns;
1224 int some_delay_3_half_cycles;
1225
1226 unsigned channel, i;
1227 int high_multiplier;
1228 int lane_3_delay;
1229 int cas_latency_derived;
1230
1231 high_multiplier = 0;
1232 some_delay_ns = 200;
1233 some_delay_3_half_cycles = 4;
1234 cas_latency_shift = info->silicon_revision == 0
1235 || info->silicon_revision == 1 ? 1 : 0;
1236 if (info->revision < 8) {
1237 some_delay_ns = 600;
1238 cas_latency_shift = 0;
1239 }
1240 {
1241 int speed_bit;
1242 speed_bit =
1243 ((info->clock_speed_index > 1
1244 || (info->silicon_revision != 2
1245 && info->silicon_revision != 3))) ^ (info->revision >=
1246 0x10);
1247 write_500(info, 0, speed_bit | ((!info->use_ecc) << 1), 0x60e,
1248 3, 1);
1249 write_500(info, 1, speed_bit | ((!info->use_ecc) << 1), 0x60e,
1250 3, 1);
1251 if (info->revision >= 0x10 && info->clock_speed_index <= 1
1252 && (info->silicon_revision == 2
1253 || info->silicon_revision == 3))
1254 rmw_1d0(0x116, 5, 2, 4, 1);
1255 }
1256 write_mchbar32(0x120,
1257 (1 << (info->max_slots_used_in_channel + 28)) |
1258 0x188e7f9f);
1259
1260 write_mchbar8(0x124,
1261 info->board_lane_delay[4] +
1262 ((frequency_01(info) + 999) / 1000));
1263 write_mchbar16(0x125, 0x1360);
1264 write_mchbar8(0x127, 0x40);
1265 if (info->fsb_frequency < frequency_11(info) / 2) {
1266 unsigned some_delay_2_half_cycles;
1267 high_multiplier = 1;
1268 some_delay_2_half_cycles = ps_to_halfcycles(info,
1269 ((3 *
1270 fsbcycle_ps(info))
1271 >> 1) +
1272 (halfcycle_ps(info)
1273 *
1274 reg178_min[info->
1275 clock_speed_index]
1276 >> 6)
1277 +
1278 4 *
1279 halfcycle_ps(info)
1280 + 2230);
1281 some_delay_3_half_cycles =
1282 min((some_delay_2_half_cycles +
1283 (frequency_11(info) * 2) * (28 -
1284 some_delay_2_half_cycles) /
1285 (frequency_11(info) * 2 -
1286 4 * (info->fsb_frequency))) >> 3, 7);
1287 }
1288 if (read_mchbar8(0x2ca9) & 1)
1289 some_delay_3_half_cycles = 3;
1290 for (channel = 0; channel < NUM_CHANNELS; channel++) {
1291 write_mchbar32(0x220 + (channel << 10),
1292 read_mchbar32(0x220 +
1293 (channel << 10)) | 0x18001117);
1294 write_mchbar32(0x224 + (channel << 10),
1295 (info->max_slots_used_in_channel - 1)
1296 |
1297 ((info->cas_latency - 5 -
1298 info->clock_speed_index) << 21)
1299 |
1300 ((info->max_slots_used_in_channel +
1301 info->cas_latency - cas_latency_shift -
1302 4) << 16)
1303 | ((info->cas_latency - cas_latency_shift - 4) <<
1304 26)
1305 |
1306 ((info->cas_latency - info->clock_speed_index +
1307 info->max_slots_used_in_channel - 6) << 8));
1308 write_mchbar32(0x228 + (channel << 10),
1309 info->max_slots_used_in_channel);
1310 write_mchbar8(0x239 + (channel << 10), 32);
1311 write_mchbar32(0x248 + (channel << 10),
1312 (high_multiplier << 24) |
1313 (some_delay_3_half_cycles << 25) | 0x840000);
1314 write_mchbar32(0x278 + (channel << 10), 0xc362042);
1315 write_mchbar32(0x27c + (channel << 10), 0x8b000062);
1316 write_mchbar32(0x24c + (channel << 10),
1317 ((! !info->
1318 clock_speed_index) << 17) | (((2 +
1319 info->
1320 clock_speed_index
1321 -
1322 (! !info->
1323 clock_speed_index)))
1324 << 12) | 0x10200);
1325
1326 write_mchbar8(0x267 + (channel << 10), 0x4);
1327 write_mchbar16(0x272 + (channel << 10), 0x155);
1328 write_mchbar32(0x2bc + (channel << 10),
1329 (read_mchbar32(0x2bc + (channel << 10)) &
1330 0xFF000000)
1331 | 0x707070);
1332
1333 write_500(info, channel,
1334 ((!info->populated_ranks[channel][1][1])
1335 | (!info->populated_ranks[channel][1][0] << 1)
1336 | (!info->populated_ranks[channel][0][1] << 2)
1337 | (!info->populated_ranks[channel][0][0] << 3)),
1338 0x4c9, 4, 1);
1339 }
1340
1341 write_mchbar8(0x2c4, ((1 + (info->clock_speed_index != 0)) << 6) | 0xC);
1342 {
1343 u8 freq_divisor = 2;
1344 if (info->fsb_frequency == frequency_11(info))
1345 freq_divisor = 3;
1346 else if (2 * info->fsb_frequency < 3 * (frequency_11(info) / 2))
1347 freq_divisor = 1;
1348 else
1349 freq_divisor = 2;
1350 write_mchbar32(0x2c0, (freq_divisor << 11) | 0x6009c400);
1351 }
1352
1353 if (info->board_lane_delay[3] <= 10) {
1354 if (info->board_lane_delay[3] <= 8)
1355 lane_3_delay = info->board_lane_delay[3];
1356 else
1357 lane_3_delay = 10;
1358 } else {
1359 lane_3_delay = 12;
1360 }
1361 cas_latency_derived = info->cas_latency - info->clock_speed_index + 2;
1362 if (info->clock_speed_index > 1)
1363 cas_latency_derived++;
1364 for (channel = 0; channel < NUM_CHANNELS; channel++) {
1365 write_mchbar32(0x240 + (channel << 10),
1366 ((info->clock_speed_index ==
1367 0) * 0x11000) | 0x1002100 | ((2 +
1368 info->
1369 clock_speed_index)
1370 << 4) | (info->
1371 cas_latency
1372 - 3));
1373 write_500(info, channel, (info->clock_speed_index << 1) | 1,
1374 0x609, 6, 1);
1375 write_500(info, channel,
1376 info->clock_speed_index + 2 * info->cas_latency - 7,
1377 0x601, 6, 1);
1378
1379 write_mchbar32(0x250 + (channel << 10),
1380 ((lane_3_delay + info->clock_speed_index +
1381 9) << 6)
1382 | (info->board_lane_delay[7] << 2) | (info->
1383 board_lane_delay
1384 [4] << 16)
1385 | (info->board_lane_delay[1] << 25) | (info->
1386 board_lane_delay
1387 [1] << 29)
1388 | 1);
1389 write_mchbar32(0x254 + (channel << 10),
1390 (info->
1391 board_lane_delay[1] >> 3) | ((info->
1392 board_lane_delay
1393 [8] +
1394 4 *
1395 info->
1396 use_ecc) << 6) |
1397 0x80 | (info->board_lane_delay[6] << 1) | (info->
1398 board_lane_delay
1399 [2] <<
1400 28) |
1401 (cas_latency_derived << 16) | 0x4700000);
1402 write_mchbar32(0x258 + (channel << 10),
1403 ((info->board_lane_delay[5] +
1404 info->clock_speed_index +
1405 9) << 12) | ((info->clock_speed_index -
1406 info->cas_latency + 12) << 8)
1407 | (info->board_lane_delay[2] << 17) | (info->
1408 board_lane_delay
1409 [4] << 24)
1410 | 0x47);
1411 write_mchbar32(0x25c + (channel << 10),
1412 (info->board_lane_delay[1] << 1) | (info->
1413 board_lane_delay
1414 [0] << 8) |
1415 0x1da50000);
1416 write_mchbar8(0x264 + (channel << 10), 0xff);
1417 write_mchbar8(0x5f8 + (channel << 10),
1418 (cas_latency_shift << 3) | info->use_ecc);
1419 }
1420
1421 program_modules_memory_map(info, 1);
1422
1423 write_mchbar16(0x610,
1424 (min(ns_to_cycles(info, some_delay_ns) / 2, 127) << 9)
1425 | (read_mchbar16(0x610) & 0x1C3) | 0x3C);
1426 write_mchbar16(0x612, read_mchbar16(0x612) | 0x100);
1427 write_mchbar16(0x214, read_mchbar16(0x214) | 0x3E00);
1428 for (i = 0; i < 8; i++) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001429 pci_write_config32(PCI_DEV (QUICKPATH_BUS, 0, 1), 0x80 + 4 * i,
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001430 (info->total_memory_mb - 64) | !i | 2);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001431 pci_write_config32(PCI_DEV (QUICKPATH_BUS, 0, 1), 0xc0 + 4 * i, 0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001432 }
1433}
1434
Patrick Rudolph266a1f72016-06-09 18:13:34 +02001435#define DEFAULT_PCI_MMIO_SIZE 2048
1436#define HOST_BRIDGE PCI_DEVFN(0, 0)
1437
1438static unsigned int get_mmio_size(void)
1439{
1440 const struct device *dev;
1441 const struct northbridge_intel_nehalem_config *cfg = NULL;
1442
1443 dev = dev_find_slot(0, HOST_BRIDGE);
1444 if (dev)
1445 cfg = dev->chip_info;
1446
1447 /* If this is zero, it just means devicetree.cb didn't set it */
1448 if (!cfg || cfg->pci_mmio_size == 0)
1449 return DEFAULT_PCI_MMIO_SIZE;
1450 else
1451 return cfg->pci_mmio_size;
1452}
1453
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001454#define BETTER_MEMORY_MAP 0
1455
1456static void program_total_memory_map(struct raminfo *info)
1457{
1458 unsigned int TOM, TOLUD, TOUUD;
1459 unsigned int quickpath_reserved;
1460 unsigned int REMAPbase;
1461 unsigned int uma_base_igd;
1462 unsigned int uma_base_gtt;
Patrick Rudolph266a1f72016-06-09 18:13:34 +02001463 unsigned int mmio_size;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001464 int memory_remap;
1465 unsigned int memory_map[8];
1466 int i;
1467 unsigned int current_limit;
1468 unsigned int tseg_base;
1469 int uma_size_igd = 0, uma_size_gtt = 0;
1470
1471 memset(memory_map, 0, sizeof(memory_map));
1472
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001473 if (info->uma_enabled) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001474 u16 t = pci_read_config16(NORTHBRIDGE, D0F0_GGC);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001475 gav(t);
1476 const int uma_sizes_gtt[16] =
1477 { 0, 1, 0, 2, 0, 0, 0, 0, 0, 2, 3, 4, 42, 42, 42, 42 };
1478 /* Igd memory */
1479 const int uma_sizes_igd[16] = {
1480 0, 0, 0, 0, 0, 32, 48, 64, 128, 256, 96, 160, 224, 352,
1481 256, 512
1482 };
1483
1484 uma_size_igd = uma_sizes_igd[(t >> 4) & 0xF];
1485 uma_size_gtt = uma_sizes_gtt[(t >> 8) & 0xF];
1486 }
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001487
Patrick Rudolph266a1f72016-06-09 18:13:34 +02001488 mmio_size = get_mmio_size();
1489
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001490 TOM = info->total_memory_mb;
1491 if (TOM == 4096)
1492 TOM = 4032;
1493 TOUUD = ALIGN_DOWN(TOM - info->memory_reserved_for_heci_mb, 64);
Patrick Rudolph266a1f72016-06-09 18:13:34 +02001494 TOLUD = ALIGN_DOWN(min(4096 - mmio_size + ALIGN_UP(uma_size_igd + uma_size_gtt, 64)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001495 , TOUUD), 64);
1496 memory_remap = 0;
1497 if (TOUUD - TOLUD > 64) {
1498 memory_remap = 1;
1499 REMAPbase = max(4096, TOUUD);
1500 TOUUD = TOUUD - TOLUD + 4096;
1501 }
1502 if (TOUUD > 4096)
1503 memory_map[2] = TOUUD | 1;
1504 quickpath_reserved = 0;
1505
1506 {
1507 u32 t;
1508
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001509 gav(t = pci_read_config32(PCI_DEV(QUICKPATH_BUS, 0, 1), 0x68));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001510 if (t & 0x800)
1511 quickpath_reserved =
1512 (1 << find_lowest_bit_set32(t >> 20));
1513 }
1514 if (memory_remap)
1515 TOUUD -= quickpath_reserved;
1516
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001517 uma_base_igd = TOLUD - uma_size_igd;
1518 uma_base_gtt = uma_base_igd - uma_size_gtt;
1519 tseg_base = ALIGN_DOWN(uma_base_gtt, 64) - (CONFIG_SMM_TSEG_SIZE >> 20);
1520 if (!memory_remap)
1521 tseg_base -= quickpath_reserved;
1522 tseg_base = ALIGN_DOWN(tseg_base, 8);
1523
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001524 pci_write_config16(NORTHBRIDGE, D0F0_TOLUD, TOLUD << 4);
1525 pci_write_config16(NORTHBRIDGE, D0F0_TOM, TOM >> 6);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001526 if (memory_remap) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001527 pci_write_config16(NORTHBRIDGE, D0F0_REMAPBASE, REMAPbase >> 6);
1528 pci_write_config16(NORTHBRIDGE, D0F0_REMAPLIMIT, (TOUUD - 64) >> 6);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001529 }
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001530 pci_write_config16(NORTHBRIDGE, D0F0_TOUUD, TOUUD);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001531
1532 if (info->uma_enabled) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001533 pci_write_config32(NORTHBRIDGE, D0F0_IGD_BASE, uma_base_igd << 20);
1534 pci_write_config32(NORTHBRIDGE, D0F0_GTT_BASE, uma_base_gtt << 20);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001535 }
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001536 pci_write_config32(NORTHBRIDGE, TSEG, tseg_base << 20);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001537
1538 current_limit = 0;
1539 memory_map[0] = ALIGN_DOWN(uma_base_gtt, 64) | 1;
1540 memory_map[1] = 4096;
1541 for (i = 0; i < ARRAY_SIZE(memory_map); i++) {
1542 current_limit = max(current_limit, memory_map[i] & ~1);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001543 pci_write_config32(PCI_DEV(QUICKPATH_BUS, 0, 1), 4 * i + 0x80,
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001544 (memory_map[i] & 1) | ALIGN_DOWN(current_limit -
1545 1, 64) | 2);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001546 pci_write_config32(PCI_DEV(QUICKPATH_BUS, 0, 1), 4 * i + 0xc0, 0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001547 }
1548}
1549
1550static void collect_system_info(struct raminfo *info)
1551{
1552 u32 capid0[3];
1553 int i;
1554 unsigned channel;
1555
1556 /* Wait for some bit, maybe TXT clear. */
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02001557 while (!(read8((u8 *)0xfed40000) & (1 << 7)));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001558
1559 if (!info->heci_bar)
1560 gav(info->heci_bar =
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001561 pci_read_config32(HECIDEV, HECIBAR) & 0xFFFFFFF8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001562 if (!info->memory_reserved_for_heci_mb) {
1563 /* Wait for ME to be ready */
1564 intel_early_me_init();
1565 info->memory_reserved_for_heci_mb = intel_early_me_uma_size();
1566 }
1567
1568 for (i = 0; i < 3; i++)
1569 gav(capid0[i] =
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001570 pci_read_config32(NORTHBRIDGE, D0F0_CAPID0 | (i << 2)));
1571 gav(info->revision = pci_read_config8(NORTHBRIDGE, PCI_REVISION_ID));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001572 info->max_supported_clock_speed_index = (~capid0[1] & 7);
1573
1574 if ((capid0[1] >> 11) & 1)
1575 info->uma_enabled = 0;
1576 else
1577 gav(info->uma_enabled =
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001578 pci_read_config8(NORTHBRIDGE, D0F0_DEVEN) & 8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001579 /* Unrecognised: [0000:fffd3d2d] 37f81.37f82 ! CPUID: eax: 00000001; ecx: 00000e00 => 00020655.00010800.029ae3ff.bfebfbff */
1580 info->silicon_revision = 0;
1581
1582 if (capid0[2] & 2) {
1583 info->silicon_revision = 0;
1584 info->max_supported_clock_speed_index = 2;
1585 for (channel = 0; channel < NUM_CHANNELS; channel++)
1586 if (info->populated_ranks[channel][0][0]
1587 && (info->spd[channel][0][MODULE_TYPE] & 0xf) ==
1588 3) {
1589 info->silicon_revision = 2;
1590 info->max_supported_clock_speed_index = 1;
1591 }
1592 } else {
1593 switch (((capid0[2] >> 18) & 1) + 2 * ((capid0[1] >> 3) & 1)) {
1594 case 1:
1595 case 2:
1596 info->silicon_revision = 3;
1597 break;
1598 case 3:
1599 info->silicon_revision = 0;
1600 break;
1601 case 0:
1602 info->silicon_revision = 2;
1603 break;
1604 }
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001605 switch (pci_read_config16(NORTHBRIDGE, PCI_DEVICE_ID)) {
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001606 case 0x40:
1607 info->silicon_revision = 0;
1608 break;
1609 case 0x48:
1610 info->silicon_revision = 1;
1611 break;
1612 }
1613 }
1614}
1615
1616static void write_training_data(struct raminfo *info)
1617{
1618 int tm, channel, slot, rank, lane;
1619 if (info->revision < 8)
1620 return;
1621
1622 for (tm = 0; tm < 4; tm++)
1623 for (channel = 0; channel < NUM_CHANNELS; channel++)
1624 for (slot = 0; slot < NUM_SLOTS; slot++)
1625 for (rank = 0; rank < NUM_RANKS; rank++)
1626 for (lane = 0; lane < 9; lane++)
1627 write_500(info, channel,
1628 info->
1629 cached_training->
1630 lane_timings[tm]
1631 [channel][slot][rank]
1632 [lane],
1633 get_timing_register_addr
1634 (lane, tm, slot,
1635 rank), 9, 0);
1636 write_1d0(info->cached_training->reg_178, 0x178, 7, 1);
1637 write_1d0(info->cached_training->reg_10b, 0x10b, 6, 1);
1638}
1639
1640static void dump_timings(struct raminfo *info)
1641{
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001642 int channel, slot, rank, lane, i;
1643 printk(BIOS_DEBUG, "Timings:\n");
1644 FOR_POPULATED_RANKS {
1645 printk(BIOS_DEBUG, "channel %d, slot %d, rank %d\n", channel,
1646 slot, rank);
1647 for (lane = 0; lane < 9; lane++) {
1648 printk(BIOS_DEBUG, "lane %d: ", lane);
1649 for (i = 0; i < 4; i++) {
1650 printk(BIOS_DEBUG, "%x (%x) ",
1651 read_500(info, channel,
1652 get_timing_register_addr
1653 (lane, i, slot, rank),
1654 9),
1655 info->training.
1656 lane_timings[i][channel][slot][rank]
1657 [lane]);
1658 }
1659 printk(BIOS_DEBUG, "\n");
1660 }
1661 }
1662 printk(BIOS_DEBUG, "[178] = %x (%x)\n", read_1d0(0x178, 7),
1663 info->training.reg_178);
1664 printk(BIOS_DEBUG, "[10b] = %x (%x)\n", read_1d0(0x10b, 6),
1665 info->training.reg_10b);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001666}
1667
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01001668/* Read timings and other registers that need to be restored verbatim and
1669 put them to CBMEM.
1670 */
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001671static void save_timings(struct raminfo *info)
1672{
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001673 struct ram_training train;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001674 int channel, slot, rank, lane, i;
1675
1676 train = info->training;
1677 FOR_POPULATED_RANKS for (lane = 0; lane < 9; lane++)
1678 for (i = 0; i < 4; i++)
1679 train.lane_timings[i][channel][slot][rank][lane] =
1680 read_500(info, channel,
1681 get_timing_register_addr(lane, i, slot,
1682 rank), 9);
1683 train.reg_178 = read_1d0(0x178, 7);
1684 train.reg_10b = read_1d0(0x10b, 6);
1685
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01001686 for (channel = 0; channel < NUM_CHANNELS; channel++) {
1687 u32 reg32;
1688 reg32 = read_mchbar32 ((channel << 10) + 0x274);
1689 train.reg274265[channel][0] = reg32 >> 16;
1690 train.reg274265[channel][1] = reg32 & 0xffff;
1691 train.reg274265[channel][2] = read_mchbar16 ((channel << 10) + 0x265) >> 8;
1692 }
1693 train.reg2ca9_bit0 = read_mchbar8(0x2ca9) & 1;
1694 train.reg_6dc = read_mchbar32 (0x6dc);
1695 train.reg_6e8 = read_mchbar32 (0x6e8);
1696
1697 printk (BIOS_SPEW, "[6dc] = %x\n", train.reg_6dc);
1698 printk (BIOS_SPEW, "[6e8] = %x\n", train.reg_6e8);
1699
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001700 /* Save the MRC S3 restore data to cbmem */
Arthur Heymansdc71e252018-01-29 10:14:48 +01001701 mrc_cache_stash_data(MRC_TRAINING_DATA, MRC_CACHE_VERSION,
1702 &train, sizeof(train));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001703}
1704
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001705static const struct ram_training *get_cached_training(void)
1706{
Arthur Heymansdc71e252018-01-29 10:14:48 +01001707 struct region_device rdev;
1708 if (mrc_cache_get_current(MRC_TRAINING_DATA, MRC_CACHE_VERSION,
1709 &rdev))
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001710 return 0;
Arthur Heymansdc71e252018-01-29 10:14:48 +01001711 return (void *)rdev_mmap_full(&rdev);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001712}
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001713
1714/* FIXME: add timeout. */
1715static void wait_heci_ready(void)
1716{
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02001717 while (!(read32(DEFAULT_HECIBAR + 0xc) & 8)); // = 0x8000000c
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001718 write32((DEFAULT_HECIBAR + 0x4),
1719 (read32(DEFAULT_HECIBAR + 0x4) & ~0x10) | 0xc);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001720}
1721
1722/* FIXME: add timeout. */
1723static void wait_heci_cb_avail(int len)
1724{
1725 union {
1726 struct mei_csr csr;
1727 u32 raw;
1728 } csr;
1729
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02001730 while (!(read32(DEFAULT_HECIBAR + 0xc) & 8));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001731
1732 do
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001733 csr.raw = read32(DEFAULT_HECIBAR + 0x4);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001734 while (len >
1735 csr.csr.buffer_depth - (csr.csr.buffer_write_ptr -
1736 csr.csr.buffer_read_ptr));
1737}
1738
Elyes HAOUASfd051dc2018-07-08 12:39:34 +02001739static void send_heci_packet(struct mei_header *head, u32 *payload)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001740{
1741 int len = (head->length + 3) / 4;
1742 int i;
1743
1744 wait_heci_cb_avail(len + 1);
1745
1746 /* FIXME: handle leftovers correctly. */
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001747 write32(DEFAULT_HECIBAR + 0, *(u32 *) head);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001748 for (i = 0; i < len - 1; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001749 write32(DEFAULT_HECIBAR + 0, payload[i]);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001750
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001751 write32(DEFAULT_HECIBAR + 0, payload[i] & ((1 << (8 * len)) - 1));
1752 write32(DEFAULT_HECIBAR + 0x4, read32(DEFAULT_HECIBAR + 0x4) | 0x4);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001753}
1754
1755static void
Elyes HAOUASfd051dc2018-07-08 12:39:34 +02001756send_heci_message(u8 *msg, int len, u8 hostaddress, u8 clientaddress)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001757{
1758 struct mei_header head;
1759 int maxlen;
1760
1761 wait_heci_ready();
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001762 maxlen = (read32(DEFAULT_HECIBAR + 0x4) >> 24) * 4 - 4;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001763
1764 while (len) {
1765 int cur = len;
1766 if (cur > maxlen) {
1767 cur = maxlen;
1768 head.is_complete = 0;
1769 } else
1770 head.is_complete = 1;
1771 head.length = cur;
1772 head.reserved = 0;
1773 head.client_address = clientaddress;
1774 head.host_address = hostaddress;
1775 send_heci_packet(&head, (u32 *) msg);
1776 len -= cur;
1777 msg += cur;
1778 }
1779}
1780
1781/* FIXME: Add timeout. */
1782static int
Elyes HAOUASfd051dc2018-07-08 12:39:34 +02001783recv_heci_packet(struct raminfo *info, struct mei_header *head, u32 *packet,
1784 u32 *packet_size)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001785{
1786 union {
1787 struct mei_csr csr;
1788 u32 raw;
1789 } csr;
1790 int i = 0;
1791
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001792 write32(DEFAULT_HECIBAR + 0x4, read32(DEFAULT_HECIBAR + 0x4) | 2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001793 do {
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001794 csr.raw = read32(DEFAULT_HECIBAR + 0xc);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001795 }
1796 while (csr.csr.buffer_write_ptr == csr.csr.buffer_read_ptr);
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001797 *(u32 *) head = read32(DEFAULT_HECIBAR + 0x8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001798 if (!head->length) {
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001799 write32(DEFAULT_HECIBAR + 0x4,
1800 read32(DEFAULT_HECIBAR + 0x4) | 2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001801 *packet_size = 0;
1802 return 0;
1803 }
1804 if (head->length + 4 > 4 * csr.csr.buffer_depth
1805 || head->length > *packet_size) {
1806 *packet_size = 0;
1807 return -1;
1808 }
1809
1810 do
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001811 csr.raw = read32(DEFAULT_HECIBAR + 0xc);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001812 while ((head->length + 3) >> 2 >
1813 csr.csr.buffer_write_ptr - csr.csr.buffer_read_ptr);
1814
1815 for (i = 0; i < (head->length + 3) >> 2; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001816 packet[i++] = read32(DEFAULT_HECIBAR + 0x8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001817 *packet_size = head->length;
1818 if (!csr.csr.ready)
1819 *packet_size = 0;
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001820 write32(DEFAULT_HECIBAR + 0x4, read32(DEFAULT_HECIBAR + 0x4) | 4);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001821 return 0;
1822}
1823
1824/* FIXME: Add timeout. */
1825static int
Elyes HAOUASfd051dc2018-07-08 12:39:34 +02001826recv_heci_message(struct raminfo *info, u32 *message, u32 *message_size)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001827{
1828 struct mei_header head;
1829 int current_position;
1830
1831 current_position = 0;
1832 while (1) {
1833 u32 current_size;
1834 current_size = *message_size - current_position;
1835 if (recv_heci_packet
1836 (info, &head, message + (current_position >> 2),
1837 &current_size) == -1)
1838 break;
1839 if (!current_size)
1840 break;
1841 current_position += current_size;
1842 if (head.is_complete) {
1843 *message_size = current_position;
1844 return 0;
1845 }
1846
1847 if (current_position >= *message_size)
1848 break;
1849 }
1850 *message_size = 0;
1851 return -1;
1852}
1853
1854static void send_heci_uma_message(struct raminfo *info)
1855{
1856 struct uma_reply {
1857 u8 group_id;
1858 u8 command;
1859 u8 reserved;
1860 u8 result;
1861 u8 field2;
1862 u8 unk3[0x48 - 4 - 1];
Stefan Reinauer6a001132017-07-13 02:20:27 +02001863 } __packed reply;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001864 struct uma_message {
1865 u8 group_id;
1866 u8 cmd;
1867 u8 reserved;
1868 u8 result;
1869 u32 c2;
1870 u64 heci_uma_addr;
1871 u32 memory_reserved_for_heci_mb;
1872 u16 c3;
Stefan Reinauer6a001132017-07-13 02:20:27 +02001873 } __packed msg = {
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001874 0, MKHI_SET_UMA, 0, 0,
1875 0x82,
1876 info->heci_uma_addr, info->memory_reserved_for_heci_mb, 0};
1877 u32 reply_size;
1878
1879 send_heci_message((u8 *) & msg, sizeof(msg), 0, 7);
1880
1881 reply_size = sizeof(reply);
1882 if (recv_heci_message(info, (u32 *) & reply, &reply_size) == -1)
1883 return;
1884
1885 if (reply.command != (MKHI_SET_UMA | (1 << 7)))
1886 die("HECI init failed\n");
1887}
1888
1889static void setup_heci_uma(struct raminfo *info)
1890{
1891 u32 reg44;
1892
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001893 reg44 = pci_read_config32(HECIDEV, 0x44); // = 0x80010020
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001894 info->memory_reserved_for_heci_mb = 0;
1895 info->heci_uma_addr = 0;
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001896 if (!((reg44 & 0x10000) && !(pci_read_config32(HECIDEV, 0x40) & 0x20)))
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001897 return;
1898
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001899 info->heci_bar = pci_read_config32(HECIDEV, 0x10) & 0xFFFFFFF0;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001900 info->memory_reserved_for_heci_mb = reg44 & 0x3f;
1901 info->heci_uma_addr =
1902 ((u64)
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001903 ((((u64) pci_read_config16(NORTHBRIDGE, D0F0_TOM)) << 6) -
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001904 info->memory_reserved_for_heci_mb)) << 20;
1905
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001906 pci_read_config32(NORTHBRIDGE, DMIBAR);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001907 if (info->memory_reserved_for_heci_mb) {
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001908 write32(DEFAULT_DMIBAR + 0x14,
1909 read32(DEFAULT_DMIBAR + 0x14) & ~0x80);
1910 write32(DEFAULT_RCBA + 0x14,
1911 read32(DEFAULT_RCBA + 0x14) & ~0x80);
1912 write32(DEFAULT_DMIBAR + 0x20,
1913 read32(DEFAULT_DMIBAR + 0x20) & ~0x80);
1914 write32(DEFAULT_RCBA + 0x20,
1915 read32(DEFAULT_RCBA + 0x20) & ~0x80);
1916 write32(DEFAULT_DMIBAR + 0x2c,
1917 read32(DEFAULT_DMIBAR + 0x2c) & ~0x80);
1918 write32(DEFAULT_RCBA + 0x30,
1919 read32(DEFAULT_RCBA + 0x30) & ~0x80);
1920 write32(DEFAULT_DMIBAR + 0x38,
1921 read32(DEFAULT_DMIBAR + 0x38) & ~0x80);
1922 write32(DEFAULT_RCBA + 0x40,
1923 read32(DEFAULT_RCBA + 0x40) & ~0x80);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001924
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08001925 write32(DEFAULT_RCBA + 0x40, 0x87000080); // OK
1926 write32(DEFAULT_DMIBAR + 0x38, 0x87000080); // OK
1927 while (read16(DEFAULT_RCBA + 0x46) & 2
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02001928 && read16(DEFAULT_DMIBAR + 0x3e) & 2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001929 }
1930
1931 write_mchbar32(0x24, 0x10000 + info->memory_reserved_for_heci_mb);
1932
1933 send_heci_uma_message(info);
1934
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03001935 pci_write_config32(HECIDEV, 0x10, 0x0);
1936 pci_write_config8(HECIDEV, 0x4, 0x0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001937
1938}
1939
1940static int have_match_ranks(struct raminfo *info, int channel, int ranks)
1941{
1942 int ranks_in_channel;
1943 ranks_in_channel = info->populated_ranks[channel][0][0]
1944 + info->populated_ranks[channel][0][1]
1945 + info->populated_ranks[channel][1][0]
1946 + info->populated_ranks[channel][1][1];
1947
1948 /* empty channel */
1949 if (ranks_in_channel == 0)
1950 return 1;
1951
1952 if (ranks_in_channel != ranks)
1953 return 0;
1954 /* single slot */
1955 if (info->populated_ranks[channel][0][0] !=
1956 info->populated_ranks[channel][1][0])
1957 return 1;
1958 if (info->populated_ranks[channel][0][1] !=
1959 info->populated_ranks[channel][1][1])
1960 return 1;
1961 if (info->is_x16_module[channel][0] != info->is_x16_module[channel][1])
1962 return 0;
1963 if (info->density[channel][0] != info->density[channel][1])
1964 return 0;
1965 return 1;
1966}
1967
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01001968static void read_4090(struct raminfo *info)
1969{
1970 int i, channel, slot, rank, lane;
1971 for (i = 0; i < 2; i++)
1972 for (slot = 0; slot < NUM_SLOTS; slot++)
1973 for (rank = 0; rank < NUM_RANKS; rank++)
1974 for (lane = 0; lane < 9; lane++)
1975 info->training.
1976 lane_timings[0][i][slot][rank][lane]
1977 = 32;
1978
1979 for (i = 1; i < 4; i++)
1980 for (channel = 0; channel < NUM_CHANNELS; channel++)
1981 for (slot = 0; slot < NUM_SLOTS; slot++)
1982 for (rank = 0; rank < NUM_RANKS; rank++)
1983 for (lane = 0; lane < 9; lane++) {
1984 info->training.
1985 lane_timings[i][channel]
1986 [slot][rank][lane] =
1987 read_500(info, channel,
1988 get_timing_register_addr
1989 (lane, i, slot,
1990 rank), 9)
1991 + (i == 1) * 11; // !!!!
1992 }
1993
1994}
1995
1996static u32 get_etalon2(int flip, u32 addr)
1997{
1998 const u16 invmask[] = {
1999 0xaaaa, 0x6db6, 0x4924, 0xeeee, 0xcccc, 0x8888, 0x7bde, 0x739c,
2000 0x6318, 0x4210, 0xefbe, 0xcf3c, 0x8e38, 0x0c30, 0x0820
2001 };
2002 u32 ret;
2003 u32 comp4 = addr / 480;
2004 addr %= 480;
2005 u32 comp1 = addr & 0xf;
2006 u32 comp2 = (addr >> 4) & 1;
2007 u32 comp3 = addr >> 5;
2008
2009 if (comp4)
2010 ret = 0x1010101 << (comp4 - 1);
2011 else
2012 ret = 0;
2013 if (flip ^ (((invmask[comp3] >> comp1) ^ comp2) & 1))
2014 ret = ~ret;
2015
2016 return ret;
2017}
2018
2019static void disable_cache(void)
2020{
2021 msr_t msr = {.lo = 0, .hi = 0 };
2022
Alexandru Gagniuc86091f92015-09-30 20:23:09 -07002023 wrmsr(MTRR_PHYS_BASE(3), msr);
2024 wrmsr(MTRR_PHYS_MASK(3), msr);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002025}
2026
2027static void enable_cache(unsigned int base, unsigned int size)
2028{
2029 msr_t msr;
2030 msr.lo = base | MTRR_TYPE_WRPROT;
2031 msr.hi = 0;
Alexandru Gagniuc86091f92015-09-30 20:23:09 -07002032 wrmsr(MTRR_PHYS_BASE(3), msr);
2033 msr.lo = ((~(ALIGN_DOWN(size + 4096, 4096) - 1) | MTRR_DEF_TYPE_EN)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002034 & 0xffffffff);
2035 msr.hi = 0x0000000f;
Alexandru Gagniuc86091f92015-09-30 20:23:09 -07002036 wrmsr(MTRR_PHYS_MASK(3), msr);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002037}
2038
2039static void flush_cache(u32 start, u32 size)
2040{
2041 u32 end;
2042 u32 addr;
2043
2044 end = start + (ALIGN_DOWN(size + 4096, 4096));
2045 for (addr = start; addr < end; addr += 64)
2046 clflush(addr);
2047}
2048
2049static void clear_errors(void)
2050{
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03002051 pci_write_config8(NORTHBRIDGE, 0xc0, 0x01);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002052}
2053
2054static void write_testing(struct raminfo *info, int totalrank, int flip)
2055{
2056 int nwrites = 0;
2057 /* in 8-byte units. */
2058 u32 offset;
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08002059 u8 *base;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002060
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08002061 base = (u8 *)(totalrank << 28);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002062 for (offset = 0; offset < 9 * 480; offset += 2) {
2063 write32(base + offset * 8, get_etalon2(flip, offset));
2064 write32(base + offset * 8 + 4, get_etalon2(flip, offset));
2065 write32(base + offset * 8 + 8, get_etalon2(flip, offset + 1));
2066 write32(base + offset * 8 + 12, get_etalon2(flip, offset + 1));
2067 nwrites += 4;
2068 if (nwrites >= 320) {
2069 clear_errors();
2070 nwrites = 0;
2071 }
2072 }
2073}
2074
2075static u8 check_testing(struct raminfo *info, u8 total_rank, int flip)
2076{
2077 u8 failmask = 0;
2078 int i;
2079 int comp1, comp2, comp3;
2080 u32 failxor[2] = { 0, 0 };
2081
2082 enable_cache((total_rank << 28), 1728 * 5 * 4);
2083
2084 for (comp3 = 0; comp3 < 9 && failmask != 0xff; comp3++) {
2085 for (comp1 = 0; comp1 < 4; comp1++)
2086 for (comp2 = 0; comp2 < 60; comp2++) {
2087 u32 re[4];
2088 u32 curroffset =
2089 comp3 * 8 * 60 + 2 * comp1 + 8 * comp2;
2090 read128((total_rank << 28) | (curroffset << 3),
2091 (u64 *) re);
2092 failxor[0] |=
2093 get_etalon2(flip, curroffset) ^ re[0];
2094 failxor[1] |=
2095 get_etalon2(flip, curroffset) ^ re[1];
2096 failxor[0] |=
2097 get_etalon2(flip, curroffset | 1) ^ re[2];
2098 failxor[1] |=
2099 get_etalon2(flip, curroffset | 1) ^ re[3];
2100 }
2101 for (i = 0; i < 8; i++)
2102 if ((0xff << (8 * (i % 4))) & failxor[i / 4])
2103 failmask |= 1 << i;
2104 }
2105 disable_cache();
2106 flush_cache((total_rank << 28), 1728 * 5 * 4);
2107 return failmask;
2108}
2109
2110const u32 seed1[0x18] = {
2111 0x3a9d5ab5, 0x576cb65b, 0x555773b6, 0x2ab772ee,
2112 0x555556ee, 0x3a9d5ab5, 0x576cb65b, 0x555773b6,
2113 0x2ab772ee, 0x555556ee, 0x5155a555, 0x5155a555,
2114 0x5155a555, 0x5155a555, 0x3a9d5ab5, 0x576cb65b,
2115 0x555773b6, 0x2ab772ee, 0x555556ee, 0x55d6b4a5,
2116 0x366d6b3a, 0x2ae5ddbb, 0x3b9ddbb7, 0x55d6b4a5,
2117};
2118
2119static u32 get_seed2(int a, int b)
2120{
2121 const u32 seed2[5] = {
2122 0x55555555, 0x33333333, 0x2e555a55, 0x55555555,
2123 0x5b6db6db,
2124 };
2125 u32 r;
2126 r = seed2[(a + (a >= 10)) / 5];
2127 return b ? ~r : r;
2128}
2129
2130static int make_shift(int comp2, int comp5, int x)
2131{
2132 const u8 seed3[32] = {
2133 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2134 0x00, 0x00, 0x38, 0x1c, 0x3c, 0x18, 0x38, 0x38,
2135 0x38, 0x38, 0x38, 0x38, 0x0f, 0x0f, 0x0f, 0x0f,
2136 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f,
2137 };
2138
2139 return (comp2 - ((seed3[comp5] >> (x & 7)) & 1)) & 0x1f;
2140}
2141
2142static u32 get_etalon(int flip, u32 addr)
2143{
2144 u32 mask_byte = 0;
2145 int comp1 = (addr >> 1) & 1;
2146 int comp2 = (addr >> 3) & 0x1f;
2147 int comp3 = (addr >> 8) & 0xf;
2148 int comp4 = (addr >> 12) & 0xf;
2149 int comp5 = (addr >> 16) & 0x1f;
2150 u32 mask_bit = ~(0x10001 << comp3);
2151 u32 part1;
2152 u32 part2;
2153 int byte;
2154
2155 part2 =
2156 ((seed1[comp5] >>
2157 make_shift(comp2, comp5,
2158 (comp3 >> 3) | (comp1 << 2) | 2)) & 1) ^ flip;
2159 part1 =
2160 ((seed1[comp5] >>
2161 make_shift(comp2, comp5,
2162 (comp3 >> 3) | (comp1 << 2) | 0)) & 1) ^ flip;
2163
2164 for (byte = 0; byte < 4; byte++)
2165 if ((get_seed2(comp5, comp4) >>
2166 make_shift(comp2, comp5, (byte | (comp1 << 2)))) & 1)
2167 mask_byte |= 0xff << (8 * byte);
2168
2169 return (mask_bit & mask_byte) | (part1 << comp3) | (part2 <<
2170 (comp3 + 16));
2171}
2172
2173static void
2174write_testing_type2(struct raminfo *info, u8 totalrank, u8 region, u8 block,
2175 char flip)
2176{
2177 int i;
2178 for (i = 0; i < 2048; i++)
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08002179 write32p((totalrank << 28) | (region << 25) | (block << 16) |
2180 (i << 2), get_etalon(flip, (block << 16) | (i << 2)));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002181}
2182
2183static u8
2184check_testing_type2(struct raminfo *info, u8 totalrank, u8 region, u8 block,
2185 char flip)
2186{
2187 u8 failmask = 0;
2188 u32 failxor[2];
2189 int i;
2190 int comp1, comp2, comp3;
2191
2192 failxor[0] = 0;
2193 failxor[1] = 0;
2194
2195 enable_cache(totalrank << 28, 134217728);
2196 for (comp3 = 0; comp3 < 2 && failmask != 0xff; comp3++) {
2197 for (comp1 = 0; comp1 < 16; comp1++)
2198 for (comp2 = 0; comp2 < 64; comp2++) {
2199 u32 addr =
2200 (totalrank << 28) | (region << 25) | (block
2201 << 16)
2202 | (comp3 << 12) | (comp2 << 6) | (comp1 <<
2203 2);
2204 failxor[comp1 & 1] |=
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08002205 read32p(addr) ^ get_etalon(flip, addr);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002206 }
2207 for (i = 0; i < 8; i++)
2208 if ((0xff << (8 * (i % 4))) & failxor[i / 4])
2209 failmask |= 1 << i;
2210 }
2211 disable_cache();
2212 flush_cache((totalrank << 28) | (region << 25) | (block << 16), 16384);
2213 return failmask;
2214}
2215
2216static int check_bounded(unsigned short *vals, u16 bound)
2217{
2218 int i;
2219
2220 for (i = 0; i < 8; i++)
2221 if (vals[i] < bound)
2222 return 0;
2223 return 1;
2224}
2225
2226enum state {
2227 BEFORE_USABLE = 0, AT_USABLE = 1, AT_MARGIN = 2, COMPLETE = 3
2228};
2229
2230static int validate_state(enum state *in)
2231{
2232 int i;
2233 for (i = 0; i < 8; i++)
2234 if (in[i] != COMPLETE)
2235 return 0;
2236 return 1;
2237}
2238
2239static void
Elyes HAOUASfd051dc2018-07-08 12:39:34 +02002240do_fsm(enum state *state, u16 *counter,
2241 u8 fail_mask, int margin, int uplimit,
2242 u8 *res_low, u8 *res_high, u8 val)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002243{
2244 int lane;
2245
2246 for (lane = 0; lane < 8; lane++) {
2247 int is_fail = (fail_mask >> lane) & 1;
2248 switch (state[lane]) {
2249 case BEFORE_USABLE:
2250 if (!is_fail) {
2251 counter[lane] = 1;
2252 state[lane] = AT_USABLE;
2253 break;
2254 }
2255 counter[lane] = 0;
2256 state[lane] = BEFORE_USABLE;
2257 break;
2258 case AT_USABLE:
2259 if (!is_fail) {
2260 ++counter[lane];
2261 if (counter[lane] >= margin) {
2262 state[lane] = AT_MARGIN;
2263 res_low[lane] = val - margin + 1;
2264 break;
2265 }
2266 state[lane] = 1;
2267 break;
2268 }
2269 counter[lane] = 0;
2270 state[lane] = BEFORE_USABLE;
2271 break;
2272 case AT_MARGIN:
2273 if (is_fail) {
2274 state[lane] = COMPLETE;
2275 res_high[lane] = val - 1;
2276 } else {
2277 counter[lane]++;
2278 state[lane] = AT_MARGIN;
2279 if (val == uplimit) {
2280 state[lane] = COMPLETE;
2281 res_high[lane] = uplimit;
2282 }
2283 }
2284 break;
2285 case COMPLETE:
2286 break;
2287 }
2288 }
2289}
2290
2291static void
2292train_ram_at_178(struct raminfo *info, u8 channel, int slot, int rank,
2293 u8 total_rank, u8 reg_178, int first_run, int niter,
2294 timing_bounds_t * timings)
2295{
2296 int lane;
2297 enum state state[8];
2298 u16 count[8];
2299 u8 lower_usable[8];
2300 u8 upper_usable[8];
2301 unsigned short num_sucessfully_checked[8];
2302 u8 secondary_total_rank;
2303 u8 reg1b3;
2304
2305 if (info->populated_ranks_mask[1]) {
2306 if (channel == 1)
2307 secondary_total_rank =
2308 info->populated_ranks[1][0][0] +
2309 info->populated_ranks[1][0][1]
2310 + info->populated_ranks[1][1][0] +
2311 info->populated_ranks[1][1][1];
2312 else
2313 secondary_total_rank = 0;
2314 } else
2315 secondary_total_rank = total_rank;
2316
2317 {
2318 int i;
2319 for (i = 0; i < 8; i++)
2320 state[i] = BEFORE_USABLE;
2321 }
2322
2323 if (!first_run) {
2324 int is_all_ok = 1;
2325 for (lane = 0; lane < 8; lane++)
2326 if (timings[reg_178][channel][slot][rank][lane].
2327 smallest ==
2328 timings[reg_178][channel][slot][rank][lane].
2329 largest) {
2330 timings[reg_178][channel][slot][rank][lane].
2331 smallest = 0;
2332 timings[reg_178][channel][slot][rank][lane].
2333 largest = 0;
2334 is_all_ok = 0;
2335 }
2336 if (is_all_ok) {
2337 int i;
2338 for (i = 0; i < 8; i++)
2339 state[i] = COMPLETE;
2340 }
2341 }
2342
2343 for (reg1b3 = 0; reg1b3 < 0x30 && !validate_state(state); reg1b3++) {
2344 u8 failmask = 0;
2345 write_1d0(reg1b3 ^ 32, 0x1b3, 6, 1);
2346 write_1d0(reg1b3 ^ 32, 0x1a3, 6, 1);
2347 failmask = check_testing(info, total_rank, 0);
2348 write_mchbar32(0xfb0, read_mchbar32(0xfb0) | 0x00030000);
2349 do_fsm(state, count, failmask, 5, 47, lower_usable,
2350 upper_usable, reg1b3);
2351 }
2352
2353 if (reg1b3) {
2354 write_1d0(0, 0x1b3, 6, 1);
2355 write_1d0(0, 0x1a3, 6, 1);
2356 for (lane = 0; lane < 8; lane++) {
2357 if (state[lane] == COMPLETE) {
2358 timings[reg_178][channel][slot][rank][lane].
2359 smallest =
2360 lower_usable[lane] +
2361 (info->training.
2362 lane_timings[0][channel][slot][rank][lane]
2363 & 0x3F) - 32;
2364 timings[reg_178][channel][slot][rank][lane].
2365 largest =
2366 upper_usable[lane] +
2367 (info->training.
2368 lane_timings[0][channel][slot][rank][lane]
2369 & 0x3F) - 32;
2370 }
2371 }
2372 }
2373
2374 if (!first_run) {
2375 for (lane = 0; lane < 8; lane++)
2376 if (state[lane] == COMPLETE) {
2377 write_500(info, channel,
2378 timings[reg_178][channel][slot][rank]
2379 [lane].smallest,
2380 get_timing_register_addr(lane, 0,
2381 slot, rank),
2382 9, 1);
2383 write_500(info, channel,
2384 timings[reg_178][channel][slot][rank]
2385 [lane].smallest +
2386 info->training.
2387 lane_timings[1][channel][slot][rank]
2388 [lane]
2389 -
2390 info->training.
2391 lane_timings[0][channel][slot][rank]
2392 [lane], get_timing_register_addr(lane,
2393 1,
2394 slot,
2395 rank),
2396 9, 1);
2397 num_sucessfully_checked[lane] = 0;
2398 } else
2399 num_sucessfully_checked[lane] = -1;
2400
2401 do {
2402 u8 failmask = 0;
2403 int i;
2404 for (i = 0; i < niter; i++) {
2405 if (failmask == 0xFF)
2406 break;
2407 failmask |=
2408 check_testing_type2(info, total_rank, 2, i,
2409 0);
2410 failmask |=
2411 check_testing_type2(info, total_rank, 3, i,
2412 1);
2413 }
2414 write_mchbar32(0xfb0,
2415 read_mchbar32(0xfb0) | 0x00030000);
2416 for (lane = 0; lane < 8; lane++)
2417 if (num_sucessfully_checked[lane] != 0xffff) {
2418 if ((1 << lane) & failmask) {
2419 if (timings[reg_178][channel]
2420 [slot][rank][lane].
2421 largest <=
2422 timings[reg_178][channel]
2423 [slot][rank][lane].smallest)
2424 num_sucessfully_checked
2425 [lane] = -1;
2426 else {
2427 num_sucessfully_checked
2428 [lane] = 0;
2429 timings[reg_178]
2430 [channel][slot]
2431 [rank][lane].
2432 smallest++;
2433 write_500(info, channel,
2434 timings
2435 [reg_178]
2436 [channel]
2437 [slot][rank]
2438 [lane].
2439 smallest,
2440 get_timing_register_addr
2441 (lane, 0,
2442 slot, rank),
2443 9, 1);
2444 write_500(info, channel,
2445 timings
2446 [reg_178]
2447 [channel]
2448 [slot][rank]
2449 [lane].
2450 smallest +
2451 info->
2452 training.
2453 lane_timings
2454 [1][channel]
2455 [slot][rank]
2456 [lane]
2457 -
2458 info->
2459 training.
2460 lane_timings
2461 [0][channel]
2462 [slot][rank]
2463 [lane],
2464 get_timing_register_addr
2465 (lane, 1,
2466 slot, rank),
2467 9, 1);
2468 }
2469 } else
2470 num_sucessfully_checked[lane]++;
2471 }
2472 }
2473 while (!check_bounded(num_sucessfully_checked, 2));
2474
2475 for (lane = 0; lane < 8; lane++)
2476 if (state[lane] == COMPLETE) {
2477 write_500(info, channel,
2478 timings[reg_178][channel][slot][rank]
2479 [lane].largest,
2480 get_timing_register_addr(lane, 0,
2481 slot, rank),
2482 9, 1);
2483 write_500(info, channel,
2484 timings[reg_178][channel][slot][rank]
2485 [lane].largest +
2486 info->training.
2487 lane_timings[1][channel][slot][rank]
2488 [lane]
2489 -
2490 info->training.
2491 lane_timings[0][channel][slot][rank]
2492 [lane], get_timing_register_addr(lane,
2493 1,
2494 slot,
2495 rank),
2496 9, 1);
2497 num_sucessfully_checked[lane] = 0;
2498 } else
2499 num_sucessfully_checked[lane] = -1;
2500
2501 do {
2502 int failmask = 0;
2503 int i;
2504 for (i = 0; i < niter; i++) {
2505 if (failmask == 0xFF)
2506 break;
2507 failmask |=
2508 check_testing_type2(info, total_rank, 2, i,
2509 0);
2510 failmask |=
2511 check_testing_type2(info, total_rank, 3, i,
2512 1);
2513 }
2514
2515 write_mchbar32(0xfb0,
2516 read_mchbar32(0xfb0) | 0x00030000);
2517 for (lane = 0; lane < 8; lane++) {
2518 if (num_sucessfully_checked[lane] != 0xffff) {
2519 if ((1 << lane) & failmask) {
2520 if (timings[reg_178][channel]
2521 [slot][rank][lane].
2522 largest <=
2523 timings[reg_178][channel]
2524 [slot][rank][lane].
2525 smallest) {
2526 num_sucessfully_checked
2527 [lane] = -1;
2528 } else {
2529 num_sucessfully_checked
2530 [lane] = 0;
2531 timings[reg_178]
2532 [channel][slot]
2533 [rank][lane].
2534 largest--;
2535 write_500(info, channel,
2536 timings
2537 [reg_178]
2538 [channel]
2539 [slot][rank]
2540 [lane].
2541 largest,
2542 get_timing_register_addr
2543 (lane, 0,
2544 slot, rank),
2545 9, 1);
2546 write_500(info, channel,
2547 timings
2548 [reg_178]
2549 [channel]
2550 [slot][rank]
2551 [lane].
2552 largest +
2553 info->
2554 training.
2555 lane_timings
2556 [1][channel]
2557 [slot][rank]
2558 [lane]
2559 -
2560 info->
2561 training.
2562 lane_timings
2563 [0][channel]
2564 [slot][rank]
2565 [lane],
2566 get_timing_register_addr
2567 (lane, 1,
2568 slot, rank),
2569 9, 1);
2570 }
2571 } else
2572 num_sucessfully_checked[lane]++;
2573 }
2574 }
2575 }
2576 while (!check_bounded(num_sucessfully_checked, 3));
2577
2578 for (lane = 0; lane < 8; lane++) {
2579 write_500(info, channel,
2580 info->training.
2581 lane_timings[0][channel][slot][rank][lane],
2582 get_timing_register_addr(lane, 0, slot, rank),
2583 9, 1);
2584 write_500(info, channel,
2585 info->training.
2586 lane_timings[1][channel][slot][rank][lane],
2587 get_timing_register_addr(lane, 1, slot, rank),
2588 9, 1);
2589 if (timings[reg_178][channel][slot][rank][lane].
2590 largest <=
2591 timings[reg_178][channel][slot][rank][lane].
2592 smallest) {
2593 timings[reg_178][channel][slot][rank][lane].
2594 largest = 0;
2595 timings[reg_178][channel][slot][rank][lane].
2596 smallest = 0;
2597 }
2598 }
2599 }
2600}
2601
2602static void set_10b(struct raminfo *info, u8 val)
2603{
2604 int channel;
2605 int slot, rank;
2606 int lane;
2607
2608 if (read_1d0(0x10b, 6) == val)
2609 return;
2610
2611 write_1d0(val, 0x10b, 6, 1);
2612
2613 FOR_POPULATED_RANKS_BACKWARDS for (lane = 0; lane < 9; lane++) {
2614 u16 reg_500;
2615 reg_500 = read_500(info, channel,
2616 get_timing_register_addr(lane, 0, slot,
2617 rank), 9);
2618 if (val == 1) {
2619 if (lut16[info->clock_speed_index] <= reg_500)
2620 reg_500 -= lut16[info->clock_speed_index];
2621 else
2622 reg_500 = 0;
2623 } else {
2624 reg_500 += lut16[info->clock_speed_index];
2625 }
2626 write_500(info, channel, reg_500,
2627 get_timing_register_addr(lane, 0, slot, rank), 9, 1);
2628 }
2629}
2630
2631static void set_ecc(int onoff)
2632{
2633 int channel;
2634 for (channel = 0; channel < NUM_CHANNELS; channel++) {
2635 u8 t;
2636 t = read_mchbar8((channel << 10) + 0x5f8);
2637 if (onoff)
2638 t |= 1;
2639 else
2640 t &= ~1;
2641 write_mchbar8((channel << 10) + 0x5f8, t);
2642 }
2643}
2644
2645static void set_178(u8 val)
2646{
2647 if (val >= 31)
2648 val = val - 31;
2649 else
2650 val = 63 - val;
2651
2652 write_1d0(2 * val, 0x178, 7, 1);
2653}
2654
2655static void
2656write_500_timings_type(struct raminfo *info, int channel, int slot, int rank,
2657 int type)
2658{
2659 int lane;
2660
2661 for (lane = 0; lane < 8; lane++)
2662 write_500(info, channel,
2663 info->training.
2664 lane_timings[type][channel][slot][rank][lane],
2665 get_timing_register_addr(lane, type, slot, rank), 9,
2666 0);
2667}
2668
2669static void
2670try_timing_offsets(struct raminfo *info, int channel,
2671 int slot, int rank, int totalrank)
2672{
2673 u16 count[8];
2674 enum state state[8];
2675 u8 lower_usable[8], upper_usable[8];
2676 int lane;
2677 int i;
2678 int flip = 1;
2679 int timing_offset;
2680
2681 for (i = 0; i < 8; i++)
2682 state[i] = BEFORE_USABLE;
2683
2684 memset(count, 0, sizeof(count));
2685
2686 for (lane = 0; lane < 8; lane++)
2687 write_500(info, channel,
2688 info->training.
2689 lane_timings[2][channel][slot][rank][lane] + 32,
2690 get_timing_register_addr(lane, 3, slot, rank), 9, 1);
2691
2692 for (timing_offset = 0; !validate_state(state) && timing_offset < 64;
2693 timing_offset++) {
2694 u8 failmask;
2695 write_1d0(timing_offset ^ 32, 0x1bb, 6, 1);
2696 failmask = 0;
2697 for (i = 0; i < 2 && failmask != 0xff; i++) {
2698 flip = !flip;
2699 write_testing(info, totalrank, flip);
2700 failmask |= check_testing(info, totalrank, flip);
2701 }
2702 do_fsm(state, count, failmask, 10, 63, lower_usable,
2703 upper_usable, timing_offset);
2704 }
2705 write_1d0(0, 0x1bb, 6, 1);
2706 dump_timings(info);
2707 if (!validate_state(state))
2708 die("Couldn't discover DRAM timings (1)\n");
2709
2710 for (lane = 0; lane < 8; lane++) {
2711 u8 bias = 0;
2712
2713 if (info->silicon_revision) {
2714 int usable_length;
2715
2716 usable_length = upper_usable[lane] - lower_usable[lane];
2717 if (usable_length >= 20) {
2718 bias = usable_length / 2 - 10;
2719 if (bias >= 2)
2720 bias = 2;
2721 }
2722 }
2723 write_500(info, channel,
2724 info->training.
2725 lane_timings[2][channel][slot][rank][lane] +
2726 (upper_usable[lane] + lower_usable[lane]) / 2 - bias,
2727 get_timing_register_addr(lane, 3, slot, rank), 9, 1);
2728 info->training.timing2_bounds[channel][slot][rank][lane][0] =
2729 info->training.lane_timings[2][channel][slot][rank][lane] +
2730 lower_usable[lane];
2731 info->training.timing2_bounds[channel][slot][rank][lane][1] =
2732 info->training.lane_timings[2][channel][slot][rank][lane] +
2733 upper_usable[lane];
2734 info->training.timing2_offset[channel][slot][rank][lane] =
2735 info->training.lane_timings[2][channel][slot][rank][lane];
2736 }
2737}
2738
2739static u8
2740choose_training(struct raminfo *info, int channel, int slot, int rank,
2741 int lane, timing_bounds_t * timings, u8 center_178)
2742{
2743 u16 central_weight;
2744 u16 side_weight;
2745 unsigned int sum = 0, count = 0;
2746 u8 span;
2747 u8 lower_margin, upper_margin;
2748 u8 reg_178;
2749 u8 result;
2750
2751 span = 12;
2752 central_weight = 20;
2753 side_weight = 20;
2754 if (info->silicon_revision == 1 && channel == 1) {
2755 central_weight = 5;
2756 side_weight = 20;
2757 if ((info->
2758 populated_ranks_mask[1] ^ (info->
2759 populated_ranks_mask[1] >> 2)) &
2760 1)
2761 span = 18;
2762 }
2763 if ((info->populated_ranks_mask[0] & 5) == 5) {
2764 central_weight = 20;
2765 side_weight = 20;
2766 }
2767 if (info->clock_speed_index >= 2
2768 && (info->populated_ranks_mask[0] & 5) == 5 && slot == 1) {
2769 if (info->silicon_revision == 1) {
2770 switch (channel) {
2771 case 0:
2772 if (lane == 1) {
2773 central_weight = 10;
2774 side_weight = 20;
2775 }
2776 break;
2777 case 1:
2778 if (lane == 6) {
2779 side_weight = 5;
2780 central_weight = 20;
2781 }
2782 break;
2783 }
2784 }
2785 if (info->silicon_revision == 0 && channel == 0 && lane == 0) {
2786 side_weight = 5;
2787 central_weight = 20;
2788 }
2789 }
2790 for (reg_178 = center_178 - span; reg_178 <= center_178 + span;
2791 reg_178 += span) {
2792 u8 smallest;
2793 u8 largest;
2794 largest = timings[reg_178][channel][slot][rank][lane].largest;
2795 smallest = timings[reg_178][channel][slot][rank][lane].smallest;
2796 if (largest - smallest + 1 >= 5) {
2797 unsigned int weight;
2798 if (reg_178 == center_178)
2799 weight = central_weight;
2800 else
2801 weight = side_weight;
2802 sum += weight * (largest + smallest);
2803 count += weight;
2804 }
2805 }
2806 dump_timings(info);
2807 if (count == 0)
2808 die("Couldn't discover DRAM timings (2)\n");
2809 result = sum / (2 * count);
2810 lower_margin =
2811 result - timings[center_178][channel][slot][rank][lane].smallest;
2812 upper_margin =
2813 timings[center_178][channel][slot][rank][lane].largest - result;
2814 if (upper_margin < 10 && lower_margin > 10)
2815 result -= min(lower_margin - 10, 10 - upper_margin);
2816 if (upper_margin > 10 && lower_margin < 10)
2817 result += min(upper_margin - 10, 10 - lower_margin);
2818 return result;
2819}
2820
2821#define STANDARD_MIN_MARGIN 5
2822
2823static u8 choose_reg178(struct raminfo *info, timing_bounds_t * timings)
2824{
2825 u16 margin[64];
2826 int lane, rank, slot, channel;
2827 u8 reg178;
2828 int count = 0, sum = 0;
2829
2830 for (reg178 = reg178_min[info->clock_speed_index];
2831 reg178 < reg178_max[info->clock_speed_index];
2832 reg178 += reg178_step[info->clock_speed_index]) {
2833 margin[reg178] = -1;
2834 FOR_POPULATED_RANKS_BACKWARDS for (lane = 0; lane < 8; lane++) {
2835 int curmargin =
2836 timings[reg178][channel][slot][rank][lane].largest -
2837 timings[reg178][channel][slot][rank][lane].
2838 smallest + 1;
2839 if (curmargin < margin[reg178])
2840 margin[reg178] = curmargin;
2841 }
2842 if (margin[reg178] >= STANDARD_MIN_MARGIN) {
2843 u16 weight;
2844 weight = margin[reg178] - STANDARD_MIN_MARGIN;
2845 sum += weight * reg178;
2846 count += weight;
2847 }
2848 }
2849 dump_timings(info);
2850 if (count == 0)
2851 die("Couldn't discover DRAM timings (3)\n");
2852
2853 u8 threshold;
2854
2855 for (threshold = 30; threshold >= 5; threshold--) {
2856 int usable_length = 0;
2857 int smallest_fount = 0;
2858 for (reg178 = reg178_min[info->clock_speed_index];
2859 reg178 < reg178_max[info->clock_speed_index];
2860 reg178 += reg178_step[info->clock_speed_index])
2861 if (margin[reg178] >= threshold) {
2862 usable_length +=
2863 reg178_step[info->clock_speed_index];
2864 info->training.reg178_largest =
2865 reg178 -
2866 2 * reg178_step[info->clock_speed_index];
2867
2868 if (!smallest_fount) {
2869 smallest_fount = 1;
2870 info->training.reg178_smallest =
2871 reg178 +
2872 reg178_step[info->
2873 clock_speed_index];
2874 }
2875 }
2876 if (usable_length >= 0x21)
2877 break;
2878 }
2879
2880 return sum / count;
2881}
2882
2883static int check_cached_sanity(struct raminfo *info)
2884{
2885 int lane;
2886 int slot, rank;
2887 int channel;
2888
2889 if (!info->cached_training)
2890 return 0;
2891
2892 for (channel = 0; channel < NUM_CHANNELS; channel++)
2893 for (slot = 0; slot < NUM_SLOTS; slot++)
2894 for (rank = 0; rank < NUM_RANKS; rank++)
2895 for (lane = 0; lane < 8 + info->use_ecc; lane++) {
2896 u16 cached_value, estimation_value;
2897 cached_value =
2898 info->cached_training->
2899 lane_timings[1][channel][slot][rank]
2900 [lane];
2901 if (cached_value >= 0x18
2902 && cached_value <= 0x1E7) {
2903 estimation_value =
2904 info->training.
2905 lane_timings[1][channel]
2906 [slot][rank][lane];
2907 if (estimation_value <
2908 cached_value - 24)
2909 return 0;
2910 if (estimation_value >
2911 cached_value + 24)
2912 return 0;
2913 }
2914 }
2915 return 1;
2916}
2917
2918static int try_cached_training(struct raminfo *info)
2919{
2920 u8 saved_243[2];
2921 u8 tm;
2922
2923 int channel, slot, rank, lane;
2924 int flip = 1;
2925 int i, j;
2926
2927 if (!check_cached_sanity(info))
2928 return 0;
2929
2930 info->training.reg178_center = info->cached_training->reg178_center;
2931 info->training.reg178_smallest = info->cached_training->reg178_smallest;
2932 info->training.reg178_largest = info->cached_training->reg178_largest;
2933 memcpy(&info->training.timing_bounds,
2934 &info->cached_training->timing_bounds,
2935 sizeof(info->training.timing_bounds));
2936 memcpy(&info->training.timing_offset,
2937 &info->cached_training->timing_offset,
2938 sizeof(info->training.timing_offset));
2939
2940 write_1d0(2, 0x142, 3, 1);
2941 saved_243[0] = read_mchbar8(0x243);
2942 saved_243[1] = read_mchbar8(0x643);
2943 write_mchbar8(0x243, saved_243[0] | 2);
2944 write_mchbar8(0x643, saved_243[1] | 2);
2945 set_ecc(0);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03002946 pci_write_config16(NORTHBRIDGE, 0xc8, 3);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01002947 if (read_1d0(0x10b, 6) & 1)
2948 set_10b(info, 0);
2949 for (tm = 0; tm < 2; tm++) {
2950 int totalrank;
2951
2952 set_178(tm ? info->cached_training->reg178_largest : info->
2953 cached_training->reg178_smallest);
2954
2955 totalrank = 0;
2956 /* Check timing ranges. With i == 0 we check smallest one and with
2957 i == 1 the largest bound. With j == 0 we check that on the bound
2958 it still works whereas with j == 1 we check that just outside of
2959 bound we fail.
2960 */
2961 FOR_POPULATED_RANKS_BACKWARDS {
2962 for (i = 0; i < 2; i++) {
2963 for (lane = 0; lane < 8; lane++) {
2964 write_500(info, channel,
2965 info->cached_training->
2966 timing2_bounds[channel][slot]
2967 [rank][lane][i],
2968 get_timing_register_addr(lane,
2969 3,
2970 slot,
2971 rank),
2972 9, 1);
2973
2974 if (!i)
2975 write_500(info, channel,
2976 info->
2977 cached_training->
2978 timing2_offset
2979 [channel][slot][rank]
2980 [lane],
2981 get_timing_register_addr
2982 (lane, 2, slot, rank),
2983 9, 1);
2984 write_500(info, channel,
2985 i ? info->cached_training->
2986 timing_bounds[tm][channel]
2987 [slot][rank][lane].
2988 largest : info->
2989 cached_training->
2990 timing_bounds[tm][channel]
2991 [slot][rank][lane].smallest,
2992 get_timing_register_addr(lane,
2993 0,
2994 slot,
2995 rank),
2996 9, 1);
2997 write_500(info, channel,
2998 info->cached_training->
2999 timing_offset[channel][slot]
3000 [rank][lane] +
3001 (i ? info->cached_training->
3002 timing_bounds[tm][channel]
3003 [slot][rank][lane].
3004 largest : info->
3005 cached_training->
3006 timing_bounds[tm][channel]
3007 [slot][rank][lane].
3008 smallest) - 64,
3009 get_timing_register_addr(lane,
3010 1,
3011 slot,
3012 rank),
3013 9, 1);
3014 }
3015 for (j = 0; j < 2; j++) {
3016 u8 failmask;
3017 u8 expected_failmask;
3018 char reg1b3;
3019
3020 reg1b3 = (j == 1) + 4;
3021 reg1b3 =
3022 j == i ? reg1b3 : (-reg1b3) & 0x3f;
3023 write_1d0(reg1b3, 0x1bb, 6, 1);
3024 write_1d0(reg1b3, 0x1b3, 6, 1);
3025 write_1d0(reg1b3, 0x1a3, 6, 1);
3026
3027 flip = !flip;
3028 write_testing(info, totalrank, flip);
3029 failmask =
3030 check_testing(info, totalrank,
3031 flip);
3032 expected_failmask =
3033 j == 0 ? 0x00 : 0xff;
3034 if (failmask != expected_failmask)
3035 goto fail;
3036 }
3037 }
3038 totalrank++;
3039 }
3040 }
3041
3042 set_178(info->cached_training->reg178_center);
3043 if (info->use_ecc)
3044 set_ecc(1);
3045 write_training_data(info);
3046 write_1d0(0, 322, 3, 1);
3047 info->training = *info->cached_training;
3048
3049 write_1d0(0, 0x1bb, 6, 1);
3050 write_1d0(0, 0x1b3, 6, 1);
3051 write_1d0(0, 0x1a3, 6, 1);
3052 write_mchbar8(0x243, saved_243[0]);
3053 write_mchbar8(0x643, saved_243[1]);
3054
3055 return 1;
3056
3057fail:
3058 FOR_POPULATED_RANKS {
3059 write_500_timings_type(info, channel, slot, rank, 1);
3060 write_500_timings_type(info, channel, slot, rank, 2);
3061 write_500_timings_type(info, channel, slot, rank, 3);
3062 }
3063
3064 write_1d0(0, 0x1bb, 6, 1);
3065 write_1d0(0, 0x1b3, 6, 1);
3066 write_1d0(0, 0x1a3, 6, 1);
3067 write_mchbar8(0x243, saved_243[0]);
3068 write_mchbar8(0x643, saved_243[1]);
3069
3070 return 0;
3071}
3072
3073static void do_ram_training(struct raminfo *info)
3074{
3075 u8 saved_243[2];
3076 int totalrank = 0;
3077 u8 reg_178;
3078 int niter;
3079
Matthias Gazzaridfa51252018-05-19 00:44:20 +02003080 timing_bounds_t *timings = timings_car;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003081 int lane, rank, slot, channel;
3082 u8 reg178_center;
3083
3084 write_1d0(2, 0x142, 3, 1);
3085 saved_243[0] = read_mchbar8(0x243);
3086 saved_243[1] = read_mchbar8(0x643);
3087 write_mchbar8(0x243, saved_243[0] | 2);
3088 write_mchbar8(0x643, saved_243[1] | 2);
3089 switch (info->clock_speed_index) {
3090 case 0:
3091 niter = 5;
3092 break;
3093 case 1:
3094 niter = 10;
3095 break;
3096 default:
3097 niter = 19;
3098 break;
3099 }
3100 set_ecc(0);
3101
3102 FOR_POPULATED_RANKS_BACKWARDS {
3103 int i;
3104
3105 write_500_timings_type(info, channel, slot, rank, 0);
3106
3107 write_testing(info, totalrank, 0);
3108 for (i = 0; i < niter; i++) {
3109 write_testing_type2(info, totalrank, 2, i, 0);
3110 write_testing_type2(info, totalrank, 3, i, 1);
3111 }
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003112 pci_write_config8(NORTHBRIDGE, 0xc0, 0x01);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003113 totalrank++;
3114 }
3115
3116 if (reg178_min[info->clock_speed_index] <
3117 reg178_max[info->clock_speed_index])
3118 memset(timings[reg178_min[info->clock_speed_index]], 0,
3119 sizeof(timings[0]) *
3120 (reg178_max[info->clock_speed_index] -
3121 reg178_min[info->clock_speed_index]));
3122 for (reg_178 = reg178_min[info->clock_speed_index];
3123 reg_178 < reg178_max[info->clock_speed_index];
3124 reg_178 += reg178_step[info->clock_speed_index]) {
3125 totalrank = 0;
3126 set_178(reg_178);
3127 for (channel = NUM_CHANNELS - 1; channel >= 0; channel--)
3128 for (slot = 0; slot < NUM_SLOTS; slot++)
3129 for (rank = 0; rank < NUM_RANKS; rank++) {
3130 memset(&timings[reg_178][channel][slot]
3131 [rank][0].smallest, 0, 16);
3132 if (info->
3133 populated_ranks[channel][slot]
3134 [rank]) {
3135 train_ram_at_178(info, channel,
3136 slot, rank,
3137 totalrank,
3138 reg_178, 1,
3139 niter,
3140 timings);
3141 totalrank++;
3142 }
3143 }
3144 }
3145
3146 reg178_center = choose_reg178(info, timings);
3147
3148 FOR_POPULATED_RANKS_BACKWARDS for (lane = 0; lane < 8; lane++) {
3149 info->training.timing_bounds[0][channel][slot][rank][lane].
3150 smallest =
3151 timings[info->training.
3152 reg178_smallest][channel][slot][rank][lane].
3153 smallest;
3154 info->training.timing_bounds[0][channel][slot][rank][lane].
3155 largest =
3156 timings[info->training.
3157 reg178_smallest][channel][slot][rank][lane].largest;
3158 info->training.timing_bounds[1][channel][slot][rank][lane].
3159 smallest =
3160 timings[info->training.
3161 reg178_largest][channel][slot][rank][lane].smallest;
3162 info->training.timing_bounds[1][channel][slot][rank][lane].
3163 largest =
3164 timings[info->training.
3165 reg178_largest][channel][slot][rank][lane].largest;
3166 info->training.timing_offset[channel][slot][rank][lane] =
3167 info->training.lane_timings[1][channel][slot][rank][lane]
3168 -
3169 info->training.lane_timings[0][channel][slot][rank][lane] +
3170 64;
3171 }
3172
3173 if (info->silicon_revision == 1
3174 && (info->
3175 populated_ranks_mask[1] ^ (info->
3176 populated_ranks_mask[1] >> 2)) & 1) {
3177 int ranks_after_channel1;
3178
3179 totalrank = 0;
3180 for (reg_178 = reg178_center - 18;
3181 reg_178 <= reg178_center + 18; reg_178 += 18) {
3182 totalrank = 0;
3183 set_178(reg_178);
3184 for (slot = 0; slot < NUM_SLOTS; slot++)
3185 for (rank = 0; rank < NUM_RANKS; rank++) {
3186 if (info->
3187 populated_ranks[1][slot][rank]) {
3188 train_ram_at_178(info, 1, slot,
3189 rank,
3190 totalrank,
3191 reg_178, 0,
3192 niter,
3193 timings);
3194 totalrank++;
3195 }
3196 }
3197 }
3198 ranks_after_channel1 = totalrank;
3199
3200 for (reg_178 = reg178_center - 12;
3201 reg_178 <= reg178_center + 12; reg_178 += 12) {
3202 totalrank = ranks_after_channel1;
3203 set_178(reg_178);
3204 for (slot = 0; slot < NUM_SLOTS; slot++)
3205 for (rank = 0; rank < NUM_RANKS; rank++)
3206 if (info->
3207 populated_ranks[0][slot][rank]) {
3208 train_ram_at_178(info, 0, slot,
3209 rank,
3210 totalrank,
3211 reg_178, 0,
3212 niter,
3213 timings);
3214 totalrank++;
3215 }
3216
3217 }
3218 } else {
3219 for (reg_178 = reg178_center - 12;
3220 reg_178 <= reg178_center + 12; reg_178 += 12) {
3221 totalrank = 0;
3222 set_178(reg_178);
3223 FOR_POPULATED_RANKS_BACKWARDS {
3224 train_ram_at_178(info, channel, slot, rank,
3225 totalrank, reg_178, 0, niter,
3226 timings);
3227 totalrank++;
3228 }
3229 }
3230 }
3231
3232 set_178(reg178_center);
3233 FOR_POPULATED_RANKS_BACKWARDS for (lane = 0; lane < 8; lane++) {
3234 u16 tm0;
3235
3236 tm0 =
3237 choose_training(info, channel, slot, rank, lane, timings,
3238 reg178_center);
3239 write_500(info, channel, tm0,
3240 get_timing_register_addr(lane, 0, slot, rank), 9, 1);
3241 write_500(info, channel,
3242 tm0 +
3243 info->training.
3244 lane_timings[1][channel][slot][rank][lane] -
3245 info->training.
3246 lane_timings[0][channel][slot][rank][lane],
3247 get_timing_register_addr(lane, 1, slot, rank), 9, 1);
3248 }
3249
3250 totalrank = 0;
3251 FOR_POPULATED_RANKS_BACKWARDS {
3252 try_timing_offsets(info, channel, slot, rank, totalrank);
3253 totalrank++;
3254 }
3255 write_mchbar8(0x243, saved_243[0]);
3256 write_mchbar8(0x643, saved_243[1]);
3257 write_1d0(0, 0x142, 3, 1);
3258 info->training.reg178_center = reg178_center;
3259}
3260
3261static void ram_training(struct raminfo *info)
3262{
3263 u16 saved_fc4;
3264
3265 saved_fc4 = read_mchbar16(0xfc4);
3266 write_mchbar16(0xfc4, 0xffff);
3267
3268 if (info->revision >= 8)
3269 read_4090(info);
3270
3271 if (!try_cached_training(info))
3272 do_ram_training(info);
3273 if ((info->silicon_revision == 2 || info->silicon_revision == 3)
3274 && info->clock_speed_index < 2)
3275 set_10b(info, 1);
3276 write_mchbar16(0xfc4, saved_fc4);
3277}
3278
3279static unsigned gcd(unsigned a, unsigned b)
3280{
3281 unsigned t;
3282 if (a > b) {
3283 t = a;
3284 a = b;
3285 b = t;
3286 }
3287 /* invariant a < b. */
3288 while (a) {
3289 t = b % a;
3290 b = a;
3291 a = t;
3292 }
3293 return b;
3294}
3295
3296static inline int div_roundup(int a, int b)
3297{
Edward O'Callaghan7116ac82014-07-08 01:53:24 +10003298 return CEIL_DIV(a, b);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003299}
3300
3301static unsigned lcm(unsigned a, unsigned b)
3302{
3303 return (a * b) / gcd(a, b);
3304}
3305
3306struct stru1 {
3307 u8 freqs_reversed;
3308 u8 freq_diff_reduced;
3309 u8 freq_min_reduced;
3310 u8 divisor_f4_to_fmax;
3311 u8 divisor_f3_to_fmax;
3312 u8 freq4_to_max_remainder;
3313 u8 freq3_to_2_remainder;
3314 u8 freq3_to_2_remaindera;
3315 u8 freq4_to_2_remainder;
3316 int divisor_f3_to_f1, divisor_f4_to_f2;
3317 int common_time_unit_ps;
3318 int freq_max_reduced;
3319};
3320
3321static void
3322compute_frequence_ratios(struct raminfo *info, u16 freq1, u16 freq2,
3323 int num_cycles_2, int num_cycles_1, int round_it,
3324 int add_freqs, struct stru1 *result)
3325{
3326 int g;
3327 int common_time_unit_ps;
3328 int freq1_reduced, freq2_reduced;
3329 int freq_min_reduced;
3330 int freq_max_reduced;
3331 int freq3, freq4;
3332
3333 g = gcd(freq1, freq2);
3334 freq1_reduced = freq1 / g;
3335 freq2_reduced = freq2 / g;
3336 freq_min_reduced = min(freq1_reduced, freq2_reduced);
3337 freq_max_reduced = max(freq1_reduced, freq2_reduced);
3338
3339 common_time_unit_ps = div_roundup(900000, lcm(freq1, freq2));
3340 freq3 = div_roundup(num_cycles_2, common_time_unit_ps) - 1;
3341 freq4 = div_roundup(num_cycles_1, common_time_unit_ps) - 1;
3342 if (add_freqs) {
3343 freq3 += freq2_reduced;
3344 freq4 += freq1_reduced;
3345 }
3346
3347 if (round_it) {
3348 result->freq3_to_2_remainder = 0;
3349 result->freq3_to_2_remaindera = 0;
3350 result->freq4_to_max_remainder = 0;
3351 result->divisor_f4_to_f2 = 0;
3352 result->divisor_f3_to_f1 = 0;
3353 } else {
3354 if (freq2_reduced < freq1_reduced) {
3355 result->freq3_to_2_remainder =
3356 result->freq3_to_2_remaindera =
3357 freq3 % freq1_reduced - freq1_reduced + 1;
3358 result->freq4_to_max_remainder =
3359 -(freq4 % freq1_reduced);
3360 result->divisor_f3_to_f1 = freq3 / freq1_reduced;
3361 result->divisor_f4_to_f2 =
3362 (freq4 -
3363 (freq1_reduced - freq2_reduced)) / freq2_reduced;
3364 result->freq4_to_2_remainder =
3365 -(char)((freq1_reduced - freq2_reduced) +
3366 ((u8) freq4 -
3367 (freq1_reduced -
3368 freq2_reduced)) % (u8) freq2_reduced);
3369 } else {
3370 if (freq2_reduced > freq1_reduced) {
3371 result->freq4_to_max_remainder =
3372 (freq4 % freq2_reduced) - freq2_reduced + 1;
3373 result->freq4_to_2_remainder =
3374 freq4 % freq_max_reduced -
3375 freq_max_reduced + 1;
3376 } else {
3377 result->freq4_to_max_remainder =
3378 -(freq4 % freq2_reduced);
3379 result->freq4_to_2_remainder =
3380 -(char)(freq4 % freq_max_reduced);
3381 }
3382 result->divisor_f4_to_f2 = freq4 / freq2_reduced;
3383 result->divisor_f3_to_f1 =
3384 (freq3 -
3385 (freq2_reduced - freq1_reduced)) / freq1_reduced;
3386 result->freq3_to_2_remainder = -(freq3 % freq2_reduced);
3387 result->freq3_to_2_remaindera =
3388 -(char)((freq_max_reduced - freq_min_reduced) +
3389 (freq3 -
3390 (freq_max_reduced -
3391 freq_min_reduced)) % freq1_reduced);
3392 }
3393 }
3394 result->divisor_f3_to_fmax = freq3 / freq_max_reduced;
3395 result->divisor_f4_to_fmax = freq4 / freq_max_reduced;
3396 if (round_it) {
3397 if (freq2_reduced > freq1_reduced) {
3398 if (freq3 % freq_max_reduced)
3399 result->divisor_f3_to_fmax++;
3400 }
3401 if (freq2_reduced < freq1_reduced) {
3402 if (freq4 % freq_max_reduced)
3403 result->divisor_f4_to_fmax++;
3404 }
3405 }
3406 result->freqs_reversed = (freq2_reduced < freq1_reduced);
3407 result->freq_diff_reduced = freq_max_reduced - freq_min_reduced;
3408 result->freq_min_reduced = freq_min_reduced;
3409 result->common_time_unit_ps = common_time_unit_ps;
3410 result->freq_max_reduced = freq_max_reduced;
3411}
3412
3413static void
3414set_2d5x_reg(struct raminfo *info, u16 reg, u16 freq1, u16 freq2,
3415 int num_cycles_2, int num_cycles_1, int num_cycles_3,
3416 int num_cycles_4, int reverse)
3417{
3418 struct stru1 vv;
3419 char multiplier;
3420
3421 compute_frequence_ratios(info, freq1, freq2, num_cycles_2, num_cycles_1,
3422 0, 1, &vv);
3423
3424 multiplier =
3425 div_roundup(max
3426 (div_roundup(num_cycles_2, vv.common_time_unit_ps) +
3427 div_roundup(num_cycles_3, vv.common_time_unit_ps),
3428 div_roundup(num_cycles_1,
3429 vv.common_time_unit_ps) +
3430 div_roundup(num_cycles_4, vv.common_time_unit_ps))
3431 + vv.freq_min_reduced - 1, vv.freq_max_reduced) - 1;
3432
3433 u32 y =
3434 (u8) ((vv.freq_max_reduced - vv.freq_min_reduced) +
3435 vv.freq_max_reduced * multiplier)
3436 | (vv.
3437 freqs_reversed << 8) | ((u8) (vv.freq_min_reduced *
3438 multiplier) << 16) | ((u8) (vv.
3439 freq_min_reduced
3440 *
3441 multiplier)
3442 << 24);
3443 u32 x =
3444 vv.freq3_to_2_remaindera | (vv.freq4_to_2_remainder << 8) | (vv.
3445 divisor_f3_to_f1
3446 << 16)
3447 | (vv.divisor_f4_to_f2 << 20) | (vv.freq_min_reduced << 24);
3448 if (reverse) {
3449 write_mchbar32(reg, y);
3450 write_mchbar32(reg + 4, x);
3451 } else {
3452 write_mchbar32(reg + 4, y);
3453 write_mchbar32(reg, x);
3454 }
3455}
3456
3457static void
3458set_6d_reg(struct raminfo *info, u16 reg, u16 freq1, u16 freq2,
3459 int num_cycles_1, int num_cycles_2, int num_cycles_3,
3460 int num_cycles_4)
3461{
3462 struct stru1 ratios1;
3463 struct stru1 ratios2;
3464
3465 compute_frequence_ratios(info, freq1, freq2, num_cycles_1, num_cycles_2,
3466 0, 1, &ratios2);
3467 compute_frequence_ratios(info, freq1, freq2, num_cycles_3, num_cycles_4,
3468 0, 1, &ratios1);
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003469 printk (BIOS_SPEW, "[%x] <= %x\n", reg,
3470 ratios1.freq4_to_max_remainder | (ratios2.
3471 freq4_to_max_remainder
3472 << 8)
3473 | (ratios1.divisor_f4_to_fmax << 16) | (ratios2.
3474 divisor_f4_to_fmax
3475 << 20));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003476 write_mchbar32(reg,
3477 ratios1.freq4_to_max_remainder | (ratios2.
3478 freq4_to_max_remainder
3479 << 8)
3480 | (ratios1.divisor_f4_to_fmax << 16) | (ratios2.
3481 divisor_f4_to_fmax
3482 << 20));
3483}
3484
3485static void
3486set_2dx8_reg(struct raminfo *info, u16 reg, u8 mode, u16 freq1, u16 freq2,
3487 int num_cycles_2, int num_cycles_1, int round_it, int add_freqs)
3488{
3489 struct stru1 ratios;
3490
3491 compute_frequence_ratios(info, freq1, freq2, num_cycles_2, num_cycles_1,
3492 round_it, add_freqs, &ratios);
3493 switch (mode) {
3494 case 0:
3495 write_mchbar32(reg + 4,
3496 ratios.freq_diff_reduced | (ratios.
3497 freqs_reversed <<
3498 8));
3499 write_mchbar32(reg,
3500 ratios.freq3_to_2_remainder | (ratios.
3501 freq4_to_max_remainder
3502 << 8)
3503 | (ratios.divisor_f3_to_fmax << 16) | (ratios.
3504 divisor_f4_to_fmax
3505 << 20) |
3506 (ratios.freq_min_reduced << 24));
3507 break;
3508
3509 case 1:
3510 write_mchbar32(reg,
3511 ratios.freq3_to_2_remainder | (ratios.
3512 divisor_f3_to_fmax
3513 << 16));
3514 break;
3515
3516 case 2:
3517 write_mchbar32(reg,
3518 ratios.freq3_to_2_remainder | (ratios.
3519 freq4_to_max_remainder
3520 << 8) | (ratios.
3521 divisor_f3_to_fmax
3522 << 16) |
3523 (ratios.divisor_f4_to_fmax << 20));
3524 break;
3525
3526 case 4:
3527 write_mchbar32(reg, (ratios.divisor_f3_to_fmax << 4)
3528 | (ratios.divisor_f4_to_fmax << 8) | (ratios.
3529 freqs_reversed
3530 << 12) |
3531 (ratios.freq_min_reduced << 16) | (ratios.
3532 freq_diff_reduced
3533 << 24));
3534 break;
3535 }
3536}
3537
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003538static void set_2dxx_series(struct raminfo *info, int s3resume)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003539{
3540 set_2dx8_reg(info, 0x2d00, 0, 0x78, frequency_11(info) / 2, 1359, 1005,
3541 0, 1);
3542 set_2dx8_reg(info, 0x2d08, 0, 0x78, 0x78, 3273, 5033, 1, 1);
3543 set_2dx8_reg(info, 0x2d10, 0, 0x78, info->fsb_frequency, 1475, 1131, 0,
3544 1);
3545 set_2dx8_reg(info, 0x2d18, 0, 2 * info->fsb_frequency,
3546 frequency_11(info), 1231, 1524, 0, 1);
3547 set_2dx8_reg(info, 0x2d20, 0, 2 * info->fsb_frequency,
3548 frequency_11(info) / 2, 1278, 2008, 0, 1);
3549 set_2dx8_reg(info, 0x2d28, 0, info->fsb_frequency, frequency_11(info),
3550 1167, 1539, 0, 1);
3551 set_2dx8_reg(info, 0x2d30, 0, info->fsb_frequency,
3552 frequency_11(info) / 2, 1403, 1318, 0, 1);
3553 set_2dx8_reg(info, 0x2d38, 0, info->fsb_frequency, 0x78, 3460, 5363, 1,
3554 1);
3555 set_2dx8_reg(info, 0x2d40, 0, info->fsb_frequency, 0x3c, 2792, 5178, 1,
3556 1);
3557 set_2dx8_reg(info, 0x2d48, 0, 2 * info->fsb_frequency, 0x78, 2738, 4610,
3558 1, 1);
3559 set_2dx8_reg(info, 0x2d50, 0, info->fsb_frequency, 0x78, 2819, 5932, 1,
3560 1);
3561 set_2dx8_reg(info, 0x6d4, 1, info->fsb_frequency,
3562 frequency_11(info) / 2, 4000, 0, 0, 0);
3563 set_2dx8_reg(info, 0x6d8, 2, info->fsb_frequency,
3564 frequency_11(info) / 2, 4000, 4000, 0, 0);
3565
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003566 if (s3resume) {
3567 printk (BIOS_SPEW, "[6dc] <= %x\n", info->cached_training->reg_6dc);
3568 write_mchbar32(0x6dc, info->cached_training->reg_6dc);
3569 } else
3570 set_6d_reg(info, 0x6dc, 2 * info->fsb_frequency, frequency_11(info), 0,
3571 info->delay46_ps[0], 0,
3572 info->delay54_ps[0]);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003573 set_2dx8_reg(info, 0x6e0, 1, 2 * info->fsb_frequency,
3574 frequency_11(info), 2500, 0, 0, 0);
3575 set_2dx8_reg(info, 0x6e4, 1, 2 * info->fsb_frequency,
3576 frequency_11(info) / 2, 3500, 0, 0, 0);
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003577 if (s3resume) {
3578 printk (BIOS_SPEW, "[6e8] <= %x\n", info->cached_training->reg_6e8);
3579 write_mchbar32(0x6e8, info->cached_training->reg_6e8);
3580 } else
3581 set_6d_reg(info, 0x6e8, 2 * info->fsb_frequency, frequency_11(info), 0,
3582 info->delay46_ps[1], 0,
3583 info->delay54_ps[1]);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003584 set_2d5x_reg(info, 0x2d58, 0x78, 0x78, 864, 1195, 762, 786, 0);
3585 set_2d5x_reg(info, 0x2d60, 0x195, info->fsb_frequency, 1352, 725, 455,
3586 470, 0);
3587 set_2d5x_reg(info, 0x2d68, 0x195, 0x3c, 2707, 5632, 3277, 2207, 0);
3588 set_2d5x_reg(info, 0x2d70, 0x195, frequency_11(info) / 2, 1276, 758,
3589 454, 459, 0);
3590 set_2d5x_reg(info, 0x2d78, 0x195, 0x78, 1021, 799, 510, 513, 0);
3591 set_2d5x_reg(info, 0x2d80, info->fsb_frequency, 0xe1, 0, 2862, 2579,
3592 2588, 0);
3593 set_2d5x_reg(info, 0x2d88, info->fsb_frequency, 0xe1, 0, 2690, 2405,
3594 2405, 0);
3595 set_2d5x_reg(info, 0x2da0, 0x78, 0xe1, 0, 2560, 2264, 2251, 0);
3596 set_2d5x_reg(info, 0x2da8, 0x195, frequency_11(info), 1060, 775, 484,
3597 480, 0);
3598 set_2d5x_reg(info, 0x2db0, 0x195, 0x78, 4183, 6023, 2217, 2048, 0);
3599 write_mchbar32(0x2dbc, ((frequency_11(info) / 2) - 1) | 0xe00000);
3600 write_mchbar32(0x2db8, ((info->fsb_frequency - 1) << 16) | 0x77);
3601}
3602
3603static u16 get_max_timing(struct raminfo *info, int channel)
3604{
3605 int slot, rank, lane;
3606 u16 ret = 0;
3607
3608 if ((read_mchbar8(0x2ca8) >> 2) < 1)
3609 return 384;
3610
3611 if (info->revision < 8)
3612 return 256;
3613
3614 for (slot = 0; slot < NUM_SLOTS; slot++)
3615 for (rank = 0; rank < NUM_RANKS; rank++)
3616 if (info->populated_ranks[channel][slot][rank])
3617 for (lane = 0; lane < 8 + info->use_ecc; lane++)
3618 ret = max(ret, read_500(info, channel,
3619 get_timing_register_addr
3620 (lane, 0, slot,
3621 rank), 9));
3622 return ret;
3623}
3624
3625static void set_274265(struct raminfo *info)
3626{
3627 int delay_a_ps, delay_b_ps, delay_c_ps, delay_d_ps;
3628 int delay_e_ps, delay_e_cycles, delay_f_cycles;
3629 int delay_e_over_cycle_ps;
3630 int cycletime_ps;
3631 int channel;
3632
3633 delay_a_ps = 4 * halfcycle_ps(info) + 6 * fsbcycle_ps(info);
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003634 info->training.reg2ca9_bit0 = 0;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003635 for (channel = 0; channel < NUM_CHANNELS; channel++) {
3636 cycletime_ps =
3637 900000 / lcm(2 * info->fsb_frequency, frequency_11(info));
3638 delay_d_ps =
3639 (halfcycle_ps(info) * get_max_timing(info, channel) >> 6)
3640 - info->some_delay_3_ps_rounded + 200;
3641 if (!
3642 ((info->silicon_revision == 0
3643 || info->silicon_revision == 1)
3644 && (info->revision >= 8)))
3645 delay_d_ps += halfcycle_ps(info) * 2;
3646 delay_d_ps +=
3647 halfcycle_ps(info) * (!info->revision_flag_1 +
3648 info->some_delay_2_halfcycles_ceil +
3649 2 * info->some_delay_1_cycle_floor +
3650 info->clock_speed_index +
3651 2 * info->cas_latency - 7 + 11);
3652 delay_d_ps += info->revision >= 8 ? 2758 : 4428;
3653
3654 write_mchbar32(0x140,
3655 (read_mchbar32(0x140) & 0xfaffffff) | 0x2000000);
3656 write_mchbar32(0x138,
3657 (read_mchbar32(0x138) & 0xfaffffff) | 0x2000000);
3658 if ((read_mchbar8(0x144) & 0x1f) > 0x13)
3659 delay_d_ps += 650;
3660 delay_c_ps = delay_d_ps + 1800;
3661 if (delay_c_ps <= delay_a_ps)
3662 delay_e_ps = 0;
3663 else
3664 delay_e_ps =
3665 cycletime_ps * div_roundup(delay_c_ps - delay_a_ps,
3666 cycletime_ps);
3667
3668 delay_e_over_cycle_ps = delay_e_ps % (2 * halfcycle_ps(info));
3669 delay_e_cycles = delay_e_ps / (2 * halfcycle_ps(info));
3670 delay_f_cycles =
3671 div_roundup(2500 - delay_e_over_cycle_ps,
3672 2 * halfcycle_ps(info));
3673 if (delay_f_cycles > delay_e_cycles) {
3674 info->delay46_ps[channel] = delay_e_ps;
3675 delay_e_cycles = 0;
3676 } else {
3677 info->delay46_ps[channel] =
3678 delay_e_over_cycle_ps +
3679 2 * halfcycle_ps(info) * delay_f_cycles;
3680 delay_e_cycles -= delay_f_cycles;
3681 }
3682
3683 if (info->delay46_ps[channel] < 2500) {
3684 info->delay46_ps[channel] = 2500;
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003685 info->training.reg2ca9_bit0 = 1;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003686 }
3687 delay_b_ps = halfcycle_ps(info) + delay_c_ps;
3688 if (delay_b_ps <= delay_a_ps)
3689 delay_b_ps = 0;
3690 else
3691 delay_b_ps -= delay_a_ps;
3692 info->delay54_ps[channel] =
3693 cycletime_ps * div_roundup(delay_b_ps,
3694 cycletime_ps) -
3695 2 * halfcycle_ps(info) * delay_e_cycles;
3696 if (info->delay54_ps[channel] < 2500)
3697 info->delay54_ps[channel] = 2500;
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003698 info->training.reg274265[channel][0] = delay_e_cycles;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003699 if (delay_d_ps + 7 * halfcycle_ps(info) <=
3700 24 * halfcycle_ps(info))
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003701 info->training.reg274265[channel][1] = 0;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003702 else
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003703 info->training.reg274265[channel][1] =
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003704 div_roundup(delay_d_ps + 7 * halfcycle_ps(info),
3705 4 * halfcycle_ps(info)) - 6;
3706 write_mchbar32((channel << 10) + 0x274,
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003707 info->training.reg274265[channel][1]
3708 | (info->training.reg274265[channel][0] << 16));
3709 info->training.reg274265[channel][2] =
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003710 div_roundup(delay_c_ps + 3 * fsbcycle_ps(info),
3711 4 * halfcycle_ps(info)) + 1;
3712 write_mchbar16((channel << 10) + 0x265,
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003713 info->training.reg274265[channel][2] << 8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003714 }
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003715 if (info->training.reg2ca9_bit0)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003716 write_mchbar8(0x2ca9, read_mchbar8(0x2ca9) | 1);
3717 else
3718 write_mchbar8(0x2ca9, read_mchbar8(0x2ca9) & ~1);
3719}
3720
3721static void restore_274265(struct raminfo *info)
3722{
3723 int channel;
3724
3725 for (channel = 0; channel < NUM_CHANNELS; channel++) {
3726 write_mchbar32((channel << 10) + 0x274,
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003727 (info->cached_training->reg274265[channel][0] << 16)
3728 | info->cached_training->reg274265[channel][1]);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003729 write_mchbar16((channel << 10) + 0x265,
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003730 info->cached_training->reg274265[channel][2] << 8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003731 }
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01003732 if (info->cached_training->reg2ca9_bit0)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003733 write_mchbar8(0x2ca9, read_mchbar8(0x2ca9) | 1);
3734 else
3735 write_mchbar8(0x2ca9, read_mchbar8(0x2ca9) & ~1);
3736}
3737
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003738static void dmi_setup(void)
3739{
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08003740 gav(read8(DEFAULT_DMIBAR + 0x254));
3741 write8(DEFAULT_DMIBAR + 0x254, 0x1);
3742 write16(DEFAULT_DMIBAR + 0x1b8, 0x18f2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003743 read_mchbar16(0x48);
3744 write_mchbar16(0x48, 0x2);
3745
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08003746 write32(DEFAULT_DMIBAR + 0xd68, read32(DEFAULT_DMIBAR + 0xd68) | 0x08000000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003747
3748 outl((gav(inl(DEFAULT_GPIOBASE | 0x38)) & ~0x140000) | 0x400000,
3749 DEFAULT_GPIOBASE | 0x38);
3750 gav(inb(DEFAULT_GPIOBASE | 0xe)); // = 0xfdcaff6e
3751}
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003752
Vladimir Serbinenko9817a372014-02-19 22:07:12 +01003753void chipset_init(const int s3resume)
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003754{
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003755 u8 x2ca8;
Vladimir Serbinenko55391c42014-08-03 14:51:00 +02003756 u16 ggc;
3757 u8 gfxsize;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003758
Vladimir Serbinenko9817a372014-02-19 22:07:12 +01003759 x2ca8 = read_mchbar8(0x2ca8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003760 if ((x2ca8 & 1) || (x2ca8 == 8 && !s3resume)) {
3761 printk(BIOS_DEBUG, "soft reset detected, rebooting properly\n");
3762 write_mchbar8(0x2ca8, 0);
Vladimir Serbinenkoe1eef692014-02-19 22:08:51 +01003763 outb(0x6, 0xcf9);
Patrick Georgi546953c2014-11-29 10:38:17 +01003764 halt();
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003765 }
Felix Held29a9c072018-07-29 01:34:45 +02003766#if 0
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003767 if (!s3resume) {
3768 pre_raminit_3(x2ca8);
3769 }
Vladimir Serbinenkof62669c2014-01-09 10:59:38 +01003770 pre_raminit_4a(x2ca8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003771#endif
3772
3773 dmi_setup();
3774
3775 write_mchbar16(0x1170, 0xa880);
3776 write_mchbar8(0x11c1, 0x1);
3777 write_mchbar16(0x1170, 0xb880);
3778 read_mchbar8(0x1210);
3779 write_mchbar8(0x1210, 0x84);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003780
Vladimir Serbinenko55391c42014-08-03 14:51:00 +02003781 if (get_option(&gfxsize, "gfx_uma_size") != CB_SUCCESS) {
3782 /* 0 for 32MB */
3783 gfxsize = 0;
3784 }
3785
3786 ggc = 0xb00 | ((gfxsize + 5) << 4);
3787
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003788 pci_write_config16(NORTHBRIDGE, D0F0_GGC, ggc | 2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003789
3790 u16 deven;
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003791 deven = pci_read_config16(NORTHBRIDGE, D0F0_DEVEN); // = 0x3
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003792
3793 if (deven & 8) {
3794 write_mchbar8(0x2c30, 0x20);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003795 pci_read_config8(NORTHBRIDGE, 0x8); // = 0x18
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003796 write_mchbar16(0x2c30, read_mchbar16(0x2c30) | 0x200);
3797 write_mchbar16(0x2c32, 0x434);
3798 read_mchbar32(0x2c44);
3799 write_mchbar32(0x2c44, 0x1053687);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003800 pci_read_config8(GMA, 0x62); // = 0x2
3801 pci_write_config8(GMA, 0x62, 0x2);
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08003802 read8(DEFAULT_RCBA + 0x2318);
3803 write8(DEFAULT_RCBA + 0x2318, 0x47);
3804 read8(DEFAULT_RCBA + 0x2320);
3805 write8(DEFAULT_RCBA + 0x2320, 0xfc);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003806 }
3807
3808 read_mchbar32(0x30);
3809 write_mchbar32(0x30, 0x40);
3810
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003811 pci_write_config16(NORTHBRIDGE, D0F0_GGC, ggc);
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08003812 gav(read32(DEFAULT_RCBA + 0x3428));
3813 write32(DEFAULT_RCBA + 0x3428, 0x1d);
Vladimir Serbinenko9817a372014-02-19 22:07:12 +01003814}
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003815
Vladimir Serbinenko9817a372014-02-19 22:07:12 +01003816void raminit(const int s3resume, const u8 *spd_addrmap)
3817{
3818 unsigned channel, slot, lane, rank;
3819 int i;
3820 struct raminfo info;
3821 u8 x2ca8;
3822 u16 deven;
Vladimir Serbinenkob16f0922014-06-07 16:27:27 +02003823 int cbmem_wasnot_inited;
Vladimir Serbinenko9817a372014-02-19 22:07:12 +01003824
3825 x2ca8 = read_mchbar8(0x2ca8);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003826 deven = pci_read_config16(NORTHBRIDGE, D0F0_DEVEN);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003827
3828 memset(&info, 0x5a, sizeof(info));
3829
3830 info.last_500_command[0] = 0;
3831 info.last_500_command[1] = 0;
3832
3833 info.fsb_frequency = 135 * 2;
3834 info.board_lane_delay[0] = 0x14;
3835 info.board_lane_delay[1] = 0x07;
3836 info.board_lane_delay[2] = 0x07;
3837 info.board_lane_delay[3] = 0x08;
3838 info.board_lane_delay[4] = 0x56;
3839 info.board_lane_delay[5] = 0x04;
3840 info.board_lane_delay[6] = 0x04;
3841 info.board_lane_delay[7] = 0x05;
3842 info.board_lane_delay[8] = 0x10;
3843
3844 info.training.reg_178 = 0;
3845 info.training.reg_10b = 0;
3846
3847 info.heci_bar = 0;
3848 info.memory_reserved_for_heci_mb = 0;
3849
3850 /* before SPD */
3851 timestamp_add_now(101);
3852
Felix Held29a9c072018-07-29 01:34:45 +02003853 if (!s3resume || 1) { // possible error
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003854 pci_read_config8(SOUTHBRIDGE, GEN_PMCON_2); // = 0x80
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003855
3856 collect_system_info(&info);
3857
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003858 /* Enable SMBUS. */
3859 enable_smbus();
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003860
3861 memset(&info.populated_ranks, 0, sizeof(info.populated_ranks));
3862
3863 info.use_ecc = 1;
3864 for (channel = 0; channel < NUM_CHANNELS; channel++)
Vladimir Serbinenko2ab8ec72014-02-20 14:34:56 +01003865 for (slot = 0; slot < NUM_SLOTS; slot++) {
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003866 int v;
3867 int try;
3868 int addr;
3869 const u8 useful_addresses[] = {
3870 DEVICE_TYPE,
3871 MODULE_TYPE,
3872 DENSITY,
3873 RANKS_AND_DQ,
3874 MEMORY_BUS_WIDTH,
3875 TIMEBASE_DIVIDEND,
3876 TIMEBASE_DIVISOR,
3877 CYCLETIME,
3878 CAS_LATENCIES_LSB,
3879 CAS_LATENCIES_MSB,
3880 CAS_LATENCY_TIME,
3881 0x11, 0x12, 0x13, 0x14, 0x15,
3882 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b,
3883 0x1c, 0x1d,
3884 THERMAL_AND_REFRESH,
3885 0x20,
3886 REFERENCE_RAW_CARD_USED,
3887 RANK1_ADDRESS_MAPPING,
3888 0x75, 0x76, 0x77, 0x78,
3889 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e,
3890 0x7f, 0x80, 0x81, 0x82, 0x83, 0x84,
3891 0x85, 0x86, 0x87, 0x88,
3892 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e,
3893 0x8f, 0x90, 0x91, 0x92, 0x93, 0x94,
3894 0x95
3895 };
Vladimir Serbinenko902626c2014-02-16 17:22:26 +01003896 if (!spd_addrmap[2 * channel + slot])
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003897 continue;
3898 for (try = 0; try < 5; try++) {
Vladimir Serbinenko902626c2014-02-16 17:22:26 +01003899 v = smbus_read_byte(spd_addrmap[2 * channel + slot],
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003900 DEVICE_TYPE);
3901 if (v >= 0)
3902 break;
3903 }
3904 if (v < 0)
3905 continue;
3906 for (addr = 0;
3907 addr <
3908 sizeof(useful_addresses) /
3909 sizeof(useful_addresses[0]); addr++)
3910 gav(info.
3911 spd[channel][0][useful_addresses
3912 [addr]] =
Vladimir Serbinenko902626c2014-02-16 17:22:26 +01003913 smbus_read_byte(spd_addrmap[2 * channel + slot],
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003914 useful_addresses
3915 [addr]));
3916 if (info.spd[channel][0][DEVICE_TYPE] != 11)
3917 die("Only DDR3 is supported");
3918
3919 v = info.spd[channel][0][RANKS_AND_DQ];
3920 info.populated_ranks[channel][0][0] = 1;
3921 info.populated_ranks[channel][0][1] =
3922 ((v >> 3) & 7);
3923 if (((v >> 3) & 7) > 1)
3924 die("At most 2 ranks are supported");
3925 if ((v & 7) == 0 || (v & 7) > 2)
3926 die("Only x8 and x16 modules are supported");
3927 if ((info.
3928 spd[channel][slot][MODULE_TYPE] & 0xF) != 2
3929 && (info.
3930 spd[channel][slot][MODULE_TYPE] & 0xF)
3931 != 3)
3932 die("Registered memory is not supported");
3933 info.is_x16_module[channel][0] = (v & 7) - 1;
3934 info.density[channel][slot] =
3935 info.spd[channel][slot][DENSITY] & 0xF;
3936 if (!
3937 (info.
3938 spd[channel][slot][MEMORY_BUS_WIDTH] &
3939 0x18))
3940 info.use_ecc = 0;
3941 }
3942
3943 gav(0x55);
3944
3945 for (channel = 0; channel < NUM_CHANNELS; channel++) {
3946 int v = 0;
3947 for (slot = 0; slot < NUM_SLOTS; slot++)
3948 for (rank = 0; rank < NUM_RANKS; rank++)
3949 v |= info.
3950 populated_ranks[channel][slot][rank]
3951 << (2 * slot + rank);
3952 info.populated_ranks_mask[channel] = v;
3953 }
3954
3955 gav(0x55);
3956
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003957 gav(pci_read_config32(NORTHBRIDGE, D0F0_CAPID0 + 4));
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003958 }
3959
3960 /* after SPD */
3961 timestamp_add_now(102);
3962
3963 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) & 0xfc);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003964
3965 collect_system_info(&info);
3966 calculate_timings(&info);
3967
Felix Held29a9c072018-07-29 01:34:45 +02003968#if 0
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003969 pci_write_config8(NORTHBRIDGE, 0xdf, 0x82);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003970#endif
3971
3972 if (!s3resume) {
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003973 u8 reg8 = pci_read_config8(SOUTHBRIDGE, GEN_PMCON_2);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003974 if (x2ca8 == 0 && (reg8 & 0x80)) {
3975 /* Don't enable S4-assertion stretch. Makes trouble on roda/rk9.
3976 reg8 = pci_read_config8(PCI_DEV(0, 0x1f, 0), 0xa4);
3977 pci_write_config8(PCI_DEV(0, 0x1f, 0), 0xa4, reg8 | 0x08);
3978 */
3979
3980 /* Clear bit7. */
3981
3982 pci_write_config8(SOUTHBRIDGE, GEN_PMCON_2,
3983 (reg8 & ~(1 << 7)));
3984
3985 printk(BIOS_INFO,
3986 "Interrupted RAM init, reset required.\n");
3987 outb(0x6, 0xcf9);
Patrick Georgi546953c2014-11-29 10:38:17 +01003988 halt();
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003989 }
3990 }
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003991
3992 if (!s3resume && x2ca8 == 0)
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03003993 pci_write_config8(SOUTHBRIDGE, GEN_PMCON_2,
3994 pci_read_config8(SOUTHBRIDGE, GEN_PMCON_2) | 0x80);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01003995
3996 compute_derived_timings(&info);
3997
3998 if (x2ca8 == 0) {
3999 gav(read_mchbar8(0x164));
4000 write_mchbar8(0x164, 0x26);
4001 write_mchbar16(0x2c20, 0x10);
4002 }
4003
4004 write_mchbar32(0x18b4, read_mchbar32(0x18b4) | 0x210000); /* OK */
4005 write_mchbar32(0x1890, read_mchbar32(0x1890) | 0x2000000); /* OK */
4006 write_mchbar32(0x18b4, read_mchbar32(0x18b4) | 0x8000);
4007
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004008 gav(pci_read_config32(PCI_DEV(0xff, 2, 1), 0x50)); // !!!!
4009 pci_write_config8(PCI_DEV(0xff, 2, 1), 0x54, 0x12);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004010
4011 gav(read_mchbar16(0x2c10)); // !!!!
4012 write_mchbar16(0x2c10, 0x412);
4013 gav(read_mchbar16(0x2c10)); // !!!!
4014 write_mchbar16(0x2c12, read_mchbar16(0x2c12) | 0x100); /* OK */
4015
4016 gav(read_mchbar8(0x2ca8)); // !!!!
4017 write_mchbar32(0x1804,
4018 (read_mchbar32(0x1804) & 0xfffffffc) | 0x8400080);
4019
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004020 pci_read_config32(PCI_DEV(0xff, 2, 1), 0x6c); // !!!!
4021 pci_write_config32(PCI_DEV(0xff, 2, 1), 0x6c, 0x40a0a0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004022 gav(read_mchbar32(0x1c04)); // !!!!
4023 gav(read_mchbar32(0x1804)); // !!!!
4024
4025 if (x2ca8 == 0) {
4026 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) | 1);
4027 }
4028
4029 write_mchbar32(0x18d8, 0x120000);
4030 write_mchbar32(0x18dc, 0x30a484a);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004031 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xe0, 0x0);
4032 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xf4, 0x9444a);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004033 write_mchbar32(0x18d8, 0x40000);
4034 write_mchbar32(0x18dc, 0xb000000);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004035 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xe0, 0x60000);
4036 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xf4, 0x0);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004037 write_mchbar32(0x18d8, 0x180000);
4038 write_mchbar32(0x18dc, 0xc0000142);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004039 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xe0, 0x20000);
4040 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xf4, 0x142);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004041 write_mchbar32(0x18d8, 0x1e0000);
4042
4043 gav(read_mchbar32(0x18dc)); // !!!!
4044 write_mchbar32(0x18dc, 0x3);
4045 gav(read_mchbar32(0x18dc)); // !!!!
4046
4047 if (x2ca8 == 0) {
4048 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) | 1); // guess
4049 }
4050
4051 write_mchbar32(0x188c, 0x20bc09);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004052 pci_write_config32(PCI_DEV(0xff, 2, 1), 0xd0, 0x40b0c09);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004053 write_mchbar32(0x1a10, 0x4200010e);
4054 write_mchbar32(0x18b8, read_mchbar32(0x18b8) | 0x200);
4055 gav(read_mchbar32(0x1918)); // !!!!
4056 write_mchbar32(0x1918, 0x332);
4057
4058 gav(read_mchbar32(0x18b8)); // !!!!
4059 write_mchbar32(0x18b8, 0xe00);
4060 gav(read_mchbar32(0x182c)); // !!!!
4061 write_mchbar32(0x182c, 0x10202);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004062 gav(pci_read_config32(PCI_DEV(0xff, 2, 1), 0x94)); // !!!!
4063 pci_write_config32(PCI_DEV(0xff, 2, 1), 0x94, 0x10202);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004064 write_mchbar32(0x1a1c, read_mchbar32(0x1a1c) & 0x8fffffff);
4065 write_mchbar32(0x1a70, read_mchbar32(0x1a70) | 0x100000);
4066
4067 write_mchbar32(0x18b4, read_mchbar32(0x18b4) & 0xffff7fff);
4068 gav(read_mchbar32(0x1a68)); // !!!!
4069 write_mchbar32(0x1a68, 0x343800);
4070 gav(read_mchbar32(0x1e68)); // !!!!
4071 gav(read_mchbar32(0x1a68)); // !!!!
4072
4073 if (x2ca8 == 0) {
4074 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) | 1); // guess
4075 }
4076
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004077 pci_read_config32(PCI_DEV(0xff, 2, 0), 0x048); // !!!!
4078 pci_write_config32(PCI_DEV(0xff, 2, 0), 0x048, 0x140000);
4079 pci_read_config32(PCI_DEV(0xff, 2, 0), 0x058); // !!!!
4080 pci_write_config32(PCI_DEV(0xff, 2, 0), 0x058, 0x64555);
4081 pci_read_config32(PCI_DEV(0xff, 2, 0), 0x058); // !!!!
4082 pci_read_config32(PCI_DEV (0xff, 0, 0), 0xd0); // !!!!
4083 pci_write_config32(PCI_DEV (0xff, 0, 0), 0xd0, 0x180);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004084 gav(read_mchbar32(0x1af0)); // !!!!
4085 gav(read_mchbar32(0x1af0)); // !!!!
4086 write_mchbar32(0x1af0, 0x1f020003);
4087 gav(read_mchbar32(0x1af0)); // !!!!
4088
Edward O'Callaghan42b716f2014-06-26 21:38:52 +10004089 if (x2ca8 == 0) {
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004090 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) | 1); // guess
4091 }
4092
4093 gav(read_mchbar32(0x1890)); // !!!!
4094 write_mchbar32(0x1890, 0x80102);
4095 gav(read_mchbar32(0x18b4)); // !!!!
4096 write_mchbar32(0x18b4, 0x216000);
4097 write_mchbar32(0x18a4, 0x22222222);
4098 write_mchbar32(0x18a8, 0x22222222);
4099 write_mchbar32(0x18ac, 0x22222);
4100
4101 udelay(1000);
4102
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01004103 info.cached_training = get_cached_training();
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004104
Vladimir Serbinenkof7a42de2014-01-09 11:10:04 +01004105 if (x2ca8 == 0) {
4106 int j;
4107 if (s3resume && info.cached_training) {
4108 restore_274265(&info);
4109 printk(BIOS_DEBUG, "reg2ca9_bit0 = %x\n",
4110 info.cached_training->reg2ca9_bit0);
4111 for (i = 0; i < 2; i++)
4112 for (j = 0; j < 3; j++)
4113 printk(BIOS_DEBUG, "reg274265[%d][%d] = %x\n",
4114 i, j, info.cached_training->reg274265[i][j]);
4115 } else {
4116 set_274265(&info);
4117 printk(BIOS_DEBUG, "reg2ca9_bit0 = %x\n",
4118 info.training.reg2ca9_bit0);
4119 for (i = 0; i < 2; i++)
4120 for (j = 0; j < 3; j++)
4121 printk(BIOS_DEBUG, "reg274265[%d][%d] = %x\n",
4122 i, j, info.training.reg274265[i][j]);
4123 }
4124
4125 set_2dxx_series(&info, s3resume);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004126
4127 if (!(deven & 8)) {
4128 read_mchbar32(0x2cb0);
4129 write_mchbar32(0x2cb0, 0x40);
4130 }
4131
4132 udelay(1000);
4133
4134 if (deven & 8) {
4135 write_mchbar32(0xff8, 0x1800 | read_mchbar32(0xff8));
4136 read_mchbar32(0x2cb0);
4137 write_mchbar32(0x2cb0, 0x00);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004138 pci_read_config8(PCI_DEV (0, 0x2, 0x0), 0x4c);
4139 pci_read_config8(PCI_DEV (0, 0x2, 0x0), 0x4c);
4140 pci_read_config8(PCI_DEV (0, 0x2, 0x0), 0x4e);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004141
4142 read_mchbar8(0x1150);
4143 read_mchbar8(0x1151);
4144 read_mchbar8(0x1022);
4145 read_mchbar8(0x16d0);
4146 write_mchbar32(0x1300, 0x60606060);
4147 write_mchbar32(0x1304, 0x60606060);
4148 write_mchbar32(0x1308, 0x78797a7b);
4149 write_mchbar32(0x130c, 0x7c7d7e7f);
4150 write_mchbar32(0x1310, 0x60606060);
4151 write_mchbar32(0x1314, 0x60606060);
4152 write_mchbar32(0x1318, 0x60606060);
4153 write_mchbar32(0x131c, 0x60606060);
4154 write_mchbar32(0x1320, 0x50515253);
4155 write_mchbar32(0x1324, 0x54555657);
4156 write_mchbar32(0x1328, 0x58595a5b);
4157 write_mchbar32(0x132c, 0x5c5d5e5f);
4158 write_mchbar32(0x1330, 0x40414243);
4159 write_mchbar32(0x1334, 0x44454647);
4160 write_mchbar32(0x1338, 0x48494a4b);
4161 write_mchbar32(0x133c, 0x4c4d4e4f);
4162 write_mchbar32(0x1340, 0x30313233);
4163 write_mchbar32(0x1344, 0x34353637);
4164 write_mchbar32(0x1348, 0x38393a3b);
4165 write_mchbar32(0x134c, 0x3c3d3e3f);
4166 write_mchbar32(0x1350, 0x20212223);
4167 write_mchbar32(0x1354, 0x24252627);
4168 write_mchbar32(0x1358, 0x28292a2b);
4169 write_mchbar32(0x135c, 0x2c2d2e2f);
4170 write_mchbar32(0x1360, 0x10111213);
4171 write_mchbar32(0x1364, 0x14151617);
4172 write_mchbar32(0x1368, 0x18191a1b);
4173 write_mchbar32(0x136c, 0x1c1d1e1f);
4174 write_mchbar32(0x1370, 0x10203);
4175 write_mchbar32(0x1374, 0x4050607);
4176 write_mchbar32(0x1378, 0x8090a0b);
4177 write_mchbar32(0x137c, 0xc0d0e0f);
4178 write_mchbar8(0x11cc, 0x4e);
4179 write_mchbar32(0x1110, 0x73970404);
4180 write_mchbar32(0x1114, 0x72960404);
4181 write_mchbar32(0x1118, 0x6f950404);
4182 write_mchbar32(0x111c, 0x6d940404);
4183 write_mchbar32(0x1120, 0x6a930404);
4184 write_mchbar32(0x1124, 0x68a41404);
4185 write_mchbar32(0x1128, 0x66a21404);
4186 write_mchbar32(0x112c, 0x63a01404);
4187 write_mchbar32(0x1130, 0x609e1404);
4188 write_mchbar32(0x1134, 0x5f9c1404);
4189 write_mchbar32(0x1138, 0x5c961404);
4190 write_mchbar32(0x113c, 0x58a02404);
4191 write_mchbar32(0x1140, 0x54942404);
4192 write_mchbar32(0x1190, 0x900080a);
4193 write_mchbar16(0x11c0, 0xc40b);
4194 write_mchbar16(0x11c2, 0x303);
4195 write_mchbar16(0x11c4, 0x301);
4196 read_mchbar32(0x1190);
4197 write_mchbar32(0x1190, 0x8900080a);
4198 write_mchbar32(0x11b8, 0x70c3000);
4199 write_mchbar8(0x11ec, 0xa);
4200 write_mchbar16(0x1100, 0x800);
4201 read_mchbar32(0x11bc);
4202 write_mchbar32(0x11bc, 0x1e84800);
4203 write_mchbar16(0x11ca, 0xfa);
4204 write_mchbar32(0x11e4, 0x4e20);
4205 write_mchbar8(0x11bc, 0xf);
4206 write_mchbar16(0x11da, 0x19);
4207 write_mchbar16(0x11ba, 0x470c);
4208 write_mchbar32(0x1680, 0xe6ffe4ff);
4209 write_mchbar32(0x1684, 0xdeffdaff);
4210 write_mchbar32(0x1688, 0xd4ffd0ff);
4211 write_mchbar32(0x168c, 0xccffc6ff);
4212 write_mchbar32(0x1690, 0xc0ffbeff);
4213 write_mchbar32(0x1694, 0xb8ffb0ff);
4214 write_mchbar32(0x1698, 0xa8ff0000);
4215 write_mchbar32(0x169c, 0xc00);
4216 write_mchbar32(0x1290, 0x5000000);
4217 }
4218
4219 write_mchbar32(0x124c, 0x15040d00);
4220 write_mchbar32(0x1250, 0x7f0000);
4221 write_mchbar32(0x1254, 0x1e220004);
4222 write_mchbar32(0x1258, 0x4000004);
4223 write_mchbar32(0x1278, 0x0);
4224 write_mchbar32(0x125c, 0x0);
4225 write_mchbar32(0x1260, 0x0);
4226 write_mchbar32(0x1264, 0x0);
4227 write_mchbar32(0x1268, 0x0);
4228 write_mchbar32(0x126c, 0x0);
4229 write_mchbar32(0x1270, 0x0);
4230 write_mchbar32(0x1274, 0x0);
4231 }
4232
4233 if ((deven & 8) && x2ca8 == 0) {
4234 write_mchbar16(0x1214, 0x320);
4235 write_mchbar32(0x1600, 0x40000000);
4236 read_mchbar32(0x11f4);
4237 write_mchbar32(0x11f4, 0x10000000);
4238 read_mchbar16(0x1230);
4239 write_mchbar16(0x1230, 0x8000);
4240 write_mchbar32(0x1400, 0x13040020);
4241 write_mchbar32(0x1404, 0xe090120);
4242 write_mchbar32(0x1408, 0x5120220);
4243 write_mchbar32(0x140c, 0x5120330);
4244 write_mchbar32(0x1410, 0xe090220);
4245 write_mchbar32(0x1414, 0x1010001);
4246 write_mchbar32(0x1418, 0x1110000);
4247 write_mchbar32(0x141c, 0x9020020);
4248 write_mchbar32(0x1420, 0xd090220);
4249 write_mchbar32(0x1424, 0x2090220);
4250 write_mchbar32(0x1428, 0x2090330);
4251 write_mchbar32(0x142c, 0xd090220);
4252 write_mchbar32(0x1430, 0x1010001);
4253 write_mchbar32(0x1434, 0x1110000);
4254 write_mchbar32(0x1438, 0x11040020);
4255 write_mchbar32(0x143c, 0x4030220);
4256 write_mchbar32(0x1440, 0x1060220);
4257 write_mchbar32(0x1444, 0x1060330);
4258 write_mchbar32(0x1448, 0x4030220);
4259 write_mchbar32(0x144c, 0x1010001);
4260 write_mchbar32(0x1450, 0x1110000);
4261 write_mchbar32(0x1454, 0x4010020);
4262 write_mchbar32(0x1458, 0xb090220);
4263 write_mchbar32(0x145c, 0x1090220);
4264 write_mchbar32(0x1460, 0x1090330);
4265 write_mchbar32(0x1464, 0xb090220);
4266 write_mchbar32(0x1468, 0x1010001);
4267 write_mchbar32(0x146c, 0x1110000);
4268 write_mchbar32(0x1470, 0xf040020);
4269 write_mchbar32(0x1474, 0xa090220);
4270 write_mchbar32(0x1478, 0x1120220);
4271 write_mchbar32(0x147c, 0x1120330);
4272 write_mchbar32(0x1480, 0xa090220);
4273 write_mchbar32(0x1484, 0x1010001);
4274 write_mchbar32(0x1488, 0x1110000);
4275 write_mchbar32(0x148c, 0x7020020);
4276 write_mchbar32(0x1490, 0x1010220);
4277 write_mchbar32(0x1494, 0x10210);
4278 write_mchbar32(0x1498, 0x10320);
4279 write_mchbar32(0x149c, 0x1010220);
4280 write_mchbar32(0x14a0, 0x1010001);
4281 write_mchbar32(0x14a4, 0x1110000);
4282 write_mchbar32(0x14a8, 0xd040020);
4283 write_mchbar32(0x14ac, 0x8090220);
4284 write_mchbar32(0x14b0, 0x1111310);
4285 write_mchbar32(0x14b4, 0x1111420);
4286 write_mchbar32(0x14b8, 0x8090220);
4287 write_mchbar32(0x14bc, 0x1010001);
4288 write_mchbar32(0x14c0, 0x1110000);
4289 write_mchbar32(0x14c4, 0x3010020);
4290 write_mchbar32(0x14c8, 0x7090220);
4291 write_mchbar32(0x14cc, 0x1081310);
4292 write_mchbar32(0x14d0, 0x1081420);
4293 write_mchbar32(0x14d4, 0x7090220);
4294 write_mchbar32(0x14d8, 0x1010001);
4295 write_mchbar32(0x14dc, 0x1110000);
4296 write_mchbar32(0x14e0, 0xb040020);
4297 write_mchbar32(0x14e4, 0x2030220);
4298 write_mchbar32(0x14e8, 0x1051310);
4299 write_mchbar32(0x14ec, 0x1051420);
4300 write_mchbar32(0x14f0, 0x2030220);
4301 write_mchbar32(0x14f4, 0x1010001);
4302 write_mchbar32(0x14f8, 0x1110000);
4303 write_mchbar32(0x14fc, 0x5020020);
4304 write_mchbar32(0x1500, 0x5090220);
4305 write_mchbar32(0x1504, 0x2071310);
4306 write_mchbar32(0x1508, 0x2071420);
4307 write_mchbar32(0x150c, 0x5090220);
4308 write_mchbar32(0x1510, 0x1010001);
4309 write_mchbar32(0x1514, 0x1110000);
4310 write_mchbar32(0x1518, 0x7040120);
4311 write_mchbar32(0x151c, 0x2090220);
4312 write_mchbar32(0x1520, 0x70b1210);
4313 write_mchbar32(0x1524, 0x70b1310);
4314 write_mchbar32(0x1528, 0x2090220);
4315 write_mchbar32(0x152c, 0x1010001);
4316 write_mchbar32(0x1530, 0x1110000);
4317 write_mchbar32(0x1534, 0x1010110);
4318 write_mchbar32(0x1538, 0x1081310);
4319 write_mchbar32(0x153c, 0x5041200);
4320 write_mchbar32(0x1540, 0x5041310);
4321 write_mchbar32(0x1544, 0x1081310);
4322 write_mchbar32(0x1548, 0x1010001);
4323 write_mchbar32(0x154c, 0x1110000);
4324 write_mchbar32(0x1550, 0x1040120);
4325 write_mchbar32(0x1554, 0x4051210);
4326 write_mchbar32(0x1558, 0xd051200);
4327 write_mchbar32(0x155c, 0xd051200);
4328 write_mchbar32(0x1560, 0x4051210);
4329 write_mchbar32(0x1564, 0x1010001);
4330 write_mchbar32(0x1568, 0x1110000);
4331 write_mchbar16(0x1222, 0x220a);
4332 write_mchbar16(0x123c, 0x1fc0);
4333 write_mchbar16(0x1220, 0x1388);
4334 }
4335
4336 read_mchbar32(0x2c80); // !!!!
4337 write_mchbar32(0x2c80, 0x1053688);
4338 read_mchbar32(0x1c04); // !!!!
4339 write_mchbar32(0x1804, 0x406080);
4340
4341 read_mchbar8(0x2ca8);
4342
4343 if (x2ca8 == 0) {
4344 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) & ~3);
4345 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8) + 4);
4346 write_mchbar32(0x1af0, read_mchbar32(0x1af0) | 0x10);
Patrick Georgi546953c2014-11-29 10:38:17 +01004347 halt();
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004348 }
4349
4350 write_mchbar8(0x2ca8, read_mchbar8(0x2ca8));
4351 read_mchbar32(0x2c80); // !!!!
4352 write_mchbar32(0x2c80, 0x53688);
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004353 pci_write_config32(PCI_DEV (0xff, 0, 0), 0x60, 0x20220);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004354 read_mchbar16(0x2c20); // !!!!
4355 read_mchbar16(0x2c10); // !!!!
4356 read_mchbar16(0x2c00); // !!!!
4357 write_mchbar16(0x2c00, 0x8c0);
4358 udelay(1000);
4359 write_1d0(0, 0x33d, 0, 0);
4360 write_500(&info, 0, 0, 0xb61, 0, 0);
4361 write_500(&info, 1, 0, 0xb61, 0, 0);
4362 write_mchbar32(0x1a30, 0x0);
4363 write_mchbar32(0x1a34, 0x0);
4364 write_mchbar16(0x614,
4365 0xb5b | (info.populated_ranks[1][0][0] *
4366 0x404) | (info.populated_ranks[0][0][0] *
4367 0xa0));
4368 write_mchbar16(0x616, 0x26a);
4369 write_mchbar32(0x134, 0x856000);
4370 write_mchbar32(0x160, 0x5ffffff);
4371 read_mchbar32(0x114); // !!!!
4372 write_mchbar32(0x114, 0xc2024440);
4373 read_mchbar32(0x118); // !!!!
4374 write_mchbar32(0x118, 0x4);
4375 for (channel = 0; channel < NUM_CHANNELS; channel++)
4376 write_mchbar32(0x260 + (channel << 10),
4377 0x30809ff |
4378 ((info.
4379 populated_ranks_mask[channel] & 3) << 20));
4380 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4381 write_mchbar16(0x31c + (channel << 10), 0x101);
4382 write_mchbar16(0x360 + (channel << 10), 0x909);
4383 write_mchbar16(0x3a4 + (channel << 10), 0x101);
4384 write_mchbar16(0x3e8 + (channel << 10), 0x101);
4385 write_mchbar32(0x320 + (channel << 10), 0x29002900);
4386 write_mchbar32(0x324 + (channel << 10), 0x0);
4387 write_mchbar32(0x368 + (channel << 10), 0x32003200);
4388 write_mchbar16(0x352 + (channel << 10), 0x505);
4389 write_mchbar16(0x354 + (channel << 10), 0x3c3c);
4390 write_mchbar16(0x356 + (channel << 10), 0x1040);
4391 write_mchbar16(0x39a + (channel << 10), 0x73e4);
4392 write_mchbar16(0x3de + (channel << 10), 0x77ed);
4393 write_mchbar16(0x422 + (channel << 10), 0x1040);
4394 }
4395
4396 write_1d0(0x4, 0x151, 4, 1);
4397 write_1d0(0, 0x142, 3, 1);
4398 rdmsr(0x1ac); // !!!!
4399 write_500(&info, 1, 1, 0x6b3, 4, 1);
4400 write_500(&info, 1, 1, 0x6cf, 4, 1);
4401
4402 rmw_1d0(0x21c, 0x38, 0, 6, 1);
4403
4404 write_1d0(((!info.populated_ranks[1][0][0]) << 1) | ((!info.
4405 populated_ranks[0]
4406 [0][0]) << 0),
4407 0x1d1, 3, 1);
4408 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4409 write_mchbar16(0x38e + (channel << 10), 0x5f5f);
4410 write_mchbar16(0x3d2 + (channel << 10), 0x5f5f);
4411 }
4412
4413 set_334(0);
4414
4415 program_base_timings(&info);
4416
4417 write_mchbar8(0x5ff, read_mchbar8(0x5ff) | 0x80); /* OK */
4418
4419 write_1d0(0x2, 0x1d5, 2, 1);
4420 write_1d0(0x20, 0x166, 7, 1);
4421 write_1d0(0x0, 0xeb, 3, 1);
4422 write_1d0(0x0, 0xf3, 6, 1);
4423
4424 for (channel = 0; channel < NUM_CHANNELS; channel++)
4425 for (lane = 0; lane < 9; lane++) {
4426 u16 addr = 0x125 + get_lane_offset(0, 0, lane);
4427 u8 a;
4428 a = read_500(&info, channel, addr, 6); // = 0x20040080 //!!!!
4429 write_500(&info, channel, a, addr, 6, 1);
4430 }
4431
4432 udelay(1000);
4433
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004434 if (s3resume) {
4435 if (info.cached_training == NULL) {
4436 u32 reg32;
4437 printk(BIOS_ERR,
4438 "Couldn't find training data. Rebooting\n");
4439 reg32 = inl(DEFAULT_PMBASE + 0x04);
4440 outl(reg32 & ~(7 << 10), DEFAULT_PMBASE + 0x04);
4441 outb(0xe, 0xcf9);
Patrick Georgi546953c2014-11-29 10:38:17 +01004442 halt();
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004443 }
4444 int tm;
4445 info.training = *info.cached_training;
4446 for (tm = 0; tm < 4; tm++)
4447 for (channel = 0; channel < NUM_CHANNELS; channel++)
4448 for (slot = 0; slot < NUM_SLOTS; slot++)
4449 for (rank = 0; rank < NUM_RANKS; rank++)
4450 for (lane = 0; lane < 9; lane++)
4451 write_500(&info,
4452 channel,
4453 info.training.
4454 lane_timings
4455 [tm][channel]
4456 [slot][rank]
4457 [lane],
4458 get_timing_register_addr
4459 (lane, tm,
4460 slot, rank),
4461 9, 0);
4462 write_1d0(info.cached_training->reg_178, 0x178, 7, 1);
4463 write_1d0(info.cached_training->reg_10b, 0x10b, 6, 1);
4464 }
4465
4466 read_mchbar32(0x1f4); // !!!!
4467 write_mchbar32(0x1f4, 0x20000);
4468 write_mchbar32(0x1f0, 0x1d000200);
4469 read_mchbar8(0x1f0); // !!!!
4470 write_mchbar8(0x1f0, 0x1);
4471 read_mchbar8(0x1f0); // !!!!
4472
4473 program_board_delay(&info);
4474
4475 write_mchbar8(0x5ff, 0x0); /* OK */
4476 write_mchbar8(0x5ff, 0x80); /* OK */
4477 write_mchbar8(0x5f4, 0x1); /* OK */
4478
4479 write_mchbar32(0x130, read_mchbar32(0x130) & 0xfffffffd); // | 2 when ?
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02004480 while (read_mchbar32(0x130) & 1);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004481 gav(read_1d0(0x14b, 7)); // = 0x81023100
4482 write_1d0(0x30, 0x14b, 7, 1);
4483 read_1d0(0xd6, 6); // = 0xfa008080 // !!!!
4484 write_1d0(7, 0xd6, 6, 1);
4485 read_1d0(0x328, 6); // = 0xfa018080 // !!!!
4486 write_1d0(7, 0x328, 6, 1);
4487
4488 for (channel = 0; channel < NUM_CHANNELS; channel++)
4489 set_4cf(&info, channel,
4490 info.populated_ranks[channel][0][0] ? 8 : 0);
4491
4492 read_1d0(0x116, 4); // = 0x4040432 // !!!!
4493 write_1d0(2, 0x116, 4, 1);
4494 read_1d0(0xae, 6); // = 0xe8088080 // !!!!
4495 write_1d0(0, 0xae, 6, 1);
4496 read_1d0(0x300, 4); // = 0x48088080 // !!!!
4497 write_1d0(0, 0x300, 6, 1);
4498 read_mchbar16(0x356); // !!!!
4499 write_mchbar16(0x356, 0x1040);
4500 read_mchbar16(0x756); // !!!!
4501 write_mchbar16(0x756, 0x1040);
4502 write_mchbar32(0x140, read_mchbar32(0x140) & ~0x07000000);
4503 write_mchbar32(0x138, read_mchbar32(0x138) & ~0x07000000);
4504 write_mchbar32(0x130, 0x31111301);
Vladimir Serbinenko25fc5322014-12-07 13:05:44 +01004505 /* Wait until REG130b0 is 1. */
4506 while (read_mchbar32(0x130) & 1)
4507 ;
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004508
4509 {
4510 u32 t;
4511 u8 val_a1;
4512 val_a1 = read_1d0(0xa1, 6); // = 0x1cf4040 // !!!!
4513 t = read_1d0(0x2f3, 6); // = 0x10a4040 // !!!!
4514 rmw_1d0(0x320, 0x07,
4515 (t & 4) | ((t & 8) >> 2) | ((t & 0x10) >> 4), 6, 1);
4516 rmw_1d0(0x14b, 0x78,
4517 ((((val_a1 >> 2) & 4) | (val_a1 & 8)) >> 2) | (val_a1 &
4518 4), 7,
4519 1);
4520 rmw_1d0(0xce, 0x38,
4521 ((((val_a1 >> 2) & 4) | (val_a1 & 8)) >> 2) | (val_a1 &
4522 4), 6,
4523 1);
4524 }
4525
4526 for (channel = 0; channel < NUM_CHANNELS; channel++)
4527 set_4cf(&info, channel,
4528 info.populated_ranks[channel][0][0] ? 9 : 1);
4529
4530 rmw_1d0(0x116, 0xe, 1, 4, 1); // = 0x4040432 // !!!!
4531 read_mchbar32(0x144); // !!!!
4532 write_1d0(2, 0xae, 6, 1);
4533 write_1d0(2, 0x300, 6, 1);
4534 write_1d0(2, 0x121, 3, 1);
4535 read_1d0(0xd6, 6); // = 0xfa00c0c7 // !!!!
4536 write_1d0(4, 0xd6, 6, 1);
4537 read_1d0(0x328, 6); // = 0xfa00c0c7 // !!!!
4538 write_1d0(4, 0x328, 6, 1);
4539
4540 for (channel = 0; channel < NUM_CHANNELS; channel++)
4541 set_4cf(&info, channel,
4542 info.populated_ranks[channel][0][0] ? 9 : 0);
4543
4544 write_mchbar32(0x130,
4545 0x11111301 | (info.
4546 populated_ranks[1][0][0] << 30) | (info.
4547 populated_ranks
4548 [0][0]
4549 [0] <<
4550 29));
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02004551 while (read_mchbar8(0x130) & 1); // !!!!
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004552 read_1d0(0xa1, 6); // = 0x1cf4054 // !!!!
4553 read_1d0(0x2f3, 6); // = 0x10a4054 // !!!!
4554 read_1d0(0x21c, 6); // = 0xafa00c0 // !!!!
4555 write_1d0(0, 0x21c, 6, 1);
4556 read_1d0(0x14b, 7); // = 0x810231b0 // !!!!
4557 write_1d0(0x35, 0x14b, 7, 1);
4558
4559 for (channel = 0; channel < NUM_CHANNELS; channel++)
4560 set_4cf(&info, channel,
4561 info.populated_ranks[channel][0][0] ? 0xb : 0x2);
4562
4563 set_334(1);
4564
4565 write_mchbar8(0x1e8, 0x4); /* OK */
4566
4567 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4568 write_500(&info, channel,
4569 0x3 & ~(info.populated_ranks_mask[channel]), 0x6b7, 2,
4570 1);
4571 write_500(&info, channel, 0x3, 0x69b, 2, 1);
4572 }
4573 write_mchbar32(0x2d0, (read_mchbar32(0x2d0) & 0xff2c01ff) | 0x200000); /* OK */
4574 write_mchbar16(0x6c0, 0x14a0); /* OK */
4575 write_mchbar32(0x6d0, (read_mchbar32(0x6d0) & 0xff0080ff) | 0x8000); /* OK */
4576 write_mchbar16(0x232, 0x8);
4577 write_mchbar32(0x234, (read_mchbar32(0x234) & 0xfffbfffb) | 0x40004); /* 0x40004 or 0 depending on ? */
4578 write_mchbar32(0x34, (read_mchbar32(0x34) & 0xfffffffd) | 5); /* OK */
4579 write_mchbar32(0x128, 0x2150d05);
4580 write_mchbar8(0x12c, 0x1f); /* OK */
4581 write_mchbar8(0x12d, 0x56); /* OK */
4582 write_mchbar8(0x12e, 0x31);
4583 write_mchbar8(0x12f, 0x0); /* OK */
4584 write_mchbar8(0x271, 0x2); /* OK */
4585 write_mchbar8(0x671, 0x2); /* OK */
4586 write_mchbar8(0x1e8, 0x4); /* OK */
4587 for (channel = 0; channel < NUM_CHANNELS; channel++)
4588 write_mchbar32(0x294 + (channel << 10),
4589 (info.populated_ranks_mask[channel] & 3) << 16);
4590 write_mchbar32(0x134, (read_mchbar32(0x134) & 0xfc01ffff) | 0x10000); /* OK */
4591 write_mchbar32(0x134, (read_mchbar32(0x134) & 0xfc85ffff) | 0x850000); /* OK */
4592 for (channel = 0; channel < NUM_CHANNELS; channel++)
4593 write_mchbar32(0x260 + (channel << 10),
4594 (read_mchbar32(0x260 + (channel << 10)) &
4595 ~0xf00000) | 0x8000000 | ((info.
4596 populated_ranks_mask
4597 [channel] & 3) <<
4598 20));
4599
4600 if (!s3resume)
4601 jedec_init(&info);
4602
4603 int totalrank = 0;
4604 for (channel = 0; channel < NUM_CHANNELS; channel++)
4605 for (slot = 0; slot < NUM_SLOTS; slot++)
4606 for (rank = 0; rank < NUM_RANKS; rank++)
4607 if (info.populated_ranks[channel][slot][rank]) {
4608 jedec_read(&info, channel, slot, rank,
4609 totalrank, 0xa, 0x400);
4610 totalrank++;
4611 }
4612
4613 write_mchbar8(0x12c, 0x9f);
4614
4615 read_mchbar8(0x271); // 2 // !!!!
4616 write_mchbar8(0x271, 0xe);
4617 read_mchbar8(0x671); // !!!!
4618 write_mchbar8(0x671, 0xe);
4619
4620 if (!s3resume) {
4621 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4622 write_mchbar32(0x294 + (channel << 10),
4623 (info.
4624 populated_ranks_mask[channel] & 3) <<
4625 16);
4626 write_mchbar16(0x298 + (channel << 10),
4627 (info.
4628 populated_ranks[channel][0][0]) | (info.
4629 populated_ranks
4630 [channel]
4631 [0]
4632 [1]
4633 <<
4634 5));
4635 write_mchbar32(0x29c + (channel << 10), 0x77a);
4636 }
4637 read_mchbar32(0x2c0); /// !!!
4638 write_mchbar32(0x2c0, 0x6009cc00);
4639
4640 {
4641 u8 a, b;
4642 a = read_mchbar8(0x243); // !!!!
4643 b = read_mchbar8(0x643); // !!!!
4644 write_mchbar8(0x243, a | 2);
4645 write_mchbar8(0x643, b | 2);
4646 }
4647
4648 write_1d0(7, 0x19b, 3, 1);
4649 write_1d0(7, 0x1c0, 3, 1);
4650 write_1d0(4, 0x1c6, 4, 1);
4651 write_1d0(4, 0x1cc, 4, 1);
4652 read_1d0(0x151, 4); // = 0x408c6d74 // !!!!
4653 write_1d0(4, 0x151, 4, 1);
4654 write_mchbar32(0x584, 0xfffff);
4655 write_mchbar32(0x984, 0xfffff);
4656
4657 for (channel = 0; channel < NUM_CHANNELS; channel++)
4658 for (slot = 0; slot < NUM_SLOTS; slot++)
4659 for (rank = 0; rank < NUM_RANKS; rank++)
4660 if (info.
4661 populated_ranks[channel][slot]
4662 [rank])
4663 config_rank(&info, s3resume,
4664 channel, slot,
4665 rank);
4666
4667 write_mchbar8(0x243, 0x1);
4668 write_mchbar8(0x643, 0x1);
4669 }
4670
4671 /* was == 1 but is common */
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004672 pci_write_config16(NORTHBRIDGE, 0xc8, 3);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004673 write_26c(0, 0x820);
4674 write_26c(1, 0x820);
4675 write_mchbar32(0x130, read_mchbar32(0x130) | 2);
4676 /* end */
4677
4678 if (s3resume) {
4679 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4680 write_mchbar32(0x294 + (channel << 10),
4681 (info.
4682 populated_ranks_mask[channel] & 3) <<
4683 16);
4684 write_mchbar16(0x298 + (channel << 10),
4685 (info.
4686 populated_ranks[channel][0][0]) | (info.
4687 populated_ranks
4688 [channel]
4689 [0]
4690 [1]
4691 <<
4692 5));
4693 write_mchbar32(0x29c + (channel << 10), 0x77a);
4694 }
4695 read_mchbar32(0x2c0); /// !!!
4696 write_mchbar32(0x2c0, 0x6009cc00);
4697 }
4698
4699 write_mchbar32(0xfa4, read_mchbar32(0xfa4) & ~0x01000002);
4700 write_mchbar32(0xfb0, 0x2000e019);
4701
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004702 /* Before training. */
4703 timestamp_add_now(103);
4704
4705 if (!s3resume)
4706 ram_training(&info);
4707
4708 /* After training. */
Paul Menzel9e817bf2015-05-28 07:32:48 +02004709 timestamp_add_now(104);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004710
4711 dump_timings(&info);
4712
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004713 program_modules_memory_map(&info, 0);
4714 program_total_memory_map(&info);
4715
4716 if (info.non_interleaved_part_mb != 0 && info.interleaved_part_mb != 0)
4717 write_mchbar8(0x111, 0x20 | (0 << 2) | (1 << 6) | (0 << 7));
4718 else if (have_match_ranks(&info, 0, 4) && have_match_ranks(&info, 1, 4))
4719 write_mchbar8(0x111, 0x20 | (3 << 2) | (0 << 6) | (1 << 7));
4720 else if (have_match_ranks(&info, 0, 2) && have_match_ranks(&info, 1, 2))
4721 write_mchbar8(0x111, 0x20 | (3 << 2) | (0 << 6) | (0 << 7));
4722 else
4723 write_mchbar8(0x111, 0x20 | (3 << 2) | (1 << 6) | (0 << 7));
4724
4725 write_mchbar32(0xfac, read_mchbar32(0xfac) & ~0x80000000); // OK
4726 write_mchbar32(0xfb4, 0x4800); // OK
4727 write_mchbar32(0xfb8, (info.revision < 8) ? 0x20 : 0x0); // OK
4728 write_mchbar32(0xe94, 0x7ffff); // OK
4729 write_mchbar32(0xfc0, 0x80002040); // OK
4730 write_mchbar32(0xfc4, 0x701246); // OK
4731 write_mchbar8(0xfc8, read_mchbar8(0xfc8) & ~0x70); // OK
4732 write_mchbar32(0xe5c, 0x1000000 | read_mchbar32(0xe5c)); // OK
4733 write_mchbar32(0x1a70, (read_mchbar32(0x1a70) | 0x00200000) & ~0x00100000); // OK
4734 write_mchbar32(0x50, 0x700b0); // OK
4735 write_mchbar32(0x3c, 0x10); // OK
4736 write_mchbar8(0x1aa8, (read_mchbar8(0x1aa8) & ~0x35) | 0xa); // OK
4737 write_mchbar8(0xff4, read_mchbar8(0xff4) | 0x2); // OK
4738 write_mchbar32(0xff8, (read_mchbar32(0xff8) & ~0xe008) | 0x1020); // OK
4739
Felix Held29a9c072018-07-29 01:34:45 +02004740#if 1
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004741 write_mchbar32(0xd00, IOMMU_BASE2 | 1);
4742 write_mchbar32(0xd40, IOMMU_BASE1 | 1);
4743 write_mchbar32(0xdc0, IOMMU_BASE4 | 1);
4744
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08004745 write32p(IOMMU_BASE1 | 0xffc, 0x80000000);
4746 write32p(IOMMU_BASE2 | 0xffc, 0xc0000000);
4747 write32p(IOMMU_BASE4 | 0xffc, 0x80000000);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004748
4749#else
4750 {
4751 u32 eax;
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08004752 eax = read32p(0xffc + (read_mchbar32(0xd00) & ~1)) | 0x08000000; // = 0xe911714b// OK
4753 write32p(0xffc + (read_mchbar32(0xd00) & ~1), eax); // OK
4754 eax = read32p(0xffc + (read_mchbar32(0xdc0) & ~1)) | 0x40000000; // = 0xe911714b// OK
4755 write32p(0xffc + (read_mchbar32(0xdc0) & ~1), eax); // OK
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004756 }
4757#endif
4758
4759 {
4760 u32 eax;
4761
4762 eax = info.fsb_frequency / 9;
4763 write_mchbar32(0xfcc, (read_mchbar32(0xfcc) & 0xfffc0000) | (eax * 0x280) | (eax * 0x5000) | eax | 0x40000); // OK
4764 write_mchbar32(0x20, 0x33001); //OK
4765 }
4766
4767 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4768 write_mchbar32(0x220 + (channel << 10), read_mchbar32(0x220 + (channel << 10)) & ~0x7770); //OK
4769 if (info.max_slots_used_in_channel == 1)
4770 write_mchbar16(0x237 + (channel << 10), (read_mchbar16(0x237 + (channel << 10)) | 0x0201)); //OK
4771 else
4772 write_mchbar16(0x237 + (channel << 10), (read_mchbar16(0x237 + (channel << 10)) & ~0x0201)); //OK
4773
4774 write_mchbar8(0x241 + (channel << 10), read_mchbar8(0x241 + (channel << 10)) | 1); // OK
4775
4776 if (info.clock_speed_index <= 1
4777 && (info.silicon_revision == 2
4778 || info.silicon_revision == 3))
4779 write_mchbar32(0x248 + (channel << 10), (read_mchbar32(0x248 + (channel << 10)) | 0x00102000)); // OK
4780 else
4781 write_mchbar32(0x248 + (channel << 10), (read_mchbar32(0x248 + (channel << 10)) & ~0x00102000)); // OK
4782 }
4783
4784 write_mchbar32(0x115, read_mchbar32(0x115) | 0x1000000); // OK
4785
4786 {
4787 u8 al;
4788 al = 0xd;
4789 if (!(info.silicon_revision == 0 || info.silicon_revision == 1))
4790 al += 2;
4791 al |= ((1 << (info.max_slots_used_in_channel - 1)) - 1) << 4;
4792 write_mchbar32(0x210, (al << 16) | 0x20); // OK
4793 }
4794
4795 for (channel = 0; channel < NUM_CHANNELS; channel++) {
4796 write_mchbar32(0x288 + (channel << 10), 0x70605040); // OK
4797 write_mchbar32(0x28c + (channel << 10), 0xfffec080); // OK
4798 write_mchbar32(0x290 + (channel << 10), 0x282091c | ((info.max_slots_used_in_channel - 1) << 0x16)); // OK
4799 }
4800 u32 reg1c;
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004801 pci_read_config32(NORTHBRIDGE, 0x40); // = DEFAULT_EPBAR | 0x001 // OK
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08004802 reg1c = read32p(DEFAULT_EPBAR | 0x01c); // = 0x8001 // OK
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004803 pci_read_config32(NORTHBRIDGE, 0x40); // = DEFAULT_EPBAR | 0x001 // OK
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -08004804 write32p(DEFAULT_EPBAR | 0x01c, reg1c); // OK
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004805 read_mchbar8(0xe08); // = 0x0
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004806 pci_read_config32(NORTHBRIDGE, 0xe4); // = 0x316126
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004807 write_mchbar8(0x1210, read_mchbar8(0x1210) | 2); // OK
4808 write_mchbar32(0x1200, 0x8800440); // OK
4809 write_mchbar32(0x1204, 0x53ff0453); // OK
4810 write_mchbar32(0x1208, 0x19002043); // OK
4811 write_mchbar16(0x1214, 0x320); // OK
4812
4813 if (info.revision == 0x10 || info.revision == 0x11) {
4814 write_mchbar16(0x1214, 0x220); // OK
4815 write_mchbar8(0x1210, read_mchbar8(0x1210) | 0x40); // OK
4816 }
4817
4818 write_mchbar8(0x1214, read_mchbar8(0x1214) | 0x4); // OK
4819 write_mchbar8(0x120c, 0x1); // OK
4820 write_mchbar8(0x1218, 0x3); // OK
4821 write_mchbar8(0x121a, 0x3); // OK
4822 write_mchbar8(0x121c, 0x3); // OK
4823 write_mchbar16(0xc14, 0x0); // OK
4824 write_mchbar16(0xc20, 0x0); // OK
4825 write_mchbar32(0x1c, 0x0); // OK
4826
4827 /* revision dependent here. */
4828
4829 write_mchbar16(0x1230, read_mchbar16(0x1230) | 0x1f07); // OK
4830
4831 if (info.uma_enabled)
4832 write_mchbar32(0x11f4, read_mchbar32(0x11f4) | 0x10000000); // OK
4833
4834 write_mchbar16(0x1230, read_mchbar16(0x1230) | 0x8000); // OK
4835 write_mchbar8(0x1214, read_mchbar8(0x1214) | 1); // OK
4836
4837 u8 bl, ebpb;
4838 u16 reg_1020;
4839
4840 reg_1020 = read_mchbar32(0x1020); // = 0x6c733c // OK
4841 write_mchbar8(0x1070, 0x1); // OK
4842
4843 write_mchbar32(0x1000, 0x100); // OK
4844 write_mchbar8(0x1007, 0x0); // OK
4845
4846 if (reg_1020 != 0) {
4847 write_mchbar16(0x1018, 0x0); // OK
4848 bl = reg_1020 >> 8;
4849 ebpb = reg_1020 & 0xff;
4850 } else {
4851 ebpb = 0;
4852 bl = 8;
4853 }
4854
4855 rdmsr(0x1a2);
4856
4857 write_mchbar32(0x1014, 0xffffffff); // OK
4858
4859 write_mchbar32(0x1010, ((((ebpb + 0x7d) << 7) / bl) & 0xff) * (! !reg_1020)); // OK
4860
4861 write_mchbar8(0x101c, 0xb8); // OK
4862
4863 write_mchbar8(0x123e, (read_mchbar8(0x123e) & 0xf) | 0x60); // OK
4864 if (reg_1020 != 0) {
4865 write_mchbar32(0x123c, (read_mchbar32(0x123c) & ~0x00900000) | 0x600000); // OK
4866 write_mchbar8(0x101c, 0xb8); // OK
4867 }
4868
4869 setup_heci_uma(&info);
4870
4871 if (info.uma_enabled) {
4872 u16 ax;
4873 write_mchbar32(0x11b0, read_mchbar32(0x11b0) | 0x4000); // OK
4874 write_mchbar32(0x11b4, read_mchbar32(0x11b4) | 0x4000); // OK
4875 write_mchbar16(0x1190, read_mchbar16(0x1190) | 0x4000); // OK
4876
4877 ax = read_mchbar16(0x1190) & 0xf00; // = 0x480a // OK
4878 write_mchbar16(0x1170, ax | (read_mchbar16(0x1170) & 0x107f) | 0x4080); // OK
4879 write_mchbar16(0x1170, read_mchbar16(0x1170) | 0x1000); // OK
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004880 udelay(1000);
Felix Held29a9c072018-07-29 01:34:45 +02004881
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004882 u16 ecx;
Elyes HAOUAS7db506c2016-10-02 11:56:39 +02004883 for (ecx = 0xffff; ecx && (read_mchbar16(0x1170) & 0x1000); ecx--); // OK
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004884 write_mchbar16(0x1190, read_mchbar16(0x1190) & ~0x4000); // OK
4885 }
4886
Kyösti Mälkkid45114f2013-07-26 08:53:59 +03004887 pci_write_config8(SOUTHBRIDGE, GEN_PMCON_2,
4888 pci_read_config8(SOUTHBRIDGE, GEN_PMCON_2) & ~0x80);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004889 udelay(10000);
Vladimir Serbinenkoc7db28c2014-02-19 22:09:33 +01004890 write_mchbar16(0x2ca8, 0x8);
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004891
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004892 udelay(1000);
4893 dump_timings(&info);
Vladimir Serbinenkob16f0922014-06-07 16:27:27 +02004894 cbmem_wasnot_inited = cbmem_recovery(s3resume);
4895
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004896 if (!s3resume)
4897 save_timings(&info);
Vladimir Serbinenkob16f0922014-06-07 16:27:27 +02004898 if (s3resume && cbmem_wasnot_inited) {
4899 u32 reg32;
4900 printk(BIOS_ERR, "Failed S3 resume.\n");
4901 ram_check(0x100000, 0x200000);
4902
4903 /* Clear SLP_TYPE. */
4904 reg32 = inl(DEFAULT_PMBASE + 0x04);
4905 outl(reg32 & ~(7 << 10), DEFAULT_PMBASE + 0x04);
4906
4907 /* Failed S3 resume, reset to come up cleanly */
4908 outb(0xe, 0xcf9);
Patrick Georgi546953c2014-11-29 10:38:17 +01004909 halt();
Vladimir Serbinenkob16f0922014-06-07 16:27:27 +02004910 }
Vladimir Serbinenkoc6f6be02013-11-12 22:32:08 +01004911}