blob: 8922d31221174b43017664dc82529093322a591b [file] [log] [blame]
Stefan Reinauer00636b02012-04-04 00:08:51 +02001/*
2 * This file is part of the coreboot project.
3 *
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07004 * Copyright (C) 2014 Damien Zammit <damien@zamaudio.com>
5 * Copyright (C) 2014 Vladimir Serbinenko <phcoder@gmail.com>
Stefan Reinauer00636b02012-04-04 00:08:51 +02006 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; version 2 of the License.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
Stefan Reinauer00636b02012-04-04 00:08:51 +020015 */
16
17#include <console/console.h>
Kyösti Mälkki1d7541f2014-02-17 21:34:42 +020018#include <console/usb.h>
Kyösti Mälkki5687fc92013-11-28 18:11:49 +020019#include <bootmode.h>
Stefan Reinauer00636b02012-04-04 00:08:51 +020020#include <string.h>
Stefan Reinauer00636b02012-04-04 00:08:51 +020021#include <arch/io.h>
Stefan Reinauer00636b02012-04-04 00:08:51 +020022#include <cbmem.h>
23#include <arch/cbfs.h>
24#include <cbfs.h>
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -070025#include <halt.h>
Stefan Reinauer00636b02012-04-04 00:08:51 +020026#include <ip_checksum.h>
27#include <pc80/mc146818rtc.h>
Duncan Laurie7b508dd2012-04-09 12:30:43 -070028#include <device/pci_def.h>
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -070029#include "raminit_native.h"
Stefan Reinauer00636b02012-04-04 00:08:51 +020030#include "sandybridge.h"
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -070031#include <delay.h>
32#include <lib.h>
Stefan Reinauer00636b02012-04-04 00:08:51 +020033
34/* Management Engine is in the southbridge */
35#include "southbridge/intel/bd82x6x/me.h"
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -070036/* For SPD. */
37#include "southbridge/intel/bd82x6x/smbus.h"
38#include "arch/cpu.h"
39#include "cpu/x86/msr.h"
Stefan Reinauer00636b02012-04-04 00:08:51 +020040
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -070041/* FIXME: no ECC support. */
42/* FIXME: no support for 3-channel chipsets. */
Stefan Reinauer00636b02012-04-04 00:08:51 +020043
Patrick Rudolph371d2912015-10-09 13:33:25 +020044/*
45 * Register description:
46 * Intel provides a command queue of depth four.
47 * Every command is configured by using multiple registers.
48 * On executing the command queue you have to provide the depth used.
49 *
50 * Known registers:
51 * Channel X = [0, 1]
52 * Command queue index Y = [0, 1, 2, 3]
53 *
54 * DEFAULT_MCHBAR + 0x4220 + 0x400 * X + 4 * Y: command io register
55 * Controls the DRAM command signals
56 * Bit 0: !RAS
57 * Bit 1: !CAS
58 * Bit 2: !WE
59 *
60 * DEFAULT_MCHBAR + 0x4200 + 0x400 * X + 4 * Y: addr bankslot io register
61 * Controls the address, bank address and slotrank signals
62 * Bit 0-15 : Address
63 * Bit 20-22: Bank Address
64 * Bit 24-25: slotrank
65 *
66 * DEFAULT_MCHBAR + 0x4230 + 0x400 * X + 4 * Y: idle register
67 * Controls the idle time after issuing this DRAM command
68 * Bit 16-32: number of clock-cylces to idle
69 *
70 * DEFAULT_MCHBAR + 0x4284 + 0x400 * channel: execute command queue
71 * Starts to execute all queued commands
72 * Bit 0 : start DRAM command execution
73 * Bit 16-20: (number of queued commands - 1) * 4
74 */
75
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -070076#define BASEFREQ 133
77#define tDLLK 512
Stefan Reinauer00636b02012-04-04 00:08:51 +020078
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -070079#define IS_SANDY_CPU(x) ((x & 0xffff0) == 0x206a0)
80#define IS_SANDY_CPU_C(x) ((x & 0xf) == 4)
81#define IS_SANDY_CPU_D0(x) ((x & 0xf) == 5)
82#define IS_SANDY_CPU_D1(x) ((x & 0xf) == 6)
83#define IS_SANDY_CPU_D2(x) ((x & 0xf) == 7)
Stefan Reinauer00636b02012-04-04 00:08:51 +020084
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -070085#define IS_IVY_CPU(x) ((x & 0xffff0) == 0x306a0)
86#define IS_IVY_CPU_C(x) ((x & 0xf) == 4)
87#define IS_IVY_CPU_K(x) ((x & 0xf) == 5)
88#define IS_IVY_CPU_D(x) ((x & 0xf) == 6)
89#define IS_IVY_CPU_E(x) ((x & 0xf) >= 8)
Stefan Reinauer00636b02012-04-04 00:08:51 +020090
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -070091#define NUM_CHANNELS 2
92#define NUM_SLOTRANKS 4
93#define NUM_SLOTS 2
94#define NUM_LANES 8
Stefan Reinauer00636b02012-04-04 00:08:51 +020095
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -070096/* FIXME: Vendor BIOS uses 64 but our algorithms are less
97 performant and even 1 seems to be enough in practice. */
98#define NUM_PATTERNS 4
Stefan Reinauer00636b02012-04-04 00:08:51 +020099
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700100typedef struct odtmap_st {
101 u16 rttwr;
102 u16 rttnom;
103} odtmap;
Stefan Reinauer00636b02012-04-04 00:08:51 +0200104
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700105typedef struct dimm_info_st {
106 dimm_attr dimm[NUM_CHANNELS][NUM_SLOTS];
107} dimm_info;
Stefan Reinauer00636b02012-04-04 00:08:51 +0200108
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700109struct ram_rank_timings {
110 /* Register 4024. One byte per slotrank. */
111 u8 val_4024;
112 /* Register 4028. One nibble per slotrank. */
113 u8 val_4028;
Stefan Reinauer00636b02012-04-04 00:08:51 +0200114
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700115 int val_320c;
Stefan Reinauer00636b02012-04-04 00:08:51 +0200116
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700117 struct ram_lane_timings {
118 /* lane register offset 0x10. */
119 u16 timA; /* bits 0 - 5, bits 16 - 18 */
120 u8 rising; /* bits 8 - 14 */
121 u8 falling; /* bits 20 - 26. */
Stefan Reinauer00636b02012-04-04 00:08:51 +0200122
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700123 /* lane register offset 0x20. */
124 int timC; /* bit 0 - 5, 19. */
125 u16 timB; /* bits 8 - 13, 15 - 17. */
126 } lanes[NUM_LANES];
127};
Stefan Reinauer00636b02012-04-04 00:08:51 +0200128
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700129struct ramctr_timing_st;
Stefan Reinauer00636b02012-04-04 00:08:51 +0200130
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700131typedef struct ramctr_timing_st {
132 int mobile;
Stefan Reinauer00636b02012-04-04 00:08:51 +0200133
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700134 u16 cas_supported;
135 /* tLatencies are in units of ns, scaled by x256 */
136 u32 tCK;
137 u32 tAA;
138 u32 tWR;
139 u32 tRCD;
140 u32 tRRD;
141 u32 tRP;
142 u32 tRAS;
143 u32 tRFC;
144 u32 tWTR;
145 u32 tRTP;
146 u32 tFAW;
147 /* Latencies in terms of clock cycles
148 * They are saved separately as they are needed for DRAM MRS commands*/
149 u8 CAS; /* CAS read latency */
150 u8 CWL; /* CAS write latency */
Stefan Reinauer00636b02012-04-04 00:08:51 +0200151
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700152 u32 tREFI;
153 u32 tMOD;
154 u32 tXSOffset;
155 u32 tWLO;
156 u32 tCKE;
157 u32 tXPDLL;
158 u32 tXP;
159 u32 tAONPD;
Stefan Reinauer00636b02012-04-04 00:08:51 +0200160
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700161 u16 reg_5064b0; /* bits 0-11. */
Stefan Reinauer00636b02012-04-04 00:08:51 +0200162
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700163 u8 rankmap[NUM_CHANNELS];
164 int ref_card_offset[NUM_CHANNELS];
165 u32 mad_dimm[NUM_CHANNELS];
166 int channel_size_mb[NUM_CHANNELS];
167 u32 cmd_stretch[NUM_CHANNELS];
Stefan Reinauer00636b02012-04-04 00:08:51 +0200168
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700169 int reg_c14_offset;
170 int reg_320c_range_threshold;
Stefan Reinauer00636b02012-04-04 00:08:51 +0200171
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700172 int edge_offset[3];
173 int timC_offset[3];
Stefan Reinauer00636b02012-04-04 00:08:51 +0200174
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700175 int extended_temperature_range;
176 int auto_self_refresh;
Stefan Reinauer00636b02012-04-04 00:08:51 +0200177
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700178 int rank_mirror[NUM_CHANNELS][NUM_SLOTRANKS];
179
180 struct ram_rank_timings timings[NUM_CHANNELS][NUM_SLOTRANKS];
181} ramctr_timing;
182
183#define SOUTHBRIDGE PCI_DEV(0, 0x1f, 0)
184#define NORTHBRIDGE PCI_DEV(0, 0x0, 0)
185#define FOR_ALL_LANES for (lane = 0; lane < NUM_LANES; lane++)
186#define FOR_ALL_CHANNELS for (channel = 0; channel < NUM_CHANNELS; channel++)
187#define FOR_ALL_POPULATED_RANKS for (slotrank = 0; slotrank < NUM_SLOTRANKS; slotrank++) if (ctrl->rankmap[channel] & (1 << slotrank))
188#define FOR_ALL_POPULATED_CHANNELS for (channel = 0; channel < NUM_CHANNELS; channel++) if (ctrl->rankmap[channel])
189#define MAX_EDGE_TIMING 71
190#define MAX_TIMC 127
191#define MAX_TIMB 511
192#define MAX_TIMA 127
193
194static void program_timings(ramctr_timing * ctrl, int channel);
195
196static const char *ecc_decoder[] = {
Stefan Reinauer00636b02012-04-04 00:08:51 +0200197 "inactive",
198 "active on IO",
199 "disabled on IO",
200 "active"
201};
202
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700203static void wait_txt_clear(void)
204{
205 struct cpuid_result cp;
206
207 cp = cpuid_ext(0x1, 0x0);
208 /* Check if TXT is supported? */
209 if (!(cp.ecx & 0x40))
210 return;
211 /* Some TXT public bit. */
212 if (!(read32((void *)0xfed30010) & 1))
213 return;
214 /* Wait for TXT clear. */
215 while (!(read8((void *)0xfed40000) & (1 << 7))) ;
216}
217
218static void sfence(void)
219{
220 asm volatile ("sfence");
221}
222
Patrick Rudolph9b515682015-10-09 13:43:51 +0200223static void toggle_io_reset(void) {
224 /* toggle IO reset bit */
225 u32 r32 = read32(DEFAULT_MCHBAR + 0x5030);
226 write32(DEFAULT_MCHBAR + 0x5030, r32 | 0x20);
227 udelay(1);
228 write32(DEFAULT_MCHBAR + 0x5030, r32 & ~0x20);
229 udelay(1);
230}
231
Stefan Reinauer00636b02012-04-04 00:08:51 +0200232/*
233 * Dump in the log memory controller configuration as read from the memory
234 * controller registers.
235 */
236static void report_memory_config(void)
237{
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700238 u32 addr_decoder_common, addr_decode_ch[NUM_CHANNELS];
Stefan Reinauer00636b02012-04-04 00:08:51 +0200239 int i;
240
241 addr_decoder_common = MCHBAR32(0x5000);
242 addr_decode_ch[0] = MCHBAR32(0x5004);
243 addr_decode_ch[1] = MCHBAR32(0x5008);
244
245 printk(BIOS_DEBUG, "memcfg DDR3 clock %d MHz\n",
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700246 (MCHBAR32(0x5e04) * 13333 * 2 + 50) / 100);
Stefan Reinauer00636b02012-04-04 00:08:51 +0200247 printk(BIOS_DEBUG, "memcfg channel assignment: A: %d, B % d, C % d\n",
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700248 addr_decoder_common & 3, (addr_decoder_common >> 2) & 3,
Stefan Reinauer00636b02012-04-04 00:08:51 +0200249 (addr_decoder_common >> 4) & 3);
250
251 for (i = 0; i < ARRAY_SIZE(addr_decode_ch); i++) {
252 u32 ch_conf = addr_decode_ch[i];
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700253 printk(BIOS_DEBUG, "memcfg channel[%d] config (%8.8x):\n", i,
254 ch_conf);
Stefan Reinauer00636b02012-04-04 00:08:51 +0200255 printk(BIOS_DEBUG, " ECC %s\n",
256 ecc_decoder[(ch_conf >> 24) & 3]);
257 printk(BIOS_DEBUG, " enhanced interleave mode %s\n",
258 ((ch_conf >> 22) & 1) ? "on" : "off");
259 printk(BIOS_DEBUG, " rank interleave %s\n",
260 ((ch_conf >> 21) & 1) ? "on" : "off");
261 printk(BIOS_DEBUG, " DIMMA %d MB width x%d %s rank%s\n",
262 ((ch_conf >> 0) & 0xff) * 256,
263 ((ch_conf >> 19) & 1) ? 16 : 8,
264 ((ch_conf >> 17) & 1) ? "dual" : "single",
265 ((ch_conf >> 16) & 1) ? "" : ", selected");
266 printk(BIOS_DEBUG, " DIMMB %d MB width x%d %s rank%s\n",
267 ((ch_conf >> 8) & 0xff) * 256,
268 ((ch_conf >> 20) & 1) ? 16 : 8,
269 ((ch_conf >> 18) & 1) ? "dual" : "single",
270 ((ch_conf >> 16) & 1) ? ", selected" : "");
271 }
272}
273
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700274void read_spd(spd_raw_data * spd, u8 addr)
Stefan Reinauer00636b02012-04-04 00:08:51 +0200275{
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -0700276 int j;
277 for (j = 0; j < 256; j++)
278 (*spd)[j] = do_smbus_read_byte(SMBUS_IO_BASE, addr, j);
279}
280
281static void dram_find_spds_ddr3(spd_raw_data * spd, dimm_info * dimm,
282 ramctr_timing * ctrl)
283{
284 int dimms = 0;
285 int channel, slot, spd_slot;
286
287 memset (ctrl->rankmap, 0, sizeof (ctrl->rankmap));
288
289 ctrl->extended_temperature_range = 1;
290 ctrl->auto_self_refresh = 1;
291
292 FOR_ALL_CHANNELS {
293 ctrl->channel_size_mb[channel] = 0;
294
295 for (slot = 0; slot < NUM_SLOTS; slot++) {
296 spd_slot = 2 * channel + slot;
297 spd_decode_ddr3(&dimm->dimm[channel][slot], spd[spd_slot]);
298 if (dimm->dimm[channel][slot].dram_type != SPD_MEMORY_TYPE_SDRAM_DDR3) {
299 // set dimm invalid
300 dimm->dimm[channel][slot].ranks = 0;
301 dimm->dimm[channel][slot].size_mb = 0;
302 continue;
303 }
304
305 dram_print_spd_ddr3(&dimm->dimm[channel][slot]);
306 dimms++;
307 ctrl->rank_mirror[channel][slot * 2] = 0;
308 ctrl->rank_mirror[channel][slot * 2 + 1] = dimm->dimm[channel][slot].flags.pins_mirrored;
309 ctrl->channel_size_mb[channel] += dimm->dimm[channel][slot].size_mb;
310
311 ctrl->auto_self_refresh &= dimm->dimm[channel][slot].flags.asr;
312 ctrl->extended_temperature_range &= dimm->dimm[channel][slot].flags.ext_temp_refresh;
313
314 ctrl->rankmap[channel] |= ((1 << dimm->dimm[channel][slot].ranks) - 1) << (2 * slot);
315 printk(BIOS_DEBUG, "rankmap[%d] = 0x%x\n", channel, ctrl->rankmap[channel]);
316 }
317 if ((ctrl->rankmap[channel] & 3) && (ctrl->rankmap[channel] & 0xc)
318 && dimm->dimm[channel][0].reference_card <= 5 && dimm->dimm[channel][1].reference_card <= 5) {
319 const int ref_card_offset_table[6][6] = {
320 { 0, 0, 0, 0, 2, 2, },
321 { 0, 0, 0, 0, 2, 2, },
322 { 0, 0, 0, 0, 2, 2, },
323 { 0, 0, 0, 0, 1, 1, },
324 { 2, 2, 2, 1, 0, 0, },
325 { 2, 2, 2, 1, 0, 0, },
326 };
327 ctrl->ref_card_offset[channel] = ref_card_offset_table[dimm->dimm[channel][0].reference_card]
328 [dimm->dimm[channel][1].reference_card];
329 } else
330 ctrl->ref_card_offset[channel] = 0;
331 }
332
333 if (!dimms)
334 die("No DIMMs were found");
335}
336
337static void dram_find_common_params(const dimm_info * dimms,
338 ramctr_timing * ctrl)
339{
340 size_t valid_dimms;
341 int channel, slot;
342 ctrl->cas_supported = 0xff;
343 valid_dimms = 0;
344 FOR_ALL_CHANNELS for (slot = 0; slot < 2; slot++) {
345 const dimm_attr *dimm = &dimms->dimm[channel][slot];
346 if (dimm->dram_type != SPD_MEMORY_TYPE_SDRAM_DDR3)
347 continue;
348 valid_dimms++;
349
350 /* Find all possible CAS combinations */
351 ctrl->cas_supported &= dimm->cas_supported;
352
353 /* Find the smallest common latencies supported by all DIMMs */
354 ctrl->tCK = MAX(ctrl->tCK, dimm->tCK);
355 ctrl->tAA = MAX(ctrl->tAA, dimm->tAA);
356 ctrl->tWR = MAX(ctrl->tWR, dimm->tWR);
357 ctrl->tRCD = MAX(ctrl->tRCD, dimm->tRCD);
358 ctrl->tRRD = MAX(ctrl->tRRD, dimm->tRRD);
359 ctrl->tRP = MAX(ctrl->tRP, dimm->tRP);
360 ctrl->tRAS = MAX(ctrl->tRAS, dimm->tRAS);
361 ctrl->tRFC = MAX(ctrl->tRFC, dimm->tRFC);
362 ctrl->tWTR = MAX(ctrl->tWTR, dimm->tWTR);
363 ctrl->tRTP = MAX(ctrl->tRTP, dimm->tRTP);
364 ctrl->tFAW = MAX(ctrl->tFAW, dimm->tFAW);
365 }
366
367 if (!ctrl->cas_supported)
368 die("Unsupported DIMM combination. "
369 "DIMMS do not support common CAS latency");
370 if (!valid_dimms)
371 die("No valid DIMMs found");
372}
373
374static u8 get_CWL(u8 CAS)
375{
376 /* Get CWL based on CAS using the following rule:
377 * _________________________________________
378 * CAS: | 4T | 5T | 6T | 7T | 8T | 9T | 10T | 11T |
379 * CWL: | 5T | 5T | 5T | 6T | 6T | 7T | 7T | 8T |
380 */
381 static const u8 cas_cwl_map[] = { 5, 5, 5, 6, 6, 7, 7, 8 };
382 if (CAS > 11)
383 return 8;
384 return cas_cwl_map[CAS - 4];
385}
386
387/* Frequency multiplier. */
388static u32 get_FRQ(u32 tCK)
389{
390 u32 FRQ;
391 FRQ = 256000 / (tCK * BASEFREQ);
392 if (FRQ > 8)
393 return 8;
394 if (FRQ < 3)
395 return 3;
396 return FRQ;
397}
398
399static u32 get_REFI(u32 tCK)
400{
401 /* Get REFI based on MCU frequency using the following rule:
402 * _________________________________________
403 * FRQ : | 3 | 4 | 5 | 6 | 7 | 8 |
404 * REFI: | 3120 | 4160 | 5200 | 6240 | 7280 | 8320 |
405 */
406 static const u32 frq_refi_map[] =
407 { 3120, 4160, 5200, 6240, 7280, 8320 };
408 return frq_refi_map[get_FRQ(tCK) - 3];
409}
410
411static u8 get_XSOffset(u32 tCK)
412{
413 /* Get XSOffset based on MCU frequency using the following rule:
414 * _________________________
415 * FRQ : | 3 | 4 | 5 | 6 | 7 | 8 |
416 * XSOffset : | 4 | 6 | 7 | 8 | 10 | 11 |
417 */
418 static const u8 frq_xs_map[] = { 4, 6, 7, 8, 10, 11 };
419 return frq_xs_map[get_FRQ(tCK) - 3];
420}
421
422static u8 get_MOD(u32 tCK)
423{
424 /* Get MOD based on MCU frequency using the following rule:
425 * _____________________________
426 * FRQ : | 3 | 4 | 5 | 6 | 7 | 8 |
427 * MOD : | 12 | 12 | 12 | 12 | 15 | 16 |
428 */
429 static const u8 frq_mod_map[] = { 12, 12, 12, 12, 15, 16 };
430 return frq_mod_map[get_FRQ(tCK) - 3];
431}
432
433static u8 get_WLO(u32 tCK)
434{
435 /* Get WLO based on MCU frequency using the following rule:
436 * _______________________
437 * FRQ : | 3 | 4 | 5 | 6 | 7 | 8 |
438 * WLO : | 4 | 5 | 6 | 6 | 8 | 8 |
439 */
440 static const u8 frq_wlo_map[] = { 4, 5, 6, 6, 8, 8 };
441 return frq_wlo_map[get_FRQ(tCK) - 3];
442}
443
444static u8 get_CKE(u32 tCK)
445{
446 /* Get CKE based on MCU frequency using the following rule:
447 * _______________________
448 * FRQ : | 3 | 4 | 5 | 6 | 7 | 8 |
449 * CKE : | 3 | 3 | 4 | 4 | 5 | 6 |
450 */
451 static const u8 frq_cke_map[] = { 3, 3, 4, 4, 5, 6 };
452 return frq_cke_map[get_FRQ(tCK) - 3];
453}
454
455static u8 get_XPDLL(u32 tCK)
456{
457 /* Get XPDLL based on MCU frequency using the following rule:
458 * _____________________________
459 * FRQ : | 3 | 4 | 5 | 6 | 7 | 8 |
460 * XPDLL : | 10 | 13 | 16 | 20 | 23 | 26 |
461 */
462 static const u8 frq_xpdll_map[] = { 10, 13, 16, 20, 23, 26 };
463 return frq_xpdll_map[get_FRQ(tCK) - 3];
464}
465
466static u8 get_XP(u32 tCK)
467{
468 /* Get XP based on MCU frequency using the following rule:
469 * _______________________
470 * FRQ : | 3 | 4 | 5 | 6 | 7 | 8 |
471 * XP : | 3 | 4 | 4 | 5 | 6 | 7 |
472 */
473 static const u8 frq_xp_map[] = { 3, 4, 4, 5, 6, 7 };
474 return frq_xp_map[get_FRQ(tCK) - 3];
475}
476
477static u8 get_AONPD(u32 tCK)
478{
479 /* Get AONPD based on MCU frequency using the following rule:
480 * ________________________
481 * FRQ : | 3 | 4 | 5 | 6 | 7 | 8 |
482 * AONPD : | 4 | 5 | 6 | 8 | 8 | 10 |
483 */
484 static const u8 frq_aonpd_map[] = { 4, 5, 6, 8, 8, 10 };
485 return frq_aonpd_map[get_FRQ(tCK) - 3];
486}
487
488static u32 get_COMP2(u32 tCK)
489{
490 /* Get COMP2 based on MCU frequency using the following rule:
491 * ___________________________________________________________
492 * FRQ : | 3 | 4 | 5 | 6 | 7 | 8 |
493 * COMP : | D6BEDCC | CE7C34C | CA57A4C | C6369CC | C42514C | C21410C |
494 */
495 static const u32 frq_comp2_map[] = { 0xD6BEDCC, 0xCE7C34C, 0xCA57A4C,
496 0xC6369CC, 0xC42514C, 0xC21410C
497 };
498 return frq_comp2_map[get_FRQ(tCK) - 3];
499}
500
501static void dram_timing(ramctr_timing * ctrl)
502{
503 u8 val;
504 u32 val32;
505
506 /* Maximum supported DDR3 frequency is 1066MHz (DDR3 2133) so make sure
507 * we cap it if we have faster DIMMs.
508 * Then, align it to the closest JEDEC standard frequency */
509 if (ctrl->tCK <= TCK_1066MHZ) {
510 ctrl->tCK = TCK_1066MHZ;
511 ctrl->edge_offset[0] = 16;
512 ctrl->edge_offset[1] = 7;
513 ctrl->edge_offset[2] = 7;
514 ctrl->timC_offset[0] = 18;
515 ctrl->timC_offset[1] = 7;
516 ctrl->timC_offset[2] = 7;
517 ctrl->reg_c14_offset = 16;
518 ctrl->reg_5064b0 = 0x218;
519 ctrl->reg_320c_range_threshold = 13;
520 } else if (ctrl->tCK <= TCK_933MHZ) {
521 ctrl->tCK = TCK_933MHZ;
522 ctrl->edge_offset[0] = 14;
523 ctrl->edge_offset[1] = 6;
524 ctrl->edge_offset[2] = 6;
525 ctrl->timC_offset[0] = 15;
526 ctrl->timC_offset[1] = 6;
527 ctrl->timC_offset[2] = 6;
528 ctrl->reg_c14_offset = 14;
529 ctrl->reg_5064b0 = 0x1d5;
530 ctrl->reg_320c_range_threshold = 15;
531 } else if (ctrl->tCK <= TCK_800MHZ) {
532 ctrl->tCK = TCK_800MHZ;
533 ctrl->edge_offset[0] = 13;
534 ctrl->edge_offset[1] = 5;
535 ctrl->edge_offset[2] = 5;
536 ctrl->timC_offset[0] = 14;
537 ctrl->timC_offset[1] = 5;
538 ctrl->timC_offset[2] = 5;
539 ctrl->reg_c14_offset = 12;
540 ctrl->reg_5064b0 = 0x193;
541 ctrl->reg_320c_range_threshold = 15;
542 } else if (ctrl->tCK <= TCK_666MHZ) {
543 ctrl->tCK = TCK_666MHZ;
544 ctrl->edge_offset[0] = 10;
545 ctrl->edge_offset[1] = 4;
546 ctrl->edge_offset[2] = 4;
547 ctrl->timC_offset[0] = 11;
548 ctrl->timC_offset[1] = 4;
549 ctrl->timC_offset[2] = 4;
550 ctrl->reg_c14_offset = 10;
551 ctrl->reg_5064b0 = 0x150;
552 ctrl->reg_320c_range_threshold = 16;
553 } else if (ctrl->tCK <= TCK_533MHZ) {
554 ctrl->tCK = TCK_533MHZ;
555 ctrl->edge_offset[0] = 8;
556 ctrl->edge_offset[1] = 3;
557 ctrl->edge_offset[2] = 3;
558 ctrl->timC_offset[0] = 9;
559 ctrl->timC_offset[1] = 3;
560 ctrl->timC_offset[2] = 3;
561 ctrl->reg_c14_offset = 8;
562 ctrl->reg_5064b0 = 0x10d;
563 ctrl->reg_320c_range_threshold = 17;
564 } else {
565 ctrl->tCK = TCK_400MHZ;
566 ctrl->edge_offset[0] = 6;
567 ctrl->edge_offset[1] = 2;
568 ctrl->edge_offset[2] = 2;
569 ctrl->timC_offset[0] = 6;
570 ctrl->timC_offset[1] = 2;
571 ctrl->timC_offset[2] = 2;
572 ctrl->reg_c14_offset = 8;
573 ctrl->reg_5064b0 = 0xcd;
574 ctrl->reg_320c_range_threshold = 17;
575 }
576
577 val32 = (1000 << 8) / ctrl->tCK;
578 printk(BIOS_DEBUG, "Selected DRAM frequency: %u MHz\n", val32);
579
580 /* Find CAS and CWL latencies */
581 val = (ctrl->tAA + ctrl->tCK - 1) / ctrl->tCK;
582 printk(BIOS_DEBUG, "Minimum CAS latency : %uT\n", val);
583 /* Find lowest supported CAS latency that satisfies the minimum value */
584 while (!((ctrl->cas_supported >> (val - 4)) & 1)
585 && (ctrl->cas_supported >> (val - 4))) {
586 val++;
587 }
588 /* Is CAS supported */
589 if (!(ctrl->cas_supported & (1 << (val - 4))))
590 printk(BIOS_DEBUG, "CAS not supported\n");
591 printk(BIOS_DEBUG, "Selected CAS latency : %uT\n", val);
592 ctrl->CAS = val;
593 ctrl->CWL = get_CWL(ctrl->CAS);
594 printk(BIOS_DEBUG, "Selected CWL latency : %uT\n", ctrl->CWL);
595
596 /* Find tRCD */
597 ctrl->tRCD = (ctrl->tRCD + ctrl->tCK - 1) / ctrl->tCK;
598 printk(BIOS_DEBUG, "Selected tRCD : %uT\n", ctrl->tRCD);
599
600 ctrl->tRP = (ctrl->tRP + ctrl->tCK - 1) / ctrl->tCK;
601 printk(BIOS_DEBUG, "Selected tRP : %uT\n", ctrl->tRP);
602
603 /* Find tRAS */
604 ctrl->tRAS = (ctrl->tRAS + ctrl->tCK - 1) / ctrl->tCK;
605 printk(BIOS_DEBUG, "Selected tRAS : %uT\n", ctrl->tRAS);
606
607 /* Find tWR */
608 ctrl->tWR = (ctrl->tWR + ctrl->tCK - 1) / ctrl->tCK;
609 printk(BIOS_DEBUG, "Selected tWR : %uT\n", ctrl->tWR);
610
611 /* Find tFAW */
612 ctrl->tFAW = (ctrl->tFAW + ctrl->tCK - 1) / ctrl->tCK;
613 printk(BIOS_DEBUG, "Selected tFAW : %uT\n", ctrl->tFAW);
614
615 /* Find tRRD */
616 ctrl->tRRD = (ctrl->tRRD + ctrl->tCK - 1) / ctrl->tCK;
617 printk(BIOS_DEBUG, "Selected tRRD : %uT\n", ctrl->tRRD);
618
619 /* Find tRTP */
620 ctrl->tRTP = (ctrl->tRTP + ctrl->tCK - 1) / ctrl->tCK;
621 printk(BIOS_DEBUG, "Selected tRTP : %uT\n", ctrl->tRTP);
622
623 /* Find tWTR */
624 ctrl->tWTR = (ctrl->tWTR + ctrl->tCK - 1) / ctrl->tCK;
625 printk(BIOS_DEBUG, "Selected tWTR : %uT\n", ctrl->tWTR);
626
627 /* Refresh-to-Active or Refresh-to-Refresh (tRFC) */
628 ctrl->tRFC = (ctrl->tRFC + ctrl->tCK - 1) / ctrl->tCK;
629 printk(BIOS_DEBUG, "Selected tRFC : %uT\n", ctrl->tRFC);
630
631 ctrl->tREFI = get_REFI(ctrl->tCK);
632 ctrl->tMOD = get_MOD(ctrl->tCK);
633 ctrl->tXSOffset = get_XSOffset(ctrl->tCK);
634 ctrl->tWLO = get_WLO(ctrl->tCK);
635 ctrl->tCKE = get_CKE(ctrl->tCK);
636 ctrl->tXPDLL = get_XPDLL(ctrl->tCK);
637 ctrl->tXP = get_XP(ctrl->tCK);
638 ctrl->tAONPD = get_AONPD(ctrl->tCK);
639}
640
641static void dram_freq(ramctr_timing * ctrl)
642{
643 if (ctrl->tCK > TCK_400MHZ) {
644 printk (BIOS_ERR, "DRAM frequency is under lowest supported frequency (400 MHz). Increasing to 400 MHz as last resort");
645 ctrl->tCK = TCK_400MHZ;
646 }
647 while (1) {
648 u8 val2;
649 u32 reg1 = 0;
650
651 /* Step 1 - Set target PCU frequency */
652
653 if (ctrl->tCK <= TCK_1066MHZ) {
654 ctrl->tCK = TCK_1066MHZ;
655 } else if (ctrl->tCK <= TCK_933MHZ) {
656 ctrl->tCK = TCK_933MHZ;
657 } else if (ctrl->tCK <= TCK_800MHZ) {
658 ctrl->tCK = TCK_800MHZ;
659 } else if (ctrl->tCK <= TCK_666MHZ) {
660 ctrl->tCK = TCK_666MHZ;
661 } else if (ctrl->tCK <= TCK_533MHZ) {
662 ctrl->tCK = TCK_533MHZ;
663 } else if (ctrl->tCK <= TCK_400MHZ) {
664 ctrl->tCK = TCK_400MHZ;
665 } else {
666 die ("No lock frequency found");
667 }
668
669 /* Frequency mulitplier. */
670 u32 FRQ = get_FRQ(ctrl->tCK);
671
672 /* Step 2 - Select frequency in the MCU */
673 reg1 = FRQ;
674 reg1 |= 0x80000000; // set running bit
675 MCHBAR32(0x5e00) = reg1;
676 while (reg1 & 0x80000000) {
677 printk(BIOS_DEBUG, " PLL busy...");
678 reg1 = MCHBAR32(0x5e00);
679 }
680 printk(BIOS_DEBUG, "done\n");
681
682 /* Step 3 - Verify lock frequency */
683 reg1 = MCHBAR32(0x5e04);
684 val2 = (u8) reg1;
685 if (val2 >= FRQ) {
686 printk(BIOS_DEBUG, "MCU frequency is set at : %d MHz\n",
687 (1000 << 8) / ctrl->tCK);
688 return;
689 }
690 printk(BIOS_DEBUG, "PLL didn't lock. Retrying at lower frequency\n");
691 ctrl->tCK++;
692 }
693}
694
695static void dram_xover(ramctr_timing * ctrl)
696{
697 u32 reg;
698 int channel;
699
700 FOR_ALL_CHANNELS {
701 // enable xover clk
702 printk(BIOS_DEBUG, "[%x] = %x\n", channel * 0x100 + 0xc14,
703 (ctrl->rankmap[channel] << 24));
704 MCHBAR32(channel * 0x100 + 0xc14) = (ctrl->rankmap[channel] << 24);
705
706 // enable xover ctl
707 reg = 0;
708 if (ctrl->rankmap[channel] & 0x5) {
709 reg |= 0x20000;
710 }
711 if (ctrl->rankmap[channel] & 0xa) {
712 reg |= 0x4000000;
713 }
714 // enable xover cmd
715 reg |= 0x4000;
716 printk(BIOS_DEBUG, "[%x] = %x\n", 0x100 * channel + 0x320c,
717 reg);
718 MCHBAR32(0x100 * channel + 0x320c) = reg;
719 }
720}
721
722static void dram_timing_regs(ramctr_timing * ctrl)
723{
724 u32 reg, addr, val32, cpu, stretch;
725 struct cpuid_result cpures;
726 int channel;
727
728 FOR_ALL_CHANNELS {
729 // DBP
730 reg = 0;
731 reg |= ctrl->tRCD;
732 reg |= (ctrl->tRP << 4);
733 reg |= (ctrl->CAS << 8);
734 reg |= (ctrl->CWL << 12);
735 reg |= (ctrl->tRAS << 16);
736 printk(BIOS_DEBUG, "[%x] = %x\n", 0x400 * channel + 0x4000,
737 reg);
738 MCHBAR32(0x400 * channel + 0x4000) = reg;
739
740 // RAP
741 reg = 0;
742 reg |= ctrl->tRRD;
743 reg |= (ctrl->tRTP << 4);
744 reg |= (ctrl->tCKE << 8);
745 reg |= (ctrl->tWTR << 12);
746 reg |= (ctrl->tFAW << 16);
747 reg |= (ctrl->tWR << 24);
748 reg |= (3 << 30);
749 printk(BIOS_DEBUG, "[%x] = %x\n", 0x400 * channel + 0x4004,
750 reg);
751 MCHBAR32(0x400 * channel + 0x4004) = reg;
752
753 // OTHP
754 addr = 0x400 * channel + 0x400c;
755 reg = 0;
756 reg |= ctrl->tXPDLL;
757 reg |= (ctrl->tXP << 5);
758 reg |= (ctrl->tAONPD << 8);
759 reg |= 0xa0000;
760 printk(BIOS_DEBUG, "[%x] = %x\n", addr, reg);
761 MCHBAR32(addr) = reg;
762
763 MCHBAR32(0x400 * channel + 0x4014) = 0;
764
765 MCHBAR32(addr) |= 0x00020000;
766
767 // ODT stretch
768 reg = 0;
769
770 cpures = cpuid(0);
771 cpu = cpures.eax;
772 if (IS_IVY_CPU(cpu)
773 || (IS_SANDY_CPU(cpu) && IS_SANDY_CPU_D2(cpu))) {
774 stretch = 2;
775 addr = 0x400 * channel + 0x400c;
776 printk(BIOS_DEBUG, "[%x] = %x\n",
777 0x400 * channel + 0x400c, reg);
778 reg = MCHBAR32(addr);
779
780 if (((ctrl->rankmap[channel] & 3) == 0)
781 || (ctrl->rankmap[channel] & 0xc) == 0) {
782
783 // Rank 0 - operate on rank 2
784 reg = (reg & ~0xc0000) | (stretch << 18);
785
786 // Rank 2 - operate on rank 0
787 reg = (reg & ~0x30000) | (stretch << 16);
788
789 printk(BIOS_DEBUG, "[%x] = %x\n", addr, reg);
790 MCHBAR32(addr) = reg;
791 }
792
793 } else if (IS_SANDY_CPU(cpu) && IS_SANDY_CPU_C(cpu)) {
794 stretch = 3;
795 addr = 0x400 * channel + 0x401c;
796 reg = MCHBAR32(addr);
797
798 if (((ctrl->rankmap[channel] & 3) == 0)
799 || (ctrl->rankmap[channel] & 0xc) == 0) {
800
801 // Rank 0 - operate on rank 2
802 reg = (reg & ~0x3000) | (stretch << 12);
803
804 // Rank 2 - operate on rank 0
805 reg = (reg & ~0xc00) | (stretch << 10);
806
807 printk(BIOS_DEBUG, "[%x] = %x\n", addr, reg);
808 MCHBAR32(addr) = reg;
809 }
810 } else {
811 stretch = 0;
812 }
813
814 // REFI
815 reg = 0;
816 val32 = ctrl->tREFI;
817 reg = (reg & ~0xffff) | val32;
818 val32 = ctrl->tRFC;
819 reg = (reg & ~0x1ff0000) | (val32 << 16);
820 val32 = (u32) (ctrl->tREFI * 9) / 1024;
821 reg = (reg & ~0xfe000000) | (val32 << 25);
822 printk(BIOS_DEBUG, "[%x] = %x\n", 0x400 * channel + 0x4298,
823 reg);
824 MCHBAR32(0x400 * channel + 0x4298) = reg;
825
826 MCHBAR32(0x400 * channel + 0x4294) |= 0xff;
827
828 // SRFTP
829 reg = 0;
830 val32 = tDLLK;
831 reg = (reg & ~0xfff) | val32;
832 val32 = ctrl->tXSOffset;
833 reg = (reg & ~0xf000) | (val32 << 12);
834 val32 = tDLLK - ctrl->tXSOffset;
835 reg = (reg & ~0x3ff0000) | (val32 << 16);
836 val32 = ctrl->tMOD - 8;
837 reg = (reg & ~0xf0000000) | (val32 << 28);
838 printk(BIOS_DEBUG, "[%x] = %x\n", 0x400 * channel + 0x42a4,
839 reg);
840 MCHBAR32(0x400 * channel + 0x42a4) = reg;
841 }
842}
843
844static void dram_dimm_mapping(dimm_info * info, ramctr_timing * ctrl)
845{
846 u32 reg, val32;
847 int channel;
848
849 FOR_ALL_CHANNELS {
850 dimm_attr *dimmA = 0;
851 dimm_attr *dimmB = 0;
852 reg = 0;
853 val32 = 0;
854 if (info->dimm[channel][0].size_mb >=
855 info->dimm[channel][1].size_mb) {
856 // dimm 0 is bigger, set it to dimmA
857 dimmA = &info->dimm[channel][0];
858 dimmB = &info->dimm[channel][1];
859 reg |= (0 << 16);
860 } else {
861 // dimm 1 is bigger, set it to dimmA
862 dimmA = &info->dimm[channel][1];
863 dimmB = &info->dimm[channel][0];
864 reg |= (1 << 16);
865 }
866 // dimmA
867 if (dimmA && (dimmA->ranks > 0)) {
868 val32 = dimmA->size_mb / 256;
869 reg = (reg & ~0xff) | val32;
870 val32 = dimmA->ranks - 1;
871 reg = (reg & ~0x20000) | (val32 << 17);
872 val32 = (dimmA->width / 8) - 1;
873 reg = (reg & ~0x80000) | (val32 << 19);
874 }
875 // dimmB
876 if (dimmB && (dimmB->ranks > 0)) {
877 val32 = dimmB->size_mb / 256;
878 reg = (reg & ~0xff00) | (val32 << 8);
879 val32 = dimmB->ranks - 1;
880 reg = (reg & ~0x40000) | (val32 << 18);
881 val32 = (dimmB->width / 8) - 1;
882 reg = (reg & ~0x100000) | (val32 << 20);
883 }
884 reg = (reg & ~0x200000) | (1 << 21); // rank interleave
885 reg = (reg & ~0x400000) | (1 << 22); // enhanced interleave
886
887 // Save MAD-DIMM register
888 if ((dimmA && (dimmA->ranks > 0))
889 || (dimmB && (dimmB->ranks > 0))) {
890 ctrl->mad_dimm[channel] = reg;
891 } else {
892 ctrl->mad_dimm[channel] = 0;
893 }
894 }
895}
896
897static void dram_dimm_set_mapping(ramctr_timing * ctrl)
898{
899 int channel;
900 FOR_ALL_CHANNELS {
901 MCHBAR32(0x5004 + channel * 4) = ctrl->mad_dimm[channel];
902 }
903}
904
905static void dram_zones(ramctr_timing * ctrl, int training)
906{
907 u32 reg, ch0size, ch1size;
908 u8 val;
909 reg = 0;
910 val = 0;
911 if (training) {
912 ch0size = ctrl->channel_size_mb[0] ? 256 : 0;
913 ch1size = ctrl->channel_size_mb[1] ? 256 : 0;
914 } else {
915 ch0size = ctrl->channel_size_mb[0];
916 ch1size = ctrl->channel_size_mb[1];
917 }
918
919 if (ch0size >= ch1size) {
920 reg = MCHBAR32(0x5014);
921 val = ch1size / 256;
922 reg = (reg & ~0xff000000) | val << 24;
923 reg = (reg & ~0xff0000) | (2 * val) << 16;
924 MCHBAR32(0x5014) = reg;
925 MCHBAR32(0x5000) = 0x24;
926 } else {
927 reg = MCHBAR32(0x5014);
928 val = ch0size / 256;
929 reg = (reg & ~0xff000000) | val << 24;
930 reg = (reg & ~0xff0000) | (2 * val) << 16;
931 MCHBAR32(0x5014) = reg;
932 MCHBAR32(0x5000) = 0x21;
933 }
934}
935
936static void dram_memorymap(ramctr_timing * ctrl, int me_uma_size)
937{
938 u32 reg, val, reclaim;
939 u32 tom, gfxstolen, gttsize;
940 size_t tsegsize, mmiosize, toludbase, touudbase, gfxstolenbase, gttbase,
941 tsegbase, mestolenbase;
942 size_t tsegbasedelta, remapbase, remaplimit;
943 uint16_t ggc;
944
945 mmiosize = 0x400;
946
947 ggc = pci_read_config16(NORTHBRIDGE, GGC);
948 if (!(ggc & 2)) {
949 gfxstolen = ((ggc >> 3) & 0x1f) * 32;
950 gttsize = ((ggc >> 8) & 0x3);
951 } else {
952 gfxstolen = 0;
953 gttsize = 0;
954 }
955
956 tsegsize = CONFIG_SMM_TSEG_SIZE >> 20;
957
958 tom = ctrl->channel_size_mb[0] + ctrl->channel_size_mb[1];
959
960 mestolenbase = tom - me_uma_size;
961
962 toludbase = MIN(4096 - mmiosize + gfxstolen + gttsize + tsegsize,
963 tom - me_uma_size);
964 gfxstolenbase = toludbase - gfxstolen;
965 gttbase = gfxstolenbase - gttsize;
966
967 tsegbase = gttbase - tsegsize;
968
969 // Round tsegbase down to nearest address aligned to tsegsize
970 tsegbasedelta = tsegbase & (tsegsize - 1);
971 tsegbase &= ~(tsegsize - 1);
972
973 gttbase -= tsegbasedelta;
974 gfxstolenbase -= tsegbasedelta;
975 toludbase -= tsegbasedelta;
976
977 // Test if it is possible to reclaim a hole in the ram addressing
978 if (tom - me_uma_size > toludbase) {
979 // Reclaim is possible
980 reclaim = 1;
981 remapbase = MAX(4096, tom - me_uma_size);
982 remaplimit =
983 remapbase + MIN(4096, tom - me_uma_size) - toludbase - 1;
984 touudbase = remaplimit + 1;
985 } else {
986 // Reclaim not possible
987 reclaim = 0;
988 touudbase = tom - me_uma_size;
989 }
990
991 // Update memory map in pci-e configuration space
992
993 // TOM (top of memory)
994 reg = pcie_read_config32(PCI_DEV(0, 0, 0), 0xa0);
995 val = tom & 0xfff;
996 reg = (reg & ~0xfff00000) | (val << 20);
997 printk(BIOS_DEBUG, "PCI:[%x] = %x\n", 0xa0, reg);
998 pcie_write_config32(PCI_DEV(0, 0, 0), 0xa0, reg);
999
1000 reg = pcie_read_config32(PCI_DEV(0, 0, 0), 0xa4);
1001 val = tom & 0xfffff000;
1002 reg = (reg & ~0x000fffff) | (val >> 12);
1003 printk(BIOS_DEBUG, "PCI:[%x] = %x\n", 0xa4, reg);
1004 pcie_write_config32(PCI_DEV(0, 0, 0), 0xa4, reg);
1005
1006 // TOLUD (top of low used dram)
1007 reg = pcie_read_config32(PCI_DEV(0, 0, 0), 0xbc);
1008 val = toludbase & 0xfff;
1009 reg = (reg & ~0xfff00000) | (val << 20);
1010 printk(BIOS_DEBUG, "PCI:[%x] = %x\n", 0xbc, reg);
1011 pcie_write_config32(PCI_DEV(0, 0, 0), 0xbc, reg);
1012
1013 // TOUUD LSB (top of upper usable dram)
1014 reg = pcie_read_config32(PCI_DEV(0, 0, 0), 0xa8);
1015 val = touudbase & 0xfff;
1016 reg = (reg & ~0xfff00000) | (val << 20);
1017 printk(BIOS_DEBUG, "PCI:[%x] = %x\n", 0xa8, reg);
1018 pcie_write_config32(PCI_DEV(0, 0, 0), 0xa8, reg);
1019
1020 // TOUUD MSB
1021 reg = pcie_read_config32(PCI_DEV(0, 0, 0), 0xac);
1022 val = touudbase & 0xfffff000;
1023 reg = (reg & ~0x000fffff) | (val >> 12);
1024 printk(BIOS_DEBUG, "PCI:[%x] = %x\n", 0xac, reg);
1025 pcie_write_config32(PCI_DEV(0, 0, 0), 0xac, reg);
1026
1027 if (reclaim) {
1028 // REMAP BASE
1029 pcie_write_config32(PCI_DEV(0, 0, 0), 0x90, remapbase << 20);
1030 pcie_write_config32(PCI_DEV(0, 0, 0), 0x94, remapbase >> 12);
1031
1032 // REMAP LIMIT
1033 pcie_write_config32(PCI_DEV(0, 0, 0), 0x98, remaplimit << 20);
1034 pcie_write_config32(PCI_DEV(0, 0, 0), 0x9c, remaplimit >> 12);
1035 }
1036 // TSEG
1037 reg = pcie_read_config32(PCI_DEV(0, 0, 0), 0xb8);
1038 val = tsegbase & 0xfff;
1039 reg = (reg & ~0xfff00000) | (val << 20);
1040 printk(BIOS_DEBUG, "PCI:[%x] = %x\n", 0xb8, reg);
1041 pcie_write_config32(PCI_DEV(0, 0, 0), 0xb8, reg);
1042
1043 // GFX stolen memory
1044 reg = pcie_read_config32(PCI_DEV(0, 0, 0), 0xb0);
1045 val = gfxstolenbase & 0xfff;
1046 reg = (reg & ~0xfff00000) | (val << 20);
1047 printk(BIOS_DEBUG, "PCI:[%x] = %x\n", 0xb0, reg);
1048 pcie_write_config32(PCI_DEV(0, 0, 0), 0xb0, reg);
1049
1050 // GTT stolen memory
1051 reg = pcie_read_config32(PCI_DEV(0, 0, 0), 0xb4);
1052 val = gttbase & 0xfff;
1053 reg = (reg & ~0xfff00000) | (val << 20);
1054 printk(BIOS_DEBUG, "PCI:[%x] = %x\n", 0xb4, reg);
1055 pcie_write_config32(PCI_DEV(0, 0, 0), 0xb4, reg);
1056
1057 if (me_uma_size) {
1058 reg = pcie_read_config32(PCI_DEV(0, 0, 0), 0x7c);
1059 val = (0x80000 - me_uma_size) & 0xfffff000;
1060 reg = (reg & ~0x000fffff) | (val >> 12);
1061 printk(BIOS_DEBUG, "PCI:[%x] = %x\n", 0x7c, reg);
1062 pcie_write_config32(PCI_DEV(0, 0, 0), 0x7c, reg);
1063
1064 // ME base
1065 reg = pcie_read_config32(PCI_DEV(0, 0, 0), 0x70);
1066 val = mestolenbase & 0xfff;
1067 reg = (reg & ~0xfff00000) | (val << 20);
1068 printk(BIOS_DEBUG, "PCI:[%x] = %x\n", 0x70, reg);
1069 pcie_write_config32(PCI_DEV(0, 0, 0), 0x70, reg);
1070
1071 reg = pcie_read_config32(PCI_DEV(0, 0, 0), 0x74);
1072 val = mestolenbase & 0xfffff000;
1073 reg = (reg & ~0x000fffff) | (val >> 12);
1074 printk(BIOS_DEBUG, "PCI:[%x] = %x\n", 0x74, reg);
1075 pcie_write_config32(PCI_DEV(0, 0, 0), 0x74, reg);
1076
1077 // ME mask
1078 reg = pcie_read_config32(PCI_DEV(0, 0, 0), 0x78);
1079 val = (0x80000 - me_uma_size) & 0xfff;
1080 reg = (reg & ~0xfff00000) | (val << 20);
1081 reg = (reg & ~0x400) | (1 << 10); // set lockbit on ME mem
1082
1083 reg = (reg & ~0x800) | (1 << 11); // set ME memory enable
1084 printk(BIOS_DEBUG, "PCI:[%x] = %x\n", 0x78, reg);
1085 pcie_write_config32(PCI_DEV(0, 0, 0), 0x78, reg);
1086 }
1087}
1088
1089static void dram_ioregs(ramctr_timing * ctrl)
1090{
1091 u32 reg, comp2;
1092
1093 int channel;
1094
1095 // IO clock
1096 FOR_ALL_CHANNELS {
1097 MCHBAR32(0xc00 + 0x100 * channel) = ctrl->rankmap[channel];
1098 }
1099
1100 // IO command
1101 FOR_ALL_CHANNELS {
1102 MCHBAR32(0x3200 + 0x100 * channel) = ctrl->rankmap[channel];
1103 }
1104
1105 // IO control
1106 FOR_ALL_POPULATED_CHANNELS {
1107 program_timings(ctrl, channel);
1108 }
1109
1110 // Rcomp
1111 printk(BIOS_DEBUG, "RCOMP...");
1112 reg = 0;
1113 while (reg == 0) {
1114 reg = MCHBAR32(0x5084) & 0x10000;
1115 }
1116 printk(BIOS_DEBUG, "done\n");
1117
1118 // Set comp2
1119 comp2 = get_COMP2(ctrl->tCK);
1120 MCHBAR32(0x3714) = comp2;
1121 printk(BIOS_DEBUG, "COMP2 done\n");
1122
1123 // Set comp1
1124 FOR_ALL_POPULATED_CHANNELS {
1125 reg = MCHBAR32(0x1810 + channel * 0x100); //ch0
1126 reg = (reg & ~0xe00) | (1 << 9); //odt
1127 reg = (reg & ~0xe00000) | (1 << 21); //clk drive up
1128 reg = (reg & ~0x38000000) | (1 << 27); //ctl drive up
1129 MCHBAR32(0x1810 + channel * 0x100) = reg;
1130 }
1131 printk(BIOS_DEBUG, "COMP1 done\n");
1132
1133 printk(BIOS_DEBUG, "FORCE RCOMP and wait 20us...");
1134 MCHBAR32(0x5f08) |= 0x100;
1135 udelay(20);
1136 printk(BIOS_DEBUG, "done\n");
1137}
1138
1139static void wait_428c(int channel)
1140{
1141 while (1) {
1142 if (read32(DEFAULT_MCHBAR + 0x428c + (channel << 10)) & 0x50)
1143 return;
1144 }
1145}
1146
1147static void write_reset(ramctr_timing * ctrl)
1148{
1149 int channel, slotrank;
1150
1151 /* choose a populated channel. */
1152 channel = (ctrl->rankmap[0]) ? 0 : 1;
1153
1154 wait_428c(channel);
1155
1156 /* choose a populated rank. */
1157 slotrank = (ctrl->rankmap[channel] & 1) ? 0 : 2;
1158
Patrick Rudolph371d2912015-10-09 13:33:25 +02001159 /* DRAM command ZQCS */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07001160 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel, 0x0f003);
1161 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel, 0x80c01);
1162
1163 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
1164 (slotrank << 24) | 0x60000);
1165
1166 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0);
1167
1168 write32(DEFAULT_MCHBAR + 0x4284 + 0x400 * channel, 0x400001);
1169 wait_428c(channel);
1170}
1171
1172static void dram_jedecreset(ramctr_timing * ctrl)
1173{
1174 u32 reg, addr;
1175 int channel;
1176
1177 while (!(MCHBAR32(0x5084) & 0x10000)) ;
1178 do {
1179 reg = MCHBAR32(0x428c);
1180 } while ((reg & 0x14) == 0);
1181
1182 // Set state of memory controller
1183 reg = 0x112;
1184 MCHBAR32(0x5030) = reg;
1185 MCHBAR32(0x4ea0) = 0;
1186 reg |= 2; //ddr reset
1187 MCHBAR32(0x5030) = reg;
1188
1189 // Assert dimm reset signal
1190 reg = MCHBAR32(0x5030);
1191 reg &= ~0x2;
1192 MCHBAR32(0x5030) = reg;
1193
1194 // Wait 200us
1195 udelay(200);
1196
1197 // Deassert dimm reset signal
1198 MCHBAR32(0x5030) |= 2;
1199
1200 // Wait 500us
1201 udelay(500);
1202
1203 // Enable DCLK
1204 MCHBAR32(0x5030) |= 4;
1205
1206 // XXX Wait 20ns
1207 udelay(1);
1208
1209 FOR_ALL_CHANNELS {
1210 // Set valid rank CKE
1211 reg = 0;
1212 reg = (reg & ~0xf) | ctrl->rankmap[channel];
1213 addr = 0x400 * channel + 0x42a0;
1214 MCHBAR32(addr) = reg;
1215
1216 // Wait 10ns for ranks to settle
1217 //udelay(0.01);
1218
1219 reg = (reg & ~0xf0) | (ctrl->rankmap[channel] << 4);
1220 MCHBAR32(addr) = reg;
1221
1222 // Write reset using a NOP
1223 write_reset(ctrl);
1224 }
1225}
1226
1227static odtmap get_ODT(ramctr_timing * ctrl, u8 rank)
1228{
1229 /* Get ODT based on rankmap: */
1230 int dimms_per_ch = 0;
1231 int channel;
1232
1233 FOR_ALL_CHANNELS {
1234 dimms_per_ch = max ((ctrl->rankmap[channel] & 1)
1235 + ((ctrl->rankmap[channel] >> 2) & 1),
1236 dimms_per_ch);
1237 }
1238
1239 if (dimms_per_ch == 1) {
1240 return (const odtmap){60, 60};
1241 } else if (dimms_per_ch == 2) {
1242 return (const odtmap){120, 30};
1243 } else {
1244 printk(BIOS_DEBUG,
1245 "Huh, no dimms? m0 = %d m1 = %d dpc = %d\n",
1246 ctrl->rankmap[0],
1247 ctrl->rankmap[1], dimms_per_ch);
1248 die("");
1249 }
1250}
1251
1252static void write_mrreg(ramctr_timing * ctrl, int channel, int slotrank,
1253 int reg, u32 val)
1254{
1255 wait_428c(channel);
1256
1257 printram("MRd: %x <= %x\n", reg, val);
1258
1259 if (ctrl->rank_mirror[channel][slotrank]) {
1260 /* DDR3 Rank1 Address mirror
1261 * swap the following pins:
1262 * A3<->A4, A5<->A6, A7<->A8, BA0<->BA1 */
1263 reg = ((reg >> 1) & 1) | ((reg << 1) & 2);
1264 val = (val & ~0x1f8) | ((val >> 1) & 0xa8)
1265 | ((val & 0xa8) << 1);
1266 }
1267
1268 printram("MRd: %x <= %x\n", reg, val);
1269
Patrick Rudolph371d2912015-10-09 13:33:25 +02001270 /* DRAM command MRS */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07001271 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel, 0x0f000);
1272 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel, 0x41001);
1273 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
1274 (slotrank << 24) | (reg << 20) | val | 0x60000);
1275 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0);
1276
Patrick Rudolph371d2912015-10-09 13:33:25 +02001277 /* DRAM command MRS */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07001278 write32(DEFAULT_MCHBAR + 0x4224 + 0x400 * channel, 0x1f000);
1279 write32(DEFAULT_MCHBAR + 0x4234 + 0x400 * channel, 0x41001);
1280 write32(DEFAULT_MCHBAR + 0x4204 + 0x400 * channel,
1281 (slotrank << 24) | (reg << 20) | val | 0x60000);
1282 write32(DEFAULT_MCHBAR + 0x4214 + 0x400 * channel, 0);
1283
Patrick Rudolph371d2912015-10-09 13:33:25 +02001284 /* DRAM command MRS */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07001285 write32(DEFAULT_MCHBAR + 0x4228 + 0x400 * channel, 0x0f000);
1286 write32(DEFAULT_MCHBAR + 0x4238 + 0x400 * channel,
1287 0x1001 | (ctrl->tMOD << 16));
1288 write32(DEFAULT_MCHBAR + 0x4208 + 0x400 * channel,
1289 (slotrank << 24) | (reg << 20) | val | 0x60000);
1290 write32(DEFAULT_MCHBAR + 0x4218 + 0x400 * channel, 0);
1291 write32(DEFAULT_MCHBAR + 0x4284 + 0x400 * channel, 0x80001);
1292}
1293
1294static u32 make_mr0(ramctr_timing * ctrl, u8 rank)
1295{
1296 u16 mr0reg, mch_cas, mch_wr;
1297 static const u8 mch_wr_t[12] = { 1, 2, 3, 4, 0, 5, 0, 6, 0, 7, 0, 0 };
Patrick Rudolph371d2912015-10-09 13:33:25 +02001298
1299 /* DLL Reset - self clearing - set after CLK frequency has been changed */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07001300 mr0reg = 0x100;
1301
1302 // Convert CAS to MCH register friendly
1303 if (ctrl->CAS < 12) {
1304 mch_cas = (u16) ((ctrl->CAS - 4) << 1);
1305 } else {
1306 mch_cas = (u16) (ctrl->CAS - 12);
1307 mch_cas = ((mch_cas << 1) | 0x1);
1308 }
1309
1310 // Convert tWR to MCH register friendly
1311 mch_wr = mch_wr_t[ctrl->tWR - 5];
1312
1313 mr0reg = (mr0reg & ~0x4) | (mch_cas & 0x1);
1314 mr0reg = (mr0reg & ~0x70) | ((mch_cas & 0xe) << 3);
1315 mr0reg = (mr0reg & ~0xe00) | (mch_wr << 9);
Patrick Rudolph371d2912015-10-09 13:33:25 +02001316
1317 // Precharge PD - Fast (desktop) 0x1 or slow (mobile) 0x0 - mostly power-saving feature
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07001318 mr0reg = (mr0reg & ~0x1000) | (!ctrl->mobile << 12);
1319 return mr0reg;
1320}
1321
1322static void dram_mr0(ramctr_timing * ctrl, u8 rank)
1323{
1324 int channel;
1325
1326 FOR_ALL_POPULATED_CHANNELS write_mrreg(ctrl, channel, rank, 0,
1327 make_mr0(ctrl, rank));
1328}
1329
1330static u32 encode_odt(u32 odt)
1331{
1332 switch (odt) {
1333 case 30:
1334 return (1 << 9) | (1 << 2); // RZQ/8, RZQ/4
1335 case 60:
1336 return (1 << 2); // RZQ/4
1337 case 120:
1338 return (1 << 6); // RZQ/2
1339 default:
1340 case 0:
1341 return 0;
1342 }
1343}
1344
1345static u32 make_mr1(ramctr_timing * ctrl, u8 rank)
1346{
1347 odtmap odt;
1348 u32 mr1reg;
1349
1350 odt = get_ODT(ctrl, rank);
1351 mr1reg = 0x2;
1352
1353 mr1reg |= encode_odt(odt.rttnom);
1354
1355 return mr1reg;
1356}
1357
1358static void dram_mr1(ramctr_timing * ctrl, u8 rank)
1359{
1360 u16 mr1reg;
1361 int channel;
1362
1363 mr1reg = make_mr1(ctrl, rank);
1364
1365 FOR_ALL_CHANNELS {
1366 write_mrreg(ctrl, channel, rank, 1, mr1reg);
1367 }
1368}
1369
1370static void dram_mr2(ramctr_timing * ctrl, u8 rank)
1371{
1372 u16 pasr, cwl, mr2reg;
1373 odtmap odt;
1374 int channel;
1375 int srt;
1376
1377 pasr = 0;
1378 cwl = ctrl->CWL - 5;
1379 odt = get_ODT(ctrl, rank);
1380
1381 srt = ctrl->extended_temperature_range && !ctrl->auto_self_refresh;
1382
1383 mr2reg = 0;
1384 mr2reg = (mr2reg & ~0x7) | pasr;
1385 mr2reg = (mr2reg & ~0x38) | (cwl << 3);
1386 mr2reg = (mr2reg & ~0x40) | (ctrl->auto_self_refresh << 6);
1387 mr2reg = (mr2reg & ~0x80) | (srt << 7);
1388 mr2reg |= (odt.rttwr / 60) << 9;
1389
1390 FOR_ALL_CHANNELS {
1391 write_mrreg(ctrl, channel, rank, 2, mr2reg);
1392 }
1393}
1394
1395static void dram_mr3(ramctr_timing * ctrl, u8 rank)
1396{
1397 int channel;
1398
1399 FOR_ALL_CHANNELS {
1400 write_mrreg(ctrl, channel, rank, 3, 0);
1401 }
1402}
1403
1404static void dram_mrscommands(ramctr_timing * ctrl)
1405{
1406 u8 rank;
1407 u32 reg, addr;
1408 int channel;
1409
1410 for (rank = 0; rank < 4; rank++) {
1411 // MR2
1412 printram("MR2 rank %d...", rank);
1413 dram_mr2(ctrl, rank);
1414 printram("done\n");
1415
1416 // MR3
1417 printram("MR3 rank %d...", rank);
1418 dram_mr3(ctrl, rank);
1419 printram("done\n");
1420
1421 // MR1
1422 printram("MR1 rank %d...", rank);
1423 dram_mr1(ctrl, rank);
1424 printram("done\n");
1425
1426 // MR0
1427 printram("MR0 rank %d...", rank);
1428 dram_mr0(ctrl, rank);
1429 printram("done\n");
1430 }
1431
Patrick Rudolph371d2912015-10-09 13:33:25 +02001432 /* DRAM command NOP */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07001433 write32(DEFAULT_MCHBAR + 0x4e20, 0x7);
1434 write32(DEFAULT_MCHBAR + 0x4e30, 0xf1001);
1435 write32(DEFAULT_MCHBAR + 0x4e00, 0x60002);
1436 write32(DEFAULT_MCHBAR + 0x4e10, 0);
Patrick Rudolph371d2912015-10-09 13:33:25 +02001437
1438 /* DRAM command ZQCL */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07001439 write32(DEFAULT_MCHBAR + 0x4e24, 0x1f003);
1440 write32(DEFAULT_MCHBAR + 0x4e34, 0x1901001);
1441 write32(DEFAULT_MCHBAR + 0x4e04, 0x60400);
1442 write32(DEFAULT_MCHBAR + 0x4e14, 0x288);
Patrick Rudolph371d2912015-10-09 13:33:25 +02001443
1444 /* execute command queue on all channels ? */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07001445 write32(DEFAULT_MCHBAR + 0x4e84, 0x40004);
1446
1447 // Drain
1448 FOR_ALL_CHANNELS {
1449 // Wait for ref drained
1450 wait_428c(channel);
1451 }
1452
1453 // Refresh enable
1454 MCHBAR32(0x5030) |= 8;
1455
1456 FOR_ALL_POPULATED_CHANNELS {
1457 addr = 0x400 * channel + 0x4020;
1458 reg = MCHBAR32(addr);
1459 reg &= ~0x200000;
1460 MCHBAR32(addr) = reg;
1461
1462 wait_428c(channel);
1463
1464 rank = (ctrl->rankmap[channel] & 1) ? 0 : 2;
1465
1466 // Drain
1467 wait_428c(channel);
1468
Patrick Rudolph371d2912015-10-09 13:33:25 +02001469 /* DRAM command ZQCS */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07001470 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel, 0x0f003);
1471 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel, 0x659001);
1472 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
1473 (rank << 24) | 0x60000);
1474 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0x3e0);
1475 write32(DEFAULT_MCHBAR + 0x4284 + 0x400 * channel, 0x1);
1476
1477 // Drain
1478 wait_428c(channel);
1479 }
1480}
1481
1482const u32 lane_registers[] = {
1483 0x0000, 0x0200, 0x0400, 0x0600,
1484 0x1000, 0x1200, 0x1400, 0x1600,
1485 0x0800
1486};
1487
1488static void program_timings(ramctr_timing * ctrl, int channel)
1489{
1490 u32 reg32, reg_4024, reg_c14, reg_c18, reg_4028;
1491 int lane;
1492 int slotrank, slot;
1493 int full_shift = 0;
1494 u16 slot320c[NUM_SLOTS];
1495
1496 FOR_ALL_POPULATED_RANKS {
1497 if (full_shift < -ctrl->timings[channel][slotrank].val_320c)
1498 full_shift = -ctrl->timings[channel][slotrank].val_320c;
1499 }
1500
1501 for (slot = 0; slot < NUM_SLOTS; slot++)
1502 switch ((ctrl->rankmap[channel] >> (2 * slot)) & 3) {
1503 case 0:
1504 default:
1505 slot320c[slot] = 0x7f;
1506 break;
1507 case 1:
1508 slot320c[slot] =
1509 ctrl->timings[channel][2 * slot + 0].val_320c +
1510 full_shift;
1511 break;
1512 case 2:
1513 slot320c[slot] =
1514 ctrl->timings[channel][2 * slot + 1].val_320c +
1515 full_shift;
1516 break;
1517 case 3:
1518 slot320c[slot] =
1519 (ctrl->timings[channel][2 * slot].val_320c +
1520 ctrl->timings[channel][2 * slot +
1521 1].val_320c) / 2 +
1522 full_shift;
1523 break;
1524 }
1525
1526 reg32 = (1 << 17) | (1 << 14);
1527 reg32 |= ((slot320c[0] & 0x3f) << 6) | ((slot320c[0] & 0x40) << 9);
1528 reg32 |= (slot320c[1] & 0x7f) << 18;
1529 reg32 |= (full_shift & 0x3f) | ((full_shift & 0x40) << 6);
1530
1531 MCHBAR32(0x320c + 0x100 * channel) = reg32;
1532
1533 reg_c14 = ctrl->rankmap[channel] << 24;
1534 reg_c18 = 0;
1535
1536 FOR_ALL_POPULATED_RANKS {
1537 int shift =
1538 ctrl->timings[channel][slotrank].val_320c + full_shift;
1539 int offset_val_c14;
1540 if (shift < 0)
1541 shift = 0;
1542 offset_val_c14 = ctrl->reg_c14_offset + shift;
1543 reg_c14 |= (offset_val_c14 & 0x3f) << (6 * slotrank);
1544 reg_c18 |= ((offset_val_c14 >> 6) & 1) << slotrank;
1545 }
1546
1547 MCHBAR32(0xc14 + channel * 0x100) = reg_c14;
1548 MCHBAR32(0xc18 + channel * 0x100) = reg_c18;
1549
1550 reg_4028 = MCHBAR32(0x4028 + 0x400 * channel);
1551 reg_4028 &= 0xffff0000;
1552
1553 reg_4024 = 0;
1554
1555 FOR_ALL_POPULATED_RANKS {
1556 int post_timA_min_high = 7, post_timA_max_high = 0;
1557 int pre_timA_min_high = 7, pre_timA_max_high = 0;
1558 int shift_402x = 0;
1559 int shift =
1560 ctrl->timings[channel][slotrank].val_320c + full_shift;
1561
1562 if (shift < 0)
1563 shift = 0;
1564
1565 FOR_ALL_LANES {
1566 if (post_timA_min_high >
1567 ((ctrl->timings[channel][slotrank].lanes[lane].
1568 timA + shift) >> 6))
1569 post_timA_min_high =
1570 ((ctrl->timings[channel][slotrank].
1571 lanes[lane].timA + shift) >> 6);
1572 if (pre_timA_min_high >
1573 (ctrl->timings[channel][slotrank].lanes[lane].
1574 timA >> 6))
1575 pre_timA_min_high =
1576 (ctrl->timings[channel][slotrank].
1577 lanes[lane].timA >> 6);
1578 if (post_timA_max_high <
1579 ((ctrl->timings[channel][slotrank].lanes[lane].
1580 timA + shift) >> 6))
1581 post_timA_max_high =
1582 ((ctrl->timings[channel][slotrank].
1583 lanes[lane].timA + shift) >> 6);
1584 if (pre_timA_max_high <
1585 (ctrl->timings[channel][slotrank].lanes[lane].
1586 timA >> 6))
1587 pre_timA_max_high =
1588 (ctrl->timings[channel][slotrank].
1589 lanes[lane].timA >> 6);
1590 }
1591
1592 if (pre_timA_max_high - pre_timA_min_high <
1593 post_timA_max_high - post_timA_min_high)
1594 shift_402x = +1;
1595 else if (pre_timA_max_high - pre_timA_min_high >
1596 post_timA_max_high - post_timA_min_high)
1597 shift_402x = -1;
1598
1599 reg_4028 |=
1600 (ctrl->timings[channel][slotrank].val_4028 + shift_402x -
1601 post_timA_min_high) << (4 * slotrank);
1602 reg_4024 |=
1603 (ctrl->timings[channel][slotrank].val_4024 +
1604 shift_402x) << (8 * slotrank);
1605
1606 FOR_ALL_LANES {
1607 MCHBAR32(lane_registers[lane] + 0x10 + 0x100 * channel +
1608 4 * slotrank)
1609 =
1610 (((ctrl->timings[channel][slotrank].lanes[lane].
1611 timA + shift) & 0x3f)
1612 |
1613 ((ctrl->timings[channel][slotrank].lanes[lane].
1614 rising + shift) << 8)
1615 |
1616 (((ctrl->timings[channel][slotrank].lanes[lane].
1617 timA + shift -
1618 (post_timA_min_high << 6)) & 0x1c0) << 10)
1619 | (ctrl->timings[channel][slotrank].lanes[lane].
1620 falling << 20));
1621
1622 MCHBAR32(lane_registers[lane] + 0x20 + 0x100 * channel +
1623 4 * slotrank)
1624 =
1625 (((ctrl->timings[channel][slotrank].lanes[lane].
1626 timC + shift) & 0x3f)
1627 |
1628 (((ctrl->timings[channel][slotrank].lanes[lane].
1629 timB + shift) & 0x3f) << 8)
1630 |
1631 (((ctrl->timings[channel][slotrank].lanes[lane].
1632 timB + shift) & 0x1c0) << 9)
1633 |
1634 (((ctrl->timings[channel][slotrank].lanes[lane].
1635 timC + shift) & 0x40) << 13));
1636 }
1637 }
1638 MCHBAR32(0x4024 + 0x400 * channel) = reg_4024;
1639 MCHBAR32(0x4028 + 0x400 * channel) = reg_4028;
1640}
1641
1642static void test_timA(ramctr_timing * ctrl, int channel, int slotrank)
1643{
1644 wait_428c(channel);
1645
Patrick Rudolph371d2912015-10-09 13:33:25 +02001646 /* DRAM command MRS
1647 * write MR3 MPR enable
1648 * in this mode only RD and RDA are allowed
1649 * all reads return a predefined pattern */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07001650 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel, 0x1f000);
1651 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel,
1652 (0xc01 | (ctrl->tMOD << 16)));
1653 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
1654 (slotrank << 24) | 0x360004);
1655 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0);
1656
Patrick Rudolph371d2912015-10-09 13:33:25 +02001657 /* DRAM command RD */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07001658 write32(DEFAULT_MCHBAR + 0x4224 + 0x400 * channel, 0x1f105);
1659 write32(DEFAULT_MCHBAR + 0x4234 + 0x400 * channel, 0x4040c01);
1660 write32(DEFAULT_MCHBAR + 0x4204 + 0x400 * channel, (slotrank << 24));
1661 write32(DEFAULT_MCHBAR + 0x4214 + 0x400 * channel, 0);
1662
Patrick Rudolph371d2912015-10-09 13:33:25 +02001663 /* DRAM command RD */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07001664 write32(DEFAULT_MCHBAR + 0x4228 + 0x400 * channel, 0x1f105);
1665 write32(DEFAULT_MCHBAR + 0x4238 + 0x400 * channel,
1666 0x100f | ((ctrl->CAS + 36) << 16));
1667 write32(DEFAULT_MCHBAR + 0x4208 + 0x400 * channel,
1668 (slotrank << 24) | 0x60000);
1669 write32(DEFAULT_MCHBAR + 0x4218 + 0x400 * channel, 0);
1670
Patrick Rudolph371d2912015-10-09 13:33:25 +02001671 /* DRAM command MRS
1672 * write MR3 MPR disable */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07001673 write32(DEFAULT_MCHBAR + 0x422c + 0x400 * channel, 0x1f000);
1674 write32(DEFAULT_MCHBAR + 0x423c + 0x400 * channel,
1675 (0xc01 | (ctrl->tMOD << 16)));
1676 write32(DEFAULT_MCHBAR + 0x420c + 0x400 * channel,
1677 (slotrank << 24) | 0x360000);
1678 write32(DEFAULT_MCHBAR + 0x421c + 0x400 * channel, 0);
1679
1680 write32(DEFAULT_MCHBAR + 0x4284 + 0x400 * channel, 0xc0001);
1681
1682 wait_428c(channel);
1683}
1684
1685static int does_lane_work(ramctr_timing * ctrl, int channel, int slotrank,
1686 int lane)
1687{
1688 u32 timA = ctrl->timings[channel][slotrank].lanes[lane].timA;
1689 return ((read32
1690 (DEFAULT_MCHBAR + lane_registers[lane] + channel * 0x100 + 4 +
1691 ((timA / 32) & 1) * 4)
1692 >> (timA % 32)) & 1);
1693}
1694
1695struct run {
1696 int middle;
1697 int end;
1698 int start;
1699 int all;
1700 int length;
1701};
1702
1703static struct run get_longest_zero_run(int *seq, int sz)
1704{
1705 int i, ls;
1706 int bl = 0, bs = 0;
1707 struct run ret;
1708
1709 ls = 0;
1710 for (i = 0; i < 2 * sz; i++)
1711 if (seq[i % sz]) {
1712 if (i - ls > bl) {
1713 bl = i - ls;
1714 bs = ls;
1715 }
1716 ls = i + 1;
1717 }
1718 if (bl == 0) {
1719 ret.middle = sz / 2;
1720 ret.start = 0;
1721 ret.end = sz;
1722 ret.all = 1;
1723 return ret;
1724 }
1725
1726 ret.start = bs % sz;
1727 ret.end = (bs + bl - 1) % sz;
1728 ret.middle = (bs + (bl - 1) / 2) % sz;
1729 ret.length = bl;
1730 ret.all = 0;
1731
1732 return ret;
1733}
1734
1735static void discover_timA_coarse(ramctr_timing * ctrl, int channel,
1736 int slotrank, int *upperA)
1737{
1738 int timA;
1739 int statistics[NUM_LANES][128];
1740 int lane;
1741
1742 for (timA = 0; timA < 128; timA++) {
1743 FOR_ALL_LANES {
1744 ctrl->timings[channel][slotrank].lanes[lane].timA = timA;
1745 }
1746 program_timings(ctrl, channel);
1747
1748 test_timA(ctrl, channel, slotrank);
1749
1750 FOR_ALL_LANES {
1751 statistics[lane][timA] =
1752 !does_lane_work(ctrl, channel, slotrank, lane);
1753 printram("Astat: %d, %d, %d, %x, %x\n",
1754 channel, slotrank, lane, timA,
1755 statistics[lane][timA]);
1756 }
1757 }
1758 FOR_ALL_LANES {
1759 struct run rn = get_longest_zero_run(statistics[lane], 128);
1760 ctrl->timings[channel][slotrank].lanes[lane].timA = rn.middle;
1761 upperA[lane] = rn.end;
1762 if (upperA[lane] < rn.middle)
1763 upperA[lane] += 128;
1764 printram("Aval: %d, %d, %d, %x\n", channel, slotrank,
1765 lane, ctrl->timings[channel][slotrank].lanes[lane].timA);
1766 printram("Aend: %d, %d, %d, %x\n", channel, slotrank,
1767 lane, upperA[lane]);
1768 }
1769}
1770
1771static void discover_timA_fine(ramctr_timing * ctrl, int channel, int slotrank,
1772 int *upperA)
1773{
1774 int timA_delta;
1775 int statistics[NUM_LANES][51];
1776 int lane, i;
1777
1778 memset(statistics, 0, sizeof(statistics));
1779
1780 for (timA_delta = -25; timA_delta <= 25; timA_delta++) {
1781 FOR_ALL_LANES ctrl->timings[channel][slotrank].lanes[lane].
1782 timA = upperA[lane] + timA_delta + 0x40;
1783 program_timings(ctrl, channel);
1784
1785 for (i = 0; i < 100; i++) {
1786 test_timA(ctrl, channel, slotrank);
1787 FOR_ALL_LANES {
1788 statistics[lane][timA_delta + 25] +=
1789 does_lane_work(ctrl, channel, slotrank,
1790 lane);
1791 }
1792 }
1793 }
1794 FOR_ALL_LANES {
1795 int last_zero, first_all;
1796
1797 for (last_zero = -25; last_zero <= 25; last_zero++)
1798 if (statistics[lane][last_zero + 25])
1799 break;
1800 last_zero--;
1801 for (first_all = -25; first_all <= 25; first_all++)
1802 if (statistics[lane][first_all + 25] == 100)
1803 break;
1804
1805 printram("lane %d: %d, %d\n", lane, last_zero,
1806 first_all);
1807
1808 ctrl->timings[channel][slotrank].lanes[lane].timA =
1809 (last_zero + first_all) / 2 + upperA[lane];
1810 printram("Aval: %d, %d, %d, %x\n", channel, slotrank,
1811 lane, ctrl->timings[channel][slotrank].lanes[lane].timA);
1812 }
1813}
1814
1815static void discover_402x(ramctr_timing * ctrl, int channel, int slotrank,
1816 int *upperA)
1817{
1818 int works[NUM_LANES];
1819 int lane;
1820 while (1) {
1821 int all_works = 1, some_works = 0;
1822 program_timings(ctrl, channel);
1823 test_timA(ctrl, channel, slotrank);
1824 FOR_ALL_LANES {
1825 works[lane] =
1826 !does_lane_work(ctrl, channel, slotrank, lane);
1827 if (works[lane])
1828 some_works = 1;
1829 else
1830 all_works = 0;
1831 }
1832 if (all_works)
1833 return;
1834 if (!some_works) {
1835 if (ctrl->timings[channel][slotrank].val_4024 < 2)
1836 die("402x discovery failed");
1837 ctrl->timings[channel][slotrank].val_4024 -= 2;
1838 printram("4024 -= 2;\n");
1839 continue;
1840 }
1841 ctrl->timings[channel][slotrank].val_4028 += 2;
1842 printram("4028 += 2;\n");
1843 if (ctrl->timings[channel][slotrank].val_4028 >= 0x10)
1844 die("402x discovery failed");
1845 FOR_ALL_LANES if (works[lane]) {
1846 ctrl->timings[channel][slotrank].lanes[lane].timA +=
1847 128;
1848 upperA[lane] += 128;
1849 printram("increment %d, %d, %d\n", channel,
1850 slotrank, lane);
1851 }
1852 }
1853}
1854
1855struct timA_minmax {
1856 int timA_min_high, timA_max_high;
1857};
1858
1859static void pre_timA_change(ramctr_timing * ctrl, int channel, int slotrank,
1860 struct timA_minmax *mnmx)
1861{
1862 int lane;
1863 mnmx->timA_min_high = 7;
1864 mnmx->timA_max_high = 0;
1865
1866 FOR_ALL_LANES {
1867 if (mnmx->timA_min_high >
1868 (ctrl->timings[channel][slotrank].lanes[lane].timA >> 6))
1869 mnmx->timA_min_high =
1870 (ctrl->timings[channel][slotrank].lanes[lane].
1871 timA >> 6);
1872 if (mnmx->timA_max_high <
1873 (ctrl->timings[channel][slotrank].lanes[lane].timA >> 6))
1874 mnmx->timA_max_high =
1875 (ctrl->timings[channel][slotrank].lanes[lane].
1876 timA >> 6);
1877 }
1878}
1879
1880static void post_timA_change(ramctr_timing * ctrl, int channel, int slotrank,
1881 struct timA_minmax *mnmx)
1882{
1883 struct timA_minmax post;
1884 int shift_402x = 0;
1885
1886 /* Get changed maxima. */
1887 pre_timA_change(ctrl, channel, slotrank, &post);
1888
1889 if (mnmx->timA_max_high - mnmx->timA_min_high <
1890 post.timA_max_high - post.timA_min_high)
1891 shift_402x = +1;
1892 else if (mnmx->timA_max_high - mnmx->timA_min_high >
1893 post.timA_max_high - post.timA_min_high)
1894 shift_402x = -1;
1895 else
1896 shift_402x = 0;
1897
1898 ctrl->timings[channel][slotrank].val_4028 += shift_402x;
1899 ctrl->timings[channel][slotrank].val_4024 += shift_402x;
1900 printram("4024 += %d;\n", shift_402x);
1901 printram("4028 += %d;\n", shift_402x);
1902}
1903
Patrick Rudolph371d2912015-10-09 13:33:25 +02001904/* Compensate the skew between DQS and DQs.
1905 * To ease PCB design a small skew between Data Strobe signals and
1906 * Data Signals is allowed.
1907 * The controller has to measure and compensate this skew for every byte-lane.
1908 * By delaying either all DQs signals or DQS signal, a full phase
1909 * shift can be introduced.
1910 * It is assumed that one byte-lane's DQs signals have the same routing delay.
1911 *
1912 * To measure the actual skew, the DRAM is placed in "read leveling" mode.
1913 * In read leveling mode the DRAM-chip outputs an alternating periodic pattern.
1914 * The memory controller iterates over all possible values to do a full phase shift
1915 * and issues read commands.
1916 * With DQS and DQs in phase the data read is expected to alternate on every byte:
1917 * 0xFF 0x00 0xFF ...
1918 * Once the controller has detected this pattern a bit in the result register is
1919 * set for the current phase shift.
1920 */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07001921static void read_training(ramctr_timing * ctrl)
1922{
1923 int channel, slotrank, lane;
1924
1925 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS {
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07001926 int all_high, some_high;
1927 int upperA[NUM_LANES];
1928 struct timA_minmax mnmx;
1929
1930 wait_428c(channel);
Patrick Rudolph371d2912015-10-09 13:33:25 +02001931
1932 /* DRAM command PREA */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07001933 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel, 0x1f002);
1934 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel,
1935 0xc01 | (ctrl->tRP << 16));
1936 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
1937 (slotrank << 24) | 0x60400);
1938 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0);
1939 write32(DEFAULT_MCHBAR + 0x4284 + 0x400 * channel, 1);
1940
1941 write32(DEFAULT_MCHBAR + 0x3400, (slotrank << 2) | 0x8001);
1942
1943 ctrl->timings[channel][slotrank].val_4028 = 4;
1944 ctrl->timings[channel][slotrank].val_4024 = 55;
1945 program_timings(ctrl, channel);
1946
1947 discover_timA_coarse(ctrl, channel, slotrank, upperA);
1948
1949 all_high = 1;
1950 some_high = 0;
1951 FOR_ALL_LANES {
1952 if (ctrl->timings[channel][slotrank].lanes[lane].
1953 timA >= 0x40)
1954 some_high = 1;
1955 else
1956 all_high = 0;
1957 }
1958
1959 if (all_high) {
1960 ctrl->timings[channel][slotrank].val_4028--;
1961 printram("4028--;\n");
1962 FOR_ALL_LANES {
1963 ctrl->timings[channel][slotrank].lanes[lane].
1964 timA -= 0x40;
1965 upperA[lane] -= 0x40;
1966
1967 }
1968 } else if (some_high) {
1969 ctrl->timings[channel][slotrank].val_4024++;
1970 ctrl->timings[channel][slotrank].val_4028++;
1971 printram("4024++;\n");
1972 printram("4028++;\n");
1973 }
1974
1975 program_timings(ctrl, channel);
1976
1977 pre_timA_change(ctrl, channel, slotrank, &mnmx);
1978
1979 discover_402x(ctrl, channel, slotrank, upperA);
1980
1981 post_timA_change(ctrl, channel, slotrank, &mnmx);
1982 pre_timA_change(ctrl, channel, slotrank, &mnmx);
1983
1984 discover_timA_fine(ctrl, channel, slotrank, upperA);
1985
1986 post_timA_change(ctrl, channel, slotrank, &mnmx);
1987 pre_timA_change(ctrl, channel, slotrank, &mnmx);
1988
1989 FOR_ALL_LANES {
1990 ctrl->timings[channel][slotrank].lanes[lane].timA -= mnmx.timA_min_high * 0x40;
1991 }
1992 ctrl->timings[channel][slotrank].val_4028 -= mnmx.timA_min_high;
1993 printram("4028 -= %d;\n", mnmx.timA_min_high);
1994
1995 post_timA_change(ctrl, channel, slotrank, &mnmx);
1996
1997 printram("4/8: %d, %d, %x, %x\n", channel, slotrank,
1998 ctrl->timings[channel][slotrank].val_4024,
1999 ctrl->timings[channel][slotrank].val_4028);
2000
2001 FOR_ALL_LANES
2002 printram("%d, %d, %d, %x\n", channel, slotrank,
2003 lane,
2004 ctrl->timings[channel][slotrank].lanes[lane].timA);
2005
2006 write32(DEFAULT_MCHBAR + 0x3400, 0);
2007
Patrick Rudolph9b515682015-10-09 13:43:51 +02002008 toggle_io_reset();
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002009 }
2010
2011 FOR_ALL_POPULATED_CHANNELS {
2012 program_timings(ctrl, channel);
2013 }
2014 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS FOR_ALL_LANES {
2015 write32(DEFAULT_MCHBAR + 0x4080 + 0x400 * channel
2016 + 4 * lane, 0);
2017 }
2018}
2019
2020static void test_timC(ramctr_timing * ctrl, int channel, int slotrank)
2021{
2022 int lane;
2023
2024 FOR_ALL_LANES {
2025 write32(DEFAULT_MCHBAR + 0x4340 + 0x400 * channel + 4 * lane, 0);
2026 read32(DEFAULT_MCHBAR + 0x4140 + 0x400 * channel + 4 * lane);
2027 }
2028
2029 wait_428c(channel);
2030
Patrick Rudolph371d2912015-10-09 13:33:25 +02002031 /* DRAM command ACT */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002032 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel, 0x1f006);
2033 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel,
2034 (max((ctrl->tFAW >> 2) + 1, ctrl->tRRD) << 10)
2035 | 4 | (ctrl->tRCD << 16));
2036
2037 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
2038 (slotrank << 24) | (6 << 16));
2039
2040 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0x244);
2041
Patrick Rudolph371d2912015-10-09 13:33:25 +02002042 /* DRAM command NOP */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002043 write32(DEFAULT_MCHBAR + 0x4224 + 0x400 * channel, 0x1f207);
2044 write32(DEFAULT_MCHBAR + 0x4234 + 0x400 * channel, 0x8041001);
2045 write32(DEFAULT_MCHBAR + 0x4204 + 0x400 * channel,
2046 (slotrank << 24) | 8);
2047 write32(DEFAULT_MCHBAR + 0x4214 + 0x400 * channel, 0x3e0);
2048
Patrick Rudolph371d2912015-10-09 13:33:25 +02002049 /* DRAM command WR */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002050 write32(DEFAULT_MCHBAR + 0x4228 + 0x400 * channel, 0x1f201);
2051 write32(DEFAULT_MCHBAR + 0x4238 + 0x400 * channel, 0x80411f4);
2052 write32(DEFAULT_MCHBAR + 0x4208 + 0x400 * channel, (slotrank << 24));
2053 write32(DEFAULT_MCHBAR + 0x4218 + 0x400 * channel, 0x242);
2054
Patrick Rudolph371d2912015-10-09 13:33:25 +02002055 /* DRAM command NOP */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002056 write32(DEFAULT_MCHBAR + 0x422c + 0x400 * channel, 0x1f207);
2057 write32(DEFAULT_MCHBAR + 0x423c + 0x400 * channel,
2058 0x8000c01 | ((ctrl->CWL + ctrl->tWTR + 5) << 16));
2059 write32(DEFAULT_MCHBAR + 0x420c + 0x400 * channel,
2060 (slotrank << 24) | 8);
2061 write32(DEFAULT_MCHBAR + 0x421c + 0x400 * channel, 0x3e0);
2062
2063 write32(DEFAULT_MCHBAR + 0x4284 + 0x400 * channel, 0xc0001);
2064
2065 wait_428c(channel);
2066
Patrick Rudolph371d2912015-10-09 13:33:25 +02002067 /* DRAM command PREA */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002068 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel, 0x1f002);
2069 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel,
2070 0xc01 | (ctrl->tRP << 16));
2071 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
2072 (slotrank << 24) | 0x60400);
2073 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0x240);
2074
Patrick Rudolph371d2912015-10-09 13:33:25 +02002075 /* DRAM command ACT */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002076 write32(DEFAULT_MCHBAR + 0x4224 + 0x400 * channel, 0x1f006);
2077 write32(DEFAULT_MCHBAR + 0x4234 + 0x400 * channel,
2078 (max(ctrl->tRRD, (ctrl->tFAW >> 2) + 1) << 10)
2079 | 8 | (ctrl->CAS << 16));
2080
2081 write32(DEFAULT_MCHBAR + 0x4204 + 0x400 * channel,
2082 (slotrank << 24) | 0x60000);
2083
2084 write32(DEFAULT_MCHBAR + 0x4214 + 0x400 * channel, 0x244);
2085
Patrick Rudolph371d2912015-10-09 13:33:25 +02002086 /* DRAM command RD */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002087 write32(DEFAULT_MCHBAR + 0x4228 + 0x400 * channel, 0x1f105);
2088 write32(DEFAULT_MCHBAR + 0x4238 + 0x400 * channel,
2089 0x40011f4 | (max(ctrl->tRTP, 8) << 16));
2090 write32(DEFAULT_MCHBAR + 0x4208 + 0x400 * channel, (slotrank << 24));
2091 write32(DEFAULT_MCHBAR + 0x4218 + 0x400 * channel, 0x242);
2092
Patrick Rudolph371d2912015-10-09 13:33:25 +02002093 /* DRAM command PREA */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002094 write32(DEFAULT_MCHBAR + 0x422c + 0x400 * channel, 0x1f002);
2095 write32(DEFAULT_MCHBAR + 0x423c + 0x400 * channel,
2096 0xc01 | (ctrl->tRP << 16));
2097 write32(DEFAULT_MCHBAR + 0x420c + 0x400 * channel,
2098 (slotrank << 24) | 0x60400);
2099 write32(DEFAULT_MCHBAR + 0x421c + 0x400 * channel, 0x240);
2100 write32(DEFAULT_MCHBAR + 0x4284 + 0x400 * channel, 0xc0001);
2101 wait_428c(channel);
2102}
2103
2104static void discover_timC(ramctr_timing * ctrl, int channel, int slotrank)
2105{
2106 int timC;
2107 int statistics[NUM_LANES][MAX_TIMC + 1];
2108 int lane;
2109
2110 wait_428c(channel);
2111
Patrick Rudolph371d2912015-10-09 13:33:25 +02002112 /* DRAM command PREA */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002113 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel, 0x1f002);
2114 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel,
2115 0xc01 | (ctrl->tRP << 16));
2116 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
2117 (slotrank << 24) | 0x60400);
2118 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0x240);
2119 write32(DEFAULT_MCHBAR + 0x4284 + 0x400 * channel, 1);
2120
2121 for (timC = 0; timC <= MAX_TIMC; timC++) {
2122 FOR_ALL_LANES ctrl->timings[channel][slotrank].lanes[lane].
2123 timC = timC;
2124 program_timings(ctrl, channel);
2125
2126 test_timC(ctrl, channel, slotrank);
2127
2128 FOR_ALL_LANES {
2129 statistics[lane][timC] =
2130 read32(DEFAULT_MCHBAR + 0x4340 + 4 * lane +
2131 0x400 * channel);
2132 printram("Cstat: %d, %d, %d, %x, %x\n",
2133 channel, slotrank, lane, timC,
2134 statistics[lane][timC]);
2135 }
2136 }
2137 FOR_ALL_LANES {
2138 struct run rn =
2139 get_longest_zero_run(statistics[lane], MAX_TIMC + 1);
2140 ctrl->timings[channel][slotrank].lanes[lane].timC = rn.middle;
2141 if (rn.all)
2142 printk(BIOS_CRIT, "timC discovery failed");
2143 printram("Cval: %d, %d, %d, %x\n", channel, slotrank,
2144 lane, ctrl->timings[channel][slotrank].lanes[lane].timC);
2145 }
2146}
2147
2148static int get_precedening_channels(ramctr_timing * ctrl, int target_channel)
2149{
2150 int channel, ret = 0;
2151 FOR_ALL_POPULATED_CHANNELS if (channel < target_channel)
2152 ret++;
2153 return ret;
2154}
2155
2156static void fill_pattern0(ramctr_timing * ctrl, int channel, u32 a, u32 b)
2157{
2158 unsigned j;
2159 unsigned channel_offset =
2160 get_precedening_channels(ctrl, channel) * 0x40;
2161 printram("channel_offset=%x\n", channel_offset);
2162 for (j = 0; j < 16; j++)
2163 write32((void *)(0x04000000 + channel_offset + 4 * j), j & 2 ? b : a);
2164 sfence();
2165}
2166
2167static int num_of_channels(const ramctr_timing * ctrl)
2168{
2169 int ret = 0;
2170 int channel;
2171 FOR_ALL_POPULATED_CHANNELS ret++;
2172 return ret;
2173}
2174
2175static void fill_pattern1(ramctr_timing * ctrl, int channel)
2176{
2177 unsigned j;
2178 unsigned channel_offset =
2179 get_precedening_channels(ctrl, channel) * 0x40;
2180 unsigned channel_step = 0x40 * num_of_channels(ctrl);
2181 for (j = 0; j < 16; j++)
2182 write32((void *)(0x04000000 + channel_offset + j * 4), 0xffffffff);
2183 for (j = 0; j < 16; j++)
2184 write32((void *)(0x04000000 + channel_offset + channel_step + j * 4), 0);
2185 sfence();
2186}
2187
2188static void precharge(ramctr_timing * ctrl)
2189{
2190 int channel, slotrank, lane;
2191
2192 FOR_ALL_POPULATED_CHANNELS {
2193 FOR_ALL_POPULATED_RANKS FOR_ALL_LANES {
2194 ctrl->timings[channel][slotrank].lanes[lane].falling =
2195 16;
2196 ctrl->timings[channel][slotrank].lanes[lane].rising =
2197 16;
2198 } program_timings(ctrl, channel);
2199
2200 FOR_ALL_POPULATED_RANKS {
2201 wait_428c(channel);
2202
Patrick Rudolph371d2912015-10-09 13:33:25 +02002203 /* DRAM command MRS
2204 * write MR3 MPR enable
2205 * in this mode only RD and RDA are allowed
2206 * all reads return a predefined pattern */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002207 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel,
2208 0x1f000);
2209 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel,
2210 0xc01 | (ctrl->tMOD << 16));
2211 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
2212 (slotrank << 24) | 0x360004);
2213 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0);
2214
Patrick Rudolph371d2912015-10-09 13:33:25 +02002215 /* DRAM command RD */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002216 write32(DEFAULT_MCHBAR + 0x4224 + 0x400 * channel,
2217 0x1f105);
2218 write32(DEFAULT_MCHBAR + 0x4234 + 0x400 * channel,
2219 0x4041003);
2220 write32(DEFAULT_MCHBAR + 0x4204 + 0x400 * channel,
2221 (slotrank << 24) | 0);
2222 write32(DEFAULT_MCHBAR + 0x4214 + 0x400 * channel, 0);
2223
Patrick Rudolph371d2912015-10-09 13:33:25 +02002224 /* DRAM command RD */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002225 write32(DEFAULT_MCHBAR + 0x4228 + 0x400 * channel,
2226 0x1f105);
2227 write32(DEFAULT_MCHBAR + 0x4238 + 0x400 * channel,
2228 0x1001 | ((ctrl->CAS + 8) << 16));
2229 write32(DEFAULT_MCHBAR + 0x4208 + 0x400 * channel,
2230 (slotrank << 24) | 0x60000);
2231 write32(DEFAULT_MCHBAR + 0x4218 + 0x400 * channel, 0);
2232
Patrick Rudolph371d2912015-10-09 13:33:25 +02002233 /* DRAM command MRS
2234 * write MR3 MPR disable */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002235 write32(DEFAULT_MCHBAR + 0x422c + 0x400 * channel,
2236 0x1f000);
2237 write32(DEFAULT_MCHBAR + 0x423c + 0x400 * channel,
2238 0xc01 | (ctrl->tMOD << 16));
2239 write32(DEFAULT_MCHBAR + 0x420c + 0x400 * channel,
2240 (slotrank << 24) | 0x360000);
2241 write32(DEFAULT_MCHBAR + 0x421c + 0x400 * channel, 0);
2242 write32(DEFAULT_MCHBAR + 0x4284 + 0x400 * channel,
2243 0xc0001);
2244
2245 wait_428c(channel);
2246 }
2247
2248 FOR_ALL_POPULATED_RANKS FOR_ALL_LANES {
2249 ctrl->timings[channel][slotrank].lanes[lane].falling =
2250 48;
2251 ctrl->timings[channel][slotrank].lanes[lane].rising =
2252 48;
2253 }
2254
2255 program_timings(ctrl, channel);
2256
2257 FOR_ALL_POPULATED_RANKS {
2258 wait_428c(channel);
Patrick Rudolph371d2912015-10-09 13:33:25 +02002259 /* DRAM command MRS
2260 * write MR3 MPR enable
2261 * in this mode only RD and RDA are allowed
2262 * all reads return a predefined pattern */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002263 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel,
2264 0x1f000);
2265 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel,
2266 0xc01 | (ctrl->tMOD << 16));
2267 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
2268 (slotrank << 24) | 0x360004);
2269 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0);
2270
Patrick Rudolph371d2912015-10-09 13:33:25 +02002271 /* DRAM command RD */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002272 write32(DEFAULT_MCHBAR + 0x4224 + 0x400 * channel,
2273 0x1f105);
2274 write32(DEFAULT_MCHBAR + 0x4234 + 0x400 * channel,
2275 0x4041003);
2276 write32(DEFAULT_MCHBAR + 0x4204 + 0x400 * channel,
2277 (slotrank << 24) | 0);
2278 write32(DEFAULT_MCHBAR + 0x4214 + 0x400 * channel, 0);
2279
Patrick Rudolph371d2912015-10-09 13:33:25 +02002280 /* DRAM command RD */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002281 write32(DEFAULT_MCHBAR + 0x4228 + 0x400 * channel,
2282 0x1f105);
2283 write32(DEFAULT_MCHBAR + 0x4238 + 0x400 * channel,
2284 0x1001 | ((ctrl->CAS + 8) << 16));
2285 write32(DEFAULT_MCHBAR + 0x4208 + 0x400 * channel,
2286 (slotrank << 24) | 0x60000);
2287 write32(DEFAULT_MCHBAR + 0x4218 + 0x400 * channel, 0);
2288
Patrick Rudolph371d2912015-10-09 13:33:25 +02002289 /* DRAM command MRS
2290 * write MR3 MPR disable */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002291 write32(DEFAULT_MCHBAR + 0x422c + 0x400 * channel,
2292 0x1f000);
2293 write32(DEFAULT_MCHBAR + 0x423c + 0x400 * channel,
2294 0xc01 | (ctrl->tMOD << 16));
2295
2296 write32(DEFAULT_MCHBAR + 0x420c + 0x400 * channel,
2297 (slotrank << 24) | 0x360000);
2298 write32(DEFAULT_MCHBAR + 0x421c + 0x400 * channel, 0);
2299
2300 write32(DEFAULT_MCHBAR + 0x4284 + 0x400 * channel,
2301 0xc0001);
2302 wait_428c(channel);
2303 }
2304 }
2305}
2306
2307static void test_timB(ramctr_timing * ctrl, int channel, int slotrank)
2308{
Patrick Rudolph371d2912015-10-09 13:33:25 +02002309 /* enable DQs on this slotrank */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002310 write_mrreg(ctrl, channel, slotrank, 1,
2311 0x80 | make_mr1(ctrl, slotrank));
2312
2313 wait_428c(channel);
Patrick Rudolph371d2912015-10-09 13:33:25 +02002314 /* DRAM command NOP */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002315 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel, 0x1f207);
2316 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel,
2317 0x8000c01 | ((ctrl->CWL + ctrl->tWLO) << 16));
2318 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
2319 8 | (slotrank << 24));
2320 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0);
2321
Patrick Rudolph371d2912015-10-09 13:33:25 +02002322 /* DRAM command NOP */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002323 write32(DEFAULT_MCHBAR + 0x4224 + 0x400 * channel, 0x1f107);
2324 write32(DEFAULT_MCHBAR + 0x4234 + 0x400 * channel,
2325 0x4000c01 | ((ctrl->CAS + 38) << 16));
2326 write32(DEFAULT_MCHBAR + 0x4204 + 0x400 * channel,
2327 (slotrank << 24) | 4);
2328 write32(DEFAULT_MCHBAR + 0x4214 + 0x400 * channel, 0);
2329
2330 write32(DEFAULT_MCHBAR + 0x400 * channel + 0x4284, 0x40001);
2331 wait_428c(channel);
2332
Patrick Rudolph371d2912015-10-09 13:33:25 +02002333 /* disable DQs on this slotrank */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002334 write_mrreg(ctrl, channel, slotrank, 1,
2335 0x1080 | make_mr1(ctrl, slotrank));
2336}
2337
2338static void discover_timB(ramctr_timing * ctrl, int channel, int slotrank)
2339{
2340 int timB;
2341 int statistics[NUM_LANES][128];
2342 int lane;
2343
2344 write32(DEFAULT_MCHBAR + 0x3400, 0x108052 | (slotrank << 2));
2345
2346 for (timB = 0; timB < 128; timB++) {
2347 FOR_ALL_LANES {
2348 ctrl->timings[channel][slotrank].lanes[lane].timB = timB;
2349 }
2350 program_timings(ctrl, channel);
2351
2352 test_timB(ctrl, channel, slotrank);
2353
2354 FOR_ALL_LANES {
2355 statistics[lane][timB] =
2356 !((read32
2357 (DEFAULT_MCHBAR + lane_registers[lane] +
2358 channel * 0x100 + 4 + ((timB / 32) & 1) * 4)
2359 >> (timB % 32)) & 1);
2360 printram("Bstat: %d, %d, %d, %x, %x\n",
2361 channel, slotrank, lane, timB,
2362 statistics[lane][timB]);
2363 }
2364 }
2365 FOR_ALL_LANES {
2366 struct run rn = get_longest_zero_run(statistics[lane], 128);
Patrick Rudolph9f1fbb92015-08-17 19:24:12 +02002367 if (rn.start < rn.middle) {
2368 ctrl->timings[channel][slotrank].lanes[lane].timB = rn.start;
2369 } else {
2370 /* In this case statistics[lane][7f] and statistics[lane][0] are
2371 * both zero.
2372 * Prefer a smaller value over rn.start to prevent failures in
2373 * the following write tests.
2374 */
2375 ctrl->timings[channel][slotrank].lanes[lane].timB = 0;
2376 }
2377
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002378 if (rn.all)
2379 die("timB discovery failed");
2380 printram("Bval: %d, %d, %d, %x\n", channel, slotrank,
2381 lane, ctrl->timings[channel][slotrank].lanes[lane].timB);
2382 }
2383}
2384
2385static int get_timB_high_adjust(u64 val)
2386{
2387 int i;
2388
2389 /* good */
2390 if (val == 0xffffffffffffffffLL)
2391 return 0;
2392
2393 if (val >= 0xf000000000000000LL) {
2394 /* needs negative adjustment */
2395 for (i = 0; i < 8; i++)
2396 if (val << (8 * (7 - i) + 4))
2397 return -i;
2398 } else {
2399 /* needs positive adjustment */
2400 for (i = 0; i < 8; i++)
2401 if (val >> (8 * (7 - i) + 4))
2402 return i;
2403 }
2404 return 8;
2405}
2406
2407static void adjust_high_timB(ramctr_timing * ctrl)
2408{
2409 int channel, slotrank, lane, old;
2410 write32(DEFAULT_MCHBAR + 0x3400, 0x200);
2411 FOR_ALL_POPULATED_CHANNELS {
2412 fill_pattern1(ctrl, channel);
2413 write32(DEFAULT_MCHBAR + 0x4288 + (channel << 10), 1);
2414 }
2415 FOR_ALL_POPULATED_CHANNELS FOR_ALL_POPULATED_RANKS {
2416
2417 write32(DEFAULT_MCHBAR + 0x4288 + 0x400 * channel, 0x10001);
2418
2419 wait_428c(channel);
2420
Patrick Rudolph371d2912015-10-09 13:33:25 +02002421 /* DRAM command ACT */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002422 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel, 0x1f006);
2423 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel,
2424 0xc01 | (ctrl->tRCD << 16));
2425 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
2426 (slotrank << 24) | 0x60000);
2427 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0);
2428
Patrick Rudolph371d2912015-10-09 13:33:25 +02002429 /* DRAM command NOP */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002430 write32(DEFAULT_MCHBAR + 0x4224 + 0x400 * channel, 0x1f207);
2431 write32(DEFAULT_MCHBAR + 0x4234 + 0x400 * channel, 0x8040c01);
2432 write32(DEFAULT_MCHBAR + 0x4204 + 0x400 * channel,
2433 (slotrank << 24) | 0x8);
2434 write32(DEFAULT_MCHBAR + 0x4214 + 0x400 * channel, 0x3e0);
2435
Patrick Rudolph371d2912015-10-09 13:33:25 +02002436 /* DRAM command WR */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002437 write32(DEFAULT_MCHBAR + 0x4228 + 0x400 * channel, 0x1f201);
2438 write32(DEFAULT_MCHBAR + 0x4238 + 0x400 * channel, 0x8041003);
2439 write32(DEFAULT_MCHBAR + 0x4208 + 0x400 * channel,
2440 (slotrank << 24));
2441 write32(DEFAULT_MCHBAR + 0x4218 + 0x400 * channel, 0x3e2);
2442
Patrick Rudolph371d2912015-10-09 13:33:25 +02002443 /* DRAM command NOP */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002444 write32(DEFAULT_MCHBAR + 0x422c + 0x400 * channel, 0x1f207);
2445 write32(DEFAULT_MCHBAR + 0x423c + 0x400 * channel,
2446 0x8000c01 | ((ctrl->CWL + ctrl->tWTR + 5) << 16));
2447 write32(DEFAULT_MCHBAR + 0x420c + 0x400 * channel,
2448 (slotrank << 24) | 0x8);
2449 write32(DEFAULT_MCHBAR + 0x421c + 0x400 * channel, 0x3e0);
2450
2451 write32(DEFAULT_MCHBAR + 0x4284 + 0x400 * channel, 0xc0001);
2452
2453 wait_428c(channel);
2454
Patrick Rudolph371d2912015-10-09 13:33:25 +02002455 /* DRAM command PREA */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002456 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel, 0x1f002);
2457 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel,
2458 0xc01 | ((ctrl->tRP) << 16));
2459 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
2460 (slotrank << 24) | 0x60400);
2461 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0x240);
2462
Patrick Rudolph371d2912015-10-09 13:33:25 +02002463 /* DRAM command ACT */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002464 write32(DEFAULT_MCHBAR + 0x4224 + 0x400 * channel, 0x1f006);
2465 write32(DEFAULT_MCHBAR + 0x4234 + 0x400 * channel,
2466 0xc01 | ((ctrl->tRCD) << 16));
2467 write32(DEFAULT_MCHBAR + 0x4204 + 0x400 * channel,
2468 (slotrank << 24) | 0x60000);
2469 write32(DEFAULT_MCHBAR + 0x4214 + 0x400 * channel, 0);
2470
Patrick Rudolph371d2912015-10-09 13:33:25 +02002471 /* DRAM command RD */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002472 write32(DEFAULT_MCHBAR + 0x4228 + 0x400 * channel, 0x3f105);
2473 write32(DEFAULT_MCHBAR + 0x4238 + 0x400 * channel,
2474 0x4000c01 |
2475 ((ctrl->tRP +
2476 ctrl->timings[channel][slotrank].val_4024 +
2477 ctrl->timings[channel][slotrank].val_4028) << 16));
2478 write32(DEFAULT_MCHBAR + 0x4208 + 0x400 * channel,
2479 (slotrank << 24) | 0x60008);
2480 write32(DEFAULT_MCHBAR + 0x4218 + 0x400 * channel, 0);
2481
2482 write32(DEFAULT_MCHBAR + 0x4284 + 0x400 * channel, 0x80001);
2483 wait_428c(channel);
2484 FOR_ALL_LANES {
2485 u64 res =
2486 read32(DEFAULT_MCHBAR + lane_registers[lane] +
2487 0x100 * channel + 4);
2488 res |=
2489 ((u64) read32(DEFAULT_MCHBAR + lane_registers[lane] +
2490 0x100 * channel + 8)) << 32;
2491 old = ctrl->timings[channel][slotrank].lanes[lane].timB;
2492 ctrl->timings[channel][slotrank].lanes[lane].timB +=
2493 get_timB_high_adjust(res) * 64;
2494
2495 printk(BIOS_DEBUG, "High adjust %d:%016llx\n", lane, res);
2496 printram("Bval+: %d, %d, %d, %x -> %x\n", channel,
2497 slotrank, lane, old,
2498 ctrl->timings[channel][slotrank].lanes[lane].
2499 timB);
2500 }
2501 }
2502 write32(DEFAULT_MCHBAR + 0x3400, 0);
2503}
2504
2505static void write_op(ramctr_timing * ctrl, int channel)
2506{
2507 int slotrank;
2508
2509 wait_428c(channel);
2510
2511 /* choose an existing rank. */
2512 slotrank = !(ctrl->rankmap[channel] & 1) ? 2 : 0;
2513
Patrick Rudolph371d2912015-10-09 13:33:25 +02002514 /* DRAM command ACT */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002515 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel, 0x0f003);
2516 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel, 0x41001);
2517
2518 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
2519 (slotrank << 24) | 0x60000);
2520
2521 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0x3e0);
2522
2523 write32(DEFAULT_MCHBAR + 0x4284 + 0x400 * channel, 1);
2524 wait_428c(channel);
2525}
2526
Patrick Rudolph371d2912015-10-09 13:33:25 +02002527/* Compensate the skew between CMD/ADDR/CLK and DQ/DQS lanes.
2528 * DDR3 adopted the fly-by topology. The data and strobes signals reach
2529 * the chips at different times with respect to command, address and
2530 * clock signals.
2531 * By delaying either all DQ/DQs or all CMD/ADDR/CLK signals, a full phase
2532 * shift can be introduced.
2533 * It is assumed that the CLK/ADDR/CMD signals have the same routing delay.
2534 *
2535 * To find the required phase shift the DRAM is placed in "write leveling" mode.
2536 * In this mode the DRAM-chip samples the CLK on every DQS edge and feeds back the
2537 * sampled value on the data lanes (DQs).
2538 */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002539static void write_training(ramctr_timing * ctrl)
2540{
2541 int channel, slotrank, lane;
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002542
2543 FOR_ALL_POPULATED_CHANNELS
2544 write32(DEFAULT_MCHBAR + 0x4008 + 0x400 * channel,
2545 read32(DEFAULT_MCHBAR + 0x4008 +
2546 0x400 * channel) | 0x8000000);
2547
2548 FOR_ALL_POPULATED_CHANNELS {
2549 write_op(ctrl, channel);
2550 write32(DEFAULT_MCHBAR + 0x4020 + 0x400 * channel,
2551 read32(DEFAULT_MCHBAR + 0x4020 +
2552 0x400 * channel) | 0x200000);
2553 }
Patrick Rudolph371d2912015-10-09 13:33:25 +02002554
2555 /* refresh disable */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002556 write32(DEFAULT_MCHBAR + 0x5030, read32(DEFAULT_MCHBAR + 0x5030) & ~8);
2557 FOR_ALL_POPULATED_CHANNELS {
2558 write_op(ctrl, channel);
2559 }
2560
Patrick Rudolph371d2912015-10-09 13:33:25 +02002561 /* enable write leveling on all ranks
2562 * disable all DQ outputs
2563 * only NOP is allowed in this mode */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002564 FOR_ALL_CHANNELS
2565 FOR_ALL_POPULATED_RANKS
2566 write_mrreg(ctrl, channel, slotrank, 1,
2567 make_mr1(ctrl, slotrank) | 0x1080);
2568
2569 write32(DEFAULT_MCHBAR + 0x3400, 0x108052);
2570
Patrick Rudolph9b515682015-10-09 13:43:51 +02002571 toggle_io_reset();
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002572
Patrick Rudolph371d2912015-10-09 13:33:25 +02002573 /* set any valid value for timB, it gets corrected later */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002574 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS
2575 discover_timB(ctrl, channel, slotrank);
2576
Patrick Rudolph371d2912015-10-09 13:33:25 +02002577 /* disable write leveling on all ranks */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002578 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS
2579 write_mrreg(ctrl, channel,
2580 slotrank, 1, make_mr1(ctrl, slotrank));
2581
2582 write32(DEFAULT_MCHBAR + 0x3400, 0);
2583
2584 FOR_ALL_POPULATED_CHANNELS
2585 wait_428c(channel);
2586
Patrick Rudolph371d2912015-10-09 13:33:25 +02002587 /* refresh enable */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002588 write32(DEFAULT_MCHBAR + 0x5030, read32(DEFAULT_MCHBAR + 0x5030) | 8);
2589
2590 FOR_ALL_POPULATED_CHANNELS {
2591 write32(DEFAULT_MCHBAR + 0x4020 + 0x400 * channel,
2592 ~0x00200000 & read32(DEFAULT_MCHBAR + 0x4020 +
2593 0x400 * channel));
2594 read32(DEFAULT_MCHBAR + 0x428c + 0x400 * channel);
2595 wait_428c(channel);
2596
Patrick Rudolph371d2912015-10-09 13:33:25 +02002597 /* DRAM command ZQCS */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002598 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel, 0x0f003);
2599 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel, 0x659001);
2600 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel, 0x60000);
2601 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0x3e0);
2602
2603 write32(DEFAULT_MCHBAR + 0x4284 + 0x400 * channel, 1);
2604 wait_428c(channel);
2605 }
2606
Patrick Rudolph9b515682015-10-09 13:43:51 +02002607 toggle_io_reset();
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002608
2609 printram("CPE\n");
2610 precharge(ctrl);
2611 printram("CPF\n");
2612
2613 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS FOR_ALL_LANES {
2614 read32(DEFAULT_MCHBAR + 0x4080 + 0x400 * channel + 4 * lane);
2615 write32(DEFAULT_MCHBAR + 0x4080 + 0x400 * channel + 4 * lane,
2616 0);
2617 }
2618
2619 FOR_ALL_POPULATED_CHANNELS {
2620 fill_pattern0(ctrl, channel, 0xaaaaaaaa, 0x55555555);
2621 write32(DEFAULT_MCHBAR + 0x4288 + (channel << 10), 0);
2622 }
2623
2624 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS
2625 discover_timC(ctrl, channel, slotrank);
2626
2627 FOR_ALL_POPULATED_CHANNELS
2628 program_timings(ctrl, channel);
2629
Patrick Rudolph371d2912015-10-09 13:33:25 +02002630 /* measure and adjust timB timings */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002631 adjust_high_timB(ctrl);
2632
2633 FOR_ALL_POPULATED_CHANNELS
2634 program_timings(ctrl, channel);
2635
2636 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS FOR_ALL_LANES {
2637 read32(DEFAULT_MCHBAR + 0x4080 + 0x400 * channel + 4 * lane);
2638 write32(DEFAULT_MCHBAR + 0x4080 + 0x400 * channel + 4 * lane,
2639 0);
2640 }
2641}
2642
2643static int test_320c(ramctr_timing * ctrl, int channel, int slotrank)
2644{
2645 struct ram_rank_timings saved_rt = ctrl->timings[channel][slotrank];
2646 int timC_delta;
2647 int lanes_ok = 0;
2648 int ctr = 0;
2649 int lane;
2650
2651 for (timC_delta = -5; timC_delta <= 5; timC_delta++) {
2652 FOR_ALL_LANES {
2653 ctrl->timings[channel][slotrank].lanes[lane].timC =
2654 saved_rt.lanes[lane].timC + timC_delta;
2655 }
2656 program_timings(ctrl, channel);
2657 FOR_ALL_LANES {
2658 write32(DEFAULT_MCHBAR + 4 * lane + 0x4f40, 0);
2659 }
2660
2661 write32(DEFAULT_MCHBAR + 0x4288 + 0x400 * channel, 0x1f);
2662
2663 wait_428c(channel);
Patrick Rudolph371d2912015-10-09 13:33:25 +02002664 /* DRAM command ACT */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002665 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel, 0x1f006);
2666 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel,
2667 ((max(ctrl->tRRD, (ctrl->tFAW >> 2) + 1)) << 10)
2668 | 8 | (ctrl->tRCD << 16));
2669
2670 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
2671 (slotrank << 24) | ctr | 0x60000);
2672
2673 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0x244);
Patrick Rudolph371d2912015-10-09 13:33:25 +02002674 /* DRAM command WR */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002675 write32(DEFAULT_MCHBAR + 0x4224 + 0x400 * channel, 0x1f201);
2676 write32(DEFAULT_MCHBAR + 0x4234 + 0x400 * channel,
2677 0x8001020 | ((ctrl->CWL + ctrl->tWTR + 8) << 16));
2678 write32(DEFAULT_MCHBAR + 0x4204 + 0x400 * channel,
2679 (slotrank << 24));
2680 write32(DEFAULT_MCHBAR + 0x4244 + 0x400 * channel, 0x389abcd);
2681 write32(DEFAULT_MCHBAR + 0x4214 + 0x400 * channel, 0x20e42);
2682
Patrick Rudolph371d2912015-10-09 13:33:25 +02002683 /* DRAM command RD */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002684 write32(DEFAULT_MCHBAR + 0x4228 + 0x400 * channel, 0x1f105);
2685 write32(DEFAULT_MCHBAR + 0x4238 + 0x400 * channel,
2686 0x4001020 | (max(ctrl->tRTP, 8) << 16));
2687 write32(DEFAULT_MCHBAR + 0x4208 + 0x400 * channel,
2688 (slotrank << 24));
2689 write32(DEFAULT_MCHBAR + 0x4248 + 0x400 * channel, 0x389abcd);
2690 write32(DEFAULT_MCHBAR + 0x4218 + 0x400 * channel, 0x20e42);
2691
Patrick Rudolph371d2912015-10-09 13:33:25 +02002692 /* DRAM command PRE */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002693 write32(DEFAULT_MCHBAR + 0x422c + 0x400 * channel, 0x1f002);
2694 write32(DEFAULT_MCHBAR + 0x423c + 0x400 * channel, 0xf1001);
2695 write32(DEFAULT_MCHBAR + 0x420c + 0x400 * channel,
2696 (slotrank << 24) | 0x60400);
2697 write32(DEFAULT_MCHBAR + 0x421c + 0x400 * channel, 0x240);
2698
2699 write32(DEFAULT_MCHBAR + 0x4284 + 0x400 * channel, 0xc0001);
2700 wait_428c(channel);
2701 FOR_ALL_LANES {
2702 u32 r32 =
2703 read32(DEFAULT_MCHBAR + 0x4340 + 4 * lane +
2704 0x400 * channel);
2705
2706 if (r32 == 0)
2707 lanes_ok |= 1 << lane;
2708 }
2709 ctr++;
2710 if (lanes_ok == ((1 << NUM_LANES) - 1))
2711 break;
2712 }
2713
2714 ctrl->timings[channel][slotrank] = saved_rt;
2715
2716 printram("3lanes: %x\n", lanes_ok);
2717 return lanes_ok != ((1 << NUM_LANES) - 1);
2718}
2719
2720#include "raminit_patterns.h"
2721
2722static void fill_pattern5(ramctr_timing * ctrl, int channel, int patno)
2723{
2724 unsigned i, j;
2725 unsigned channel_offset =
2726 get_precedening_channels(ctrl, channel) * 0x40;
2727 unsigned channel_step = 0x40 * num_of_channels(ctrl);
2728
2729 if (patno) {
2730 u8 base8 = 0x80 >> ((patno - 1) % 8);
2731 u32 base = base8 | (base8 << 8) | (base8 << 16) | (base8 << 24);
2732 for (i = 0; i < 32; i++) {
2733 for (j = 0; j < 16; j++) {
2734 u32 val = use_base[patno - 1][i] & (1 << (j / 2)) ? base : 0;
2735 if (invert[patno - 1][i] & (1 << (j / 2)))
2736 val = ~val;
2737 write32((void *)(0x04000000 + channel_offset + i * channel_step +
2738 j * 4), val);
2739 }
2740 }
2741
2742 } else {
2743 for (i = 0; i < sizeof(pattern) / sizeof(pattern[0]); i++) {
2744 for (j = 0; j < 16; j++)
2745 write32((void *)(0x04000000 + channel_offset + i * channel_step +
2746 j * 4), pattern[i][j]);
2747 }
2748 sfence();
2749 }
2750}
2751
2752static void reprogram_320c(ramctr_timing * ctrl)
2753{
2754 int channel, slotrank;
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002755
2756 FOR_ALL_POPULATED_CHANNELS {
2757 wait_428c(channel);
2758
2759 /* choose an existing rank. */
2760 slotrank = !(ctrl->rankmap[channel] & 1) ? 2 : 0;
2761
Patrick Rudolph371d2912015-10-09 13:33:25 +02002762 /* DRAM command ZQCS */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002763 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel, 0x0f003);
2764 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel, 0x41001);
2765
2766 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
2767 (slotrank << 24) | 0x60000);
2768
2769 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0x3e0);
2770
2771 write32(DEFAULT_MCHBAR + 0x4284 + 0x400 * channel, 1);
2772 wait_428c(channel);
2773 write32(DEFAULT_MCHBAR + 0x4020 + 0x400 * channel,
2774 read32(DEFAULT_MCHBAR + 0x4020 +
2775 0x400 * channel) | 0x200000);
2776 }
Patrick Rudolph371d2912015-10-09 13:33:25 +02002777
2778 /* refresh disable */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002779 write32(DEFAULT_MCHBAR + 0x5030, read32(DEFAULT_MCHBAR + 0x5030) & ~8);
2780 FOR_ALL_POPULATED_CHANNELS {
2781 wait_428c(channel);
2782
2783 /* choose an existing rank. */
2784 slotrank = !(ctrl->rankmap[channel] & 1) ? 2 : 0;
2785
Patrick Rudolph371d2912015-10-09 13:33:25 +02002786 /* DRAM command ZQCS */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002787 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel, 0x0f003);
2788 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel, 0x41001);
2789
2790 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
2791 (slotrank << 24) | 0x60000);
2792
2793 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0x3e0);
2794
2795 write32(DEFAULT_MCHBAR + 0x4284 + 0x400 * channel, 1);
2796 wait_428c(channel);
2797 }
2798
2799 /* jedec reset */
2800 dram_jedecreset(ctrl);
2801 /* mrs commands. */
2802 dram_mrscommands(ctrl);
2803
Patrick Rudolph9b515682015-10-09 13:43:51 +02002804 toggle_io_reset();
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002805}
2806
2807#define MIN_C320C_LEN 13
2808
2809static int try_cmd_stretch(ramctr_timing * ctrl, int cmd_stretch)
2810{
2811 struct ram_rank_timings saved_timings[NUM_CHANNELS][NUM_SLOTRANKS];
2812 int channel, slotrank;
2813 int c320c;
2814 int stat[NUM_SLOTRANKS][256];
2815
2816 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS {
2817 saved_timings[channel][slotrank] = ctrl->timings[channel][slotrank];
2818 }
2819
2820 FOR_ALL_POPULATED_CHANNELS {
2821 ctrl->cmd_stretch[channel] = cmd_stretch;
2822 }
2823
2824 FOR_ALL_POPULATED_CHANNELS
2825 MCHBAR32(0x4004 + 0x400 * channel) =
2826 ctrl->tRRD
2827 | (ctrl->tRTP << 4)
2828 | (ctrl->tCKE << 8)
2829 | (ctrl->tWTR << 12)
2830 | (ctrl->tFAW << 16)
2831 | (ctrl->tWR << 24)
2832 | (ctrl->cmd_stretch[channel] << 30);
2833
2834
2835 FOR_ALL_CHANNELS {
2836 int delta = 0;
2837 if (ctrl->cmd_stretch[channel] == 2)
2838 delta = 2;
2839 else if (ctrl->cmd_stretch[channel] == 0)
2840 delta = 4;
2841
2842 FOR_ALL_POPULATED_RANKS {
2843 ctrl->timings[channel][slotrank].val_4024 -= delta;
2844 }
2845 }
2846
2847 FOR_ALL_POPULATED_CHANNELS {
2848 for (c320c = -127; c320c <= 127; c320c++) {
2849 FOR_ALL_POPULATED_RANKS {
2850 ctrl->timings[channel][slotrank].val_320c = c320c;
2851 }
2852 program_timings(ctrl, channel);
2853 reprogram_320c(ctrl);
2854 FOR_ALL_POPULATED_RANKS {
2855 stat[slotrank][c320c + 127] =
2856 test_320c(ctrl, channel, slotrank);
2857 printram("3stat: %d, %d, %d: %d\n",
2858 channel, slotrank, c320c,
2859 stat[slotrank][c320c + 127]);
2860 }
2861 }
2862 FOR_ALL_POPULATED_RANKS {
2863 struct run rn =
2864 get_longest_zero_run(stat[slotrank], 255);
2865 ctrl->timings[channel][slotrank].val_320c =
2866 rn.middle - 127;
2867 printram("3val: %d, %d: %d\n", channel,
2868 slotrank,
2869 ctrl->timings[channel][slotrank].val_320c);
2870 if (rn.all || rn.length < MIN_C320C_LEN) {
2871 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS {
2872 ctrl->timings[channel][slotrank] = saved_timings[channel][slotrank];
2873 }
2874 return 0;
2875 }
2876 }
2877 }
2878 return 1;
2879}
2880
Patrick Rudolph371d2912015-10-09 13:33:25 +02002881/* Adjust CMD phase shift and try multiple command rates.
2882 * A command rate of 2T doubles the time needed for address and
2883 * command decode. */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002884static void command_training(ramctr_timing * ctrl)
2885{
2886 int channel;
2887
2888 FOR_ALL_POPULATED_CHANNELS {
2889 fill_pattern5(ctrl, channel, 0);
2890 write32(DEFAULT_MCHBAR + 0x4288 + 0x400 * channel, 0x1f);
2891 }
2892
2893 /* try command rate 1T and 2T */
2894 if (!try_cmd_stretch(ctrl, 0) && !try_cmd_stretch(ctrl, 2))
2895 die("c320c discovery failed");
2896
2897 FOR_ALL_POPULATED_CHANNELS {
2898 program_timings(ctrl, channel);
2899 }
2900
2901 reprogram_320c(ctrl);
2902}
2903
2904static void discover_edges_real(ramctr_timing * ctrl, int channel, int slotrank,
2905 int *edges)
2906{
2907 int edge;
2908 int statistics[NUM_LANES][MAX_EDGE_TIMING + 1];
2909 int lane;
2910
2911 for (edge = 0; edge <= MAX_EDGE_TIMING; edge++) {
2912 FOR_ALL_LANES {
2913 ctrl->timings[channel][slotrank].lanes[lane].rising =
2914 edge;
2915 ctrl->timings[channel][slotrank].lanes[lane].falling =
2916 edge;
2917 }
2918 printram("edge %02x\n", edge);
2919 program_timings(ctrl, channel);
2920
2921 FOR_ALL_LANES {
2922 write32(DEFAULT_MCHBAR + 0x4340 + 0x400 * channel +
2923 4 * lane, 0);
2924 read32(DEFAULT_MCHBAR + 0x400 * channel + 4 * lane +
2925 0x4140);
2926 }
2927
2928 wait_428c(channel);
Patrick Rudolph371d2912015-10-09 13:33:25 +02002929 /* DRAM command MRS
2930 * write MR3 MPR enable
2931 * in this mode only RD and RDA are allowed
2932 * all reads return a predefined pattern */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002933 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel, 0x1f000);
2934 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel,
2935 (0xc01 | (ctrl->tMOD << 16)));
2936 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
2937 (slotrank << 24) | 0x360004);
2938 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0);
2939
Patrick Rudolph371d2912015-10-09 13:33:25 +02002940 /* DRAM command RD */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002941 write32(DEFAULT_MCHBAR + 0x4224 + 0x400 * channel, 0x1f105);
2942 write32(DEFAULT_MCHBAR + 0x4234 + 0x400 * channel, 0x40411f4);
2943 write32(DEFAULT_MCHBAR + 0x4204 + 0x400 * channel,
2944 (slotrank << 24));
2945 write32(DEFAULT_MCHBAR + 0x4214 + 0x400 * channel, 0);
2946
Patrick Rudolph371d2912015-10-09 13:33:25 +02002947 /* DRAM command RD */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002948 write32(DEFAULT_MCHBAR + 0x4228 + 0x400 * channel, 0x1f105);
2949 write32(DEFAULT_MCHBAR + 0x4238 + 0x400 * channel,
2950 0x1001 | ((ctrl->CAS + 8) << 16));
2951 write32(DEFAULT_MCHBAR + 0x4208 + 0x400 * channel,
2952 (slotrank << 24) | 0x60000);
2953 write32(DEFAULT_MCHBAR + 0x4218 + 0x400 * channel, 0);
2954
Patrick Rudolph371d2912015-10-09 13:33:25 +02002955 /* DRAM command MRS
2956 * MR3 disable MPR */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002957 write32(DEFAULT_MCHBAR + 0x422c + 0x400 * channel, 0x1f000);
2958 write32(DEFAULT_MCHBAR + 0x423c + 0x400 * channel,
2959 (0xc01 | (ctrl->tMOD << 16)));
2960 write32(DEFAULT_MCHBAR + 0x420c + 0x400 * channel,
2961 (slotrank << 24) | 0x360000);
2962 write32(DEFAULT_MCHBAR + 0x421c + 0x400 * channel, 0);
2963
2964 write32(DEFAULT_MCHBAR + 0x4284 + 0x400 * channel, 0xc0001);
2965
2966 wait_428c(channel);
2967
2968 FOR_ALL_LANES {
2969 statistics[lane][edge] =
2970 read32(DEFAULT_MCHBAR + 0x4340 + 0x400 * channel +
2971 lane * 4);
2972 }
2973 }
2974 FOR_ALL_LANES {
2975 struct run rn =
2976 get_longest_zero_run(statistics[lane], MAX_EDGE_TIMING + 1);
2977 edges[lane] = rn.middle;
2978 if (rn.all)
2979 die("edge discovery failed");
2980 printram("eval %d, %d, %d, %02x\n", channel, slotrank,
2981 lane, edges[lane]);
2982 }
2983}
2984
2985static void discover_edges(ramctr_timing * ctrl)
2986{
2987 int falling_edges[NUM_CHANNELS][NUM_SLOTRANKS][NUM_LANES];
2988 int rising_edges[NUM_CHANNELS][NUM_SLOTRANKS][NUM_LANES];
2989 int channel, slotrank, lane;
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002990
2991 write32(DEFAULT_MCHBAR + 0x3400, 0);
2992
Patrick Rudolph9b515682015-10-09 13:43:51 +02002993 toggle_io_reset();
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07002994
2995 FOR_ALL_POPULATED_CHANNELS FOR_ALL_LANES {
2996 write32(DEFAULT_MCHBAR + 4 * lane +
2997 0x400 * channel + 0x4080, 0);
2998 }
2999
3000 FOR_ALL_POPULATED_CHANNELS {
3001 fill_pattern0(ctrl, channel, 0, 0);
3002 write32(DEFAULT_MCHBAR + 0x4288 + (channel << 10), 0);
3003 FOR_ALL_LANES {
3004 read32(DEFAULT_MCHBAR + 0x400 * channel +
3005 lane * 4 + 0x4140);
3006 }
3007
3008 FOR_ALL_POPULATED_RANKS FOR_ALL_LANES {
3009 ctrl->timings[channel][slotrank].lanes[lane].falling =
3010 16;
3011 ctrl->timings[channel][slotrank].lanes[lane].rising =
3012 16;
3013 }
3014
3015 program_timings(ctrl, channel);
3016
3017 FOR_ALL_POPULATED_RANKS {
3018 wait_428c(channel);
3019
Patrick Rudolph371d2912015-10-09 13:33:25 +02003020 /* DRAM command MRS
3021 * MR3 enable MPR
3022 * write MR3 MPR enable
3023 * in this mode only RD and RDA are allowed
3024 * all reads return a predefined pattern */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003025 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel,
3026 0x1f000);
3027 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel,
3028 0xc01 | (ctrl->tMOD << 16));
3029 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
3030 (slotrank << 24) | 0x360004);
3031 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0);
3032
Patrick Rudolph371d2912015-10-09 13:33:25 +02003033 /* DRAM command RD */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003034 write32(DEFAULT_MCHBAR + 0x4224 + 0x400 * channel,
3035 0x1f105);
3036 write32(DEFAULT_MCHBAR + 0x4234 + 0x400 * channel,
3037 0x4041003);
3038 write32(DEFAULT_MCHBAR + 0x4204 + 0x400 * channel,
3039 (slotrank << 24) | 0);
3040 write32(DEFAULT_MCHBAR + 0x4214 + 0x400 * channel, 0);
3041
Patrick Rudolph371d2912015-10-09 13:33:25 +02003042 /* DRAM command RD */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003043 write32(DEFAULT_MCHBAR + 0x4228 + 0x400 * channel,
3044 0x1f105);
3045 write32(DEFAULT_MCHBAR + 0x4238 + 0x400 * channel,
3046 0x1001 | ((ctrl->CAS + 8) << 16));
3047 write32(DEFAULT_MCHBAR + 0x4208 + 0x400 * channel,
3048 (slotrank << 24) | 0x60000);
3049 write32(DEFAULT_MCHBAR + 0x4218 + 0x400 * channel, 0);
3050
Patrick Rudolph371d2912015-10-09 13:33:25 +02003051 /* DRAM command MRS
3052 * MR3 disable MPR */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003053 write32(DEFAULT_MCHBAR + 0x422c + 0x400 * channel,
3054 0x1f000);
3055 write32(DEFAULT_MCHBAR + 0x423c + 0x400 * channel,
3056 0xc01 | (ctrl->tMOD << 16));
3057 write32(DEFAULT_MCHBAR + 0x420c + 0x400 * channel,
3058 (slotrank << 24) | 0x360000);
3059 write32(DEFAULT_MCHBAR + 0x421c + 0x400 * channel, 0);
3060 write32(DEFAULT_MCHBAR + 0x4284 + 0x400 * channel,
3061 0xc0001);
3062
3063 wait_428c(channel);
3064 }
3065
Patrick Rudolph371d2912015-10-09 13:33:25 +02003066 /* XXX: check any measured value ? */
3067
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003068 FOR_ALL_POPULATED_RANKS FOR_ALL_LANES {
3069 ctrl->timings[channel][slotrank].lanes[lane].falling =
3070 48;
3071 ctrl->timings[channel][slotrank].lanes[lane].rising =
3072 48;
3073 }
3074
3075 program_timings(ctrl, channel);
3076
3077 FOR_ALL_POPULATED_RANKS {
3078 wait_428c(channel);
3079
Patrick Rudolph371d2912015-10-09 13:33:25 +02003080 /* DRAM command MRS
3081 * MR3 enable MPR
3082 * write MR3 MPR enable
3083 * in this mode only RD and RDA are allowed
3084 * all reads return a predefined pattern */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003085 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel,
3086 0x1f000);
3087 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel,
3088 0xc01 | (ctrl->tMOD << 16));
3089 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
3090 (slotrank << 24) | 0x360004);
3091 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0);
3092
Patrick Rudolph371d2912015-10-09 13:33:25 +02003093 /* DRAM command RD */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003094 write32(DEFAULT_MCHBAR + 0x4224 + 0x400 * channel,
3095 0x1f105);
3096 write32(DEFAULT_MCHBAR + 0x4234 + 0x400 * channel,
3097 0x4041003);
3098 write32(DEFAULT_MCHBAR + 0x4204 + 0x400 * channel,
3099 (slotrank << 24) | 0);
3100 write32(DEFAULT_MCHBAR + 0x4214 + 0x400 * channel, 0);
3101
Patrick Rudolph371d2912015-10-09 13:33:25 +02003102 /* DRAM command RD */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003103 write32(DEFAULT_MCHBAR + 0x4228 + 0x400 * channel,
3104 0x1f105);
3105 write32(DEFAULT_MCHBAR + 0x4238 + 0x400 * channel,
3106 0x1001 | ((ctrl->CAS + 8) << 16));
3107 write32(DEFAULT_MCHBAR + 0x4208 + 0x400 * channel,
3108 (slotrank << 24) | 0x60000);
3109 write32(DEFAULT_MCHBAR + 0x4218 + 0x400 * channel, 0);
3110
Patrick Rudolph371d2912015-10-09 13:33:25 +02003111 /* DRAM command MRS
3112 * MR3 disable MPR */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003113 write32(DEFAULT_MCHBAR + 0x422c + 0x400 * channel,
3114 0x1f000);
3115 write32(DEFAULT_MCHBAR + 0x423c + 0x400 * channel,
3116 0xc01 | (ctrl->tMOD << 16));
3117 write32(DEFAULT_MCHBAR + 0x420c + 0x400 * channel,
3118 (slotrank << 24) | 0x360000);
3119 write32(DEFAULT_MCHBAR + 0x421c + 0x400 * channel, 0);
3120
3121 write32(DEFAULT_MCHBAR + 0x4284 + 0x400 * channel,
3122 0xc0001);
3123 wait_428c(channel);
3124 }
3125
Patrick Rudolph371d2912015-10-09 13:33:25 +02003126 /* XXX: check any measured value ? */
3127
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003128 FOR_ALL_LANES {
3129 write32(DEFAULT_MCHBAR + 0x4080 + 0x400 * channel +
3130 lane * 4,
3131 ~read32(DEFAULT_MCHBAR + 0x4040 +
3132 0x400 * channel + lane * 4) & 0xff);
3133 }
3134
3135 fill_pattern0(ctrl, channel, 0, 0xffffffff);
3136 write32(DEFAULT_MCHBAR + 0x4288 + (channel << 10), 0);
3137 }
3138
3139 /* FIXME: under some conditions (older chipsets?) vendor BIOS sets both edges to the same value. */
3140 write32(DEFAULT_MCHBAR + 0x4eb0, 0x300);
3141
3142 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS {
3143 discover_edges_real(ctrl, channel, slotrank,
3144 falling_edges[channel][slotrank]);
3145 }
3146
3147 write32(DEFAULT_MCHBAR + 0x4eb0, 0x200);
3148
3149 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS {
3150 discover_edges_real(ctrl, channel, slotrank,
3151 rising_edges[channel][slotrank]);
3152 }
3153
3154 write32(DEFAULT_MCHBAR + 0x4eb0, 0);
3155
3156 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS FOR_ALL_LANES {
3157 ctrl->timings[channel][slotrank].lanes[lane].falling =
3158 falling_edges[channel][slotrank][lane];
3159 ctrl->timings[channel][slotrank].lanes[lane].rising =
3160 rising_edges[channel][slotrank][lane];
3161 }
3162
3163 FOR_ALL_POPULATED_CHANNELS {
3164 program_timings(ctrl, channel);
3165 }
3166
3167 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS FOR_ALL_LANES {
3168 write32(DEFAULT_MCHBAR + 0x4080 + 0x400 * channel + 4 * lane,
3169 0);
3170 }
3171}
3172
3173static void discover_edges_write_real(ramctr_timing * ctrl, int channel,
3174 int slotrank, int *edges)
3175{
3176 int edge;
3177 u32 raw_statistics[MAX_EDGE_TIMING + 1];
3178 int statistics[MAX_EDGE_TIMING + 1];
3179 const int reg3000b24[] = { 0, 0xc, 0x2c };
3180 int lane, i;
3181 int lower[NUM_LANES];
3182 int upper[NUM_LANES];
3183 int pat;
3184
3185 FOR_ALL_LANES {
3186 lower[lane] = 0;
3187 upper[lane] = MAX_EDGE_TIMING;
3188 }
3189
3190 for (i = 0; i < 3; i++) {
3191 write32(DEFAULT_MCHBAR + 0x3000 + 0x100 * channel,
3192 reg3000b24[i] << 24);
3193 for (pat = 0; pat < NUM_PATTERNS; pat++) {
3194 fill_pattern5(ctrl, channel, pat);
3195 write32(DEFAULT_MCHBAR + 0x4288 + 0x400 * channel, 0x1f);
3196 printram("patterned\n");
3197 printram("[%x] = 0x%08x\n(%d, %d)\n",
3198 0x3000 + 0x100 * channel, reg3000b24[i] << 24, channel,
3199 slotrank);
3200 for (edge = 0; edge <= MAX_EDGE_TIMING; edge++) {
3201 FOR_ALL_LANES {
3202 ctrl->timings[channel][slotrank].lanes[lane].
3203 rising = edge;
3204 ctrl->timings[channel][slotrank].lanes[lane].
3205 falling = edge;
3206 }
3207 program_timings(ctrl, channel);
3208
3209 FOR_ALL_LANES {
3210 write32(DEFAULT_MCHBAR + 0x4340 +
3211 0x400 * channel + 4 * lane, 0);
3212 read32(DEFAULT_MCHBAR + 0x400 * channel +
3213 4 * lane + 0x4140);
3214 }
3215 wait_428c(channel);
3216
Patrick Rudolph371d2912015-10-09 13:33:25 +02003217 /* DRAM command ACT */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003218 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel,
3219 0x1f006);
3220 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel,
3221 0x4 | (ctrl->tRCD << 16)
3222 | (max(ctrl->tRRD, (ctrl->tFAW >> 2) + 1) <<
3223 10));
3224 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
3225 (slotrank << 24) | 0x60000);
3226 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel,
3227 0x240);
3228
Patrick Rudolph371d2912015-10-09 13:33:25 +02003229 /* DRAM command WR */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003230 write32(DEFAULT_MCHBAR + 0x4224 + 0x400 * channel,
3231 0x1f201);
3232 write32(DEFAULT_MCHBAR + 0x4234 + 0x400 * channel,
3233 0x8005020 | ((ctrl->tWTR + ctrl->CWL + 8) <<
3234 16));
3235 write32(DEFAULT_MCHBAR + 0x4204 + 0x400 * channel,
3236 (slotrank << 24));
3237 write32(DEFAULT_MCHBAR + 0x4214 + 0x400 * channel,
3238 0x242);
3239
Patrick Rudolph371d2912015-10-09 13:33:25 +02003240 /* DRAM command RD */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003241 write32(DEFAULT_MCHBAR + 0x4228 + 0x400 * channel,
3242 0x1f105);
3243 write32(DEFAULT_MCHBAR + 0x4238 + 0x400 * channel,
3244 0x4005020 | (max(ctrl->tRTP, 8) << 16));
3245 write32(DEFAULT_MCHBAR + 0x4208 + 0x400 * channel,
3246 (slotrank << 24));
3247 write32(DEFAULT_MCHBAR + 0x4218 + 0x400 * channel,
3248 0x242);
3249
Patrick Rudolph371d2912015-10-09 13:33:25 +02003250 /* DRAM command PRE */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003251 write32(DEFAULT_MCHBAR + 0x422c + 0x400 * channel,
3252 0x1f002);
3253 write32(DEFAULT_MCHBAR + 0x423c + 0x400 * channel,
3254 0xc01 | (ctrl->tRP << 16));
3255 write32(DEFAULT_MCHBAR + 0x420c + 0x400 * channel,
3256 (slotrank << 24) | 0x60400);
3257 write32(DEFAULT_MCHBAR + 0x421c + 0x400 * channel, 0);
3258
3259 write32(DEFAULT_MCHBAR + 0x4284 + 0x400 * channel,
3260 0xc0001);
3261 wait_428c(channel);
3262 FOR_ALL_LANES {
3263 read32(DEFAULT_MCHBAR + 0x4340 +
3264 0x400 * channel + lane * 4);
3265 }
3266
3267 raw_statistics[edge] =
3268 MCHBAR32(0x436c + 0x400 * channel);
3269 }
3270 FOR_ALL_LANES {
3271 struct run rn;
3272 for (edge = 0; edge <= MAX_EDGE_TIMING; edge++)
3273 statistics[edge] =
3274 ! !(raw_statistics[edge] & (1 << lane));
3275 rn = get_longest_zero_run(statistics,
3276 MAX_EDGE_TIMING + 1);
3277 printram("edges: %d, %d, %d: 0x%x-0x%x-0x%x, 0x%x-0x%x\n",
3278 channel, slotrank, i, rn.start, rn.middle,
3279 rn.end, rn.start + ctrl->edge_offset[i],
3280 rn.end - ctrl->edge_offset[i]);
3281 lower[lane] =
3282 max(rn.start + ctrl->edge_offset[i], lower[lane]);
3283 upper[lane] =
3284 min(rn.end - ctrl->edge_offset[i], upper[lane]);
3285 edges[lane] = (lower[lane] + upper[lane]) / 2;
Patrick Rudolph9733e282015-08-16 17:06:30 +02003286 if (rn.all || (lower[lane] > upper[lane]))
3287 die("edge write discovery failed");
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003288
3289 }
3290 }
3291 }
3292
3293 write32(DEFAULT_MCHBAR + 0x3000, 0);
3294 printram("CPA\n");
3295}
3296
3297static void discover_edges_write(ramctr_timing * ctrl)
3298{
3299 int falling_edges[NUM_CHANNELS][NUM_SLOTRANKS][NUM_LANES];
3300 int rising_edges[NUM_CHANNELS][NUM_SLOTRANKS][NUM_LANES];
3301 int channel, slotrank, lane;
3302
3303 /* FIXME: under some conditions (older chipsets?) vendor BIOS sets both edges to the same value. */
3304 write32(DEFAULT_MCHBAR + 0x4eb0, 0x300);
3305
3306 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS {
3307 discover_edges_write_real(ctrl, channel, slotrank,
3308 falling_edges[channel][slotrank]);
3309 }
3310
3311 write32(DEFAULT_MCHBAR + 0x4eb0, 0x200);
3312
3313 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS {
3314 discover_edges_write_real(ctrl, channel, slotrank,
3315 rising_edges[channel][slotrank]);
3316 }
3317
3318 write32(DEFAULT_MCHBAR + 0x4eb0, 0);
3319
3320 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS FOR_ALL_LANES {
3321 ctrl->timings[channel][slotrank].lanes[lane].falling =
3322 falling_edges[channel][slotrank][lane];
3323 ctrl->timings[channel][slotrank].lanes[lane].rising =
3324 rising_edges[channel][slotrank][lane];
3325 }
3326
3327 FOR_ALL_POPULATED_CHANNELS
3328 program_timings(ctrl, channel);
3329
3330 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS FOR_ALL_LANES {
3331 write32(DEFAULT_MCHBAR + 0x4080 + 0x400 * channel + 4 * lane,
3332 0);
3333 }
3334}
3335
3336static void test_timC_write(ramctr_timing *ctrl, int channel, int slotrank)
3337{
3338 wait_428c(channel);
Patrick Rudolph371d2912015-10-09 13:33:25 +02003339 /* DRAM command ACT */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003340 write32(DEFAULT_MCHBAR + 0x4220 + 0x400 * channel, 0x1f006);
3341 write32(DEFAULT_MCHBAR + 0x4230 + 0x400 * channel,
3342 (max((ctrl->tFAW >> 2) + 1, ctrl->tRRD)
3343 << 10) | (ctrl->tRCD << 16) | 4);
3344 write32(DEFAULT_MCHBAR + 0x4200 + 0x400 * channel,
3345 (slotrank << 24) | 0x60000);
3346 write32(DEFAULT_MCHBAR + 0x4210 + 0x400 * channel, 0x244);
3347
Patrick Rudolph371d2912015-10-09 13:33:25 +02003348 /* DRAM command WR */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003349 write32(DEFAULT_MCHBAR + 0x4224 + 0x400 * channel, 0x1f201);
3350 write32(DEFAULT_MCHBAR + 0x4234 + 0x400 * channel,
3351 0x80011e0 |
3352 ((ctrl->tWTR + ctrl->CWL + 8) << 16));
3353 write32(DEFAULT_MCHBAR + 0x4204 +
3354 0x400 * channel, (slotrank << 24));
3355 write32(DEFAULT_MCHBAR + 0x4214 +
3356 0x400 * channel, 0x242);
3357
Patrick Rudolph371d2912015-10-09 13:33:25 +02003358 /* DRAM command RD */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003359 write32(DEFAULT_MCHBAR + 0x4228 +
3360 0x400 * channel, 0x1f105);
3361 write32(DEFAULT_MCHBAR + 0x4238 +
3362 0x400 * channel,
3363 0x40011e0 | (max(ctrl->tRTP, 8) << 16));
3364 write32(DEFAULT_MCHBAR + 0x4208 +
3365 0x400 * channel, (slotrank << 24));
3366 write32(DEFAULT_MCHBAR + 0x4218 +
3367 0x400 * channel, 0x242);
3368
Patrick Rudolph371d2912015-10-09 13:33:25 +02003369 /* DRAM command PRE */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003370 write32(DEFAULT_MCHBAR + 0x422c +
3371 0x400 * channel, 0x1f002);
3372 write32(DEFAULT_MCHBAR + 0x423c +
3373 0x400 * channel,
3374 0x1001 | (ctrl->tRP << 16));
3375 write32(DEFAULT_MCHBAR + 0x420c +
3376 0x400 * channel,
3377 (slotrank << 24) | 0x60400);
3378 write32(DEFAULT_MCHBAR + 0x421c +
3379 0x400 * channel, 0);
3380
3381 write32(DEFAULT_MCHBAR + 0x4284 +
3382 0x400 * channel, 0xc0001);
3383 wait_428c(channel);
3384}
3385
3386static void discover_timC_write(ramctr_timing * ctrl)
3387{
3388 const u8 rege3c_b24[3] = { 0, 0xf, 0x2f };
3389 int i, pat;
3390
3391 int lower[NUM_CHANNELS][NUM_SLOTRANKS][NUM_LANES];
3392 int upper[NUM_CHANNELS][NUM_SLOTRANKS][NUM_LANES];
3393 int channel, slotrank, lane;
3394
3395 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS FOR_ALL_LANES {
3396 lower[channel][slotrank][lane] = 0;
3397 upper[channel][slotrank][lane] = MAX_TIMC;
3398 }
3399
3400 write32(DEFAULT_MCHBAR + 0x4ea8, 1);
3401
3402 for (i = 0; i < 3; i++)
3403 FOR_ALL_POPULATED_CHANNELS {
3404 write32(DEFAULT_MCHBAR + 0xe3c + (channel * 0x100),
3405 (rege3c_b24[i] << 24)
3406 | (read32(DEFAULT_MCHBAR + 0xe3c + (channel * 0x100))
3407 & ~0x3f000000));
3408 udelay(2);
3409 for (pat = 0; pat < NUM_PATTERNS; pat++) {
3410 FOR_ALL_POPULATED_RANKS {
3411 int timC;
3412 u32 raw_statistics[MAX_TIMC + 1];
3413 int statistics[MAX_TIMC + 1];
3414
3415 fill_pattern5(ctrl, channel, pat);
3416 write32(DEFAULT_MCHBAR + 0x4288 + 0x400 * channel, 0x1f);
3417 for (timC = 0; timC < MAX_TIMC + 1; timC++) {
3418 FOR_ALL_LANES
3419 ctrl->timings[channel][slotrank].lanes[lane].timC = timC;
3420 program_timings(ctrl, channel);
3421
3422 test_timC_write (ctrl, channel, slotrank);
3423
3424 raw_statistics[timC] =
3425 MCHBAR32(0x436c + 0x400 * channel);
3426 }
3427 FOR_ALL_LANES {
3428 struct run rn;
3429 for (timC = 0; timC <= MAX_TIMC; timC++)
3430 statistics[timC] =
3431 !!(raw_statistics[timC] &
3432 (1 << lane));
3433 rn = get_longest_zero_run(statistics,
3434 MAX_TIMC + 1);
3435 if (rn.all)
3436 die("timC write discovery failed");
3437 printram("timC: %d, %d, %d: 0x%x-0x%x-0x%x, 0x%x-0x%x\n",
3438 channel, slotrank, i, rn.start,
3439 rn.middle, rn.end,
3440 rn.start + ctrl->timC_offset[i],
3441 rn.end - ctrl->timC_offset[i]);
3442 lower[channel][slotrank][lane] =
3443 max(rn.start + ctrl->timC_offset[i],
3444 lower[channel][slotrank][lane]);
3445 upper[channel][slotrank][lane] =
3446 min(rn.end - ctrl->timC_offset[i],
3447 upper[channel][slotrank][lane]);
3448
3449 }
3450 }
3451 }
3452 }
3453
3454 FOR_ALL_CHANNELS {
3455 write32(DEFAULT_MCHBAR + (channel * 0x100) + 0xe3c,
3456 0 | (read32(DEFAULT_MCHBAR + (channel * 0x100) + 0xe3c) &
3457 ~0x3f000000));
3458 udelay(2);
3459 }
3460
3461 write32(DEFAULT_MCHBAR + 0x4ea8, 0);
3462
3463 printram("CPB\n");
3464
3465 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS FOR_ALL_LANES {
3466 printram("timC [%d, %d, %d] = 0x%x\n", channel,
3467 slotrank, lane,
3468 (lower[channel][slotrank][lane] +
3469 upper[channel][slotrank][lane]) / 2);
3470 ctrl->timings[channel][slotrank].lanes[lane].timC =
3471 (lower[channel][slotrank][lane] +
3472 upper[channel][slotrank][lane]) / 2;
3473 }
3474 FOR_ALL_POPULATED_CHANNELS {
3475 program_timings(ctrl, channel);
3476 }
3477}
3478
3479static void normalize_training(ramctr_timing * ctrl)
3480{
3481 int channel, slotrank, lane;
3482 int mat = 0;
3483
3484 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS {
3485 int delta;
3486 FOR_ALL_LANES mat =
3487 max(ctrl->timings[channel][slotrank].lanes[lane].timA, mat);
3488 delta = (mat >> 6) - ctrl->timings[channel][slotrank].val_4028;
3489 ctrl->timings[channel][slotrank].val_4024 += delta;
3490 ctrl->timings[channel][slotrank].val_4028 += delta;
3491 }
3492
3493 FOR_ALL_POPULATED_CHANNELS {
3494 program_timings(ctrl, channel);
3495 }
3496}
3497
3498static void write_controller_mr(ramctr_timing * ctrl)
3499{
3500 int channel, slotrank;
3501
3502 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS {
3503 write32(DEFAULT_MCHBAR + 0x0004 + (channel << 8) +
3504 lane_registers[slotrank], make_mr0(ctrl, slotrank));
3505 write32(DEFAULT_MCHBAR + 0x0008 + (channel << 8) +
3506 lane_registers[slotrank], make_mr1(ctrl, slotrank));
3507 }
3508}
3509
3510static void channel_test(ramctr_timing * ctrl)
3511{
3512 int channel, slotrank, lane;
3513
3514 FOR_ALL_POPULATED_CHANNELS
3515 if (read32(DEFAULT_MCHBAR + 0x42a0 + (channel << 10)) & 0xa000)
3516 die("Mini channel test failed (1)\n");
3517 FOR_ALL_POPULATED_CHANNELS {
3518 fill_pattern0(ctrl, channel, 0x12345678, 0x98765432);
3519
3520 write32(DEFAULT_MCHBAR + 0x4288 + (channel << 10), 0);
3521 }
3522
3523 for (slotrank = 0; slotrank < 4; slotrank++)
3524 FOR_ALL_CHANNELS
3525 if (ctrl->rankmap[channel] & (1 << slotrank)) {
3526 FOR_ALL_LANES {
3527 write32(DEFAULT_MCHBAR + (0x4f40 + 4 * lane), 0);
3528 write32(DEFAULT_MCHBAR + (0x4d40 + 4 * lane), 0);
3529 }
3530 wait_428c(channel);
Patrick Rudolph371d2912015-10-09 13:33:25 +02003531 /* DRAM command ACT */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003532 write32(DEFAULT_MCHBAR + 0x4220 + (channel << 10), 0x0001f006);
3533 write32(DEFAULT_MCHBAR + 0x4230 + (channel << 10), 0x0028a004);
3534 write32(DEFAULT_MCHBAR + 0x4200 + (channel << 10),
3535 0x00060000 | (slotrank << 24));
3536 write32(DEFAULT_MCHBAR + 0x4210 + (channel << 10), 0x00000244);
Patrick Rudolph371d2912015-10-09 13:33:25 +02003537 /* DRAM command WR */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003538 write32(DEFAULT_MCHBAR + 0x4224 + (channel << 10), 0x0001f201);
3539 write32(DEFAULT_MCHBAR + 0x4234 + (channel << 10), 0x08281064);
3540 write32(DEFAULT_MCHBAR + 0x4204 + (channel << 10),
3541 0x00000000 | (slotrank << 24));
3542 write32(DEFAULT_MCHBAR + 0x4214 + (channel << 10), 0x00000242);
Patrick Rudolph371d2912015-10-09 13:33:25 +02003543 /* DRAM command RD */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003544 write32(DEFAULT_MCHBAR + 0x4228 + (channel << 10), 0x0001f105);
3545 write32(DEFAULT_MCHBAR + 0x4238 + (channel << 10), 0x04281064);
3546 write32(DEFAULT_MCHBAR + 0x4208 + (channel << 10),
3547 0x00000000 | (slotrank << 24));
3548 write32(DEFAULT_MCHBAR + 0x4218 + (channel << 10), 0x00000242);
Patrick Rudolph371d2912015-10-09 13:33:25 +02003549 /* DRAM command PRE */
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003550 write32(DEFAULT_MCHBAR + 0x422c + (channel << 10), 0x0001f002);
3551 write32(DEFAULT_MCHBAR + 0x423c + (channel << 10), 0x00280c01);
3552 write32(DEFAULT_MCHBAR + 0x420c + (channel << 10),
3553 0x00060400 | (slotrank << 24));
3554 write32(DEFAULT_MCHBAR + 0x421c + (channel << 10), 0x00000240);
3555 write32(DEFAULT_MCHBAR + 0x4284 + (channel << 10), 0x000c0001);
3556 wait_428c(channel);
3557 FOR_ALL_LANES
3558 if (read32(DEFAULT_MCHBAR + 0x4340 + (channel << 10) + 4 * lane))
3559 die("Mini channel test failed (2)\n");
3560 }
3561}
3562
3563static void set_scrambling_seed(ramctr_timing * ctrl)
3564{
3565 int channel;
3566
3567 /* FIXME: we hardcode seeds. Do we need to use some PRNG for them?
3568 I don't think so. */
3569 static u32 seeds[NUM_CHANNELS][3] = {
3570 {0x00009a36, 0xbafcfdcf, 0x46d1ab68},
3571 {0x00028bfa, 0x53fe4b49, 0x19ed5483}
3572 };
3573 FOR_ALL_POPULATED_CHANNELS {
3574 MCHBAR32(0x4020 + 0x400 * channel) &= ~0x10000000;
3575 write32(DEFAULT_MCHBAR + 0x4034, seeds[channel][0]);
3576 write32(DEFAULT_MCHBAR + 0x403c, seeds[channel][1]);
3577 write32(DEFAULT_MCHBAR + 0x4038, seeds[channel][2]);
3578 }
3579}
3580
3581static void set_4f8c(void)
3582{
3583 struct cpuid_result cpures;
3584 u32 cpu;
3585
3586 cpures = cpuid(0);
3587 cpu = (cpures.eax);
3588 if (IS_SANDY_CPU(cpu) && (IS_SANDY_CPU_D0(cpu) || IS_SANDY_CPU_D1(cpu))) {
3589 MCHBAR32(0x4f8c) = 0x141D1519;
3590 } else {
3591 MCHBAR32(0x4f8c) = 0x551D1519;
3592 }
3593}
3594
3595static void prepare_training(ramctr_timing * ctrl)
3596{
3597 int channel;
3598
3599 FOR_ALL_POPULATED_CHANNELS {
3600 // Always drive command bus
3601 MCHBAR32(0x4004 + 0x400 * channel) |= 0x20000000;
3602 }
3603
3604 udelay(1);
3605
3606 FOR_ALL_POPULATED_CHANNELS {
3607 wait_428c(channel);
3608 }
3609}
3610
3611static void set_4008c(ramctr_timing * ctrl)
3612{
3613 int channel, slotrank;
3614 u32 reg;
3615 FOR_ALL_POPULATED_CHANNELS {
3616 u32 b20, b4_8_12;
3617 int min_320c = 10000;
3618 int max_320c = -10000;
3619
3620 FOR_ALL_POPULATED_RANKS {
3621 max_320c = max(ctrl->timings[channel][slotrank].val_320c, max_320c);
3622 min_320c = min(ctrl->timings[channel][slotrank].val_320c, min_320c);
3623 }
3624
3625 if (max_320c - min_320c > 51)
3626 b20 = 0;
3627 else
3628 b20 = ctrl->ref_card_offset[channel];
3629
3630 if (ctrl->reg_320c_range_threshold < max_320c - min_320c)
3631 b4_8_12 = 0x3330;
3632 else
3633 b4_8_12 = 0x2220;
3634
3635 reg = read32(DEFAULT_MCHBAR + 0x400c + (channel << 10));
3636 write32(DEFAULT_MCHBAR + 0x400c + (channel << 10),
3637 (reg & 0xFFF0FFFF)
3638 | (ctrl->ref_card_offset[channel] << 16)
3639 | (ctrl->ref_card_offset[channel] << 18));
3640 write32(DEFAULT_MCHBAR + 0x4008 + (channel << 10),
3641 0x0a000000
3642 | (b20 << 20)
3643 | ((ctrl->ref_card_offset[channel] + 2) << 16)
3644 | b4_8_12);
3645 }
3646}
3647
3648static void set_42a0(ramctr_timing * ctrl)
3649{
3650 int channel;
3651 FOR_ALL_POPULATED_CHANNELS {
3652 write32(DEFAULT_MCHBAR + (0x42a0 + 0x400 * channel),
3653 0x00001000 | ctrl->rankmap[channel]);
3654 MCHBAR32(0x4004 + 0x400 * channel) &= ~0x20000000; // OK
3655 }
3656}
3657
3658static int encode_5d10(int ns)
3659{
3660 return (ns + 499) / 500;
3661}
3662
3663/* FIXME: values in this function should be hardware revision-dependent. */
3664static void final_registers(ramctr_timing * ctrl)
3665{
3666 int channel;
3667 int t1_cycles = 0, t1_ns = 0, t2_ns;
3668 int t3_ns;
3669 u32 r32;
3670
3671 write32(DEFAULT_MCHBAR + 0x4cd4, 0x00000046);
3672
3673 write32(DEFAULT_MCHBAR + 0x400c, (read32(DEFAULT_MCHBAR + 0x400c) & 0xFFFFCFFF) | 0x1000); // OK
3674 write32(DEFAULT_MCHBAR + 0x440c, (read32(DEFAULT_MCHBAR + 0x440c) & 0xFFFFCFFF) | 0x1000); // OK
3675 write32(DEFAULT_MCHBAR + 0x4cb0, 0x00000740);
3676 write32(DEFAULT_MCHBAR + 0x4380, 0x00000aaa); // OK
3677 write32(DEFAULT_MCHBAR + 0x4780, 0x00000aaa); // OK
3678 write32(DEFAULT_MCHBAR + 0x4f88, 0x5f7003ff); // OK
3679 write32(DEFAULT_MCHBAR + 0x5064, 0x00073000 | ctrl->reg_5064b0); // OK
3680
3681 FOR_ALL_CHANNELS {
3682 switch (ctrl->rankmap[channel]) {
3683 /* Unpopulated channel. */
3684 case 0:
3685 write32(DEFAULT_MCHBAR + 0x4384 + channel * 0x400, 0);
3686 break;
3687 /* Only single-ranked dimms. */
3688 case 1:
3689 case 4:
3690 case 5:
3691 write32(DEFAULT_MCHBAR + 0x4384 + channel * 0x400, 0x373131);
3692 break;
3693 /* Dual-ranked dimms present. */
3694 default:
3695 write32(DEFAULT_MCHBAR + 0x4384 + channel * 0x400, 0x9b6ea1);
3696 break;
3697 }
3698 }
3699
3700 write32 (DEFAULT_MCHBAR + 0x5880, 0xca9171e5);
3701 write32 (DEFAULT_MCHBAR + 0x5888,
3702 (read32 (DEFAULT_MCHBAR + 0x5888) & ~0xffffff) | 0xe4d5d0);
3703 write32 (DEFAULT_MCHBAR + 0x58a8, read32 (DEFAULT_MCHBAR + 0x58a8) & ~0x1f);
3704 write32 (DEFAULT_MCHBAR + 0x4294,
3705 (read32 (DEFAULT_MCHBAR + 0x4294) & ~0x30000)
3706 | (1 << 16));
3707 write32 (DEFAULT_MCHBAR + 0x4694,
3708 (read32 (DEFAULT_MCHBAR + 0x4694) & ~0x30000)
3709 | (1 << 16));
3710
3711 MCHBAR32(0x5030) |= 1; // OK
3712 MCHBAR32(0x5030) |= 0x80; // OK
3713 MCHBAR32(0x5f18) = 0xfa; // OK
3714
3715 /* Find a populated channel. */
3716 FOR_ALL_POPULATED_CHANNELS
3717 break;
3718
3719 t1_cycles = ((read32(DEFAULT_MCHBAR + 0x4290 + channel * 0x400) >> 8) & 0xff);
3720 r32 = read32(DEFAULT_MCHBAR + 0x5064);
3721 if (r32 & 0x20000)
3722 t1_cycles += (r32 & 0xfff);
3723 t1_cycles += (read32(DEFAULT_MCHBAR + channel * 0x400 + 0x42a4) & 0xfff);
3724 t1_ns = t1_cycles * ctrl->tCK / 256 + 544;
3725 if (!(r32 & 0x20000))
3726 t1_ns += 500;
3727
3728 t2_ns = 10 * ((read32(DEFAULT_MCHBAR + 0x5f10) >> 8) & 0xfff);
3729 if ( read32(DEFAULT_MCHBAR + 0x5f00) & 8 )
3730 {
3731 t3_ns = 10 * ((read32(DEFAULT_MCHBAR + 0x5f20) >> 8) & 0xfff);
3732 t3_ns += 10 * (read32(DEFAULT_MCHBAR + 0x5f18) & 0xff);
3733 }
3734 else
3735 {
3736 t3_ns = 500;
3737 }
3738 printk(BIOS_DEBUG, "t123: %d, %d, %d\n",
3739 t1_ns, t2_ns, t3_ns);
3740 write32 (DEFAULT_MCHBAR + 0x5d10,
3741 ((encode_5d10(t1_ns) + encode_5d10(t2_ns)) << 16)
3742 | (encode_5d10(t1_ns) << 8)
3743 | ((encode_5d10(t3_ns) + encode_5d10(t2_ns) + encode_5d10(t1_ns)) << 24)
3744 | (read32(DEFAULT_MCHBAR + 0x5d10) & 0xC0C0C0C0)
3745 | 0xc);
3746}
3747
3748static void save_timings(ramctr_timing * ctrl)
3749{
3750 struct mrc_data_container *mrcdata;
3751 int output_len = ALIGN(sizeof (*ctrl), 16);
3752
3753 /* Save the MRC S3 restore data to cbmem */
3754 mrcdata = cbmem_add
3755 (CBMEM_ID_MRCDATA,
3756 output_len + sizeof(struct mrc_data_container));
3757
3758 printk(BIOS_DEBUG, "Relocate MRC DATA from %p to %p (%u bytes)\n",
3759 ctrl, mrcdata, output_len);
3760
3761 mrcdata->mrc_signature = MRC_DATA_SIGNATURE;
3762 mrcdata->mrc_data_size = output_len;
3763 mrcdata->reserved = 0;
3764 memcpy(mrcdata->mrc_data, ctrl, sizeof (*ctrl));
3765
3766 /* Zero the unused space in aligned buffer. */
3767 if (output_len > sizeof (*ctrl))
3768 memset(mrcdata->mrc_data+sizeof (*ctrl), 0,
3769 output_len - sizeof (*ctrl));
3770
3771 mrcdata->mrc_checksum = compute_ip_checksum(mrcdata->mrc_data,
3772 mrcdata->mrc_data_size);
3773}
3774
3775static void restore_timings(ramctr_timing * ctrl)
3776{
3777 int channel, slotrank, lane;
3778
3779 FOR_ALL_POPULATED_CHANNELS
3780 MCHBAR32(0x4004 + 0x400 * channel) =
3781 ctrl->tRRD
3782 | (ctrl->tRTP << 4)
3783 | (ctrl->tCKE << 8)
3784 | (ctrl->tWTR << 12)
3785 | (ctrl->tFAW << 16)
3786 | (ctrl->tWR << 24)
3787 | (ctrl->cmd_stretch[channel] << 30);
3788
3789 udelay(1);
3790
3791 FOR_ALL_POPULATED_CHANNELS {
3792 wait_428c(channel);
3793 }
3794
3795 FOR_ALL_CHANNELS FOR_ALL_POPULATED_RANKS FOR_ALL_LANES {
3796 write32(DEFAULT_MCHBAR + 0x4080 + 0x400 * channel
3797 + 4 * lane, 0);
3798 }
3799
3800 FOR_ALL_POPULATED_CHANNELS
3801 write32(DEFAULT_MCHBAR + 0x4008 + 0x400 * channel,
3802 read32(DEFAULT_MCHBAR + 0x4008 +
3803 0x400 * channel) | 0x8000000);
3804
3805 FOR_ALL_POPULATED_CHANNELS {
3806 udelay (1);
3807 write32(DEFAULT_MCHBAR + 0x4020 + 0x400 * channel,
3808 read32(DEFAULT_MCHBAR + 0x4020 +
3809 0x400 * channel) | 0x200000);
3810 }
3811
3812 printram("CPE\n");
3813
3814 write32(DEFAULT_MCHBAR + 0x3400, 0);
3815 write32(DEFAULT_MCHBAR + 0x4eb0, 0);
3816
3817 printram("CP5b\n");
3818
3819 FOR_ALL_POPULATED_CHANNELS {
3820 program_timings(ctrl, channel);
3821 }
3822
3823 u32 reg, addr;
3824
3825 while (!(MCHBAR32(0x5084) & 0x10000)) ;
3826 do {
3827 reg = MCHBAR32(0x428c);
3828 } while ((reg & 0x14) == 0);
3829
3830 // Set state of memory controller
3831 MCHBAR32(0x5030) = 0x116;
3832 MCHBAR32(0x4ea0) = 0;
3833
3834 // Wait 500us
3835 udelay(500);
3836
3837 FOR_ALL_CHANNELS {
3838 // Set valid rank CKE
3839 reg = 0;
3840 reg = (reg & ~0xf) | ctrl->rankmap[channel];
3841 addr = 0x400 * channel + 0x42a0;
3842 MCHBAR32(addr) = reg;
3843
3844 // Wait 10ns for ranks to settle
3845 //udelay(0.01);
3846
3847 reg = (reg & ~0xf0) | (ctrl->rankmap[channel] << 4);
3848 MCHBAR32(addr) = reg;
3849
3850 // Write reset using a NOP
3851 write_reset(ctrl);
3852 }
3853
3854 /* mrs commands. */
3855 dram_mrscommands(ctrl);
3856
3857 printram("CP5c\n");
3858
3859 write32(DEFAULT_MCHBAR + 0x3000, 0);
3860
3861 FOR_ALL_CHANNELS {
3862 write32(DEFAULT_MCHBAR + (channel * 0x100) + 0xe3c,
3863 0 | (read32(DEFAULT_MCHBAR + (channel * 0x100) + 0xe3c) &
3864 ~0x3f000000));
3865 udelay(2);
3866 }
3867
3868 write32(DEFAULT_MCHBAR + 0x4ea8, 0);
3869}
3870
3871void init_dram_ddr3(spd_raw_data * spds, int mobile, int min_tck,
3872 int s3resume)
3873{
3874 int me_uma_size;
3875 int cbmem_was_inited;
3876
3877 MCHBAR32(0x5f00) |= 1;
Stefan Reinauer00636b02012-04-04 00:08:51 +02003878
Vadim Bendebury7a3f36a2012-04-18 15:47:32 -07003879 report_platform_info();
3880
Stefan Reinauer00636b02012-04-04 00:08:51 +02003881 /* Wait for ME to be ready */
3882 intel_early_me_init();
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003883 me_uma_size = intel_early_me_uma_size();
Stefan Reinauer00636b02012-04-04 00:08:51 +02003884
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003885 printk(BIOS_DEBUG, "Starting native Platform init\n");
Stefan Reinauer00636b02012-04-04 00:08:51 +02003886
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003887 u32 reg_5d10;
Stefan Reinauer00636b02012-04-04 00:08:51 +02003888
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003889 wait_txt_clear();
Stefan Reinauer00636b02012-04-04 00:08:51 +02003890
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003891 wrmsr(0x000002e6, (msr_t) { .lo = 0, .hi = 0 });
Stefan Reinauer00636b02012-04-04 00:08:51 +02003892
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003893 reg_5d10 = read32(DEFAULT_MCHBAR + 0x5d10); // !!! = 0x00000000
3894 if ((pcie_read_config16(SOUTHBRIDGE, 0xa2) & 0xa0) == 0x20 /* 0x0004 */
3895 && reg_5d10 && !s3resume) {
3896 write32(DEFAULT_MCHBAR + 0x5d10, 0);
3897 /* Need reset. */
Stefan Reinauer00636b02012-04-04 00:08:51 +02003898 outb(0x6, 0xcf9);
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003899
Patrick Georgi546953c2014-11-29 10:38:17 +01003900 halt();
Stefan Reinauer00636b02012-04-04 00:08:51 +02003901 }
Stefan Reinauer00636b02012-04-04 00:08:51 +02003902
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003903 ramctr_timing ctrl;
Vadim Bendebury48a4a7f2012-06-07 18:47:13 -07003904
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003905 memset(&ctrl, 0, sizeof (ctrl));
3906
3907 early_pch_init_native();
3908 early_thermal_init();
3909
3910 ctrl.mobile = mobile;
3911 ctrl.tCK = min_tck;
3912
3913 /* FIXME: for non-S3 we should be able to use timing caching with
3914 proper verification. Right now we use timings only for S3 case.
3915 */
3916 if (s3resume) {
3917 struct mrc_data_container *mrc_cache;
3918
3919 mrc_cache = find_current_mrc_cache();
3920 if (!mrc_cache || mrc_cache->mrc_data_size < sizeof (ctrl)) {
3921 /* Failed S3 resume, reset to come up cleanly */
3922 outb(0x6, 0xcf9);
3923 halt();
Stefan Reinauer00636b02012-04-04 00:08:51 +02003924 }
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003925 memcpy(&ctrl, mrc_cache->mrc_data, sizeof (ctrl));
Stefan Reinauer00636b02012-04-04 00:08:51 +02003926 }
3927
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003928 if (!s3resume) {
3929 dimm_info info;
Sven Schnelled4ee8082012-07-28 09:28:56 +02003930
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003931 /* Get DDR3 SPD data */
3932 dram_find_spds_ddr3(spds, &info, &ctrl);
Stefan Reinauer00636b02012-04-04 00:08:51 +02003933
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003934 /* Find fastest common supported parameters */
3935 dram_find_common_params(&info, &ctrl);
Stefan Reinauer00636b02012-04-04 00:08:51 +02003936
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07003937 dram_dimm_mapping(&info, &ctrl);
3938 }
3939
3940 /* Set MCU frequency */
3941 dram_freq(&ctrl);
3942
3943 if (!s3resume) {
3944 /* Calculate timings */
3945 dram_timing(&ctrl);
3946 }
3947
3948 /* Set version register */
3949 MCHBAR32(0x5034) = 0xC04EB002;
3950
3951 /* Enable crossover */
3952 dram_xover(&ctrl);
3953
3954 /* Set timing and refresh registers */
3955 dram_timing_regs(&ctrl);
3956
3957 /* Power mode preset */
3958 MCHBAR32(0x4e80) = 0x5500;
3959
3960 /* Set scheduler parameters */
3961 MCHBAR32(0x4c20) = 0x10100005;
3962
3963 /* Set cpu specific register */
3964 set_4f8c();
3965
3966 /* Clear IO reset bit */
3967 MCHBAR32(0x5030) &= ~0x20;
3968
3969 /* Set MAD-DIMM registers */
3970 dram_dimm_set_mapping(&ctrl);
3971 printk(BIOS_DEBUG, "Done dimm mapping\n");
3972
3973 /* Zone config */
3974 dram_zones(&ctrl, 1);
3975
3976 /* Set memory map */
3977 dram_memorymap(&ctrl, me_uma_size);
3978 printk(BIOS_DEBUG, "Done memory map\n");
3979
3980 /* Set IO registers */
3981 dram_ioregs(&ctrl);
3982 printk(BIOS_DEBUG, "Done io registers\n");
3983
3984 udelay(1);
3985
3986 if (s3resume) {
3987 restore_timings(&ctrl);
3988 } else {
3989 /* Do jedec ddr3 reset sequence */
3990 dram_jedecreset(&ctrl);
3991 printk(BIOS_DEBUG, "Done jedec reset\n");
3992
3993 /* MRS commands */
3994 dram_mrscommands(&ctrl);
3995 printk(BIOS_DEBUG, "Done MRS commands\n");
3996 dram_mrscommands(&ctrl);
3997
3998 /* Prepare for memory training */
3999 prepare_training(&ctrl);
4000
4001 read_training(&ctrl);
4002 write_training(&ctrl);
4003
4004 printram("CP5a\n");
4005
4006 discover_edges(&ctrl);
4007
4008 printram("CP5b\n");
4009
4010 command_training(&ctrl);
4011
4012 printram("CP5c\n");
4013
4014 discover_edges_write(&ctrl);
4015
4016 discover_timC_write(&ctrl);
4017
4018 normalize_training(&ctrl);
4019 }
4020
4021 set_4008c(&ctrl);
4022
4023 write_controller_mr(&ctrl);
4024
4025 if (!s3resume) {
4026 channel_test(&ctrl);
4027 }
4028
4029 /* FIXME: should be hardware revision-dependent. */
4030 write32(DEFAULT_MCHBAR + 0x5024, 0x00a030ce);
4031
4032 set_scrambling_seed(&ctrl);
4033
4034 set_42a0(&ctrl);
4035
4036 final_registers(&ctrl);
4037
4038 /* Zone config */
4039 dram_zones(&ctrl, 0);
4040
4041 if (!s3resume)
4042 quick_ram_check();
4043
4044 intel_early_me_status();
4045 intel_early_me_init_done(ME_INIT_STATUS_SUCCESS);
4046 intel_early_me_status();
4047
Stefan Reinauer00636b02012-04-04 00:08:51 +02004048 report_memory_config();
Alexandru Gagniucecf2eb42015-09-28 21:39:12 -07004049
4050 cbmem_was_inited = !cbmem_recovery(s3resume);
4051 if (!s3resume)
4052 save_timings(&ctrl);
4053 if (s3resume && !cbmem_was_inited) {
4054 /* Failed S3 resume, reset to come up cleanly */
4055 outb(0x6, 0xcf9);
4056 halt();
4057 }
Stefan Reinauer00636b02012-04-04 00:08:51 +02004058}