blob: ae1863bddaedbcb5ad354698a66c0fa04407fdf2 [file] [log] [blame]
Patrick Georgi2efc8802012-11-06 11:03:53 +01001/*
2 * This file is part of the coreboot project.
3 *
4 * Copyright (C) 2007-2008 coresystems GmbH
5 * 2012 secunet Security Networks AG
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; version 2 of the License.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
Patrick Georgi2efc8802012-11-06 11:03:53 +010015 */
16
17#ifndef __NORTHBRIDGE_INTEL_GM45_GM45_H__
Edward O'Callaghan089a5102015-01-06 02:48:57 +110018#define __NORTHBRIDGE_INTEL_GM45_GM45_H__
Patrick Georgi2efc8802012-11-06 11:03:53 +010019
20#include "southbridge/intel/i82801ix/i82801ix.h"
21
22#ifndef __ACPI__
23
Alexander Couzens83fc32f2015-04-12 22:28:37 +020024#include <rules.h>
Patrick Georgi2efc8802012-11-06 11:03:53 +010025#include <stdint.h>
26
27typedef enum {
28 FSB_CLOCK_1067MHz = 0,
29 FSB_CLOCK_800MHz = 1,
30 FSB_CLOCK_667MHz = 2,
31} fsb_clock_t;
32
33typedef enum { /* Steppings below B1 were pre-production,
34 conversion stepping A1 is... ?
35 We'll support B1, B2, B3, and conversion stepping A1. */
36 STEPPING_A0 = 0,
37 STEPPING_A1 = 1,
38 STEPPING_A2 = 2,
39 STEPPING_A3 = 3,
40 STEPPING_B0 = 4,
41 STEPPING_B1 = 5,
42 STEPPING_B2 = 6,
43 STEPPING_B3 = 7,
44 STEPPING_CONVERSION_A1 = 9,
45} stepping_t;
46
47typedef enum {
48 GMCH_GM45 = 0,
49 GMCH_GM47,
50 GMCH_GM49,
51 GMCH_GE45,
52 GMCH_GL40,
53 GMCH_GL43,
54 GMCH_GS40,
55 GMCH_GS45,
56 GMCH_PM45,
57 GMCH_UNKNOWN
58} gmch_gfx_t;
59
60typedef enum {
61 MEM_CLOCK_533MHz = 0,
62 MEM_CLOCK_400MHz = 1,
63 MEM_CLOCK_333MHz = 2,
64 MEM_CLOCK_1067MT = 0,
65 MEM_CLOCK_800MT = 1,
66 MEM_CLOCK_667MT = 2,
67} mem_clock_t;
68
69typedef enum {
70 DDR1 = 1,
71 DDR2 = 2,
72 DDR3 = 3,
73} ddr_t;
74
75typedef enum {
76 CHANNEL_MODE_SINGLE,
77 CHANNEL_MODE_DUAL_ASYNC,
78 CHANNEL_MODE_DUAL_INTERLEAVED,
79} channel_mode_t;
80
81typedef enum { /* as in DDR3 spd */
82 CHIP_WIDTH_x4 = 0,
83 CHIP_WIDTH_x8 = 1,
84 CHIP_WIDTH_x16 = 2,
85 CHIP_WIDTH_x32 = 3,
86} chip_width_t;
87
88typedef enum { /* as in DDR3 spd */
89 CHIP_CAP_256M = 0,
90 CHIP_CAP_512M = 1,
91 CHIP_CAP_1G = 2,
92 CHIP_CAP_2G = 3,
93 CHIP_CAP_4G = 4,
94 CHIP_CAP_8G = 5,
95 CHIP_CAP_16G = 6,
96} chip_capacity_t;
97
98typedef struct {
99 unsigned int CAS;
100 fsb_clock_t fsb_clock;
101 mem_clock_t mem_clock;
102 channel_mode_t channel_mode;
103 unsigned int tRAS;
104 unsigned int tRP;
105 unsigned int tRCD;
106 unsigned int tRFC;
107 unsigned int tWR;
108 unsigned int tRD;
109 unsigned int tRRD;
110 unsigned int tFAW;
111 unsigned int tWL;
112} timings_t;
113
114typedef struct {
115 unsigned int card_type; /* 0x0: unpopulated,
116 0xa - 0xf: raw card type A - F */
117 chip_width_t chip_width;
118 chip_capacity_t chip_capacity;
119 unsigned int page_size; /* of whole DIMM in Bytes (4096 or 8192) */
120 unsigned int banks;
121 unsigned int ranks;
Martin Roth128c1042016-11-18 09:29:03 -0700122 unsigned int rank_capacity_mb; /* per rank in Megabytes */
Patrick Georgi2efc8802012-11-06 11:03:53 +0100123} dimminfo_t;
124
125/* The setup is one DIMM per channel, so there's no need to find a
126 common timing setup between multiple chips (but chip and controller
127 still need to be coordinated */
128typedef struct {
129 stepping_t stepping;
130 int txt_enabled;
131 int cores;
132 gmch_gfx_t gfx_type;
Patrick Georgi2efc8802012-11-06 11:03:53 +0100133 int max_ddr2_mhz;
134 int max_ddr3_mt;
135 fsb_clock_t max_fsb;
136 int max_fsb_mhz;
137 int max_render_mhz;
Vladimir Serbinenko56ae8a02014-08-16 10:59:02 +0200138 int enable_igd;
139 int enable_peg;
140 u16 ggc;
Patrick Georgi2efc8802012-11-06 11:03:53 +0100141
Nico Huber5aaeb272015-12-30 00:17:27 +0100142 /* to be filled in romstage main: */
Patrick Georgi2efc8802012-11-06 11:03:53 +0100143 int spd_type;
144 timings_t selected_timings;
145 dimminfo_t dimms[2];
Vladimir Serbinenkoc4d89482014-06-05 09:14:48 +0200146 u8 spd_map[4];
Nico Huber5aaeb272015-12-30 00:17:27 +0100147 int gs45_low_power_mode; /* low power mode of GMCH_GS45 */
148 int sff; /* small form factor option (soldered down DIMM) */
Patrick Georgi2efc8802012-11-06 11:03:53 +0100149} sysinfo_t;
150#define TOTAL_CHANNELS 2
151#define CHANNEL_IS_POPULATED(dimms, idx) (dimms[idx].card_type != 0)
152#define CHANNEL_IS_CARDF(dimms, idx) (dimms[idx].card_type == 0xf)
153#define IF_CHANNEL_POPULATED(dimms, idx) if (dimms[idx].card_type != 0)
154#define FOR_EACH_CHANNEL(idx) \
155 for (idx = 0; idx < TOTAL_CHANNELS; ++idx)
156#define FOR_EACH_POPULATED_CHANNEL(dimms, idx) \
157 FOR_EACH_CHANNEL(idx) IF_CHANNEL_POPULATED(dimms, idx)
158
159#define RANKS_PER_CHANNEL 4 /* Only two may be populated */
160#define IF_RANK_POPULATED(dimms, ch, r) \
161 if (dimms[ch].card_type && ((r) < dimms[ch].ranks))
162#define FOR_EACH_RANK_IN_CHANNEL(r) \
163 for (r = 0; r < RANKS_PER_CHANNEL; ++r)
164#define FOR_EACH_POPULATED_RANK_IN_CHANNEL(dimms, ch, r) \
165 FOR_EACH_RANK_IN_CHANNEL(r) IF_RANK_POPULATED(dimms, ch, r)
166#define FOR_EACH_RANK(ch, r) \
167 FOR_EACH_CHANNEL(ch) FOR_EACH_RANK_IN_CHANNEL(r)
168#define FOR_EACH_POPULATED_RANK(dimms, ch, r) \
169 FOR_EACH_RANK(ch, r) IF_RANK_POPULATED(dimms, ch, r)
170
171#define DDR3_MAX_CAS 18
172
173enum {
174 VCO_2666 = 4,
175 VCO_3200 = 0,
176 VCO_4000 = 1,
177 VCO_5333 = 2,
178};
179
180#endif
181
182/* Offsets of read/write training results in CMOS.
183 They will be restored upon S3 resumes. */
184#define CMOS_READ_TRAINING 0x80 /* 16 bytes */
185#define CMOS_WRITE_TRAINING 0x90 /* 16 bytes
186 (could be reduced to 10 bytes) */
187
188
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800189#ifndef __ACPI__
190#define DEFAULT_MCHBAR ((u8 *)0xfed14000)
191#define DEFAULT_DMIBAR ((u8 *)0xfed18000)
192#else
Patrick Georgi2efc8802012-11-06 11:03:53 +0100193#define DEFAULT_MCHBAR 0xfed14000
194#define DEFAULT_DMIBAR 0xfed18000
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800195#endif
Patrick Georgi2efc8802012-11-06 11:03:53 +0100196#define DEFAULT_EPBAR 0xfed19000
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800197#define DEFAULT_HECIBAR ((u8 *)0xfed1a000)
Patrick Georgi2efc8802012-11-06 11:03:53 +0100198
199 /* 4 KB per PCIe device */
200#define DEFAULT_PCIEXBAR CONFIG_MMCONF_BASE_ADDRESS
201
202#define IOMMU_BASE1 0xfed90000
203#define IOMMU_BASE2 0xfed91000
204#define IOMMU_BASE3 0xfed92000
205#define IOMMU_BASE4 0xfed93000
206
207/*
208 * D0:F0
209 */
210#define D0F0_EPBAR_LO 0x40
211#define D0F0_EPBAR_HI 0x44
212#define D0F0_MCHBAR_LO 0x48
213#define D0F0_MCHBAR_HI 0x4c
214#define D0F0_GGC 0x52
215#define D0F0_DEVEN 0x54
216#define D0F0_PCIEXBAR_LO 0x60
217#define D0F0_PCIEXBAR_HI 0x64
218#define D0F0_DMIBAR_LO 0x68
219#define D0F0_DMIBAR_HI 0x6c
220#define D0F0_PMBASE 0x78
221#define D0F0_PAM(x) (0x90+(x)) /* 0-6*/
222#define D0F0_REMAPBASE 0x98
223#define D0F0_REMAPLIMIT 0x9a
224#define D0F0_SMRAM 0x9d
225#define D0F0_ESMRAMC 0x9e
226#define D0F0_TOM 0xa0
227#define D0F0_TOUUD 0xa2
228#define D0F0_TOLUD 0xb0
229#define D0F0_SKPD 0xdc /* Scratchpad Data */
230#define D0F0_CAPID0 0xe0
231
232/*
233 * D1:F0 PEG
234 */
235#define PEG_CAP 0xa2
236#define SLOTCAP 0xb4
237#define PEGLC 0xec
238#define D1F0_VCCAP 0x104
239#define D1F0_VC0RCTL 0x114
240
241/*
242 * Graphics frequencies
243 */
244#define GCFGC_PCIDEV PCI_DEV(0, 2, 0)
245#define GCFGC_OFFSET 0xf0
246#define GCFGC_CR_SHIFT 0
247#define GCFGC_CR_MASK (0xf << GCFGC_CR_SHIFT)
248#define GCFGC_CS_SHIFT 8
249#define GCFGC_CS_MASK (0xf << GCFGC_CS_SHIFT)
250#define GCFGC_CD_SHIFT 12
251#define GCFGC_CD_MASK (0x1 << GCFGC_CD_SHIFT)
252#define GCFGC_UPDATE_SHIFT 5
253#define GCFGC_UPDATE (0x1 << GCFGC_UPDATE_SHIFT)
254
255/*
256 * MCHBAR
257 */
258
259#define MCHBAR8(x) *((volatile u8 *)(DEFAULT_MCHBAR + x))
260#define MCHBAR16(x) *((volatile u16 *)(DEFAULT_MCHBAR + x))
261#define MCHBAR32(x) *((volatile u32 *)(DEFAULT_MCHBAR + x))
262
263#define PMSTS_MCHBAR 0x0f14 /* Self refresh channel status */
264#define PMSTS_WARM_RESET (1 << 1)
265#define PMSTS_BOTH_SELFREFRESH (1 << 0)
266
267#define CLKCFG_MCHBAR 0x0c00
268#define CLKCFG_FSBCLK_SHIFT 0
269#define CLKCFG_FSBCLK_MASK (7 << CLKCFG_FSBCLK_SHIFT)
270#define CLKCFG_MEMCLK_SHIFT 4
271#define CLKCFG_MEMCLK_MASK (7 << CLKCFG_MEMCLK_SHIFT)
272#define CLKCFG_UPDATE (1 << 12)
273
274#define SSKPD_MCHBAR 0x0c1c
275#define SSKPD_CLK_SHIFT 0
276#define SSKPD_CLK_MASK (7 << SSKPD_CLK_SHIFT)
277
278#define DCC_MCHBAR 0x200
279#define DCC_NO_CHANXOR (1 << 10)
280#define DCC_INTERLEAVED (1 << 1)
281#define DCC_CMD_SHIFT 16
282#define DCC_CMD_MASK (7 << DCC_CMD_SHIFT)
283#define DCC_CMD_NOP (1 << DCC_CMD_SHIFT)
284 /* For mode register mr0: */
285#define DCC_SET_MREG (3 << DCC_CMD_SHIFT)
286 /* For extended mode registers mr1 to mr3: */
287#define DCC_SET_EREG (4 << DCC_CMD_SHIFT)
288#define DCC_SET_EREG_SHIFT 21
289#define DCC_SET_EREG_MASK (DCC_CMD_MASK | (3 << DCC_SET_EREG_SHIFT))
290#define DCC_SET_EREGx(x) ((DCC_SET_EREG | \
291 ((x - 1) << DCC_SET_EREG_SHIFT)) & \
292 DCC_SET_EREG_MASK)
293
294/* Per channel DRAM Row Attribute registers (32-bit) */
295#define CxDRA_MCHBAR(x) (0x1208 + (x * 0x0100))
296#define CxDRA_PAGESIZE_SHIFT(r) (r * 4) /* Per rank r */
297#define CxDRA_PAGESIZE_MASKr(r) (0x7 << CxDRA_PAGESIZE_SHIFT(r))
298#define CxDRA_PAGESIZE_MASK 0x0000ffff
299#define CxDRA_PAGESIZE(r, p) /* for log2(dimm page size in bytes) p */ \
300 (((p - 10) << CxDRA_PAGESIZE_SHIFT(r)) & CxDRA_PAGESIZE_MASKr(r))
301#define CxDRA_BANKS_SHIFT(r) ((r * 3) + 16)
302#define CxDRA_BANKS_MASKr(r) (0x3 << CxDRA_BANKS_SHIFT(r))
303#define CxDRA_BANKS_MASK 0x07ff0000
304#define CxDRA_BANKS(r, b) /* for number of banks b */ \
305 ((b << (CxDRA_BANKS_SHIFT(r) - 3)) & CxDRA_BANKS_MASKr(r))
306
307/*
308 * Per channel DRAM Row Boundary registers (32-bit)
309 * Every two ranks share one register and must be programmed at the same time.
310 * All registers (4 ranks per channel) have to be set.
311 */
312#define CxDRBy_MCHBAR(x, r) (0x1200 + (x * 0x0100) + ((r/2) * 4))
313#define CxDRBy_BOUND_SHIFT(r) ((r % 2) * 16)
314#define CxDRBy_BOUND_MASK(r) (0x1fc << CxDRBy_BOUND_SHIFT(r))
315#define CxDRBy_BOUND_MB(r, b) /* for boundary in MB b */ \
316 (((b >> 5) << CxDRBy_BOUND_SHIFT(r)) & CxDRBy_BOUND_MASK(r))
317
318#define CxDRC0_MCHBAR(x) (0x1230 + (x * 0x0100))
319#define CxDRC0_RANKEN0 (1 << 24) /* Rank Enable */
320#define CxDRC0_RANKEN1 (1 << 25)
321#define CxDRC0_RANKEN2 (1 << 26)
322#define CxDRC0_RANKEN3 (1 << 27)
323#define CxDRC0_RANKEN(r) (1 << (24 + r))
324#define CxDRC0_RANKEN_MASK (0xf << 24)
325#define CxDRC0_RMS_SHIFT 8 /* Refresh Mode Select */
326#define CxDRC0_RMS_MASK (7 << CxDRC0_RMS_SHIFT)
327#define CxDRC0_RMS_78US (2 << CxDRC0_RMS_SHIFT)
328#define CxDRC0_RMS_39US (3 << CxDRC0_RMS_SHIFT)
329
330#define CxDRC1_MCHBAR(x) (0x1234 + (x * 0x0100))
331#define CxDRC1_SSDS_SHIFT 24
332#define CxDRC1_SSDS_MASK (0xff << CxDRC1_SSDS_SHIFT)
333#define CxDRC1_DS (0x91 << CxDRC1_SSDS_SHIFT)
334#define CxDRC1_SS (0xb1 << CxDRC1_SSDS_SHIFT)
335#define CxDRC1_NOTPOP(r) (1 << (16 + r)) /* Write 1 for Not Populated */
336#define CxDRC1_NOTPOP_MASK (0xf << 16)
337#define CxDRC1_MUSTWR (3 << 11)
338
339#define CxDRC2_MCHBAR(x) (0x1238 + (x * 0x0100))
340#define CxDRC2_NOTPOP(r) (1 << (24 + r)) /* Write 1 for Not Populated */
341#define CxDRC2_NOTPOP_MASK (0xf << 24)
342#define CxDRC2_MUSTWR (1 << 12)
343#define CxDRC2_CLK1067MT (1 << 0)
344
345/* DRAM Timing registers (32-bit each) */
346#define CxDRT0_MCHBAR(x) (0x1210 + (x * 0x0100))
347#define CxDRT0_BtB_WtP_SHIFT 26
348#define CxDRT0_BtB_WtP_MASK (0x1f << CxDRT0_BtB_WtP_SHIFT)
349#define CxDRT0_BtB_WtR_SHIFT 20
350#define CxDRT0_BtB_WtR_MASK (0x1f << CxDRT0_BtB_WtR_SHIFT)
351#define CxDRT1_MCHBAR(x) (0x1214 + (x * 0x0100))
352#define CxDRT2_MCHBAR(x) (0x1218 + (x * 0x0100))
353#define CxDRT3_MCHBAR(x) (0x121c + (x * 0x0100))
354#define CxDRT4_MCHBAR(x) (0x1220 + (x * 0x0100))
355#define CxDRT5_MCHBAR(x) (0x1224 + (x * 0x0100))
356#define CxDRT6_MCHBAR(x) (0x1228 + (x * 0x0100))
357
358/* Clock disable registers (32-bit each) */
359#define CxDCLKDIS_MCHBAR(x) (0x120c + (x * 0x0100))
360#define CxDCLKDIS_MASK 3
361#define CxDCLKDIS_ENABLE 3 /* Always enable both clock pairs. */
362
363/* On-Die-Termination registers (2x 32-bit per channel) */
364#define CxODT_HIGH(x) (0x124c + (x * 0x0100))
365#define CxODT_LOW(x) (0x1248 + (x * 0x0100))
366
367/* Write Training registers. */
368#define CxWRTy_MCHBAR(ch, s) (0x1470 + (ch * 0x0100) + ((3 - s) * 4))
369
370#define CxGTEW(x) (0x1270+(x*0x100))
371#define CxGTC(x) (0x1274+(x*0x100))
372#define CxDTPEW(x) (0x1278+(x*0x100))
373#define CxDTAEW(x) (0x1280+(x*0x100))
374#define CxDTC(x) (0x1288+(x*0x100))
375
376
377/*
378 * DMIBAR
379 */
380
381#define DMIBAR8(x) *((volatile u8 *)(DEFAULT_DMIBAR + x))
382#define DMIBAR16(x) *((volatile u16 *)(DEFAULT_DMIBAR + x))
383#define DMIBAR32(x) *((volatile u32 *)(DEFAULT_DMIBAR + x))
384
385#define DMIVC0RCTL 0x14
386#define DMIVC1RCTL 0x20
387#define DMIVC1RSTS 0x26
388#define DMIESD 0x44
389#define DMILE1D 0x50
390#define DMILE1A 0x58
391#define DMILE2D 0x60
392#define DMILE2A 0x68
393
394
395/*
396 * EPBAR
397 */
398
399#define EPBAR8(x) *((volatile u8 *)(DEFAULT_EPBAR + x))
400#define EPBAR16(x) *((volatile u16 *)(DEFAULT_EPBAR + x))
401#define EPBAR32(x) *((volatile u32 *)(DEFAULT_EPBAR + x))
402
403#define EPESD 0x44
404#define EPLE1D 0x50
405#define EPLE1A 0x58
406#define EPLE2D 0x60
407
408
409#ifndef __ACPI__
410void gm45_early_init(void);
411void gm45_early_reset(void);
412
413void enter_raminit_or_reset(void);
414void get_gmch_info(sysinfo_t *);
415void raminit(sysinfo_t *, int s3resume);
416void raminit_thermal(const sysinfo_t *);
Vladimir Serbinenko56ae8a02014-08-16 10:59:02 +0200417void init_igd(const sysinfo_t *const);
Vladimir Serbinenko020dc0e2014-08-12 22:50:40 +0200418void init_pm(const sysinfo_t *, int do_freq_scaling_cfg);
Vladimir Serbinenko56ae8a02014-08-16 10:59:02 +0200419void igd_compute_ggc(sysinfo_t *const sysinfo);
Patrick Georgi2efc8802012-11-06 11:03:53 +0100420
421int raminit_read_vco_index(void);
422u32 raminit_get_rank_addr(unsigned int channel, unsigned int rank);
423
424void raminit_rcomp_calibration(stepping_t stepping);
425void raminit_reset_readwrite_pointers(void);
426void raminit_receive_enable_calibration(const timings_t *, const dimminfo_t *);
427void raminit_write_training(const mem_clock_t, const dimminfo_t *, int s3resume);
428void raminit_read_training(const dimminfo_t *, int s3resume);
429
430void gm45_late_init(stepping_t);
431
432u32 decode_igd_memory_size(u32 gms);
433u32 decode_igd_gtt_size(u32 gsm);
Patrick Georgi2efc8802012-11-06 11:03:53 +0100434
435void init_iommu(void);
Vladimir Serbinenko33769a52014-08-30 22:39:20 +0200436
Furquan Shaikh25f75b22016-08-29 22:51:41 -0700437#if ENV_RAMSTAGE && !defined(__SIMPLE_DEVICE__)
Alexander Couzens83fc32f2015-04-12 22:28:37 +0200438#include <device/device.h>
439
Vladimir Serbinenko33769a52014-08-30 22:39:20 +0200440struct acpi_rsdp;
Alexander Couzens83fc32f2015-04-12 22:28:37 +0200441unsigned long northbridge_write_acpi_tables(device_t device, unsigned long start, struct acpi_rsdp *rsdp);
Patrick Georgi2efc8802012-11-06 11:03:53 +0100442#endif
443
Alexander Couzens83fc32f2015-04-12 22:28:37 +0200444
445#endif /* !__ACPI__ */
Edward O'Callaghan089a5102015-01-06 02:48:57 +1100446#endif /* __NORTHBRIDGE_INTEL_GM45_GM45_H__ */