blob: 2b1026c06710f306361d4919816570b21e13c0e1 [file] [log] [blame]
Patrick Georgi2efc8802012-11-06 11:03:53 +01001/*
2 * This file is part of the coreboot project.
3 *
4 * Copyright (C) 2007-2008 coresystems GmbH
5 * 2012 secunet Security Networks AG
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; version 2 of the License.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
15 *
16 * You should have received a copy of the GNU General Public License
17 * along with this program; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
19 */
20
21#ifndef __NORTHBRIDGE_INTEL_GM45_GM45_H__
22#define __NORTHBRIDGE_INTEL_GM45_GM45_H__ 1
23
24#include "southbridge/intel/i82801ix/i82801ix.h"
25
26#ifndef __ACPI__
27
28#include <stdint.h>
29
30typedef enum {
31 FSB_CLOCK_1067MHz = 0,
32 FSB_CLOCK_800MHz = 1,
33 FSB_CLOCK_667MHz = 2,
34} fsb_clock_t;
35
36typedef enum { /* Steppings below B1 were pre-production,
37 conversion stepping A1 is... ?
38 We'll support B1, B2, B3, and conversion stepping A1. */
39 STEPPING_A0 = 0,
40 STEPPING_A1 = 1,
41 STEPPING_A2 = 2,
42 STEPPING_A3 = 3,
43 STEPPING_B0 = 4,
44 STEPPING_B1 = 5,
45 STEPPING_B2 = 6,
46 STEPPING_B3 = 7,
47 STEPPING_CONVERSION_A1 = 9,
48} stepping_t;
49
50typedef enum {
51 GMCH_GM45 = 0,
52 GMCH_GM47,
53 GMCH_GM49,
54 GMCH_GE45,
55 GMCH_GL40,
56 GMCH_GL43,
57 GMCH_GS40,
58 GMCH_GS45,
59 GMCH_PM45,
60 GMCH_UNKNOWN
61} gmch_gfx_t;
62
63typedef enum {
64 MEM_CLOCK_533MHz = 0,
65 MEM_CLOCK_400MHz = 1,
66 MEM_CLOCK_333MHz = 2,
67 MEM_CLOCK_1067MT = 0,
68 MEM_CLOCK_800MT = 1,
69 MEM_CLOCK_667MT = 2,
70} mem_clock_t;
71
72typedef enum {
73 DDR1 = 1,
74 DDR2 = 2,
75 DDR3 = 3,
76} ddr_t;
77
78typedef enum {
79 CHANNEL_MODE_SINGLE,
80 CHANNEL_MODE_DUAL_ASYNC,
81 CHANNEL_MODE_DUAL_INTERLEAVED,
82} channel_mode_t;
83
84typedef enum { /* as in DDR3 spd */
85 CHIP_WIDTH_x4 = 0,
86 CHIP_WIDTH_x8 = 1,
87 CHIP_WIDTH_x16 = 2,
88 CHIP_WIDTH_x32 = 3,
89} chip_width_t;
90
91typedef enum { /* as in DDR3 spd */
92 CHIP_CAP_256M = 0,
93 CHIP_CAP_512M = 1,
94 CHIP_CAP_1G = 2,
95 CHIP_CAP_2G = 3,
96 CHIP_CAP_4G = 4,
97 CHIP_CAP_8G = 5,
98 CHIP_CAP_16G = 6,
99} chip_capacity_t;
100
101typedef struct {
102 unsigned int CAS;
103 fsb_clock_t fsb_clock;
104 mem_clock_t mem_clock;
105 channel_mode_t channel_mode;
106 unsigned int tRAS;
107 unsigned int tRP;
108 unsigned int tRCD;
109 unsigned int tRFC;
110 unsigned int tWR;
111 unsigned int tRD;
112 unsigned int tRRD;
113 unsigned int tFAW;
114 unsigned int tWL;
115} timings_t;
116
117typedef struct {
118 unsigned int card_type; /* 0x0: unpopulated,
119 0xa - 0xf: raw card type A - F */
120 chip_width_t chip_width;
121 chip_capacity_t chip_capacity;
122 unsigned int page_size; /* of whole DIMM in Bytes (4096 or 8192) */
123 unsigned int banks;
124 unsigned int ranks;
125 unsigned int rank_capacity_mb; /* per rank in Mega Bytes */
126} dimminfo_t;
127
128/* The setup is one DIMM per channel, so there's no need to find a
129 common timing setup between multiple chips (but chip and controller
130 still need to be coordinated */
131typedef struct {
132 stepping_t stepping;
133 int txt_enabled;
134 int cores;
135 gmch_gfx_t gfx_type;
136 int gs45_low_power_mode; /* low power mode of GMCH_GS45 */
137 int max_ddr2_mhz;
138 int max_ddr3_mt;
139 fsb_clock_t max_fsb;
140 int max_fsb_mhz;
141 int max_render_mhz;
142
143 int spd_type;
144 timings_t selected_timings;
145 dimminfo_t dimms[2];
146} sysinfo_t;
147#define TOTAL_CHANNELS 2
148#define CHANNEL_IS_POPULATED(dimms, idx) (dimms[idx].card_type != 0)
149#define CHANNEL_IS_CARDF(dimms, idx) (dimms[idx].card_type == 0xf)
150#define IF_CHANNEL_POPULATED(dimms, idx) if (dimms[idx].card_type != 0)
151#define FOR_EACH_CHANNEL(idx) \
152 for (idx = 0; idx < TOTAL_CHANNELS; ++idx)
153#define FOR_EACH_POPULATED_CHANNEL(dimms, idx) \
154 FOR_EACH_CHANNEL(idx) IF_CHANNEL_POPULATED(dimms, idx)
155
156#define RANKS_PER_CHANNEL 4 /* Only two may be populated */
157#define IF_RANK_POPULATED(dimms, ch, r) \
158 if (dimms[ch].card_type && ((r) < dimms[ch].ranks))
159#define FOR_EACH_RANK_IN_CHANNEL(r) \
160 for (r = 0; r < RANKS_PER_CHANNEL; ++r)
161#define FOR_EACH_POPULATED_RANK_IN_CHANNEL(dimms, ch, r) \
162 FOR_EACH_RANK_IN_CHANNEL(r) IF_RANK_POPULATED(dimms, ch, r)
163#define FOR_EACH_RANK(ch, r) \
164 FOR_EACH_CHANNEL(ch) FOR_EACH_RANK_IN_CHANNEL(r)
165#define FOR_EACH_POPULATED_RANK(dimms, ch, r) \
166 FOR_EACH_RANK(ch, r) IF_RANK_POPULATED(dimms, ch, r)
167
168#define DDR3_MAX_CAS 18
169
170enum {
171 VCO_2666 = 4,
172 VCO_3200 = 0,
173 VCO_4000 = 1,
174 VCO_5333 = 2,
175};
176
177#endif
178
179/* Offsets of read/write training results in CMOS.
180 They will be restored upon S3 resumes. */
181#define CMOS_READ_TRAINING 0x80 /* 16 bytes */
182#define CMOS_WRITE_TRAINING 0x90 /* 16 bytes
183 (could be reduced to 10 bytes) */
184
185
186#define DEFAULT_MCHBAR 0xfed14000
187#define DEFAULT_DMIBAR 0xfed18000
188#define DEFAULT_EPBAR 0xfed19000
189#define DEFAULT_HECIBAR 0xfed1a000
190
191 /* 4 KB per PCIe device */
192#define DEFAULT_PCIEXBAR CONFIG_MMCONF_BASE_ADDRESS
193
194#define IOMMU_BASE1 0xfed90000
195#define IOMMU_BASE2 0xfed91000
196#define IOMMU_BASE3 0xfed92000
197#define IOMMU_BASE4 0xfed93000
198
199/*
200 * D0:F0
201 */
202#define D0F0_EPBAR_LO 0x40
203#define D0F0_EPBAR_HI 0x44
204#define D0F0_MCHBAR_LO 0x48
205#define D0F0_MCHBAR_HI 0x4c
206#define D0F0_GGC 0x52
207#define D0F0_DEVEN 0x54
208#define D0F0_PCIEXBAR_LO 0x60
209#define D0F0_PCIEXBAR_HI 0x64
210#define D0F0_DMIBAR_LO 0x68
211#define D0F0_DMIBAR_HI 0x6c
212#define D0F0_PMBASE 0x78
213#define D0F0_PAM(x) (0x90+(x)) /* 0-6*/
214#define D0F0_REMAPBASE 0x98
215#define D0F0_REMAPLIMIT 0x9a
216#define D0F0_SMRAM 0x9d
217#define D0F0_ESMRAMC 0x9e
218#define D0F0_TOM 0xa0
219#define D0F0_TOUUD 0xa2
220#define D0F0_TOLUD 0xb0
221#define D0F0_SKPD 0xdc /* Scratchpad Data */
222#define D0F0_CAPID0 0xe0
223
224/*
225 * D1:F0 PEG
226 */
227#define PEG_CAP 0xa2
228#define SLOTCAP 0xb4
229#define PEGLC 0xec
230#define D1F0_VCCAP 0x104
231#define D1F0_VC0RCTL 0x114
232
233/*
234 * Graphics frequencies
235 */
236#define GCFGC_PCIDEV PCI_DEV(0, 2, 0)
237#define GCFGC_OFFSET 0xf0
238#define GCFGC_CR_SHIFT 0
239#define GCFGC_CR_MASK (0xf << GCFGC_CR_SHIFT)
240#define GCFGC_CS_SHIFT 8
241#define GCFGC_CS_MASK (0xf << GCFGC_CS_SHIFT)
242#define GCFGC_CD_SHIFT 12
243#define GCFGC_CD_MASK (0x1 << GCFGC_CD_SHIFT)
244#define GCFGC_UPDATE_SHIFT 5
245#define GCFGC_UPDATE (0x1 << GCFGC_UPDATE_SHIFT)
246
247/*
248 * MCHBAR
249 */
250
251#define MCHBAR8(x) *((volatile u8 *)(DEFAULT_MCHBAR + x))
252#define MCHBAR16(x) *((volatile u16 *)(DEFAULT_MCHBAR + x))
253#define MCHBAR32(x) *((volatile u32 *)(DEFAULT_MCHBAR + x))
254
255#define PMSTS_MCHBAR 0x0f14 /* Self refresh channel status */
256#define PMSTS_WARM_RESET (1 << 1)
257#define PMSTS_BOTH_SELFREFRESH (1 << 0)
258
259#define CLKCFG_MCHBAR 0x0c00
260#define CLKCFG_FSBCLK_SHIFT 0
261#define CLKCFG_FSBCLK_MASK (7 << CLKCFG_FSBCLK_SHIFT)
262#define CLKCFG_MEMCLK_SHIFT 4
263#define CLKCFG_MEMCLK_MASK (7 << CLKCFG_MEMCLK_SHIFT)
264#define CLKCFG_UPDATE (1 << 12)
265
266#define SSKPD_MCHBAR 0x0c1c
267#define SSKPD_CLK_SHIFT 0
268#define SSKPD_CLK_MASK (7 << SSKPD_CLK_SHIFT)
269
270#define DCC_MCHBAR 0x200
271#define DCC_NO_CHANXOR (1 << 10)
272#define DCC_INTERLEAVED (1 << 1)
273#define DCC_CMD_SHIFT 16
274#define DCC_CMD_MASK (7 << DCC_CMD_SHIFT)
275#define DCC_CMD_NOP (1 << DCC_CMD_SHIFT)
276 /* For mode register mr0: */
277#define DCC_SET_MREG (3 << DCC_CMD_SHIFT)
278 /* For extended mode registers mr1 to mr3: */
279#define DCC_SET_EREG (4 << DCC_CMD_SHIFT)
280#define DCC_SET_EREG_SHIFT 21
281#define DCC_SET_EREG_MASK (DCC_CMD_MASK | (3 << DCC_SET_EREG_SHIFT))
282#define DCC_SET_EREGx(x) ((DCC_SET_EREG | \
283 ((x - 1) << DCC_SET_EREG_SHIFT)) & \
284 DCC_SET_EREG_MASK)
285
286/* Per channel DRAM Row Attribute registers (32-bit) */
287#define CxDRA_MCHBAR(x) (0x1208 + (x * 0x0100))
288#define CxDRA_PAGESIZE_SHIFT(r) (r * 4) /* Per rank r */
289#define CxDRA_PAGESIZE_MASKr(r) (0x7 << CxDRA_PAGESIZE_SHIFT(r))
290#define CxDRA_PAGESIZE_MASK 0x0000ffff
291#define CxDRA_PAGESIZE(r, p) /* for log2(dimm page size in bytes) p */ \
292 (((p - 10) << CxDRA_PAGESIZE_SHIFT(r)) & CxDRA_PAGESIZE_MASKr(r))
293#define CxDRA_BANKS_SHIFT(r) ((r * 3) + 16)
294#define CxDRA_BANKS_MASKr(r) (0x3 << CxDRA_BANKS_SHIFT(r))
295#define CxDRA_BANKS_MASK 0x07ff0000
296#define CxDRA_BANKS(r, b) /* for number of banks b */ \
297 ((b << (CxDRA_BANKS_SHIFT(r) - 3)) & CxDRA_BANKS_MASKr(r))
298
299/*
300 * Per channel DRAM Row Boundary registers (32-bit)
301 * Every two ranks share one register and must be programmed at the same time.
302 * All registers (4 ranks per channel) have to be set.
303 */
304#define CxDRBy_MCHBAR(x, r) (0x1200 + (x * 0x0100) + ((r/2) * 4))
305#define CxDRBy_BOUND_SHIFT(r) ((r % 2) * 16)
306#define CxDRBy_BOUND_MASK(r) (0x1fc << CxDRBy_BOUND_SHIFT(r))
307#define CxDRBy_BOUND_MB(r, b) /* for boundary in MB b */ \
308 (((b >> 5) << CxDRBy_BOUND_SHIFT(r)) & CxDRBy_BOUND_MASK(r))
309
310#define CxDRC0_MCHBAR(x) (0x1230 + (x * 0x0100))
311#define CxDRC0_RANKEN0 (1 << 24) /* Rank Enable */
312#define CxDRC0_RANKEN1 (1 << 25)
313#define CxDRC0_RANKEN2 (1 << 26)
314#define CxDRC0_RANKEN3 (1 << 27)
315#define CxDRC0_RANKEN(r) (1 << (24 + r))
316#define CxDRC0_RANKEN_MASK (0xf << 24)
317#define CxDRC0_RMS_SHIFT 8 /* Refresh Mode Select */
318#define CxDRC0_RMS_MASK (7 << CxDRC0_RMS_SHIFT)
319#define CxDRC0_RMS_78US (2 << CxDRC0_RMS_SHIFT)
320#define CxDRC0_RMS_39US (3 << CxDRC0_RMS_SHIFT)
321
322#define CxDRC1_MCHBAR(x) (0x1234 + (x * 0x0100))
323#define CxDRC1_SSDS_SHIFT 24
324#define CxDRC1_SSDS_MASK (0xff << CxDRC1_SSDS_SHIFT)
325#define CxDRC1_DS (0x91 << CxDRC1_SSDS_SHIFT)
326#define CxDRC1_SS (0xb1 << CxDRC1_SSDS_SHIFT)
327#define CxDRC1_NOTPOP(r) (1 << (16 + r)) /* Write 1 for Not Populated */
328#define CxDRC1_NOTPOP_MASK (0xf << 16)
329#define CxDRC1_MUSTWR (3 << 11)
330
331#define CxDRC2_MCHBAR(x) (0x1238 + (x * 0x0100))
332#define CxDRC2_NOTPOP(r) (1 << (24 + r)) /* Write 1 for Not Populated */
333#define CxDRC2_NOTPOP_MASK (0xf << 24)
334#define CxDRC2_MUSTWR (1 << 12)
335#define CxDRC2_CLK1067MT (1 << 0)
336
337/* DRAM Timing registers (32-bit each) */
338#define CxDRT0_MCHBAR(x) (0x1210 + (x * 0x0100))
339#define CxDRT0_BtB_WtP_SHIFT 26
340#define CxDRT0_BtB_WtP_MASK (0x1f << CxDRT0_BtB_WtP_SHIFT)
341#define CxDRT0_BtB_WtR_SHIFT 20
342#define CxDRT0_BtB_WtR_MASK (0x1f << CxDRT0_BtB_WtR_SHIFT)
343#define CxDRT1_MCHBAR(x) (0x1214 + (x * 0x0100))
344#define CxDRT2_MCHBAR(x) (0x1218 + (x * 0x0100))
345#define CxDRT3_MCHBAR(x) (0x121c + (x * 0x0100))
346#define CxDRT4_MCHBAR(x) (0x1220 + (x * 0x0100))
347#define CxDRT5_MCHBAR(x) (0x1224 + (x * 0x0100))
348#define CxDRT6_MCHBAR(x) (0x1228 + (x * 0x0100))
349
350/* Clock disable registers (32-bit each) */
351#define CxDCLKDIS_MCHBAR(x) (0x120c + (x * 0x0100))
352#define CxDCLKDIS_MASK 3
353#define CxDCLKDIS_ENABLE 3 /* Always enable both clock pairs. */
354
355/* On-Die-Termination registers (2x 32-bit per channel) */
356#define CxODT_HIGH(x) (0x124c + (x * 0x0100))
357#define CxODT_LOW(x) (0x1248 + (x * 0x0100))
358
359/* Write Training registers. */
360#define CxWRTy_MCHBAR(ch, s) (0x1470 + (ch * 0x0100) + ((3 - s) * 4))
361
362#define CxGTEW(x) (0x1270+(x*0x100))
363#define CxGTC(x) (0x1274+(x*0x100))
364#define CxDTPEW(x) (0x1278+(x*0x100))
365#define CxDTAEW(x) (0x1280+(x*0x100))
366#define CxDTC(x) (0x1288+(x*0x100))
367
368
369/*
370 * DMIBAR
371 */
372
373#define DMIBAR8(x) *((volatile u8 *)(DEFAULT_DMIBAR + x))
374#define DMIBAR16(x) *((volatile u16 *)(DEFAULT_DMIBAR + x))
375#define DMIBAR32(x) *((volatile u32 *)(DEFAULT_DMIBAR + x))
376
377#define DMIVC0RCTL 0x14
378#define DMIVC1RCTL 0x20
379#define DMIVC1RSTS 0x26
380#define DMIESD 0x44
381#define DMILE1D 0x50
382#define DMILE1A 0x58
383#define DMILE2D 0x60
384#define DMILE2A 0x68
385
386
387/*
388 * EPBAR
389 */
390
391#define EPBAR8(x) *((volatile u8 *)(DEFAULT_EPBAR + x))
392#define EPBAR16(x) *((volatile u16 *)(DEFAULT_EPBAR + x))
393#define EPBAR32(x) *((volatile u32 *)(DEFAULT_EPBAR + x))
394
395#define EPESD 0x44
396#define EPLE1D 0x50
397#define EPLE1A 0x58
398#define EPLE2D 0x60
399
400
401#ifndef __ACPI__
402void gm45_early_init(void);
403void gm45_early_reset(void);
404
405void enter_raminit_or_reset(void);
406void get_gmch_info(sysinfo_t *);
407void raminit(sysinfo_t *, int s3resume);
408void raminit_thermal(const sysinfo_t *);
409void init_igd(const sysinfo_t *, int no_igd, int no_peg);
410void init_pm(const sysinfo_t *);
411
412int raminit_read_vco_index(void);
413u32 raminit_get_rank_addr(unsigned int channel, unsigned int rank);
414
415void raminit_rcomp_calibration(stepping_t stepping);
416void raminit_reset_readwrite_pointers(void);
417void raminit_receive_enable_calibration(const timings_t *, const dimminfo_t *);
418void raminit_write_training(const mem_clock_t, const dimminfo_t *, int s3resume);
419void raminit_read_training(const dimminfo_t *, int s3resume);
420
421void gm45_late_init(stepping_t);
422
423u32 decode_igd_memory_size(u32 gms);
424u32 decode_igd_gtt_size(u32 gsm);
425u32 get_top_of_ram(void);
426
427void init_iommu(void);
428#endif
429
430#endif