blob: d18b3d42c82a59d00006be43d9c4c9d1ef0dc451 [file] [log] [blame]
Angel Pons4b429832020-04-02 23:48:50 +02001/* SPDX-License-Identifier: GPL-2.0-only */
Patrick Georgi2efc8802012-11-06 11:03:53 +01002
3#ifndef __NORTHBRIDGE_INTEL_GM45_GM45_H__
Edward O'Callaghan089a5102015-01-06 02:48:57 +11004#define __NORTHBRIDGE_INTEL_GM45_GM45_H__
Patrick Georgi2efc8802012-11-06 11:03:53 +01005
Elyes HAOUAS21b71ce62018-06-16 18:43:52 +02006#include <southbridge/intel/i82801ix/i82801ix.h>
Patrick Georgi2efc8802012-11-06 11:03:53 +01007
8#ifndef __ACPI__
9
10#include <stdint.h>
11
12typedef enum {
13 FSB_CLOCK_1067MHz = 0,
14 FSB_CLOCK_800MHz = 1,
15 FSB_CLOCK_667MHz = 2,
16} fsb_clock_t;
17
18typedef enum { /* Steppings below B1 were pre-production,
19 conversion stepping A1 is... ?
20 We'll support B1, B2, B3, and conversion stepping A1. */
21 STEPPING_A0 = 0,
22 STEPPING_A1 = 1,
23 STEPPING_A2 = 2,
24 STEPPING_A3 = 3,
25 STEPPING_B0 = 4,
26 STEPPING_B1 = 5,
27 STEPPING_B2 = 6,
28 STEPPING_B3 = 7,
29 STEPPING_CONVERSION_A1 = 9,
30} stepping_t;
31
32typedef enum {
33 GMCH_GM45 = 0,
34 GMCH_GM47,
35 GMCH_GM49,
36 GMCH_GE45,
37 GMCH_GL40,
38 GMCH_GL43,
39 GMCH_GS40,
40 GMCH_GS45,
41 GMCH_PM45,
42 GMCH_UNKNOWN
43} gmch_gfx_t;
44
45typedef enum {
46 MEM_CLOCK_533MHz = 0,
47 MEM_CLOCK_400MHz = 1,
48 MEM_CLOCK_333MHz = 2,
49 MEM_CLOCK_1067MT = 0,
50 MEM_CLOCK_800MT = 1,
51 MEM_CLOCK_667MT = 2,
52} mem_clock_t;
53
54typedef enum {
55 DDR1 = 1,
56 DDR2 = 2,
57 DDR3 = 3,
58} ddr_t;
59
60typedef enum {
61 CHANNEL_MODE_SINGLE,
62 CHANNEL_MODE_DUAL_ASYNC,
63 CHANNEL_MODE_DUAL_INTERLEAVED,
64} channel_mode_t;
65
66typedef enum { /* as in DDR3 spd */
67 CHIP_WIDTH_x4 = 0,
68 CHIP_WIDTH_x8 = 1,
69 CHIP_WIDTH_x16 = 2,
70 CHIP_WIDTH_x32 = 3,
71} chip_width_t;
72
73typedef enum { /* as in DDR3 spd */
74 CHIP_CAP_256M = 0,
75 CHIP_CAP_512M = 1,
76 CHIP_CAP_1G = 2,
77 CHIP_CAP_2G = 3,
78 CHIP_CAP_4G = 4,
79 CHIP_CAP_8G = 5,
80 CHIP_CAP_16G = 6,
81} chip_capacity_t;
82
83typedef struct {
84 unsigned int CAS;
85 fsb_clock_t fsb_clock;
86 mem_clock_t mem_clock;
87 channel_mode_t channel_mode;
88 unsigned int tRAS;
89 unsigned int tRP;
90 unsigned int tRCD;
91 unsigned int tRFC;
92 unsigned int tWR;
93 unsigned int tRD;
94 unsigned int tRRD;
95 unsigned int tFAW;
96 unsigned int tWL;
97} timings_t;
98
99typedef struct {
100 unsigned int card_type; /* 0x0: unpopulated,
101 0xa - 0xf: raw card type A - F */
102 chip_width_t chip_width;
103 chip_capacity_t chip_capacity;
104 unsigned int page_size; /* of whole DIMM in Bytes (4096 or 8192) */
105 unsigned int banks;
106 unsigned int ranks;
Martin Roth128c1042016-11-18 09:29:03 -0700107 unsigned int rank_capacity_mb; /* per rank in Megabytes */
Patrick Georgi2efc8802012-11-06 11:03:53 +0100108} dimminfo_t;
109
110/* The setup is one DIMM per channel, so there's no need to find a
111 common timing setup between multiple chips (but chip and controller
112 still need to be coordinated */
113typedef struct {
114 stepping_t stepping;
115 int txt_enabled;
116 int cores;
117 gmch_gfx_t gfx_type;
Patrick Georgi2efc8802012-11-06 11:03:53 +0100118 int max_ddr2_mhz;
119 int max_ddr3_mt;
120 fsb_clock_t max_fsb;
121 int max_fsb_mhz;
122 int max_render_mhz;
Vladimir Serbinenko56ae8a02014-08-16 10:59:02 +0200123 int enable_igd;
124 int enable_peg;
125 u16 ggc;
Patrick Georgi2efc8802012-11-06 11:03:53 +0100126
Nico Huber5aaeb272015-12-30 00:17:27 +0100127 /* to be filled in romstage main: */
Patrick Georgi2efc8802012-11-06 11:03:53 +0100128 int spd_type;
129 timings_t selected_timings;
130 dimminfo_t dimms[2];
Vladimir Serbinenkoc4d89482014-06-05 09:14:48 +0200131 u8 spd_map[4];
Nico Huber5aaeb272015-12-30 00:17:27 +0100132 int gs45_low_power_mode; /* low power mode of GMCH_GS45 */
133 int sff; /* small form factor option (soldered down DIMM) */
Patrick Georgi2efc8802012-11-06 11:03:53 +0100134} sysinfo_t;
135#define TOTAL_CHANNELS 2
136#define CHANNEL_IS_POPULATED(dimms, idx) (dimms[idx].card_type != 0)
137#define CHANNEL_IS_CARDF(dimms, idx) (dimms[idx].card_type == 0xf)
138#define IF_CHANNEL_POPULATED(dimms, idx) if (dimms[idx].card_type != 0)
139#define FOR_EACH_CHANNEL(idx) \
140 for (idx = 0; idx < TOTAL_CHANNELS; ++idx)
141#define FOR_EACH_POPULATED_CHANNEL(dimms, idx) \
142 FOR_EACH_CHANNEL(idx) IF_CHANNEL_POPULATED(dimms, idx)
143
144#define RANKS_PER_CHANNEL 4 /* Only two may be populated */
145#define IF_RANK_POPULATED(dimms, ch, r) \
146 if (dimms[ch].card_type && ((r) < dimms[ch].ranks))
147#define FOR_EACH_RANK_IN_CHANNEL(r) \
148 for (r = 0; r < RANKS_PER_CHANNEL; ++r)
149#define FOR_EACH_POPULATED_RANK_IN_CHANNEL(dimms, ch, r) \
150 FOR_EACH_RANK_IN_CHANNEL(r) IF_RANK_POPULATED(dimms, ch, r)
151#define FOR_EACH_RANK(ch, r) \
152 FOR_EACH_CHANNEL(ch) FOR_EACH_RANK_IN_CHANNEL(r)
153#define FOR_EACH_POPULATED_RANK(dimms, ch, r) \
154 FOR_EACH_RANK(ch, r) IF_RANK_POPULATED(dimms, ch, r)
155
156#define DDR3_MAX_CAS 18
157
158enum {
159 VCO_2666 = 4,
160 VCO_3200 = 0,
161 VCO_4000 = 1,
162 VCO_5333 = 2,
163};
164
165#endif
166
167/* Offsets of read/write training results in CMOS.
168 They will be restored upon S3 resumes. */
169#define CMOS_READ_TRAINING 0x80 /* 16 bytes */
170#define CMOS_WRITE_TRAINING 0x90 /* 16 bytes
171 (could be reduced to 10 bytes) */
172
173
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800174#ifndef __ACPI__
175#define DEFAULT_MCHBAR ((u8 *)0xfed14000)
176#define DEFAULT_DMIBAR ((u8 *)0xfed18000)
177#else
Patrick Georgi2efc8802012-11-06 11:03:53 +0100178#define DEFAULT_MCHBAR 0xfed14000
179#define DEFAULT_DMIBAR 0xfed18000
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800180#endif
Patrick Georgi2efc8802012-11-06 11:03:53 +0100181#define DEFAULT_EPBAR 0xfed19000
Kevin Paul Herbertbde6d302014-12-24 18:43:20 -0800182#define DEFAULT_HECIBAR ((u8 *)0xfed1a000)
Patrick Georgi2efc8802012-11-06 11:03:53 +0100183
Patrick Georgi2efc8802012-11-06 11:03:53 +0100184
185#define IOMMU_BASE1 0xfed90000
186#define IOMMU_BASE2 0xfed91000
187#define IOMMU_BASE3 0xfed92000
188#define IOMMU_BASE4 0xfed93000
189
190/*
191 * D0:F0
192 */
193#define D0F0_EPBAR_LO 0x40
194#define D0F0_EPBAR_HI 0x44
195#define D0F0_MCHBAR_LO 0x48
196#define D0F0_MCHBAR_HI 0x4c
197#define D0F0_GGC 0x52
198#define D0F0_DEVEN 0x54
199#define D0F0_PCIEXBAR_LO 0x60
200#define D0F0_PCIEXBAR_HI 0x64
201#define D0F0_DMIBAR_LO 0x68
202#define D0F0_DMIBAR_HI 0x6c
203#define D0F0_PMBASE 0x78
204#define D0F0_PAM(x) (0x90+(x)) /* 0-6*/
205#define D0F0_REMAPBASE 0x98
206#define D0F0_REMAPLIMIT 0x9a
207#define D0F0_SMRAM 0x9d
208#define D0F0_ESMRAMC 0x9e
209#define D0F0_TOM 0xa0
210#define D0F0_TOUUD 0xa2
211#define D0F0_TOLUD 0xb0
212#define D0F0_SKPD 0xdc /* Scratchpad Data */
213#define D0F0_CAPID0 0xe0
214
215/*
216 * D1:F0 PEG
217 */
218#define PEG_CAP 0xa2
219#define SLOTCAP 0xb4
220#define PEGLC 0xec
221#define D1F0_VCCAP 0x104
222#define D1F0_VC0RCTL 0x114
223
224/*
225 * Graphics frequencies
226 */
227#define GCFGC_PCIDEV PCI_DEV(0, 2, 0)
228#define GCFGC_OFFSET 0xf0
229#define GCFGC_CR_SHIFT 0
230#define GCFGC_CR_MASK (0xf << GCFGC_CR_SHIFT)
231#define GCFGC_CS_SHIFT 8
232#define GCFGC_CS_MASK (0xf << GCFGC_CS_SHIFT)
233#define GCFGC_CD_SHIFT 12
234#define GCFGC_CD_MASK (0x1 << GCFGC_CD_SHIFT)
235#define GCFGC_UPDATE_SHIFT 5
236#define GCFGC_UPDATE (0x1 << GCFGC_UPDATE_SHIFT)
237
238/*
239 * MCHBAR
240 */
241
242#define MCHBAR8(x) *((volatile u8 *)(DEFAULT_MCHBAR + x))
243#define MCHBAR16(x) *((volatile u16 *)(DEFAULT_MCHBAR + x))
244#define MCHBAR32(x) *((volatile u32 *)(DEFAULT_MCHBAR + x))
245
Nico Huberd85a71a2016-11-27 14:43:12 +0100246#define HPLLVCO_MCHBAR 0x0c0f
247
Patrick Georgi2efc8802012-11-06 11:03:53 +0100248#define PMSTS_MCHBAR 0x0f14 /* Self refresh channel status */
249#define PMSTS_WARM_RESET (1 << 1)
250#define PMSTS_BOTH_SELFREFRESH (1 << 0)
251
252#define CLKCFG_MCHBAR 0x0c00
253#define CLKCFG_FSBCLK_SHIFT 0
254#define CLKCFG_FSBCLK_MASK (7 << CLKCFG_FSBCLK_SHIFT)
255#define CLKCFG_MEMCLK_SHIFT 4
256#define CLKCFG_MEMCLK_MASK (7 << CLKCFG_MEMCLK_SHIFT)
257#define CLKCFG_UPDATE (1 << 12)
258
259#define SSKPD_MCHBAR 0x0c1c
260#define SSKPD_CLK_SHIFT 0
261#define SSKPD_CLK_MASK (7 << SSKPD_CLK_SHIFT)
262
263#define DCC_MCHBAR 0x200
264#define DCC_NO_CHANXOR (1 << 10)
265#define DCC_INTERLEAVED (1 << 1)
266#define DCC_CMD_SHIFT 16
267#define DCC_CMD_MASK (7 << DCC_CMD_SHIFT)
268#define DCC_CMD_NOP (1 << DCC_CMD_SHIFT)
269 /* For mode register mr0: */
270#define DCC_SET_MREG (3 << DCC_CMD_SHIFT)
271 /* For extended mode registers mr1 to mr3: */
272#define DCC_SET_EREG (4 << DCC_CMD_SHIFT)
273#define DCC_SET_EREG_SHIFT 21
274#define DCC_SET_EREG_MASK (DCC_CMD_MASK | (3 << DCC_SET_EREG_SHIFT))
275#define DCC_SET_EREGx(x) ((DCC_SET_EREG | \
276 ((x - 1) << DCC_SET_EREG_SHIFT)) & \
277 DCC_SET_EREG_MASK)
278
279/* Per channel DRAM Row Attribute registers (32-bit) */
280#define CxDRA_MCHBAR(x) (0x1208 + (x * 0x0100))
281#define CxDRA_PAGESIZE_SHIFT(r) (r * 4) /* Per rank r */
282#define CxDRA_PAGESIZE_MASKr(r) (0x7 << CxDRA_PAGESIZE_SHIFT(r))
283#define CxDRA_PAGESIZE_MASK 0x0000ffff
284#define CxDRA_PAGESIZE(r, p) /* for log2(dimm page size in bytes) p */ \
285 (((p - 10) << CxDRA_PAGESIZE_SHIFT(r)) & CxDRA_PAGESIZE_MASKr(r))
286#define CxDRA_BANKS_SHIFT(r) ((r * 3) + 16)
287#define CxDRA_BANKS_MASKr(r) (0x3 << CxDRA_BANKS_SHIFT(r))
288#define CxDRA_BANKS_MASK 0x07ff0000
289#define CxDRA_BANKS(r, b) /* for number of banks b */ \
290 ((b << (CxDRA_BANKS_SHIFT(r) - 3)) & CxDRA_BANKS_MASKr(r))
291
292/*
293 * Per channel DRAM Row Boundary registers (32-bit)
294 * Every two ranks share one register and must be programmed at the same time.
295 * All registers (4 ranks per channel) have to be set.
296 */
297#define CxDRBy_MCHBAR(x, r) (0x1200 + (x * 0x0100) + ((r/2) * 4))
298#define CxDRBy_BOUND_SHIFT(r) ((r % 2) * 16)
299#define CxDRBy_BOUND_MASK(r) (0x1fc << CxDRBy_BOUND_SHIFT(r))
300#define CxDRBy_BOUND_MB(r, b) /* for boundary in MB b */ \
301 (((b >> 5) << CxDRBy_BOUND_SHIFT(r)) & CxDRBy_BOUND_MASK(r))
302
303#define CxDRC0_MCHBAR(x) (0x1230 + (x * 0x0100))
304#define CxDRC0_RANKEN0 (1 << 24) /* Rank Enable */
305#define CxDRC0_RANKEN1 (1 << 25)
306#define CxDRC0_RANKEN2 (1 << 26)
307#define CxDRC0_RANKEN3 (1 << 27)
308#define CxDRC0_RANKEN(r) (1 << (24 + r))
309#define CxDRC0_RANKEN_MASK (0xf << 24)
310#define CxDRC0_RMS_SHIFT 8 /* Refresh Mode Select */
311#define CxDRC0_RMS_MASK (7 << CxDRC0_RMS_SHIFT)
312#define CxDRC0_RMS_78US (2 << CxDRC0_RMS_SHIFT)
313#define CxDRC0_RMS_39US (3 << CxDRC0_RMS_SHIFT)
314
315#define CxDRC1_MCHBAR(x) (0x1234 + (x * 0x0100))
316#define CxDRC1_SSDS_SHIFT 24
317#define CxDRC1_SSDS_MASK (0xff << CxDRC1_SSDS_SHIFT)
318#define CxDRC1_DS (0x91 << CxDRC1_SSDS_SHIFT)
319#define CxDRC1_SS (0xb1 << CxDRC1_SSDS_SHIFT)
320#define CxDRC1_NOTPOP(r) (1 << (16 + r)) /* Write 1 for Not Populated */
321#define CxDRC1_NOTPOP_MASK (0xf << 16)
322#define CxDRC1_MUSTWR (3 << 11)
323
324#define CxDRC2_MCHBAR(x) (0x1238 + (x * 0x0100))
325#define CxDRC2_NOTPOP(r) (1 << (24 + r)) /* Write 1 for Not Populated */
326#define CxDRC2_NOTPOP_MASK (0xf << 24)
327#define CxDRC2_MUSTWR (1 << 12)
328#define CxDRC2_CLK1067MT (1 << 0)
329
330/* DRAM Timing registers (32-bit each) */
331#define CxDRT0_MCHBAR(x) (0x1210 + (x * 0x0100))
332#define CxDRT0_BtB_WtP_SHIFT 26
333#define CxDRT0_BtB_WtP_MASK (0x1f << CxDRT0_BtB_WtP_SHIFT)
334#define CxDRT0_BtB_WtR_SHIFT 20
335#define CxDRT0_BtB_WtR_MASK (0x1f << CxDRT0_BtB_WtR_SHIFT)
336#define CxDRT1_MCHBAR(x) (0x1214 + (x * 0x0100))
337#define CxDRT2_MCHBAR(x) (0x1218 + (x * 0x0100))
338#define CxDRT3_MCHBAR(x) (0x121c + (x * 0x0100))
339#define CxDRT4_MCHBAR(x) (0x1220 + (x * 0x0100))
340#define CxDRT5_MCHBAR(x) (0x1224 + (x * 0x0100))
341#define CxDRT6_MCHBAR(x) (0x1228 + (x * 0x0100))
342
343/* Clock disable registers (32-bit each) */
344#define CxDCLKDIS_MCHBAR(x) (0x120c + (x * 0x0100))
345#define CxDCLKDIS_MASK 3
346#define CxDCLKDIS_ENABLE 3 /* Always enable both clock pairs. */
347
348/* On-Die-Termination registers (2x 32-bit per channel) */
349#define CxODT_HIGH(x) (0x124c + (x * 0x0100))
350#define CxODT_LOW(x) (0x1248 + (x * 0x0100))
351
352/* Write Training registers. */
353#define CxWRTy_MCHBAR(ch, s) (0x1470 + (ch * 0x0100) + ((3 - s) * 4))
354
355#define CxGTEW(x) (0x1270+(x*0x100))
356#define CxGTC(x) (0x1274+(x*0x100))
357#define CxDTPEW(x) (0x1278+(x*0x100))
358#define CxDTAEW(x) (0x1280+(x*0x100))
359#define CxDTC(x) (0x1288+(x*0x100))
360
361
362/*
363 * DMIBAR
364 */
365
366#define DMIBAR8(x) *((volatile u8 *)(DEFAULT_DMIBAR + x))
367#define DMIBAR16(x) *((volatile u16 *)(DEFAULT_DMIBAR + x))
368#define DMIBAR32(x) *((volatile u32 *)(DEFAULT_DMIBAR + x))
369
370#define DMIVC0RCTL 0x14
371#define DMIVC1RCTL 0x20
372#define DMIVC1RSTS 0x26
373#define DMIESD 0x44
374#define DMILE1D 0x50
375#define DMILE1A 0x58
376#define DMILE2D 0x60
377#define DMILE2A 0x68
378
379
380/*
381 * EPBAR
382 */
383
384#define EPBAR8(x) *((volatile u8 *)(DEFAULT_EPBAR + x))
385#define EPBAR16(x) *((volatile u16 *)(DEFAULT_EPBAR + x))
386#define EPBAR32(x) *((volatile u32 *)(DEFAULT_EPBAR + x))
387
388#define EPESD 0x44
389#define EPLE1D 0x50
390#define EPLE1A 0x58
391#define EPLE2D 0x60
392
393
394#ifndef __ACPI__
395void gm45_early_init(void);
396void gm45_early_reset(void);
397
398void enter_raminit_or_reset(void);
399void get_gmch_info(sysinfo_t *);
400void raminit(sysinfo_t *, int s3resume);
401void raminit_thermal(const sysinfo_t *);
Vladimir Serbinenko56ae8a02014-08-16 10:59:02 +0200402void init_igd(const sysinfo_t *const);
Vladimir Serbinenko020dc0e2014-08-12 22:50:40 +0200403void init_pm(const sysinfo_t *, int do_freq_scaling_cfg);
Vladimir Serbinenko56ae8a02014-08-16 10:59:02 +0200404void igd_compute_ggc(sysinfo_t *const sysinfo);
Patrick Georgi2efc8802012-11-06 11:03:53 +0100405
406int raminit_read_vco_index(void);
407u32 raminit_get_rank_addr(unsigned int channel, unsigned int rank);
408
409void raminit_rcomp_calibration(stepping_t stepping);
410void raminit_reset_readwrite_pointers(void);
411void raminit_receive_enable_calibration(const timings_t *, const dimminfo_t *);
412void raminit_write_training(const mem_clock_t, const dimminfo_t *, int s3resume);
413void raminit_read_training(const dimminfo_t *, int s3resume);
414
415void gm45_late_init(stepping_t);
416
417u32 decode_igd_memory_size(u32 gms);
418u32 decode_igd_gtt_size(u32 gsm);
Arthur Heymans8b766052018-01-24 23:25:13 +0100419u32 decode_tseg_size(u8 esmramc);
Patrick Georgi2efc8802012-11-06 11:03:53 +0100420
421void init_iommu(void);
Vladimir Serbinenko33769a52014-08-30 22:39:20 +0200422
Arthur Heymans3b0eb602019-01-31 22:47:09 +0100423/* romstage mainboard hookups */
Arthur Heymans3b0eb602019-01-31 22:47:09 +0100424void mb_setup_superio(void); /* optional */
425void get_mb_spd_addrmap(u8 spd_addrmap[4]);
426void mb_pre_raminit_setup(sysinfo_t *); /* optional */
427void mb_post_raminit_setup(void); /* optional */
428
Arthur Heymans20cb85f2017-04-29 14:31:32 +0200429struct blc_pwm_t {
430 char ascii_string[13];
431 int pwm_freq; /* In Hz */
432};
433int get_blc_values(const struct blc_pwm_t **entries);
Arthur Heymans4d2d1712018-11-29 12:25:31 +0100434u16 get_blc_pwm_freq_value(const char *edid_ascii_string);
Arthur Heymans20cb85f2017-04-29 14:31:32 +0200435
Angel Ponsc0c95162020-08-03 13:55:18 +0200436int decode_pcie_bar(u32 *const base, u32 *const len);
Arthur Heymans20cb85f2017-04-29 14:31:32 +0200437
Alexander Couzens83fc32f2015-04-12 22:28:37 +0200438#include <device/device.h>
439
Vladimir Serbinenko33769a52014-08-30 22:39:20 +0200440struct acpi_rsdp;
Furquan Shaikh0f007d82020-04-24 06:41:18 -0700441unsigned long northbridge_write_acpi_tables(const struct device *device, unsigned long start,
442 struct acpi_rsdp *rsdp);
Patrick Georgi2efc8802012-11-06 11:03:53 +0100443
Alexander Couzens83fc32f2015-04-12 22:28:37 +0200444#endif /* !__ACPI__ */
Edward O'Callaghan089a5102015-01-06 02:48:57 +1100445#endif /* __NORTHBRIDGE_INTEL_GM45_GM45_H__ */