Angel Pons | 4b42983 | 2020-04-02 23:48:50 +0200 | [diff] [blame^] | 1 | /* SPDX-License-Identifier: GPL-2.0-only */ |
| 2 | /* This file is part of the coreboot project. */ |
Patrick Georgi | 2efc880 | 2012-11-06 11:03:53 +0100 | [diff] [blame] | 3 | |
| 4 | #ifndef __NORTHBRIDGE_INTEL_GM45_GM45_H__ |
Edward O'Callaghan | 089a510 | 2015-01-06 02:48:57 +1100 | [diff] [blame] | 5 | #define __NORTHBRIDGE_INTEL_GM45_GM45_H__ |
Patrick Georgi | 2efc880 | 2012-11-06 11:03:53 +0100 | [diff] [blame] | 6 | |
Elyes HAOUAS | 21b71ce6 | 2018-06-16 18:43:52 +0200 | [diff] [blame] | 7 | #include <southbridge/intel/i82801ix/i82801ix.h> |
Patrick Georgi | 2efc880 | 2012-11-06 11:03:53 +0100 | [diff] [blame] | 8 | |
| 9 | #ifndef __ACPI__ |
| 10 | |
| 11 | #include <stdint.h> |
| 12 | |
| 13 | typedef enum { |
| 14 | FSB_CLOCK_1067MHz = 0, |
| 15 | FSB_CLOCK_800MHz = 1, |
| 16 | FSB_CLOCK_667MHz = 2, |
| 17 | } fsb_clock_t; |
| 18 | |
| 19 | typedef enum { /* Steppings below B1 were pre-production, |
| 20 | conversion stepping A1 is... ? |
| 21 | We'll support B1, B2, B3, and conversion stepping A1. */ |
| 22 | STEPPING_A0 = 0, |
| 23 | STEPPING_A1 = 1, |
| 24 | STEPPING_A2 = 2, |
| 25 | STEPPING_A3 = 3, |
| 26 | STEPPING_B0 = 4, |
| 27 | STEPPING_B1 = 5, |
| 28 | STEPPING_B2 = 6, |
| 29 | STEPPING_B3 = 7, |
| 30 | STEPPING_CONVERSION_A1 = 9, |
| 31 | } stepping_t; |
| 32 | |
| 33 | typedef enum { |
| 34 | GMCH_GM45 = 0, |
| 35 | GMCH_GM47, |
| 36 | GMCH_GM49, |
| 37 | GMCH_GE45, |
| 38 | GMCH_GL40, |
| 39 | GMCH_GL43, |
| 40 | GMCH_GS40, |
| 41 | GMCH_GS45, |
| 42 | GMCH_PM45, |
| 43 | GMCH_UNKNOWN |
| 44 | } gmch_gfx_t; |
| 45 | |
| 46 | typedef enum { |
| 47 | MEM_CLOCK_533MHz = 0, |
| 48 | MEM_CLOCK_400MHz = 1, |
| 49 | MEM_CLOCK_333MHz = 2, |
| 50 | MEM_CLOCK_1067MT = 0, |
| 51 | MEM_CLOCK_800MT = 1, |
| 52 | MEM_CLOCK_667MT = 2, |
| 53 | } mem_clock_t; |
| 54 | |
| 55 | typedef enum { |
| 56 | DDR1 = 1, |
| 57 | DDR2 = 2, |
| 58 | DDR3 = 3, |
| 59 | } ddr_t; |
| 60 | |
| 61 | typedef enum { |
| 62 | CHANNEL_MODE_SINGLE, |
| 63 | CHANNEL_MODE_DUAL_ASYNC, |
| 64 | CHANNEL_MODE_DUAL_INTERLEAVED, |
| 65 | } channel_mode_t; |
| 66 | |
| 67 | typedef enum { /* as in DDR3 spd */ |
| 68 | CHIP_WIDTH_x4 = 0, |
| 69 | CHIP_WIDTH_x8 = 1, |
| 70 | CHIP_WIDTH_x16 = 2, |
| 71 | CHIP_WIDTH_x32 = 3, |
| 72 | } chip_width_t; |
| 73 | |
| 74 | typedef enum { /* as in DDR3 spd */ |
| 75 | CHIP_CAP_256M = 0, |
| 76 | CHIP_CAP_512M = 1, |
| 77 | CHIP_CAP_1G = 2, |
| 78 | CHIP_CAP_2G = 3, |
| 79 | CHIP_CAP_4G = 4, |
| 80 | CHIP_CAP_8G = 5, |
| 81 | CHIP_CAP_16G = 6, |
| 82 | } chip_capacity_t; |
| 83 | |
| 84 | typedef struct { |
| 85 | unsigned int CAS; |
| 86 | fsb_clock_t fsb_clock; |
| 87 | mem_clock_t mem_clock; |
| 88 | channel_mode_t channel_mode; |
| 89 | unsigned int tRAS; |
| 90 | unsigned int tRP; |
| 91 | unsigned int tRCD; |
| 92 | unsigned int tRFC; |
| 93 | unsigned int tWR; |
| 94 | unsigned int tRD; |
| 95 | unsigned int tRRD; |
| 96 | unsigned int tFAW; |
| 97 | unsigned int tWL; |
| 98 | } timings_t; |
| 99 | |
| 100 | typedef struct { |
| 101 | unsigned int card_type; /* 0x0: unpopulated, |
| 102 | 0xa - 0xf: raw card type A - F */ |
| 103 | chip_width_t chip_width; |
| 104 | chip_capacity_t chip_capacity; |
| 105 | unsigned int page_size; /* of whole DIMM in Bytes (4096 or 8192) */ |
| 106 | unsigned int banks; |
| 107 | unsigned int ranks; |
Martin Roth | 128c104 | 2016-11-18 09:29:03 -0700 | [diff] [blame] | 108 | unsigned int rank_capacity_mb; /* per rank in Megabytes */ |
Patrick Georgi | 2efc880 | 2012-11-06 11:03:53 +0100 | [diff] [blame] | 109 | } dimminfo_t; |
| 110 | |
| 111 | /* The setup is one DIMM per channel, so there's no need to find a |
| 112 | common timing setup between multiple chips (but chip and controller |
| 113 | still need to be coordinated */ |
| 114 | typedef struct { |
| 115 | stepping_t stepping; |
| 116 | int txt_enabled; |
| 117 | int cores; |
| 118 | gmch_gfx_t gfx_type; |
Patrick Georgi | 2efc880 | 2012-11-06 11:03:53 +0100 | [diff] [blame] | 119 | int max_ddr2_mhz; |
| 120 | int max_ddr3_mt; |
| 121 | fsb_clock_t max_fsb; |
| 122 | int max_fsb_mhz; |
| 123 | int max_render_mhz; |
Vladimir Serbinenko | 56ae8a0 | 2014-08-16 10:59:02 +0200 | [diff] [blame] | 124 | int enable_igd; |
| 125 | int enable_peg; |
| 126 | u16 ggc; |
Patrick Georgi | 2efc880 | 2012-11-06 11:03:53 +0100 | [diff] [blame] | 127 | |
Nico Huber | 5aaeb27 | 2015-12-30 00:17:27 +0100 | [diff] [blame] | 128 | /* to be filled in romstage main: */ |
Patrick Georgi | 2efc880 | 2012-11-06 11:03:53 +0100 | [diff] [blame] | 129 | int spd_type; |
| 130 | timings_t selected_timings; |
| 131 | dimminfo_t dimms[2]; |
Vladimir Serbinenko | c4d8948 | 2014-06-05 09:14:48 +0200 | [diff] [blame] | 132 | u8 spd_map[4]; |
Nico Huber | 5aaeb27 | 2015-12-30 00:17:27 +0100 | [diff] [blame] | 133 | int gs45_low_power_mode; /* low power mode of GMCH_GS45 */ |
| 134 | int sff; /* small form factor option (soldered down DIMM) */ |
Patrick Georgi | 2efc880 | 2012-11-06 11:03:53 +0100 | [diff] [blame] | 135 | } sysinfo_t; |
| 136 | #define TOTAL_CHANNELS 2 |
| 137 | #define CHANNEL_IS_POPULATED(dimms, idx) (dimms[idx].card_type != 0) |
| 138 | #define CHANNEL_IS_CARDF(dimms, idx) (dimms[idx].card_type == 0xf) |
| 139 | #define IF_CHANNEL_POPULATED(dimms, idx) if (dimms[idx].card_type != 0) |
| 140 | #define FOR_EACH_CHANNEL(idx) \ |
| 141 | for (idx = 0; idx < TOTAL_CHANNELS; ++idx) |
| 142 | #define FOR_EACH_POPULATED_CHANNEL(dimms, idx) \ |
| 143 | FOR_EACH_CHANNEL(idx) IF_CHANNEL_POPULATED(dimms, idx) |
| 144 | |
| 145 | #define RANKS_PER_CHANNEL 4 /* Only two may be populated */ |
| 146 | #define IF_RANK_POPULATED(dimms, ch, r) \ |
| 147 | if (dimms[ch].card_type && ((r) < dimms[ch].ranks)) |
| 148 | #define FOR_EACH_RANK_IN_CHANNEL(r) \ |
| 149 | for (r = 0; r < RANKS_PER_CHANNEL; ++r) |
| 150 | #define FOR_EACH_POPULATED_RANK_IN_CHANNEL(dimms, ch, r) \ |
| 151 | FOR_EACH_RANK_IN_CHANNEL(r) IF_RANK_POPULATED(dimms, ch, r) |
| 152 | #define FOR_EACH_RANK(ch, r) \ |
| 153 | FOR_EACH_CHANNEL(ch) FOR_EACH_RANK_IN_CHANNEL(r) |
| 154 | #define FOR_EACH_POPULATED_RANK(dimms, ch, r) \ |
| 155 | FOR_EACH_RANK(ch, r) IF_RANK_POPULATED(dimms, ch, r) |
| 156 | |
| 157 | #define DDR3_MAX_CAS 18 |
| 158 | |
| 159 | enum { |
| 160 | VCO_2666 = 4, |
| 161 | VCO_3200 = 0, |
| 162 | VCO_4000 = 1, |
| 163 | VCO_5333 = 2, |
| 164 | }; |
| 165 | |
| 166 | #endif |
| 167 | |
| 168 | /* Offsets of read/write training results in CMOS. |
| 169 | They will be restored upon S3 resumes. */ |
| 170 | #define CMOS_READ_TRAINING 0x80 /* 16 bytes */ |
| 171 | #define CMOS_WRITE_TRAINING 0x90 /* 16 bytes |
| 172 | (could be reduced to 10 bytes) */ |
| 173 | |
| 174 | |
Kevin Paul Herbert | bde6d30 | 2014-12-24 18:43:20 -0800 | [diff] [blame] | 175 | #ifndef __ACPI__ |
| 176 | #define DEFAULT_MCHBAR ((u8 *)0xfed14000) |
| 177 | #define DEFAULT_DMIBAR ((u8 *)0xfed18000) |
| 178 | #else |
Patrick Georgi | 2efc880 | 2012-11-06 11:03:53 +0100 | [diff] [blame] | 179 | #define DEFAULT_MCHBAR 0xfed14000 |
| 180 | #define DEFAULT_DMIBAR 0xfed18000 |
Kevin Paul Herbert | bde6d30 | 2014-12-24 18:43:20 -0800 | [diff] [blame] | 181 | #endif |
Patrick Georgi | 2efc880 | 2012-11-06 11:03:53 +0100 | [diff] [blame] | 182 | #define DEFAULT_EPBAR 0xfed19000 |
Kevin Paul Herbert | bde6d30 | 2014-12-24 18:43:20 -0800 | [diff] [blame] | 183 | #define DEFAULT_HECIBAR ((u8 *)0xfed1a000) |
Patrick Georgi | 2efc880 | 2012-11-06 11:03:53 +0100 | [diff] [blame] | 184 | |
Patrick Georgi | 2efc880 | 2012-11-06 11:03:53 +0100 | [diff] [blame] | 185 | |
| 186 | #define IOMMU_BASE1 0xfed90000 |
| 187 | #define IOMMU_BASE2 0xfed91000 |
| 188 | #define IOMMU_BASE3 0xfed92000 |
| 189 | #define IOMMU_BASE4 0xfed93000 |
| 190 | |
| 191 | /* |
| 192 | * D0:F0 |
| 193 | */ |
| 194 | #define D0F0_EPBAR_LO 0x40 |
| 195 | #define D0F0_EPBAR_HI 0x44 |
| 196 | #define D0F0_MCHBAR_LO 0x48 |
| 197 | #define D0F0_MCHBAR_HI 0x4c |
| 198 | #define D0F0_GGC 0x52 |
| 199 | #define D0F0_DEVEN 0x54 |
| 200 | #define D0F0_PCIEXBAR_LO 0x60 |
| 201 | #define D0F0_PCIEXBAR_HI 0x64 |
| 202 | #define D0F0_DMIBAR_LO 0x68 |
| 203 | #define D0F0_DMIBAR_HI 0x6c |
| 204 | #define D0F0_PMBASE 0x78 |
| 205 | #define D0F0_PAM(x) (0x90+(x)) /* 0-6*/ |
| 206 | #define D0F0_REMAPBASE 0x98 |
| 207 | #define D0F0_REMAPLIMIT 0x9a |
| 208 | #define D0F0_SMRAM 0x9d |
| 209 | #define D0F0_ESMRAMC 0x9e |
| 210 | #define D0F0_TOM 0xa0 |
| 211 | #define D0F0_TOUUD 0xa2 |
| 212 | #define D0F0_TOLUD 0xb0 |
| 213 | #define D0F0_SKPD 0xdc /* Scratchpad Data */ |
| 214 | #define D0F0_CAPID0 0xe0 |
| 215 | |
| 216 | /* |
| 217 | * D1:F0 PEG |
| 218 | */ |
| 219 | #define PEG_CAP 0xa2 |
| 220 | #define SLOTCAP 0xb4 |
| 221 | #define PEGLC 0xec |
| 222 | #define D1F0_VCCAP 0x104 |
| 223 | #define D1F0_VC0RCTL 0x114 |
| 224 | |
| 225 | /* |
| 226 | * Graphics frequencies |
| 227 | */ |
| 228 | #define GCFGC_PCIDEV PCI_DEV(0, 2, 0) |
| 229 | #define GCFGC_OFFSET 0xf0 |
| 230 | #define GCFGC_CR_SHIFT 0 |
| 231 | #define GCFGC_CR_MASK (0xf << GCFGC_CR_SHIFT) |
| 232 | #define GCFGC_CS_SHIFT 8 |
| 233 | #define GCFGC_CS_MASK (0xf << GCFGC_CS_SHIFT) |
| 234 | #define GCFGC_CD_SHIFT 12 |
| 235 | #define GCFGC_CD_MASK (0x1 << GCFGC_CD_SHIFT) |
| 236 | #define GCFGC_UPDATE_SHIFT 5 |
| 237 | #define GCFGC_UPDATE (0x1 << GCFGC_UPDATE_SHIFT) |
| 238 | |
| 239 | /* |
| 240 | * MCHBAR |
| 241 | */ |
| 242 | |
| 243 | #define MCHBAR8(x) *((volatile u8 *)(DEFAULT_MCHBAR + x)) |
| 244 | #define MCHBAR16(x) *((volatile u16 *)(DEFAULT_MCHBAR + x)) |
| 245 | #define MCHBAR32(x) *((volatile u32 *)(DEFAULT_MCHBAR + x)) |
| 246 | |
Nico Huber | d85a71a | 2016-11-27 14:43:12 +0100 | [diff] [blame] | 247 | #define HPLLVCO_MCHBAR 0x0c0f |
| 248 | |
Patrick Georgi | 2efc880 | 2012-11-06 11:03:53 +0100 | [diff] [blame] | 249 | #define PMSTS_MCHBAR 0x0f14 /* Self refresh channel status */ |
| 250 | #define PMSTS_WARM_RESET (1 << 1) |
| 251 | #define PMSTS_BOTH_SELFREFRESH (1 << 0) |
| 252 | |
| 253 | #define CLKCFG_MCHBAR 0x0c00 |
| 254 | #define CLKCFG_FSBCLK_SHIFT 0 |
| 255 | #define CLKCFG_FSBCLK_MASK (7 << CLKCFG_FSBCLK_SHIFT) |
| 256 | #define CLKCFG_MEMCLK_SHIFT 4 |
| 257 | #define CLKCFG_MEMCLK_MASK (7 << CLKCFG_MEMCLK_SHIFT) |
| 258 | #define CLKCFG_UPDATE (1 << 12) |
| 259 | |
| 260 | #define SSKPD_MCHBAR 0x0c1c |
| 261 | #define SSKPD_CLK_SHIFT 0 |
| 262 | #define SSKPD_CLK_MASK (7 << SSKPD_CLK_SHIFT) |
| 263 | |
| 264 | #define DCC_MCHBAR 0x200 |
| 265 | #define DCC_NO_CHANXOR (1 << 10) |
| 266 | #define DCC_INTERLEAVED (1 << 1) |
| 267 | #define DCC_CMD_SHIFT 16 |
| 268 | #define DCC_CMD_MASK (7 << DCC_CMD_SHIFT) |
| 269 | #define DCC_CMD_NOP (1 << DCC_CMD_SHIFT) |
| 270 | /* For mode register mr0: */ |
| 271 | #define DCC_SET_MREG (3 << DCC_CMD_SHIFT) |
| 272 | /* For extended mode registers mr1 to mr3: */ |
| 273 | #define DCC_SET_EREG (4 << DCC_CMD_SHIFT) |
| 274 | #define DCC_SET_EREG_SHIFT 21 |
| 275 | #define DCC_SET_EREG_MASK (DCC_CMD_MASK | (3 << DCC_SET_EREG_SHIFT)) |
| 276 | #define DCC_SET_EREGx(x) ((DCC_SET_EREG | \ |
| 277 | ((x - 1) << DCC_SET_EREG_SHIFT)) & \ |
| 278 | DCC_SET_EREG_MASK) |
| 279 | |
| 280 | /* Per channel DRAM Row Attribute registers (32-bit) */ |
| 281 | #define CxDRA_MCHBAR(x) (0x1208 + (x * 0x0100)) |
| 282 | #define CxDRA_PAGESIZE_SHIFT(r) (r * 4) /* Per rank r */ |
| 283 | #define CxDRA_PAGESIZE_MASKr(r) (0x7 << CxDRA_PAGESIZE_SHIFT(r)) |
| 284 | #define CxDRA_PAGESIZE_MASK 0x0000ffff |
| 285 | #define CxDRA_PAGESIZE(r, p) /* for log2(dimm page size in bytes) p */ \ |
| 286 | (((p - 10) << CxDRA_PAGESIZE_SHIFT(r)) & CxDRA_PAGESIZE_MASKr(r)) |
| 287 | #define CxDRA_BANKS_SHIFT(r) ((r * 3) + 16) |
| 288 | #define CxDRA_BANKS_MASKr(r) (0x3 << CxDRA_BANKS_SHIFT(r)) |
| 289 | #define CxDRA_BANKS_MASK 0x07ff0000 |
| 290 | #define CxDRA_BANKS(r, b) /* for number of banks b */ \ |
| 291 | ((b << (CxDRA_BANKS_SHIFT(r) - 3)) & CxDRA_BANKS_MASKr(r)) |
| 292 | |
| 293 | /* |
| 294 | * Per channel DRAM Row Boundary registers (32-bit) |
| 295 | * Every two ranks share one register and must be programmed at the same time. |
| 296 | * All registers (4 ranks per channel) have to be set. |
| 297 | */ |
| 298 | #define CxDRBy_MCHBAR(x, r) (0x1200 + (x * 0x0100) + ((r/2) * 4)) |
| 299 | #define CxDRBy_BOUND_SHIFT(r) ((r % 2) * 16) |
| 300 | #define CxDRBy_BOUND_MASK(r) (0x1fc << CxDRBy_BOUND_SHIFT(r)) |
| 301 | #define CxDRBy_BOUND_MB(r, b) /* for boundary in MB b */ \ |
| 302 | (((b >> 5) << CxDRBy_BOUND_SHIFT(r)) & CxDRBy_BOUND_MASK(r)) |
| 303 | |
| 304 | #define CxDRC0_MCHBAR(x) (0x1230 + (x * 0x0100)) |
| 305 | #define CxDRC0_RANKEN0 (1 << 24) /* Rank Enable */ |
| 306 | #define CxDRC0_RANKEN1 (1 << 25) |
| 307 | #define CxDRC0_RANKEN2 (1 << 26) |
| 308 | #define CxDRC0_RANKEN3 (1 << 27) |
| 309 | #define CxDRC0_RANKEN(r) (1 << (24 + r)) |
| 310 | #define CxDRC0_RANKEN_MASK (0xf << 24) |
| 311 | #define CxDRC0_RMS_SHIFT 8 /* Refresh Mode Select */ |
| 312 | #define CxDRC0_RMS_MASK (7 << CxDRC0_RMS_SHIFT) |
| 313 | #define CxDRC0_RMS_78US (2 << CxDRC0_RMS_SHIFT) |
| 314 | #define CxDRC0_RMS_39US (3 << CxDRC0_RMS_SHIFT) |
| 315 | |
| 316 | #define CxDRC1_MCHBAR(x) (0x1234 + (x * 0x0100)) |
| 317 | #define CxDRC1_SSDS_SHIFT 24 |
| 318 | #define CxDRC1_SSDS_MASK (0xff << CxDRC1_SSDS_SHIFT) |
| 319 | #define CxDRC1_DS (0x91 << CxDRC1_SSDS_SHIFT) |
| 320 | #define CxDRC1_SS (0xb1 << CxDRC1_SSDS_SHIFT) |
| 321 | #define CxDRC1_NOTPOP(r) (1 << (16 + r)) /* Write 1 for Not Populated */ |
| 322 | #define CxDRC1_NOTPOP_MASK (0xf << 16) |
| 323 | #define CxDRC1_MUSTWR (3 << 11) |
| 324 | |
| 325 | #define CxDRC2_MCHBAR(x) (0x1238 + (x * 0x0100)) |
| 326 | #define CxDRC2_NOTPOP(r) (1 << (24 + r)) /* Write 1 for Not Populated */ |
| 327 | #define CxDRC2_NOTPOP_MASK (0xf << 24) |
| 328 | #define CxDRC2_MUSTWR (1 << 12) |
| 329 | #define CxDRC2_CLK1067MT (1 << 0) |
| 330 | |
| 331 | /* DRAM Timing registers (32-bit each) */ |
| 332 | #define CxDRT0_MCHBAR(x) (0x1210 + (x * 0x0100)) |
| 333 | #define CxDRT0_BtB_WtP_SHIFT 26 |
| 334 | #define CxDRT0_BtB_WtP_MASK (0x1f << CxDRT0_BtB_WtP_SHIFT) |
| 335 | #define CxDRT0_BtB_WtR_SHIFT 20 |
| 336 | #define CxDRT0_BtB_WtR_MASK (0x1f << CxDRT0_BtB_WtR_SHIFT) |
| 337 | #define CxDRT1_MCHBAR(x) (0x1214 + (x * 0x0100)) |
| 338 | #define CxDRT2_MCHBAR(x) (0x1218 + (x * 0x0100)) |
| 339 | #define CxDRT3_MCHBAR(x) (0x121c + (x * 0x0100)) |
| 340 | #define CxDRT4_MCHBAR(x) (0x1220 + (x * 0x0100)) |
| 341 | #define CxDRT5_MCHBAR(x) (0x1224 + (x * 0x0100)) |
| 342 | #define CxDRT6_MCHBAR(x) (0x1228 + (x * 0x0100)) |
| 343 | |
| 344 | /* Clock disable registers (32-bit each) */ |
| 345 | #define CxDCLKDIS_MCHBAR(x) (0x120c + (x * 0x0100)) |
| 346 | #define CxDCLKDIS_MASK 3 |
| 347 | #define CxDCLKDIS_ENABLE 3 /* Always enable both clock pairs. */ |
| 348 | |
| 349 | /* On-Die-Termination registers (2x 32-bit per channel) */ |
| 350 | #define CxODT_HIGH(x) (0x124c + (x * 0x0100)) |
| 351 | #define CxODT_LOW(x) (0x1248 + (x * 0x0100)) |
| 352 | |
| 353 | /* Write Training registers. */ |
| 354 | #define CxWRTy_MCHBAR(ch, s) (0x1470 + (ch * 0x0100) + ((3 - s) * 4)) |
| 355 | |
| 356 | #define CxGTEW(x) (0x1270+(x*0x100)) |
| 357 | #define CxGTC(x) (0x1274+(x*0x100)) |
| 358 | #define CxDTPEW(x) (0x1278+(x*0x100)) |
| 359 | #define CxDTAEW(x) (0x1280+(x*0x100)) |
| 360 | #define CxDTC(x) (0x1288+(x*0x100)) |
| 361 | |
| 362 | |
| 363 | /* |
| 364 | * DMIBAR |
| 365 | */ |
| 366 | |
| 367 | #define DMIBAR8(x) *((volatile u8 *)(DEFAULT_DMIBAR + x)) |
| 368 | #define DMIBAR16(x) *((volatile u16 *)(DEFAULT_DMIBAR + x)) |
| 369 | #define DMIBAR32(x) *((volatile u32 *)(DEFAULT_DMIBAR + x)) |
| 370 | |
| 371 | #define DMIVC0RCTL 0x14 |
| 372 | #define DMIVC1RCTL 0x20 |
| 373 | #define DMIVC1RSTS 0x26 |
| 374 | #define DMIESD 0x44 |
| 375 | #define DMILE1D 0x50 |
| 376 | #define DMILE1A 0x58 |
| 377 | #define DMILE2D 0x60 |
| 378 | #define DMILE2A 0x68 |
| 379 | |
| 380 | |
| 381 | /* |
| 382 | * EPBAR |
| 383 | */ |
| 384 | |
| 385 | #define EPBAR8(x) *((volatile u8 *)(DEFAULT_EPBAR + x)) |
| 386 | #define EPBAR16(x) *((volatile u16 *)(DEFAULT_EPBAR + x)) |
| 387 | #define EPBAR32(x) *((volatile u32 *)(DEFAULT_EPBAR + x)) |
| 388 | |
| 389 | #define EPESD 0x44 |
| 390 | #define EPLE1D 0x50 |
| 391 | #define EPLE1A 0x58 |
| 392 | #define EPLE2D 0x60 |
| 393 | |
| 394 | |
| 395 | #ifndef __ACPI__ |
| 396 | void gm45_early_init(void); |
| 397 | void gm45_early_reset(void); |
| 398 | |
| 399 | void enter_raminit_or_reset(void); |
| 400 | void get_gmch_info(sysinfo_t *); |
| 401 | void raminit(sysinfo_t *, int s3resume); |
| 402 | void raminit_thermal(const sysinfo_t *); |
Vladimir Serbinenko | 56ae8a0 | 2014-08-16 10:59:02 +0200 | [diff] [blame] | 403 | void init_igd(const sysinfo_t *const); |
Vladimir Serbinenko | 020dc0e | 2014-08-12 22:50:40 +0200 | [diff] [blame] | 404 | void init_pm(const sysinfo_t *, int do_freq_scaling_cfg); |
Vladimir Serbinenko | 56ae8a0 | 2014-08-16 10:59:02 +0200 | [diff] [blame] | 405 | void igd_compute_ggc(sysinfo_t *const sysinfo); |
Patrick Georgi | 2efc880 | 2012-11-06 11:03:53 +0100 | [diff] [blame] | 406 | |
| 407 | int raminit_read_vco_index(void); |
| 408 | u32 raminit_get_rank_addr(unsigned int channel, unsigned int rank); |
| 409 | |
| 410 | void raminit_rcomp_calibration(stepping_t stepping); |
| 411 | void raminit_reset_readwrite_pointers(void); |
| 412 | void raminit_receive_enable_calibration(const timings_t *, const dimminfo_t *); |
| 413 | void raminit_write_training(const mem_clock_t, const dimminfo_t *, int s3resume); |
| 414 | void raminit_read_training(const dimminfo_t *, int s3resume); |
| 415 | |
| 416 | void gm45_late_init(stepping_t); |
| 417 | |
| 418 | u32 decode_igd_memory_size(u32 gms); |
| 419 | u32 decode_igd_gtt_size(u32 gsm); |
Arthur Heymans | 8b76605 | 2018-01-24 23:25:13 +0100 | [diff] [blame] | 420 | u32 decode_tseg_size(u8 esmramc); |
Patrick Georgi | 2efc880 | 2012-11-06 11:03:53 +0100 | [diff] [blame] | 421 | |
| 422 | void init_iommu(void); |
Vladimir Serbinenko | 33769a5 | 2014-08-30 22:39:20 +0200 | [diff] [blame] | 423 | |
Arthur Heymans | 3b0eb60 | 2019-01-31 22:47:09 +0100 | [diff] [blame] | 424 | /* romstage mainboard hookups */ |
Arthur Heymans | 3b0eb60 | 2019-01-31 22:47:09 +0100 | [diff] [blame] | 425 | void mb_setup_superio(void); /* optional */ |
| 426 | void get_mb_spd_addrmap(u8 spd_addrmap[4]); |
| 427 | void mb_pre_raminit_setup(sysinfo_t *); /* optional */ |
| 428 | void mb_post_raminit_setup(void); /* optional */ |
| 429 | |
Arthur Heymans | 20cb85f | 2017-04-29 14:31:32 +0200 | [diff] [blame] | 430 | struct blc_pwm_t { |
| 431 | char ascii_string[13]; |
| 432 | int pwm_freq; /* In Hz */ |
| 433 | }; |
| 434 | int get_blc_values(const struct blc_pwm_t **entries); |
Arthur Heymans | 4d2d171 | 2018-11-29 12:25:31 +0100 | [diff] [blame] | 435 | u16 get_blc_pwm_freq_value(const char *edid_ascii_string); |
Arthur Heymans | 20cb85f | 2017-04-29 14:31:32 +0200 | [diff] [blame] | 436 | |
| 437 | |
Alexander Couzens | 83fc32f | 2015-04-12 22:28:37 +0200 | [diff] [blame] | 438 | #include <device/device.h> |
| 439 | |
Vladimir Serbinenko | 33769a5 | 2014-08-30 22:39:20 +0200 | [diff] [blame] | 440 | struct acpi_rsdp; |
Elyes HAOUAS | 6dcdaaf | 2018-02-09 07:44:31 +0100 | [diff] [blame] | 441 | unsigned long northbridge_write_acpi_tables(struct device *device, unsigned long start, struct acpi_rsdp *rsdp); |
Patrick Georgi | 2efc880 | 2012-11-06 11:03:53 +0100 | [diff] [blame] | 442 | |
Alexander Couzens | 83fc32f | 2015-04-12 22:28:37 +0200 | [diff] [blame] | 443 | #endif /* !__ACPI__ */ |
Edward O'Callaghan | 089a510 | 2015-01-06 02:48:57 +1100 | [diff] [blame] | 444 | #endif /* __NORTHBRIDGE_INTEL_GM45_GM45_H__ */ |