blob: 98b6038f5b5413cb49568b20af805d0c97fb9ee5 [file] [log] [blame]
Angel Pons4b429832020-04-02 23:48:50 +02001/* SPDX-License-Identifier: GPL-2.0-only */
Damien Zammit43a1f782015-08-19 15:16:59 +10002
3#ifndef __NORTHBRIDGE_INTEL_X4X_H__
4#define __NORTHBRIDGE_INTEL_X4X_H__
5
Arthur Heymansdc972e12019-11-12 08:35:05 +01006#include <stdint.h>
Angel Pons2a8ceef2020-09-15 12:23:45 +02007#include "memmap.h"
Arthur Heymans6190d0b2019-11-01 18:34:45 +01008
Damien Zammit43a1f782015-08-19 15:16:59 +10009/*
10 * D0:F0
11 */
Angel Ponsd1c590a2020-08-03 16:01:39 +020012#define HOST_BRIDGE PCI_DEV(0, 0, 0)
13
Angel Pons38965762020-07-22 11:15:17 +020014#include "hostbridge_regs.h"
Damien Zammit43a1f782015-08-19 15:16:59 +100015
16/*
17 * D1:F0 PEG
18 */
19#define PEG_CAP 0xa2
20#define SLOTCAP 0xb4
21#define PEGLC 0xec
22#define D1F0_VCCAP 0x104
23#define D1F0_VC0RCTL 0x114
24
25/*
26 * Graphics frequencies
27 */
28#define GCFGC_PCIDEV PCI_DEV(0, 2, 0)
29#define GCFGC_OFFSET 0xf0
30#define GCFGC_CR_SHIFT 0
31#define GCFGC_CR_MASK (0xf << GCFGC_CR_SHIFT)
32#define GCFGC_CS_SHIFT 8
33#define GCFGC_CS_MASK (0xf << GCFGC_CS_SHIFT)
34#define GCFGC_CD_SHIFT 12
35#define GCFGC_CD_MASK (0x1 << GCFGC_CD_SHIFT)
36#define GCFGC_UPDATE_SHIFT 5
37#define GCFGC_UPDATE (0x1 << GCFGC_UPDATE_SHIFT)
38
39/*
40 * MCHBAR
41 */
42
Arthur Heymans70a1dda2017-03-09 01:58:24 +010043#define MCHBAR8(x) (*((volatile u8 *)(DEFAULT_MCHBAR + (x))))
44#define MCHBAR16(x) (*((volatile u16 *)(DEFAULT_MCHBAR + (x))))
45#define MCHBAR32(x) (*((volatile u32 *)(DEFAULT_MCHBAR + (x))))
Felix Held6cd2c2f2018-07-29 18:04:14 +020046#define MCHBAR8_AND(x, and) (MCHBAR8(x) = MCHBAR8(x) & (and))
47#define MCHBAR8_OR(x, or) (MCHBAR8(x) = MCHBAR8(x) | (or))
48#define MCHBAR8_AND_OR(x, and, or) \
49 (MCHBAR8(x) = (MCHBAR8(x) & (and)) | (or))
50#define MCHBAR16_AND(x, and) (MCHBAR16(x) = MCHBAR16(x) & (and))
51#define MCHBAR16_OR(x, or) (MCHBAR16(x) = MCHBAR16(x) | (or))
52#define MCHBAR16_AND_OR(x, and, or) \
53 (MCHBAR16(x) = (MCHBAR16(x) & (and)) | (or))
54#define MCHBAR32_AND(x, and) (MCHBAR32(x) = MCHBAR32(x) & (and))
55#define MCHBAR32_OR(x, or) (MCHBAR32(x) = MCHBAR32(x) | (or))
56#define MCHBAR32_AND_OR(x, and, or) \
57 (MCHBAR32(x) = (MCHBAR32(x) & (and)) | (or))
Damien Zammit43a1f782015-08-19 15:16:59 +100058
Arthur Heymans1994e4482017-11-04 07:52:23 +010059#define CHDECMISC 0x111
60#define STACKED_MEM (1 << 1)
61
62#define C0DRB0 0x200
63#define C0DRB1 0x202
64#define C0DRB2 0x204
65#define C0DRB3 0x206
66#define C0DRA01 0x208
67#define C0DRA23 0x20a
68#define C0CKECTRL 0x260
69
70#define C1DRB0 0x600
71#define C1DRB1 0x602
72#define C1DRB2 0x604
73#define C1DRB3 0x606
74#define C1DRA01 0x608
75#define C1DRA23 0x60a
76#define C1CKECTRL 0x660
77
Damien Zammit43a1f782015-08-19 15:16:59 +100078#define PMSTS_MCHBAR 0x0f14 /* Self refresh channel status */
Arthur Heymans97e13d82016-11-30 18:40:38 +010079#define PMSTS_WARM_RESET (1 << 8)
80#define PMSTS_BOTH_SELFREFRESH (3 << 0)
Damien Zammit43a1f782015-08-19 15:16:59 +100081
82#define CLKCFG_MCHBAR 0x0c00
83#define CLKCFG_FSBCLK_SHIFT 0
84#define CLKCFG_FSBCLK_MASK (7 << CLKCFG_FSBCLK_SHIFT)
85#define CLKCFG_MEMCLK_SHIFT 4
86#define CLKCFG_MEMCLK_MASK (7 << CLKCFG_MEMCLK_SHIFT)
87#define CLKCFG_UPDATE (1 << 12)
88
Arthur Heymans5b30b822016-12-01 18:41:50 +010089#define SSKPD_MCHBAR 0x0c20 /* 64 bit */
Damien Zammit43a1f782015-08-19 15:16:59 +100090
91/*
92 * DMIBAR
93 */
94
Arthur Heymans70a1dda2017-03-09 01:58:24 +010095#define DMIBAR8(x) (*((volatile u8 *)(DEFAULT_DMIBAR + (x))))
96#define DMIBAR16(x) (*((volatile u16 *)(DEFAULT_DMIBAR + (x))))
97#define DMIBAR32(x) (*((volatile u32 *)(DEFAULT_DMIBAR + (x))))
Damien Zammit43a1f782015-08-19 15:16:59 +100098
99#define DMIVC0RCTL 0x14
100#define DMIVC1RCTL 0x20
101#define DMIVC1RSTS 0x26
102#define DMIESD 0x44
103#define DMILE1D 0x50
104#define DMILE1A 0x58
105#define DMILE2D 0x60
106#define DMILE2A 0x68
107
108/*
109 * EPBAR
110 */
111
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100112#define EPBAR8(x) (*((volatile u8 *)(DEFAULT_EPBAR + (x))))
113#define EPBAR16(x) (*((volatile u16 *)(DEFAULT_EPBAR + (x))))
114#define EPBAR32(x) (*((volatile u32 *)(DEFAULT_EPBAR + (x))))
Damien Zammit43a1f782015-08-19 15:16:59 +1000115
116#define EPESD 0x44
117#define EPLE1D 0x50
118#define EPLE1A 0x58
119#define EPLE2D 0x60
120
121#define NOP_CMD 0x2
122#define PRECHARGE_CMD 0x4
123#define MRS_CMD 0x6
124#define EMRS_CMD 0x8
125#define EMRS1_CMD (EMRS_CMD | 0x10)
126#define EMRS2_CMD (EMRS_CMD | 0x20)
127#define EMRS3_CMD (EMRS_CMD | 0x30)
128#define ZQCAL_CMD 0xa
129#define CBR_CMD 0xc
130#define NORMALOP_CMD 0xe
131
132#define TOTAL_CHANNELS 2
133#define TOTAL_DIMMS 4
Arthur Heymans276049f2017-11-05 05:56:34 +0100134#define TOTAL_BYTELANES 8
Nico Huber3c209062016-11-26 02:03:25 +0100135#define DIMMS_PER_CHANNEL (TOTAL_DIMMS / TOTAL_CHANNELS)
Damien Zammit68e1dcf2016-06-03 15:39:30 +1000136#define RAW_CARD_UNPOPULATED 0xff
Arthur Heymans3cf94032017-04-05 16:17:26 +0200137#define RAW_CARD_POPULATED 0
Damien Zammit43a1f782015-08-19 15:16:59 +1000138
Damien Zammit68e1dcf2016-06-03 15:39:30 +1000139#define DIMM_IS_POPULATED(dimms, idx) (dimms[idx].card_type != RAW_CARD_UNPOPULATED)
140#define IF_DIMM_POPULATED(dimms, idx) if (dimms[idx].card_type != RAW_CARD_UNPOPULATED)
Damien Zammit43a1f782015-08-19 15:16:59 +1000141#define ONLY_DIMMA_IS_POPULATED(dimms, ch) ( \
142 (DIMM_IS_POPULATED(dimms, (ch == 0) ? 0 : 2) && \
143 !DIMM_IS_POPULATED(dimms, (ch == 0) ? 1 : 3)))
144#define ONLY_DIMMB_IS_POPULATED(dimms, ch) ( \
145 (DIMM_IS_POPULATED(dimms, (ch == 0) ? 1 : 3) && \
146 !DIMM_IS_POPULATED(dimms, (ch == 0) ? 0 : 2)))
147#define BOTH_DIMMS_ARE_POPULATED(dimms, ch) ( \
148 (DIMM_IS_POPULATED(dimms, (ch == 0) ? 0 : 2) && \
149 (DIMM_IS_POPULATED(dimms, (ch == 0) ? 1 : 3))))
150#define FOR_EACH_DIMM(idx) \
151 for (idx = 0; idx < TOTAL_DIMMS; ++idx)
152#define FOR_EACH_POPULATED_DIMM(dimms, idx) \
153 FOR_EACH_DIMM(idx) IF_DIMM_POPULATED(dimms, idx)
Nico Huber696abfc2016-11-23 23:56:53 +0100154#define FOR_EACH_DIMM_IN_CHANNEL(ch, idx) \
Nico Huber3c209062016-11-26 02:03:25 +0100155 for (idx = (ch) << 1; idx < ((ch) << 1) + DIMMS_PER_CHANNEL; ++idx)
Nico Huber696abfc2016-11-23 23:56:53 +0100156#define FOR_EACH_POPULATED_DIMM_IN_CHANNEL(dimms, ch, idx) \
157 FOR_EACH_DIMM_IN_CHANNEL(ch, idx) IF_DIMM_POPULATED(dimms, idx)
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100158#define CHANNEL_IS_POPULATED(dimms, idx) \
159 ((dimms[idx<<1].card_type != RAW_CARD_UNPOPULATED) \
160 || (dimms[(idx<<1) + 1].card_type != RAW_CARD_UNPOPULATED))
161#define CHANNEL_IS_CARDF(dimms, idx) \
162 ((dimms[idx<<1].card_type == 0xf) \
163 || (dimms[(idx<<1) + 1].card_type == 0xf))
164#define IF_CHANNEL_POPULATED(dimms, idx) \
165 if ((dimms[idx<<1].card_type != RAW_CARD_UNPOPULATED) \
166 || (dimms[(idx<<1) + 1].card_type != RAW_CARD_UNPOPULATED))
Damien Zammit43a1f782015-08-19 15:16:59 +1000167#define FOR_EACH_CHANNEL(idx) \
168 for (idx = 0; idx < TOTAL_CHANNELS; ++idx)
169#define FOR_EACH_POPULATED_CHANNEL(dimms, idx) \
170 FOR_EACH_CHANNEL(idx) IF_CHANNEL_POPULATED(dimms, idx)
171
172#define RANKS_PER_CHANNEL 4
173#define RANK_IS_POPULATED(dimms, ch, r) \
Damien Zammit68e1dcf2016-06-03 15:39:30 +1000174 (((dimms[ch<<1].card_type != RAW_CARD_UNPOPULATED) && ((r) < dimms[ch<<1].ranks)) || \
175 ((dimms[(ch<<1) + 1].card_type != RAW_CARD_UNPOPULATED) && ((r) >= 2) && ((r) < (dimms[(ch<<1) + 1].ranks + 2))))
Damien Zammit43a1f782015-08-19 15:16:59 +1000176#define IF_RANK_POPULATED(dimms, ch, r) \
Arthur Heymans70a1dda2017-03-09 01:58:24 +0100177 if (((dimms[ch<<1].card_type != RAW_CARD_UNPOPULATED) \
178 && ((r) < dimms[ch<<1].ranks)) \
179 || ((dimms[(ch<<1) + 1].card_type != RAW_CARD_UNPOPULATED) \
180 && ((r) >= 2) && ((r) < (dimms[(ch<<1) + 1].ranks + 2))))
Damien Zammit43a1f782015-08-19 15:16:59 +1000181#define FOR_EACH_RANK_IN_CHANNEL(r) \
182 for (r = 0; r < RANKS_PER_CHANNEL; ++r)
183#define FOR_EACH_POPULATED_RANK_IN_CHANNEL(dimms, ch, r) \
184 FOR_EACH_RANK_IN_CHANNEL(r) IF_RANK_POPULATED(dimms, ch, r)
185#define FOR_EACH_RANK(ch, r) \
186 FOR_EACH_CHANNEL(ch) FOR_EACH_RANK_IN_CHANNEL(r)
187#define FOR_EACH_POPULATED_RANK(dimms, ch, r) \
188 FOR_EACH_RANK(ch, r) IF_RANK_POPULATED(dimms, ch, r)
Arthur Heymans276049f2017-11-05 05:56:34 +0100189#define FOR_EACH_BYTELANE(l) \
190 for (l = 0; l < TOTAL_BYTELANES; l++)
191#define FOR_EACH_POPULATED_CHANNEL_AND_BYTELANE(dimms, ch, l) \
192 FOR_EACH_POPULATED_CHANNEL (dimms, ch) FOR_EACH_BYTELANE(l)
Damien Zammit43a1f782015-08-19 15:16:59 +1000193
194#define DDR3_MAX_CAS 18
195
196enum fsb_clock {
197 FSB_CLOCK_800MHz = 0,
198 FSB_CLOCK_1066MHz = 1,
199 FSB_CLOCK_1333MHz = 2,
200};
201
202enum mem_clock {
203 MEM_CLOCK_400MHz = 0,
204 MEM_CLOCK_533MHz = 1,
205 MEM_CLOCK_667MHz = 2,
206 MEM_CLOCK_800MHz = 3,
207 MEM_CLOCK_1066MHz = 4,
208 MEM_CLOCK_1333MHz = 5,
209};
210
211enum ddr {
212 DDR2 = 2,
213 DDR3 = 3,
214};
215
216enum ddrxspd {
217 DDR2SPD = 0x8,
218 DDR3SPD = 0xb,
219};
220
221enum chip_width { /* as in DDR3 spd */
222 CHIP_WIDTH_x4 = 0,
223 CHIP_WIDTH_x8 = 1,
224 CHIP_WIDTH_x16 = 2,
225 CHIP_WIDTH_x32 = 3,
226};
227
228enum chip_cap { /* as in DDR3 spd */
229 CHIP_CAP_256M = 0,
230 CHIP_CAP_512M = 1,
231 CHIP_CAP_1G = 2,
232 CHIP_CAP_2G = 3,
233 CHIP_CAP_4G = 4,
234 CHIP_CAP_8G = 5,
235 CHIP_CAP_16G = 6,
236};
237
Arthur Heymans27f0ca12017-05-09 18:38:14 +0200238struct dll_setting {
239 u8 tap;
240 u8 pi;
241 u8 db_en;
242 u8 db_sel;
243 u8 clk_delay;
244 u8 coarse;
245};
246
Arthur Heymans0bf87de2017-11-04 06:15:05 +0100247struct rt_dqs_setting {
248 u8 tap;
249 u8 pi;
250};
251
Arthur Heymans3cf94032017-04-05 16:17:26 +0200252enum n_banks {
253 N_BANKS_4 = 0,
254 N_BANKS_8 = 1,
255};
256
Damien Zammit43a1f782015-08-19 15:16:59 +1000257struct timings {
258 unsigned int CAS;
Arthur Heymans3cf94032017-04-05 16:17:26 +0200259 unsigned int tclk;
Damien Zammit43a1f782015-08-19 15:16:59 +1000260 enum fsb_clock fsb_clk;
261 enum mem_clock mem_clk;
262 unsigned int tRAS;
263 unsigned int tRP;
264 unsigned int tRCD;
265 unsigned int tWR;
266 unsigned int tRFC;
267 unsigned int tWTR;
268 unsigned int tRRD;
269 unsigned int tRTP;
270};
271
272struct dimminfo {
Damien Zammit68e1dcf2016-06-03 15:39:30 +1000273 unsigned int card_type; /* 0xff: unpopulated,
Damien Zammit43a1f782015-08-19 15:16:59 +1000274 0xa - 0xf: raw card type A - F */
275 enum chip_width width;
Damien Zammit43a1f782015-08-19 15:16:59 +1000276 unsigned int page_size; /* of whole DIMM in Bytes (4096 or 8192) */
Arthur Heymans3cf94032017-04-05 16:17:26 +0200277 enum n_banks n_banks;
Damien Zammit43a1f782015-08-19 15:16:59 +1000278 unsigned int ranks;
279 unsigned int rows;
280 unsigned int cols;
Arthur Heymansadc571a2017-09-25 09:40:54 +0200281 u16 spd_crc;
Arthur Heymansf1287262017-12-25 18:30:01 +0100282 u8 mirrored;
Arthur Heymansadc571a2017-09-25 09:40:54 +0200283};
284
285struct rcven_timings {
286 u8 min_common_coarse;
Arthur Heymans276049f2017-11-05 05:56:34 +0100287 u8 coarse_offset[TOTAL_BYTELANES];
288 u8 medium[TOTAL_BYTELANES];
289 u8 tap[TOTAL_BYTELANES];
290 u8 pi[TOTAL_BYTELANES];
Damien Zammit43a1f782015-08-19 15:16:59 +1000291};
292
293/* The setup is up to two DIMMs per channel */
294struct sysinfo {
Damien Zammit43a1f782015-08-19 15:16:59 +1000295 int boot_path;
Damien Zammit43a1f782015-08-19 15:16:59 +1000296 enum fsb_clock max_fsb;
Damien Zammit43a1f782015-08-19 15:16:59 +1000297
298 int dimm_config[2];
Damien Zammit43a1f782015-08-19 15:16:59 +1000299 int spd_type;
300 int channel_capacity[2];
301 struct timings selected_timings;
302 struct dimminfo dimms[4];
303 u8 spd_map[4];
Arthur Heymansadc571a2017-09-25 09:40:54 +0200304 struct rcven_timings rcven_t[TOTAL_CHANNELS];
Arthur Heymans0bf87de2017-11-04 06:15:05 +0100305 /*
306 * The rt_dqs delay register for rank 0 seems to be used
307 * for all other ranks on the channel, so only save that
308 */
Arthur Heymans276049f2017-11-05 05:56:34 +0100309 struct rt_dqs_setting rt_dqs[TOTAL_CHANNELS][TOTAL_BYTELANES];
310 struct dll_setting dqs_settings[TOTAL_CHANNELS][TOTAL_BYTELANES];
311 struct dll_setting dq_settings[TOTAL_CHANNELS][TOTAL_BYTELANES];
Arthur Heymans7a3a3192017-05-15 10:26:29 +0200312 u8 nmode;
Arthur Heymans0602ce62018-05-26 14:44:42 +0200313 u8 stacked_mode;
Damien Zammit43a1f782015-08-19 15:16:59 +1000314};
Arthur Heymans97e13d82016-11-30 18:40:38 +0100315#define BOOT_PATH_NORMAL 0
316#define BOOT_PATH_WARM_RESET 1
317#define BOOT_PATH_RESUME 2
Damien Zammit43a1f782015-08-19 15:16:59 +1000318
319enum ddr2_signals {
Elyes HAOUAS6e8b3c12016-09-02 19:22:00 +0200320 CLKSET0 = 0,
321 CTRL0,
322 CLKSET1,
323 CMD,
324 CTRL1,
325 CTRL2,
326 CTRL3,
Damien Zammit43a1f782015-08-19 15:16:59 +1000327};
328
329void x4x_early_init(void);
Arthur Heymansef7e98a2016-12-30 21:07:18 +0100330void x4x_late_init(int s3resume);
Arthur Heymansbf53acc2019-11-11 21:14:39 +0100331void mb_get_spd_map(u8 spd_map[4]);
332void mb_pre_raminit_setup(int s3_resume);
Damien Zammit43a1f782015-08-19 15:16:59 +1000333u32 decode_igd_memory_size(u32 gms);
334u32 decode_igd_gtt_size(u32 gsm);
Arthur Heymans4c65bfc2018-04-10 13:34:24 +0200335u32 decode_tseg_size(const u32 esmramc);
Angel Ponsecec9472020-08-03 15:44:27 +0200336int decode_pcie_bar(u32 *const base, u32 *const len);
Damien Zammit43a1f782015-08-19 15:16:59 +1000337void sdram_initialize(int boot_path, const u8 *spd_map);
Arthur Heymansa2cc2312017-05-15 10:13:36 +0200338void do_raminit(struct sysinfo *, int fast_boot);
Arthur Heymansadc571a2017-09-25 09:40:54 +0200339void rcven(struct sysinfo *s);
Elyes HAOUASe951e8e2019-06-15 11:03:00 +0200340u32 fsb_to_mhz(u32 speed);
341u32 ddr_to_mhz(u32 speed);
Arthur Heymans1994e4482017-11-04 07:52:23 +0100342u32 test_address(int channel, int rank);
Arthur Heymans95c48cb2017-11-04 08:07:06 +0100343void dqsset(u8 ch, u8 lane, const struct dll_setting *setting);
344void dqset(u8 ch, u8 lane, const struct dll_setting *setting);
345void rt_set_dqs(u8 channel, u8 lane, u8 rank,
346 struct rt_dqs_setting *dqs_setting);
347int do_write_training(struct sysinfo *s);
348int do_read_training(struct sysinfo *s);
Arthur Heymansb5170c32017-12-25 20:13:28 +0100349void search_write_leveling(struct sysinfo *s);
350void send_jedec_cmd(const struct sysinfo *s, u8 r, u8 ch, u8 cmd, u32 val);
Damien Zammit43a1f782015-08-19 15:16:59 +1000351
Arthur Heymans0bf87de2017-11-04 06:15:05 +0100352extern const struct dll_setting default_ddr2_667_ctrl[7];
353extern const struct dll_setting default_ddr2_800_ctrl[7];
354extern const struct dll_setting default_ddr3_800_ctrl[2][7];
355extern const struct dll_setting default_ddr3_1067_ctrl[2][7];
356extern const struct dll_setting default_ddr3_1333_ctrl[2][7];
Arthur Heymans276049f2017-11-05 05:56:34 +0100357extern const struct dll_setting default_ddr2_667_dqs[TOTAL_BYTELANES];
358extern const struct dll_setting default_ddr2_800_dqs[TOTAL_BYTELANES];
359extern const struct dll_setting default_ddr3_800_dqs[2][TOTAL_BYTELANES];
360extern const struct dll_setting default_ddr3_1067_dqs[2][TOTAL_BYTELANES];
361extern const struct dll_setting default_ddr3_1333_dqs[2][TOTAL_BYTELANES];
362extern const struct dll_setting default_ddr2_667_dq[TOTAL_BYTELANES];
363extern const struct dll_setting default_ddr2_800_dq[TOTAL_BYTELANES];
364extern const struct dll_setting default_ddr3_800_dq[2][TOTAL_BYTELANES];
365extern const struct dll_setting default_ddr3_1067_dq[2][TOTAL_BYTELANES];
366extern const struct dll_setting default_ddr3_1333_dq[2][TOTAL_BYTELANES];
Arthur Heymansf1287262017-12-25 18:30:01 +0100367extern const u8 ddr3_emrs1_rtt_nom_config[16][4];
Arthur Heymans0d284952017-05-25 19:55:52 +0200368extern const u8 post_jedec_tab[3][4][2];
Arthur Heymans3fa103a2017-05-25 19:54:49 +0200369extern const u32 ddr3_c2_tab[2][3][6][2];
370extern const u8 ddr3_c2_x264[3][6];
371extern const u16 ddr3_c2_x23c[3][6];
Arthur Heymans0bf87de2017-11-04 06:15:05 +0100372
Kyösti Mälkkid7205be2019-09-27 07:24:17 +0300373#include <device/device.h>
Damien Zammit43a1f782015-08-19 15:16:59 +1000374struct acpi_rsdp;
Furquan Shaikh0f007d82020-04-24 06:41:18 -0700375unsigned long northbridge_write_acpi_tables(const struct device *device,
Elyes HAOUASfea02e12018-02-08 14:59:03 +0100376 unsigned long start, struct acpi_rsdp *rsdp);
Kyösti Mälkkid7205be2019-09-27 07:24:17 +0300377
Damien Zammit43a1f782015-08-19 15:16:59 +1000378#endif /* __NORTHBRIDGE_INTEL_X4X_H__ */