blob: 80a85ff6041916af90cb90ef94734efc07016fd6 [file] [log] [blame]
Zheng Baoeb75f652010-04-23 17:32:48 +00001/*
2 * This file is part of the coreboot project.
3 *
Timothy Pearson620fa5f2015-03-27 22:50:09 -05004 * Copyright (C) 2015 Timothy Pearson <tpearson@raptorengineeringinc.com>, Raptor Engineering
Zheng Baoeb75f652010-04-23 17:32:48 +00005 * Copyright (C) 2010 Advanced Micro Devices, Inc.
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; version 2 of the License.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
Zheng Baoeb75f652010-04-23 17:32:48 +000015 */
16
17/* Description: Main memory controller system configuration for DDR 3 */
18
19/* KNOWN ISSUES - ERRATA
20 *
21 * Trtp is not calculated correctly when the controller is in 64-bit mode, it
22 * is 1 busclock off. No fix planned. The controller is not ordinarily in
23 * 64-bit mode.
24 *
25 * 32 Byte burst not supported. No fix planned. The controller is not
26 * ordinarily in 64-bit mode.
27 *
28 * Trc precision does not use extra Jedec defined fractional component.
29 * InsteadTrc (course) is rounded up to nearest 1 ns.
30 *
31 * Mini and Micro DIMM not supported. Only RDIMM, UDIMM, SO-DIMM defined types
32 * supported.
33 */
34
35static u8 ReconfigureDIMMspare_D(struct MCTStatStruc *pMCTstat,
36 struct DCTStatStruc *pDCTstatA);
37static void DQSTiming_D(struct MCTStatStruc *pMCTstat,
38 struct DCTStatStruc *pDCTstatA);
39static void LoadDQSSigTmgRegs_D(struct MCTStatStruc *pMCTstat,
40 struct DCTStatStruc *pDCTstatA);
41static void HTMemMapInit_D(struct MCTStatStruc *pMCTstat,
42 struct DCTStatStruc *pDCTstatA);
43static void MCTMemClr_D(struct MCTStatStruc *pMCTstat,
44 struct DCTStatStruc *pDCTstatA);
45static void DCTMemClr_Init_D(struct MCTStatStruc *pMCTstat,
46 struct DCTStatStruc *pDCTstat);
47static void DCTMemClr_Sync_D(struct MCTStatStruc *pMCTstat,
48 struct DCTStatStruc *pDCTstat);
49static void MCTMemClrSync_D(struct MCTStatStruc *pMCTstat,
50 struct DCTStatStruc *pDCTstatA);
51static u8 NodePresent_D(u8 Node);
52static void SyncDCTsReady_D(struct MCTStatStruc *pMCTstat,
53 struct DCTStatStruc *pDCTstatA);
54static void StartupDCT_D(struct MCTStatStruc *pMCTstat,
55 struct DCTStatStruc *pDCTstat, u8 dct);
56static void ClearDCT_D(struct MCTStatStruc *pMCTstat,
57 struct DCTStatStruc *pDCTstat, u8 dct);
58static u8 AutoCycTiming_D(struct MCTStatStruc *pMCTstat,
59 struct DCTStatStruc *pDCTstat, u8 dct);
60static void GetPresetmaxF_D(struct MCTStatStruc *pMCTstat,
61 struct DCTStatStruc *pDCTstat);
62static void SPDGetTCL_D(struct MCTStatStruc *pMCTstat,
63 struct DCTStatStruc *pDCTstat, u8 dct);
64static u8 AutoConfig_D(struct MCTStatStruc *pMCTstat,
65 struct DCTStatStruc *pDCTstat, u8 dct);
66static u8 PlatformSpec_D(struct MCTStatStruc *pMCTstat,
67 struct DCTStatStruc *pDCTstat, u8 dct);
68static void SPDSetBanks_D(struct MCTStatStruc *pMCTstat,
69 struct DCTStatStruc *pDCTstat, u8 dct);
70static void StitchMemory_D(struct MCTStatStruc *pMCTstat,
71 struct DCTStatStruc *pDCTstat, u8 dct);
72static u16 Get_Fk_D(u8 k);
73static u8 Get_DIMMAddress_D(struct DCTStatStruc *pDCTstat, u8 i);
Timothy Pearson730a0432015-10-16 13:51:51 -050074static void mct_preInitDCT(struct MCTStatStruc *pMCTstat,
75 struct DCTStatStruc *pDCTstat);
Zheng Baoeb75f652010-04-23 17:32:48 +000076static void mct_initDCT(struct MCTStatStruc *pMCTstat,
77 struct DCTStatStruc *pDCTstat);
78static void mct_DramInit(struct MCTStatStruc *pMCTstat,
79 struct DCTStatStruc *pDCTstat, u8 dct);
80static u8 mct_PlatformSpec(struct MCTStatStruc *pMCTstat,
81 struct DCTStatStruc *pDCTstat, u8 dct);
Zheng Bao69436e12011-01-06 02:18:12 +000082static u8 mct_BeforePlatformSpec(struct MCTStatStruc *pMCTstat,
83 struct DCTStatStruc *pDCTstat, u8 dct);
Zheng Baoeb75f652010-04-23 17:32:48 +000084static void mct_SyncDCTsReady(struct DCTStatStruc *pDCTstat);
85static void Get_Trdrd(struct MCTStatStruc *pMCTstat,
86 struct DCTStatStruc *pDCTstat, u8 dct);
87static void mct_AfterGetCLT(struct MCTStatStruc *pMCTstat,
88 struct DCTStatStruc *pDCTstat, u8 dct);
89static u8 mct_SPDCalcWidth(struct MCTStatStruc *pMCTstat,\
90 struct DCTStatStruc *pDCTstat, u8 dct);
91static void mct_AfterStitchMemory(struct MCTStatStruc *pMCTstat,
92 struct DCTStatStruc *pDCTstat, u8 dct);
93static u8 mct_DIMMPresence(struct MCTStatStruc *pMCTstat,
94 struct DCTStatStruc *pDCTstat, u8 dct);
95static void Set_OtherTiming(struct MCTStatStruc *pMCTstat,
96 struct DCTStatStruc *pDCTstat, u8 dct);
97static void Get_Twrwr(struct MCTStatStruc *pMCTstat,
98 struct DCTStatStruc *pDCTstat, u8 dct);
99static void Get_Twrrd(struct MCTStatStruc *pMCTstat,
100 struct DCTStatStruc *pDCTstat, u8 dct);
101static void Get_TrwtTO(struct MCTStatStruc *pMCTstat,
102 struct DCTStatStruc *pDCTstat, u8 dct);
103static void Get_TrwtWB(struct MCTStatStruc *pMCTstat,
104 struct DCTStatStruc *pDCTstat);
105static void Get_DqsRcvEnGross_Diff(struct DCTStatStruc *pDCTstat,
Timothy Pearson730a0432015-10-16 13:51:51 -0500106 u32 dev, uint8_t dct, u32 index_reg);
Zheng Baoeb75f652010-04-23 17:32:48 +0000107static void Get_WrDatGross_Diff(struct DCTStatStruc *pDCTstat, u8 dct,
108 u32 dev, u32 index_reg);
109static u16 Get_DqsRcvEnGross_MaxMin(struct DCTStatStruc *pDCTstat,
Timothy Pearson730a0432015-10-16 13:51:51 -0500110 u32 dev, uint8_t dct, u32 index_reg, u32 index);
Zheng Baoeb75f652010-04-23 17:32:48 +0000111static void mct_FinalMCT_D(struct MCTStatStruc *pMCTstat,
112 struct DCTStatStruc *pDCTstat);
113static u16 Get_WrDatGross_MaxMin(struct DCTStatStruc *pDCTstat, u8 dct,
114 u32 dev, u32 index_reg, u32 index);
115static void mct_InitialMCT_D(struct MCTStatStruc *pMCTstat,
116 struct DCTStatStruc *pDCTstat);
117static void mct_init(struct MCTStatStruc *pMCTstat,
118 struct DCTStatStruc *pDCTstat);
119static void clear_legacy_Mode(struct MCTStatStruc *pMCTstat,
120 struct DCTStatStruc *pDCTstat);
121static void mct_HTMemMapExt(struct MCTStatStruc *pMCTstat,
122 struct DCTStatStruc *pDCTstatA);
123static void SetCSTriState(struct MCTStatStruc *pMCTstat,
124 struct DCTStatStruc *pDCTstat, u8 dct);
125static void SetCKETriState(struct MCTStatStruc *pMCTstat,
126 struct DCTStatStruc *pDCTstat, u8 dct);
127static void SetODTTriState(struct MCTStatStruc *pMCTstat,
128 struct DCTStatStruc *pDCTstat, u8 dct);
Timothy Pearson730a0432015-10-16 13:51:51 -0500129static void InitDDRPhy(struct MCTStatStruc *pMCTstat,
130 struct DCTStatStruc *pDCTstat, u8 dct);
Zheng Baoeb75f652010-04-23 17:32:48 +0000131static void InitPhyCompensation(struct MCTStatStruc *pMCTstat,
132 struct DCTStatStruc *pDCTstat, u8 dct);
133static u32 mct_NodePresent_D(void);
134static void mct_OtherTiming(struct MCTStatStruc *pMCTstat,
135 struct DCTStatStruc *pDCTstatA);
136static void mct_ResetDataStruct_D(struct MCTStatStruc *pMCTstat,
137 struct DCTStatStruc *pDCTstatA);
138static void mct_EarlyArbEn_D(struct MCTStatStruc *pMCTstat,
Zheng Bao69436e12011-01-06 02:18:12 +0000139 struct DCTStatStruc *pDCTstat, u8 dct);
Zheng Baoeb75f652010-04-23 17:32:48 +0000140static void mct_BeforeDramInit_Prod_D(struct MCTStatStruc *pMCTstat,
Timothy Pearson730a0432015-10-16 13:51:51 -0500141 struct DCTStatStruc *pDCTstat, u8 dct);
142static void mct_ProgramODT_D(struct MCTStatStruc *pMCTstat,
143 struct DCTStatStruc *pDCTstat, u8 dct);
Zheng Baoeb75f652010-04-23 17:32:48 +0000144void mct_ClrClToNB_D(struct MCTStatStruc *pMCTstat,
145 struct DCTStatStruc *pDCTstat);
146static u8 CheckNBCOFEarlyArbEn(struct MCTStatStruc *pMCTstat,
147 struct DCTStatStruc *pDCTstat);
148void mct_ClrWbEnhWsbDis_D(struct MCTStatStruc *pMCTstat,
149 struct DCTStatStruc *pDCTstat);
150static void mct_BeforeDQSTrain_D(struct MCTStatStruc *pMCTstat,
151 struct DCTStatStruc *pDCTstatA);
152static void AfterDramInit_D(struct DCTStatStruc *pDCTstat, u8 dct);
153static void mct_ResetDLL_D(struct MCTStatStruc *pMCTstat,
154 struct DCTStatStruc *pDCTstat, u8 dct);
155static void ProgDramMRSReg_D(struct MCTStatStruc *pMCTstat,
156 struct DCTStatStruc *pDCTstat, u8 dct);
157static void mct_DramInit_Sw_D(struct MCTStatStruc *pMCTstat,
158 struct DCTStatStruc *pDCTstat, u8 dct);
159static u32 mct_DisDllShutdownSR(struct MCTStatStruc *pMCTstat,
160 struct DCTStatStruc *pDCTstat, u32 DramConfigLo, u8 dct);
Zheng Bao69436e12011-01-06 02:18:12 +0000161static void mct_EnDllShutdownSR(struct MCTStatStruc *pMCTstat,
162 struct DCTStatStruc *pDCTstat, u8 dct);
Timothy Pearson730a0432015-10-16 13:51:51 -0500163static void ChangeMemClk(struct MCTStatStruc *pMCTstat,
164 struct DCTStatStruc *pDCTstat);
165void SetTargetFreq(struct MCTStatStruc *pMCTstat,
166 struct DCTStatStruc *pDCTstat);
Zheng Baoeb75f652010-04-23 17:32:48 +0000167
Zheng Bao69436e12011-01-06 02:18:12 +0000168static u32 mct_MR1Odt_RDimm(struct MCTStatStruc *pMCTstat,
169 struct DCTStatStruc *pDCTstat, u8 dct, u32 MrsChipSel);
Zheng Baoeb75f652010-04-23 17:32:48 +0000170static u32 mct_DramTermDyn_RDimm(struct MCTStatStruc *pMCTstat,
171 struct DCTStatStruc *pDCTstat, u8 dimm);
172static u32 mct_SetDramConfigMisc2(struct DCTStatStruc *pDCTstat, u8 dct, u32 misc2);
173static void mct_BeforeDQSTrainSamp(struct DCTStatStruc *pDCTstat);
Timothy Pearson730a0432015-10-16 13:51:51 -0500174static void mct_WriteLevelization_HW(struct MCTStatStruc *pMCTstat,
175 struct DCTStatStruc *pDCTstatA, uint8_t Pass);
Zheng Baoeb75f652010-04-23 17:32:48 +0000176static u8 Get_Latency_Diff(struct MCTStatStruc *pMCTstat,
177 struct DCTStatStruc *pDCTstat, u8 dct);
178static void SyncSetting(struct DCTStatStruc *pDCTstat);
179static u8 crcCheck(u8 smbaddr);
Zheng Bao69436e12011-01-06 02:18:12 +0000180static void mct_ExtMCTConfig_Bx(struct DCTStatStruc *pDCTstat);
181static void mct_ExtMCTConfig_Cx(struct DCTStatStruc *pDCTstat);
Zheng Baoeb75f652010-04-23 17:32:48 +0000182
Timothy Pearson730a0432015-10-16 13:51:51 -0500183static void read_dqs_receiver_enable_control_registers(uint16_t* current_total_delay,
184 uint32_t dev, uint8_t dct, uint8_t dimm, uint32_t index_reg);
185
186static void read_dqs_write_timing_control_registers(uint16_t* current_total_delay,
187 uint32_t dev, uint8_t dct, uint8_t dimm, uint32_t index_reg);
188
Zheng Baoeb75f652010-04-23 17:32:48 +0000189/*See mctAutoInitMCT header for index relationships to CL and T*/
190static const u16 Table_F_k[] = {00,200,266,333,400,533 };
191static const u8 Tab_BankAddr[] = {0x3F,0x01,0x09,0x3F,0x3F,0x11,0x0A,0x19,0x12,0x1A,0x21,0x22,0x23};
192static const u8 Table_DQSRcvEn_Offset[] = {0x00,0x01,0x10,0x11,0x2};
193
194/****************************************************************************
195 Describe how platform maps MemClk pins to logical DIMMs. The MemClk pins
196 are identified based on BKDG definition of Fn2x88[MemClkDis] bitmap.
197 AGESA will base on this value to disable unused MemClk to save power.
198
199 If MEMCLK_MAPPING or MEMCLK_MAPPING contains all zeroes, AGESA will use
200 default MemClkDis setting based on package type.
201
202 Example:
203 BKDG definition of Fn2x88[MemClkDis] bitmap for AM3 package is like below:
204 Bit AM3/S1g3 pin name
205 0 M[B,A]_CLK_H/L[0]
206 1 M[B,A]_CLK_H/L[1]
207 2 M[B,A]_CLK_H/L[2]
208 3 M[B,A]_CLK_H/L[3]
209 4 M[B,A]_CLK_H/L[4]
210 5 M[B,A]_CLK_H/L[5]
211 6 M[B,A]_CLK_H/L[6]
212 7 M[B,A]_CLK_H/L[7]
213
214 And platform has the following routing:
215 CS0 M[B,A]_CLK_H/L[4]
216 CS1 M[B,A]_CLK_H/L[2]
217 CS2 M[B,A]_CLK_H/L[3]
218 CS3 M[B,A]_CLK_H/L[5]
219
220 Then:
221 ; CS0 CS1 CS2 CS3 CS4 CS5 CS6 CS7
222 MEMCLK_MAPPING EQU 00010000b, 00000100b, 00001000b, 00100000b, 00000000b, 00000000b, 00000000b, 00000000b
223*/
224
Timothy Pearsonb8a355d2015-09-05 17:55:58 -0500225/* ==========================================================================================
226 * Set up clock pin to DIMM mappings,
227 * NOTE: If you are not sure about the pin mappings, you can keep all MemClk signals active,
228 * just set all entries in the relevant table(s) to 0xff.
229 * ==========================================================================================
230 */
Zheng Baoeb75f652010-04-23 17:32:48 +0000231static const u8 Tab_L1CLKDis[] = {0x20, 0x20, 0x10, 0x10, 0x08, 0x08, 0x04, 0x04};
232static const u8 Tab_AM3CLKDis[] = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00};
233static const u8 Tab_S1CLKDis[] = {0xA2, 0xA2, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00};
Timothy Pearsonb8a355d2015-09-05 17:55:58 -0500234
235/* C32: Enable CS0 - CS3 clocks (DIMM0 - DIMM1) */
236static const u8 Tab_C32CLKDis[] = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00};
237
238/* G34: Enable CS0 - CS3 clocks (DIMM0 - DIMM1) */
239static const u8 Tab_G34CLKDis[] = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00};
240
Zheng Baoeb75f652010-04-23 17:32:48 +0000241static const u8 Tab_ManualCLKDis[]= {0x10, 0x04, 0x08, 0x20, 0x00, 0x00, 0x00, 0x00};
Timothy Pearsonb8a355d2015-09-05 17:55:58 -0500242/* ========================================================================================== */
Zheng Baoeb75f652010-04-23 17:32:48 +0000243
244static const u8 Table_Comp_Rise_Slew_20x[] = {7, 3, 2, 2, 0xFF};
245static const u8 Table_Comp_Rise_Slew_15x[] = {7, 7, 3, 2, 0xFF};
246static const u8 Table_Comp_Fall_Slew_20x[] = {7, 5, 3, 2, 0xFF};
247static const u8 Table_Comp_Fall_Slew_15x[] = {7, 7, 5, 3, 0xFF};
248
Timothy Pearson730a0432015-10-16 13:51:51 -0500249static uint8_t dct_ddr_voltage_index(struct DCTStatStruc *pDCTstat, uint8_t dct)
250{
251 uint8_t dimm;
252 uint8_t ddr_voltage_index = 0;
253
254 /* Find current DDR supply voltage for this DCT */
255 for (dimm = 0; dimm < MAX_DIMMS_SUPPORTED; dimm++) {
256 if (pDCTstat->DIMMValidDCT[dct] & (1 << dimm))
257 ddr_voltage_index |= pDCTstat->DimmConfiguredVoltage[dimm];
258 }
259 if (ddr_voltage_index > 0x7) {
260 printk(BIOS_DEBUG, "%s: Insufficient DDR supply voltage indicated! Configuring processor for 1.25V operation, but this attempt may fail...\n", __func__);
261 ddr_voltage_index = 0x4;
262 }
263 if (ddr_voltage_index == 0x0) {
264 printk(BIOS_DEBUG, "%s: No DDR supply voltage indicated! Configuring processor for 1.5V operation, but this attempt may fail...\n", __func__);
265 ddr_voltage_index = 0x1;
266 }
267
268 return ddr_voltage_index;
269}
270
271static uint16_t fam15h_mhz_to_memclk_config(uint16_t freq)
272{
273 uint16_t fam15h_freq_tab[] = {0, 0, 0, 0, 333, 0, 400, 0, 0, 0, 533, 0, 0, 0, 667, 0, 0, 0, 800, 0, 0, 0, 933};
274 uint16_t iter;
275
276 /* Compute the index value for the given frequency */
277 for (iter = 0; iter <= 0x16; iter++) {
278 if (fam15h_freq_tab[iter] == freq)
279 break;
280 }
281 if (fam15h_freq_tab[iter] == freq)
282 freq = iter;
283 if (freq == 0)
284 freq = 0x4;
285
286 return freq;
287}
288
289static uint16_t fam10h_mhz_to_memclk_config(uint16_t freq)
290{
291 uint16_t fam10h_freq_tab[] = {0, 0, 0, 400, 533, 667, 800};
292 uint16_t iter;
293
294 /* Compute the index value for the given frequency */
295 for (iter = 0; iter <= 0x6; iter++) {
296 if (fam10h_freq_tab[iter] == freq)
297 break;
298 }
299 if (fam10h_freq_tab[iter] == freq)
300 freq = iter;
301 if (freq == 0)
302 freq = 0x3;
303
304 return freq;
305}
306
307static uint16_t mhz_to_memclk_config(uint16_t freq)
308{
309 if (is_fam15h())
310 return fam15h_mhz_to_memclk_config(freq);
311 else
312 return fam10h_mhz_to_memclk_config(freq) + 1;
313}
314
315static uint32_t fam15h_phy_predriver_calibration_code(struct DCTStatStruc *pDCTstat, uint8_t dct, uint8_t drive_strength)
316{
317 uint8_t lrdimm = 0;
318 uint8_t package_type;
319 uint8_t ddr_voltage_index;
320 uint32_t calibration_code = 0;
321 uint16_t MemClkFreq = Get_NB32_DCT(pDCTstat->dev_dct, dct, 0x94) & 0x1f;
322
323 ddr_voltage_index = dct_ddr_voltage_index(pDCTstat, dct);
324 package_type = mctGet_NVbits(NV_PACK_TYPE);
325
326 if (!lrdimm) {
327 /* Not an LRDIMM */
328 if ((package_type == PT_M2) || (package_type == PT_GR)) {
329 /* Socket AM3 or G34 */
330 if (ddr_voltage_index & 0x4) {
331 /* 1.25V */
332 /* Fam15h BKDG Rev. 3.14 section 2.10.5.3.4 Table 43 */
333 if ((MemClkFreq == 0x4) || (MemClkFreq == 0x6)) {
334 /* DDR3-667 - DDR3-800 */
335 if (drive_strength == 0x0)
336 calibration_code = 0xfff;
337 else if (drive_strength == 0x1)
338 calibration_code = 0xb6d;
339 else if (drive_strength == 0x2)
340 calibration_code = 0x924;
341 else if (drive_strength == 0x3)
342 calibration_code = 0x6db;
343 } else if ((MemClkFreq == 0xa) || (MemClkFreq == 0xe)) {
344 /* DDR3-1066 - DDR3-1333 */
345 if (drive_strength == 0x0)
346 calibration_code = 0xfff;
347 else if (drive_strength == 0x1)
348 calibration_code = 0xfff;
349 else if (drive_strength == 0x2)
350 calibration_code = 0xdb6;
351 else if (drive_strength == 0x3)
352 calibration_code = 0x924;
353 } else if ((MemClkFreq == 0x12) || (MemClkFreq == 0x16)) {
354 /* DDR3-1600 - DDR3-1866 */
355 if (drive_strength == 0x0)
356 calibration_code = 0xfff;
357 else if (drive_strength == 0x1)
358 calibration_code = 0xfff;
359 else if (drive_strength == 0x2)
360 calibration_code = 0xfff;
361 else if (drive_strength == 0x3)
362 calibration_code = 0xfff;
363 }
364 }
365 else if (ddr_voltage_index & 0x2) {
366 /* 1.35V */
367 /* Fam15h BKDG Rev. 3.14 section 2.10.5.3.4 Table 42 */
368 if ((MemClkFreq == 0x4) || (MemClkFreq == 0x6)) {
369 /* DDR3-667 - DDR3-800 */
370 if (drive_strength == 0x0)
371 calibration_code = 0xfff;
372 else if (drive_strength == 0x1)
373 calibration_code = 0x924;
374 else if (drive_strength == 0x2)
375 calibration_code = 0x6db;
376 else if (drive_strength == 0x3)
377 calibration_code = 0x492;
378 } else if ((MemClkFreq == 0xa) || (MemClkFreq == 0xe)) {
379 /* DDR3-1066 - DDR3-1333 */
380 if (drive_strength == 0x0)
381 calibration_code = 0xfff;
382 else if (drive_strength == 0x1)
383 calibration_code = 0xdb6;
384 else if (drive_strength == 0x2)
385 calibration_code = 0xbd6;
386 else if (drive_strength == 0x3)
387 calibration_code = 0x6db;
388 } else if ((MemClkFreq == 0x12) || (MemClkFreq == 0x16)) {
389 /* DDR3-1600 - DDR3-1866 */
390 if (drive_strength == 0x0)
391 calibration_code = 0xfff;
392 else if (drive_strength == 0x1)
393 calibration_code = 0xfff;
394 else if (drive_strength == 0x2)
395 calibration_code = 0xfff;
396 else if (drive_strength == 0x3)
397 calibration_code = 0xdb6;
398 }
399 }
400 else if (ddr_voltage_index & 0x1) {
401 /* 1.5V */
402 /* Fam15h BKDG Rev. 3.14 section 2.10.5.3.4 Table 41 */
403 if ((MemClkFreq == 0x4) || (MemClkFreq == 0x6)) {
404 /* DDR3-667 - DDR3-800 */
405 if (drive_strength == 0x0)
406 calibration_code = 0xb6d;
407 else if (drive_strength == 0x1)
408 calibration_code = 0x6db;
409 else if (drive_strength == 0x2)
410 calibration_code = 0x492;
411 else if (drive_strength == 0x3)
412 calibration_code = 0x492;
413 } else if ((MemClkFreq == 0xa) || (MemClkFreq == 0xe)) {
414 /* DDR3-1066 - DDR3-1333 */
415 if (drive_strength == 0x0)
416 calibration_code = 0xfff;
417 else if (drive_strength == 0x1)
418 calibration_code = 0x924;
419 else if (drive_strength == 0x2)
420 calibration_code = 0x6db;
421 else if (drive_strength == 0x3)
422 calibration_code = 0x6db;
423 } else if ((MemClkFreq == 0x12) || (MemClkFreq == 0x16)) {
424 /* DDR3-1600 - DDR3-1866 */
425 if (drive_strength == 0x0)
426 calibration_code = 0xfff;
427 else if (drive_strength == 0x1)
428 calibration_code = 0xfff;
429 else if (drive_strength == 0x2)
430 calibration_code = 0xfff;
431 else if (drive_strength == 0x3)
432 calibration_code = 0xb6d;
433 }
434 }
435 }
436 else if (package_type == PT_C3) {
437 /* Socket C32 */
438 if (ddr_voltage_index & 0x4) {
439 /* 1.25V */
440 /* Fam15h BKDG Rev. 3.14 section 2.10.5.3.4 Table 46 */
441 if ((MemClkFreq == 0x4) || (MemClkFreq == 0x6)) {
442 /* DDR3-667 - DDR3-800 */
443 if (drive_strength == 0x0)
444 calibration_code = 0xfff;
445 else if (drive_strength == 0x1)
446 calibration_code = 0xb6d;
447 else if (drive_strength == 0x2)
448 calibration_code = 0x924;
449 else if (drive_strength == 0x3)
450 calibration_code = 0x6db;
451 } else if (MemClkFreq == 0xa) {
452 /* DDR3-1066 */
453 if (drive_strength == 0x0)
454 calibration_code = 0xfff;
455 else if (drive_strength == 0x1)
456 calibration_code = 0xfff;
457 else if (drive_strength == 0x2)
458 calibration_code = 0xdb6;
459 else if (drive_strength == 0x3)
460 calibration_code = 0x924;
461 } else if (MemClkFreq == 0xe) {
462 /* DDR3-1333 */
463 if (drive_strength == 0x0)
464 calibration_code = 0xb6d;
465 else if (drive_strength == 0x1)
466 calibration_code = 0x6db;
467 else if (drive_strength == 0x2)
468 calibration_code = 0x492;
469 else if (drive_strength == 0x3)
470 calibration_code = 0x492;
471 } else if ((MemClkFreq == 0x12) || (MemClkFreq == 0x16)) {
472 /* DDR3-1600 - DDR3-1866 */
473 if (drive_strength == 0x0)
474 calibration_code = 0xfff;
475 else if (drive_strength == 0x1)
476 calibration_code = 0xfff;
477 else if (drive_strength == 0x2)
478 calibration_code = 0xfff;
479 else if (drive_strength == 0x3)
480 calibration_code = 0xfff;
481 }
482 }
483 else if (ddr_voltage_index & 0x2) {
484 /* 1.35V */
485 /* Fam15h BKDG Rev. 3.14 section 2.10.5.3.4 Table 45 */
486 if ((MemClkFreq == 0x4) || (MemClkFreq == 0x6)) {
487 /* DDR3-667 - DDR3-800 */
488 if (drive_strength == 0x0)
489 calibration_code = 0xfff;
490 else if (drive_strength == 0x1)
491 calibration_code = 0x924;
492 else if (drive_strength == 0x2)
493 calibration_code = 0x6db;
494 else if (drive_strength == 0x3)
495 calibration_code = 0x492;
496 } else if (MemClkFreq == 0xa) {
497 /* DDR3-1066 */
498 if (drive_strength == 0x0)
499 calibration_code = 0xfff;
500 else if (drive_strength == 0x1)
501 calibration_code = 0xdb6;
502 else if (drive_strength == 0x2)
503 calibration_code = 0xb6d;
504 else if (drive_strength == 0x3)
505 calibration_code = 0x6db;
506 } else if (MemClkFreq == 0xe) {
507 /* DDR3-1333 */
508 if (drive_strength == 0x0)
509 calibration_code = 0xfff;
510 else if (drive_strength == 0x1)
511 calibration_code = 0x924;
512 else if (drive_strength == 0x2)
513 calibration_code = 0x6db;
514 else if (drive_strength == 0x3)
515 calibration_code = 0x492;
516 } else if ((MemClkFreq == 0x12) || (MemClkFreq == 0x16)) {
517 /* DDR3-1600 - DDR3-1866 */
518 if (drive_strength == 0x0)
519 calibration_code = 0xfff;
520 else if (drive_strength == 0x1)
521 calibration_code = 0xfff;
522 else if (drive_strength == 0x2)
523 calibration_code = 0xfff;
524 else if (drive_strength == 0x3)
525 calibration_code = 0xdb6;
526 }
527 }
528 else if (ddr_voltage_index & 0x1) {
529 /* 1.5V */
530 /* Fam15h BKDG Rev. 3.14 section 2.10.5.3.4 Table 44 */
531 if ((MemClkFreq == 0x4) || (MemClkFreq == 0x6)) {
532 /* DDR3-667 - DDR3-800 */
533 if (drive_strength == 0x0)
534 calibration_code = 0xb6d;
535 else if (drive_strength == 0x1)
536 calibration_code = 0x6db;
537 else if (drive_strength == 0x2)
538 calibration_code = 0x492;
539 else if (drive_strength == 0x3)
540 calibration_code = 0x492;
541 } else if (MemClkFreq == 0xa) {
542 /* DDR3-1066 */
543 if (drive_strength == 0x0)
544 calibration_code = 0xfff;
545 else if (drive_strength == 0x1)
546 calibration_code = 0x924;
547 else if (drive_strength == 0x2)
548 calibration_code = 0x6db;
549 else if (drive_strength == 0x3)
550 calibration_code = 0x6db;
551 } else if (MemClkFreq == 0xe) {
552 /* DDR3-1333 */
553 if (drive_strength == 0x0)
554 calibration_code = 0xb6d;
555 else if (drive_strength == 0x1)
556 calibration_code = 0x6db;
557 else if (drive_strength == 0x2)
558 calibration_code = 0x492;
559 else if (drive_strength == 0x3)
560 calibration_code = 0x492;
561 } else if ((MemClkFreq == 0x12) || (MemClkFreq == 0x16)) {
562 /* DDR3-1600 - DDR3-1866 */
563 if (drive_strength == 0x0)
564 calibration_code = 0xfff;
565 else if (drive_strength == 0x1)
566 calibration_code = 0xfff;
567 else if (drive_strength == 0x2)
568 calibration_code = 0xfff;
569 else if (drive_strength == 0x3)
570 calibration_code = 0xb6d;
571 }
572 }
573 }
574 } else {
575 /* LRDIMM */
576
577 /* TODO
578 * Implement LRDIMM support
579 * See Fam15h BKDG Rev. 3.14 section 2.10.5.3.4 Tables 47 - 49
580 */
581 }
582
583 return calibration_code;
584}
585
586static uint32_t fam15h_phy_predriver_cmd_addr_calibration_code(struct DCTStatStruc *pDCTstat, uint8_t dct, uint8_t drive_strength)
587{
588 uint8_t ddr_voltage_index;
589 uint32_t calibration_code = 0;
590 uint16_t MemClkFreq = Get_NB32_DCT(pDCTstat->dev_dct, dct, 0x94) & 0x1f;
591
592 ddr_voltage_index = dct_ddr_voltage_index(pDCTstat, dct);
593
594 if (ddr_voltage_index & 0x4) {
595 /* 1.25V */
596 /* Fam15h BKDG Rev. 3.14 section 2.10.5.3.4 Table 52 */
597 if ((MemClkFreq == 0x4) || (MemClkFreq == 0x6)) {
598 /* DDR3-667 - DDR3-800 */
599 if (drive_strength == 0x0)
600 calibration_code = 0x492;
601 else if (drive_strength == 0x1)
602 calibration_code = 0x492;
603 else if (drive_strength == 0x2)
604 calibration_code = 0x492;
605 else if (drive_strength == 0x3)
606 calibration_code = 0x492;
607 } else if ((MemClkFreq == 0xa) || (MemClkFreq == 0xe)) {
608 /* DDR3-1066 - DDR3-1333 */
609 if (drive_strength == 0x0)
610 calibration_code = 0xdad;
611 else if (drive_strength == 0x1)
612 calibration_code = 0x924;
613 else if (drive_strength == 0x2)
614 calibration_code = 0x6db;
615 else if (drive_strength == 0x3)
616 calibration_code = 0x492;
617 } else if ((MemClkFreq == 0x12) || (MemClkFreq == 0x16)) {
618 /* DDR3-1600 - DDR3-1866 */
619 if (drive_strength == 0x0)
620 calibration_code = 0xff6;
621 else if (drive_strength == 0x1)
622 calibration_code = 0xdad;
623 else if (drive_strength == 0x2)
624 calibration_code = 0xb64;
625 else if (drive_strength == 0x3)
626 calibration_code = 0xb64;
627 }
628 }
629 else if (ddr_voltage_index & 0x2) {
630 /* 1.35V */
631 /* Fam15h BKDG Rev. 3.14 section 2.10.5.3.4 Table 51 */
632 if ((MemClkFreq == 0x4) || (MemClkFreq == 0x6)) {
633 /* DDR3-667 - DDR3-800 */
634 if (drive_strength == 0x0)
635 calibration_code = 0x492;
636 else if (drive_strength == 0x1)
637 calibration_code = 0x492;
638 else if (drive_strength == 0x2)
639 calibration_code = 0x492;
640 else if (drive_strength == 0x3)
641 calibration_code = 0x492;
642 } else if ((MemClkFreq == 0xa) || (MemClkFreq == 0xe)) {
643 /* DDR3-1066 - DDR3-1333 */
644 if (drive_strength == 0x0)
645 calibration_code = 0x924;
646 else if (drive_strength == 0x1)
647 calibration_code = 0x6db;
648 else if (drive_strength == 0x2)
649 calibration_code = 0x6db;
650 else if (drive_strength == 0x3)
651 calibration_code = 0x6db;
652 } else if ((MemClkFreq == 0x12) || (MemClkFreq == 0x16)) {
653 /* DDR3-1600 - DDR3-1866 */
654 if (drive_strength == 0x0)
655 calibration_code = 0xb6d;
656 else if (drive_strength == 0x1)
657 calibration_code = 0xb6d;
658 else if (drive_strength == 0x2)
659 calibration_code = 0x924;
660 else if (drive_strength == 0x3)
661 calibration_code = 0x924;
662 }
663 }
664 else if (ddr_voltage_index & 0x1) {
665 /* 1.5V */
666 /* Fam15h BKDG Rev. 3.14 section 2.10.5.3.4 Table 50 */
667 if ((MemClkFreq == 0x4) || (MemClkFreq == 0x6)) {
668 /* DDR3-667 - DDR3-800 */
669 if (drive_strength == 0x0)
670 calibration_code = 0x492;
671 else if (drive_strength == 0x1)
672 calibration_code = 0x492;
673 else if (drive_strength == 0x2)
674 calibration_code = 0x492;
675 else if (drive_strength == 0x3)
676 calibration_code = 0x492;
677 } else if ((MemClkFreq == 0xa) || (MemClkFreq == 0xe)) {
678 /* DDR3-1066 - DDR3-1333 */
679 if (drive_strength == 0x0)
680 calibration_code = 0x6db;
681 else if (drive_strength == 0x1)
682 calibration_code = 0x6db;
683 else if (drive_strength == 0x2)
684 calibration_code = 0x6db;
685 else if (drive_strength == 0x3)
686 calibration_code = 0x6db;
687 } else if ((MemClkFreq == 0x12) || (MemClkFreq == 0x16)) {
688 /* DDR3-1600 - DDR3-1866 */
689 if (drive_strength == 0x0)
690 calibration_code = 0xb6d;
691 else if (drive_strength == 0x1)
692 calibration_code = 0xb6d;
693 else if (drive_strength == 0x2)
694 calibration_code = 0xb6d;
695 else if (drive_strength == 0x3)
696 calibration_code = 0xb6d;
697 }
698 }
699
700 return calibration_code;
701}
702
703static uint32_t fam15h_phy_predriver_clk_calibration_code(struct DCTStatStruc *pDCTstat, uint8_t dct, uint8_t drive_strength)
704{
705 uint8_t ddr_voltage_index;
706 uint32_t calibration_code = 0;
707 uint16_t MemClkFreq = Get_NB32_DCT(pDCTstat->dev_dct, dct, 0x94) & 0x1f;
708
709 ddr_voltage_index = dct_ddr_voltage_index(pDCTstat, dct);
710
711 if (ddr_voltage_index & 0x4) {
712 /* 1.25V */
713 /* Fam15h BKDG Rev. 3.14 section 2.10.5.3.4 Table 55 */
714 if ((MemClkFreq == 0x4) || (MemClkFreq == 0x6)) {
715 /* DDR3-667 - DDR3-800 */
716 if (drive_strength == 0x0)
717 calibration_code = 0xdad;
718 else if (drive_strength == 0x1)
719 calibration_code = 0xdad;
720 else if (drive_strength == 0x2)
721 calibration_code = 0x924;
722 else if (drive_strength == 0x3)
723 calibration_code = 0x924;
724 } else if ((MemClkFreq == 0xa) || (MemClkFreq == 0xe)) {
725 /* DDR3-1066 - DDR3-1333 */
726 if (drive_strength == 0x0)
727 calibration_code = 0xff6;
728 else if (drive_strength == 0x1)
729 calibration_code = 0xff6;
730 else if (drive_strength == 0x2)
731 calibration_code = 0xff6;
732 else if (drive_strength == 0x3)
733 calibration_code = 0xff6;
734 } else if ((MemClkFreq == 0x12) || (MemClkFreq == 0x16)) {
735 /* DDR3-1600 - DDR3-1866 */
736 if (drive_strength == 0x0)
737 calibration_code = 0xff6;
738 else if (drive_strength == 0x1)
739 calibration_code = 0xff6;
740 else if (drive_strength == 0x2)
741 calibration_code = 0xff6;
742 else if (drive_strength == 0x3)
743 calibration_code = 0xff6;
744 }
745 }
746 else if (ddr_voltage_index & 0x2) {
747 /* 1.35V */
748 /* Fam15h BKDG Rev. 3.14 section 2.10.5.3.4 Table 54 */
749 if ((MemClkFreq == 0x4) || (MemClkFreq == 0x6)) {
750 /* DDR3-667 - DDR3-800 */
751 if (drive_strength == 0x0)
752 calibration_code = 0xdad;
753 else if (drive_strength == 0x1)
754 calibration_code = 0xdad;
755 else if (drive_strength == 0x2)
756 calibration_code = 0x924;
757 else if (drive_strength == 0x3)
758 calibration_code = 0x924;
759 } else if ((MemClkFreq == 0xa) || (MemClkFreq == 0xe)) {
760 /* DDR3-1066 - DDR3-1333 */
761 if (drive_strength == 0x0)
762 calibration_code = 0xff6;
763 else if (drive_strength == 0x1)
764 calibration_code = 0xff6;
765 else if (drive_strength == 0x2)
766 calibration_code = 0xff6;
767 else if (drive_strength == 0x3)
768 calibration_code = 0xdad;
769 } else if ((MemClkFreq == 0x12) || (MemClkFreq == 0x16)) {
770 /* DDR3-1600 - DDR3-1866 */
771 if (drive_strength == 0x0)
772 calibration_code = 0xff6;
773 else if (drive_strength == 0x1)
774 calibration_code = 0xff6;
775 else if (drive_strength == 0x2)
776 calibration_code = 0xff6;
777 else if (drive_strength == 0x3)
778 calibration_code = 0xdad;
779 }
780 }
781 else if (ddr_voltage_index & 0x1) {
782 /* 1.5V */
783 /* Fam15h BKDG Rev. 3.14 section 2.10.5.3.4 Table 53 */
784 if ((MemClkFreq == 0x4) || (MemClkFreq == 0x6)) {
785 /* DDR3-667 - DDR3-800 */
786 if (drive_strength == 0x0)
787 calibration_code = 0x924;
788 else if (drive_strength == 0x1)
789 calibration_code = 0x924;
790 else if (drive_strength == 0x2)
791 calibration_code = 0x924;
792 else if (drive_strength == 0x3)
793 calibration_code = 0x924;
794 } else if ((MemClkFreq == 0xa) || (MemClkFreq == 0xe)) {
795 /* DDR3-1066 - DDR3-1333 */
796 if (drive_strength == 0x0)
797 calibration_code = 0xff6;
798 else if (drive_strength == 0x1)
799 calibration_code = 0xff6;
800 else if (drive_strength == 0x2)
801 calibration_code = 0xff6;
802 else if (drive_strength == 0x3)
803 calibration_code = 0xb6d;
804 } else if ((MemClkFreq == 0x12) || (MemClkFreq == 0x16)) {
805 /* DDR3-1600 - DDR3-1866 */
806 if (drive_strength == 0x0)
807 calibration_code = 0xff6;
808 else if (drive_strength == 0x1)
809 calibration_code = 0xff6;
810 else if (drive_strength == 0x2)
811 calibration_code = 0xff6;
812 else if (drive_strength == 0x3)
813 calibration_code = 0xff6;
814 }
815 }
816
817 return calibration_code;
818}
819
820static uint32_t fam15h_output_driver_compensation_code(struct DCTStatStruc *pDCTstat, uint8_t dct)
821{
822 /* FIXME
823 * Mainboards need to be able to specify the maximum number of DIMMs installable per channel
824 * For now assume a maximum of 2 DIMMs per channel can be installed
825 */
826 uint8_t MaxDimmsInstallable = 2;
827
828 uint8_t package_type;
829 uint32_t calibration_code = 0;
830
831 package_type = mctGet_NVbits(NV_PACK_TYPE);
832 uint16_t MemClkFreq = Get_NB32_DCT(pDCTstat->dev_dct, dct, 0x94) & 0x1f;
833
834 /* Obtain number of DIMMs on channel */
835 uint8_t dimm_count = pDCTstat->MAdimms[dct];
836 uint8_t rank_count_dimm0;
837 uint8_t rank_count_dimm1;
838
839 if (package_type == PT_GR) {
840 /* Socket G34 */
841 /* Fam15h BKDG Rev. 3.14 section 2.10.5.3.4 Table 73 */
842 if (MaxDimmsInstallable == 1) {
843 if (MemClkFreq == 0x4) {
844 /* DDR3-667 */
845 calibration_code = 0x00112222;
846 }
847 else if (MemClkFreq == 0x6) {
848 /* DDR3-800 */
849 calibration_code = 0x10112222;
850 }
851 else if (MemClkFreq == 0xa) {
852 /* DDR3-1066 */
853 calibration_code = 0x20112222;
854 }
855 else if ((MemClkFreq == 0xe) || (MemClkFreq == 0x12)) {
856 /* DDR3-1333 - DDR3-1600 */
857 calibration_code = 0x30112222;
858 }
859 else if (MemClkFreq == 0x16) {
860 /* DDR3-1866 */
861 calibration_code = 0x30332222;
862 }
863 } else if (MaxDimmsInstallable == 2) {
864 if (dimm_count == 1) {
865 /* 1 DIMM detected */
866 if (MemClkFreq == 0x4) {
867 /* DDR3-667 */
868 calibration_code = 0x00112222;
869 }
870 else if (MemClkFreq == 0x6) {
871 /* DDR3-800 */
872 calibration_code = 0x10112222;
873 }
874 else if (MemClkFreq == 0xa) {
875 /* DDR3-1066 */
876 calibration_code = 0x20112222;
877 }
878 else if ((MemClkFreq == 0xe) || (MemClkFreq == 0x12)) {
879 /* DDR3-1333 - DDR3-1600 */
880 calibration_code = 0x30112222;
881 }
882 } else if (dimm_count == 2) {
883 /* 2 DIMMs detected */
884 rank_count_dimm0 = pDCTstat->C_DCTPtr[dct]->DimmRanks[0];
885 rank_count_dimm1 = pDCTstat->C_DCTPtr[dct]->DimmRanks[1];
886
887 if (MemClkFreq == 0x4) {
888 /* DDR3-667 */
889 calibration_code = 0x10222222;
890 }
891 else if (MemClkFreq == 0x6) {
892 /* DDR3-800 */
893 calibration_code = 0x20222222;
894 }
895 else if (MemClkFreq == 0xa) {
896 /* DDR3-1066 */
897 calibration_code = 0x30222222;
898 }
899 else if (MemClkFreq == 0xe) {
900 /* DDR3-1333 */
901 calibration_code = 0x30222222;
902 }
903 else if (MemClkFreq == 0x12) {
904 /* DDR3-1600 */
905 if ((rank_count_dimm0 == 1) && (rank_count_dimm1 == 1))
906 calibration_code = 0x30222222;
907 else
908 calibration_code = 0x30112222;
909 }
910 }
911 } else if (MaxDimmsInstallable == 3) {
912 /* TODO
913 * 3 DIMM/channel support unimplemented
914 */
915 }
916 } else {
917 /* TODO
918 * Other socket support unimplemented
919 */
920 }
921
922 return calibration_code;
923}
924
925static uint32_t fam15h_address_timing_compensation_code(struct DCTStatStruc *pDCTstat, uint8_t dct)
926{
927 /* FIXME
928 * Mainboards need to be able to specify the maximum number of DIMMs installable per channel
929 * For now assume a maximum of 2 DIMMs per channel can be installed
930 */
931 uint8_t MaxDimmsInstallable = 2;
932
933 uint8_t package_type;
934 uint32_t calibration_code = 0;
935
936 package_type = mctGet_NVbits(NV_PACK_TYPE);
937 uint16_t MemClkFreq = Get_NB32_DCT(pDCTstat->dev_dct, dct, 0x94) & 0x1f;
938
939 /* Obtain number of DIMMs on channel */
940 uint8_t dimm_count = pDCTstat->MAdimms[dct];
941 uint8_t rank_count_dimm0;
942 uint8_t rank_count_dimm1;
943
944 if (package_type == PT_GR) {
945 /* Socket G34 */
946 /* Fam15h BKDG Rev. 3.14 section 2.10.5.3.4 Table 73 */
947 if (MaxDimmsInstallable == 1) {
948 rank_count_dimm0 = pDCTstat->C_DCTPtr[dct]->DimmRanks[1];
949
950 if (MemClkFreq == 0x4) {
951 /* DDR3-667 */
952 if (rank_count_dimm0 == 1)
953 calibration_code = 0x00000000;
954 else
955 calibration_code = 0x003b0000;
956 } else if (MemClkFreq == 0x6) {
957 /* DDR3-800 */
958 if (rank_count_dimm0 == 1)
959 calibration_code = 0x00000000;
960 else
961 calibration_code = 0x003b0000;
962 } else if (MemClkFreq == 0xa) {
963 /* DDR3-1066 */
964 calibration_code = 0x00383837;
965 } else if (MemClkFreq == 0xe) {
966 /* DDR3-1333 */
967 calibration_code = 0x00363635;
968 } else if (MemClkFreq == 0x12) {
969 /* DDR3-1600 */
970 if (rank_count_dimm0 == 1)
971 calibration_code = 0x00353533;
972 else
973 calibration_code = 0x00003533;
974 } else if (MemClkFreq == 0x16) {
975 /* DDR3-1866 */
976 calibration_code = 0x00333330;
977 }
978 } else if (MaxDimmsInstallable == 2) {
979 if (dimm_count == 1) {
980 /* 1 DIMM detected */
981 rank_count_dimm0 = pDCTstat->C_DCTPtr[dct]->DimmRanks[1];
982
983 if (MemClkFreq == 0x4) {
984 /* DDR3-667 */
985 if (rank_count_dimm0 == 1)
986 calibration_code = 0x00000000;
987 else
988 calibration_code = 0x003b0000;
989 } else if (MemClkFreq == 0x6) {
990 /* DDR3-800 */
991 if (rank_count_dimm0 == 1)
992 calibration_code = 0x00000000;
993 else
994 calibration_code = 0x003b0000;
995 } else if (MemClkFreq == 0xa) {
996 /* DDR3-1066 */
997 calibration_code = 0x00383837;
998 } else if (MemClkFreq == 0xe) {
999 /* DDR3-1333 */
1000 calibration_code = 0x00363635;
1001 } else if (MemClkFreq == 0x12) {
1002 /* DDR3-1600 */
1003 if (rank_count_dimm0 == 1)
1004 calibration_code = 0x00353533;
1005 else
1006 calibration_code = 0x00003533;
1007 }
1008 } else if (dimm_count == 2) {
1009 /* 2 DIMMs detected */
1010 rank_count_dimm0 = pDCTstat->C_DCTPtr[dct]->DimmRanks[0];
1011 rank_count_dimm1 = pDCTstat->C_DCTPtr[dct]->DimmRanks[1];
1012
1013 if (MemClkFreq == 0x4) {
1014 /* DDR3-667 */
1015 calibration_code = 0x00390039;
1016 } else if (MemClkFreq == 0x6) {
1017 /* DDR3-800 */
1018 calibration_code = 0x00390039;
1019 } else if (MemClkFreq == 0xa) {
1020 /* DDR3-1066 */
1021 calibration_code = 0x003a3a3a;
1022 } else if (MemClkFreq == 0xe) {
1023 /* DDR3-1333 */
1024 calibration_code = 0x00003939;
1025 } else if (MemClkFreq == 0x12) {
1026 /* DDR3-1600 */
1027 if ((rank_count_dimm0 == 1) && (rank_count_dimm1 == 1))
1028 calibration_code = 0x00003738;
1029 }
1030 }
1031 } else if (MaxDimmsInstallable == 3) {
1032 /* TODO
1033 * 3 DIMM/channel support unimplemented
1034 */
1035 }
1036 } else {
1037 /* TODO
1038 * Other socket support unimplemented
1039 */
1040 }
1041
1042 return calibration_code;
1043}
1044
1045static uint8_t fam15h_slow_access_mode(struct DCTStatStruc *pDCTstat, uint8_t dct)
1046{
1047 /* FIXME
1048 * Mainboards need to be able to specify the maximum number of DIMMs installable per channel
1049 * For now assume a maximum of 2 DIMMs per channel can be installed
1050 */
1051 uint8_t MaxDimmsInstallable = 2;
1052
1053 uint8_t package_type;
1054 uint32_t slow_access = 0;
1055
1056 package_type = mctGet_NVbits(NV_PACK_TYPE);
1057 uint16_t MemClkFreq = Get_NB32_DCT(pDCTstat->dev_dct, dct, 0x94) & 0x1f;
1058
1059 /* Obtain number of DIMMs on channel */
1060 uint8_t dimm_count = pDCTstat->MAdimms[dct];
1061 uint8_t rank_count_dimm0;
1062 uint8_t rank_count_dimm1;
1063
1064 if (package_type == PT_GR) {
1065 /* Socket G34 */
1066 /* Fam15h BKDG Rev. 3.14 section 2.10.5.3.4 Table 73 */
1067 if (MaxDimmsInstallable == 1) {
1068 rank_count_dimm0 = pDCTstat->C_DCTPtr[dct]->DimmRanks[1];
1069
1070 if ((MemClkFreq == 0x4) || (MemClkFreq == 0x6)
1071 || (MemClkFreq == 0xa) | (MemClkFreq == 0xe)) {
1072 /* DDR3-667 - DDR3-1333 */
1073 slow_access = 0;
1074 } else if ((MemClkFreq == 0x12) || (MemClkFreq == 0x16)) {
1075 /* DDR3-1600 - DDR3-1866 */
1076 if (rank_count_dimm0 == 1)
1077 slow_access = 0;
1078 else
1079 slow_access = 1;
1080 }
1081 } else if (MaxDimmsInstallable == 2) {
1082 if (dimm_count == 1) {
1083 /* 1 DIMM detected */
1084 rank_count_dimm0 = pDCTstat->C_DCTPtr[dct]->DimmRanks[1];
1085
1086 if ((MemClkFreq == 0x4) || (MemClkFreq == 0x6)
1087 || (MemClkFreq == 0xa) | (MemClkFreq == 0xe)) {
1088 /* DDR3-667 - DDR3-1333 */
1089 slow_access = 0;
1090 }
1091 else if (MemClkFreq == 0x12) {
1092 /* DDR3-1600 */
1093 if (rank_count_dimm0 == 1)
1094 slow_access = 0;
1095 else
1096 slow_access = 1;
1097 }
1098 } else if (dimm_count == 2) {
1099 /* 2 DIMMs detected */
1100 rank_count_dimm0 = pDCTstat->C_DCTPtr[dct]->DimmRanks[0];
1101 rank_count_dimm1 = pDCTstat->C_DCTPtr[dct]->DimmRanks[1];
1102
1103 if ((MemClkFreq == 0x4) || (MemClkFreq == 0x6)
1104 || (MemClkFreq == 0xa)) {
1105 /* DDR3-667 - DDR3-1066 */
1106 slow_access = 0;
1107 }
1108 else if ((MemClkFreq == 0xe) || (MemClkFreq == 0x12)) {
1109 /* DDR3-1333 - DDR3-1600 */
1110 slow_access = 1;
1111 }
1112 }
1113 } else if (MaxDimmsInstallable == 3) {
1114 /* TODO
1115 * 3 DIMM/channel support unimplemented
1116 */
1117 }
1118 } else {
1119 /* TODO
1120 * Other socket support unimplemented
1121 */
1122 }
1123
1124 return slow_access;
1125}
1126
1127static void set_2t_configuration(struct MCTStatStruc *pMCTstat,
1128 struct DCTStatStruc *pDCTstat, u8 dct)
1129{
1130 uint32_t dev;
1131 uint32_t reg;
1132 uint32_t dword;
1133
1134 uint8_t enable_slow_access_mode = 0;
1135 dev = pDCTstat->dev_dct;
1136
1137 if (is_fam15h()) {
1138 if (pDCTstat->_2Tmode)
1139 enable_slow_access_mode = 1;
1140 } else {
1141 if (pDCTstat->_2Tmode == 2)
1142 enable_slow_access_mode = 1;
1143 }
1144
1145 reg = 0x94; /* DRAM Configuration High */
1146 dword = Get_NB32_DCT(dev, dct, reg);
1147 if (enable_slow_access_mode)
1148 dword |= (0x1 << 20); /* Set 2T CMD mode */
1149 else
1150 dword &= ~(0x1 << 20); /* Clear 2T CMD mode */
1151 Set_NB32_DCT(dev, dct, reg, dword);
1152}
1153
1154static void precise_ndelay_fam15(struct MCTStatStruc *pMCTstat, uint32_t nanoseconds) {
1155 msr_t tsc_msr;
1156 uint64_t cycle_count = (((uint64_t)pMCTstat->TSCFreq) * nanoseconds) / 1000;
1157 uint64_t start_timestamp;
1158 uint64_t current_timestamp;
1159
1160 tsc_msr = rdmsr(0x00000010);
1161 start_timestamp = (((uint64_t)tsc_msr.hi) << 32) | tsc_msr.lo;
1162 do {
1163 tsc_msr = rdmsr(0x00000010);
1164 current_timestamp = (((uint64_t)tsc_msr.hi) << 32) | tsc_msr.lo;
1165 } while ((current_timestamp - start_timestamp) < cycle_count);
1166}
1167
1168static void precise_memclk_delay_fam15(struct MCTStatStruc *pMCTstat, struct DCTStatStruc *pDCTstat, uint8_t dct, uint32_t clocks) {
1169 uint16_t memclk_freq;
1170 uint32_t delay_ns;
1171 uint16_t fam15h_freq_tab[] = {0, 0, 0, 0, 333, 0, 400, 0, 0, 0, 533, 0, 0, 0, 667, 0, 0, 0, 800, 0, 0, 0, 933};
1172
1173 memclk_freq = Get_NB32_DCT(pDCTstat->dev_dct, dct, 0x94) & 0x1f;
1174
1175 delay_ns = (((uint64_t)clocks * 1000) / fam15h_freq_tab[memclk_freq]);
1176 precise_ndelay_fam15(pMCTstat, delay_ns);
1177}
1178
Zheng Baoeb75f652010-04-23 17:32:48 +00001179static void mctAutoInitMCT_D(struct MCTStatStruc *pMCTstat,
1180 struct DCTStatStruc *pDCTstatA)
1181{
1182 /*
1183 * Memory may be mapped contiguously all the way up to 4GB (depending on setup
1184 * options). It is the responsibility of PCI subsystem to create an uncacheable
1185 * IO region below 4GB and to adjust TOP_MEM downward prior to any IO mapping or
1186 * accesses. It is the same responsibility of the CPU sub-system prior to
1187 * accessing LAPIC.
1188 *
1189 * Slot Number is an external convention, and is determined by OEM with accompanying
1190 * silk screening. OEM may choose to use Slot number convention which is consistent
1191 * with DIMM number conventions. All AMD engineering platforms do.
1192 *
1193 * Build Requirements:
1194 * 1. MCT_SEG0_START and MCT_SEG0_END macros to begin and end the code segment,
1195 * defined in mcti.inc.
1196 *
1197 * Run-Time Requirements:
1198 * 1. Complete Hypertransport Bus Configuration
1199 * 2. SMBus Controller Initialized
1200 * 1. BSP in Big Real Mode
1201 * 2. Stack at SS:SP, located somewhere between A000:0000 and F000:FFFF
1202 * 3. Checksummed or Valid NVRAM bits
1203 * 4. MCG_CTL=-1, MC4_CTL_EN=0 for all CPUs
1204 * 5. MCi_STS from shutdown/warm reset recorded (if desired) prior to entry
1205 * 6. All var MTRRs reset to zero
1206 * 7. State of NB_CFG.DisDatMsk set properly on all CPUs
Elyes HAOUAS0f92f632014-07-27 19:37:31 +02001207 * 8. All CPUs at 2GHz Speed (unless DQS training is not installed).
Zheng Baoeb75f652010-04-23 17:32:48 +00001208 * 9. All cHT links at max Speed/Width (unless DQS training is not installed).
1209 *
1210 *
1211 * Global relationship between index values and item values:
1212 *
1213 * pDCTstat.CASL pDCTstat.Speed
1214 * j CL(j) k F(k)
1215 * --------------------------
1216 * 0 2.0 - -
Elyes HAOUAS0f92f632014-07-27 19:37:31 +02001217 * 1 3.0 1 200 MHz
1218 * 2 4.0 2 266 MHz
1219 * 3 5.0 3 333 MHz
1220 * 4 6.0 4 400 MHz
1221 * 5 7.0 5 533 MHz
1222 * 6 8.0 6 667 MHz
1223 * 7 9.0 7 800 MHz
Zheng Baoeb75f652010-04-23 17:32:48 +00001224 */
1225 u8 Node, NodesWmem;
1226 u32 node_sys_base;
1227
Timothy Pearson59d0e042015-09-05 18:40:31 -05001228 uint8_t s3resume = acpi_is_wakeup_s3();
1229
Zheng Baoeb75f652010-04-23 17:32:48 +00001230restartinit:
1231 mctInitMemGPIOs_A_D(); /* Set any required GPIOs*/
Timothy Pearson59d0e042015-09-05 18:40:31 -05001232 if (s3resume) {
Timothy Pearson730a0432015-10-16 13:51:51 -05001233 printk(BIOS_DEBUG, "mctAutoInitMCT_D: mct_ForceNBPState0_En_Fam15\n");
1234 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
1235 struct DCTStatStruc *pDCTstat;
1236 pDCTstat = pDCTstatA + Node;
1237
1238 mct_ForceNBPState0_En_Fam15(pMCTstat, pDCTstat);
1239 }
1240
Timothy Pearson59d0e042015-09-05 18:40:31 -05001241#if IS_ENABLED(CONFIG_HAVE_ACPI_RESUME)
1242 printk(BIOS_DEBUG, "mctAutoInitMCT_D: Restoring DCT configuration from NVRAM\n");
1243 restore_mct_information_from_nvram();
1244#endif
Timothy Pearson730a0432015-10-16 13:51:51 -05001245
1246 printk(BIOS_DEBUG, "mctAutoInitMCT_D: mct_ForceNBPState0_Dis_Fam15\n");
1247 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
1248 struct DCTStatStruc *pDCTstat;
1249 pDCTstat = pDCTstatA + Node;
1250
1251 mct_ForceNBPState0_Dis_Fam15(pMCTstat, pDCTstat);
1252 }
Timothy Pearson59d0e042015-09-05 18:40:31 -05001253 } else {
1254 NodesWmem = 0;
1255 node_sys_base = 0;
1256 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
1257 struct DCTStatStruc *pDCTstat;
1258 pDCTstat = pDCTstatA + Node;
Timothy Pearsonb8a355d2015-09-05 17:55:58 -05001259
Timothy Pearson59d0e042015-09-05 18:40:31 -05001260 /* Zero out data structures to avoid false detection of DIMMs */
1261 memset(pDCTstat, 0, sizeof(struct DCTStatStruc));
Timothy Pearsonb8a355d2015-09-05 17:55:58 -05001262
Timothy Pearson59d0e042015-09-05 18:40:31 -05001263 /* Initialize data structures */
1264 pDCTstat->Node_ID = Node;
1265 pDCTstat->dev_host = PA_HOST(Node);
1266 pDCTstat->dev_map = PA_MAP(Node);
1267 pDCTstat->dev_dct = PA_DCT(Node);
1268 pDCTstat->dev_nbmisc = PA_NBMISC(Node);
Timothy Pearson730a0432015-10-16 13:51:51 -05001269 pDCTstat->dev_link = PA_LINK(Node);
1270 pDCTstat->dev_nbctl = PA_NBCTL(Node);
Timothy Pearson59d0e042015-09-05 18:40:31 -05001271 pDCTstat->NodeSysBase = node_sys_base;
Zheng Baoeb75f652010-04-23 17:32:48 +00001272
Timothy Pearson59d0e042015-09-05 18:40:31 -05001273 printk(BIOS_DEBUG, "%s: mct_init Node %d\n", __func__, Node);
1274 mct_init(pMCTstat, pDCTstat);
1275 mctNodeIDDebugPort_D();
1276 pDCTstat->NodePresent = NodePresent_D(Node);
Timothy Pearson730a0432015-10-16 13:51:51 -05001277 if (pDCTstat->NodePresent) {
Timothy Pearson59d0e042015-09-05 18:40:31 -05001278 pDCTstat->LogicalCPUID = mctGetLogicalCPUID_D(Node);
Zheng Baoeb75f652010-04-23 17:32:48 +00001279
Timothy Pearson59d0e042015-09-05 18:40:31 -05001280 printk(BIOS_DEBUG, "%s: mct_InitialMCT_D\n", __func__);
1281 mct_InitialMCT_D(pMCTstat, pDCTstat);
Zheng Baoeb75f652010-04-23 17:32:48 +00001282
Timothy Pearson59d0e042015-09-05 18:40:31 -05001283 printk(BIOS_DEBUG, "%s: mctSMBhub_Init\n", __func__);
1284 mctSMBhub_Init(Node); /* Switch SMBUS crossbar to proper node*/
Zheng Baoeb75f652010-04-23 17:32:48 +00001285
Timothy Pearson730a0432015-10-16 13:51:51 -05001286 printk(BIOS_DEBUG, "%s: mct_preInitDCT\n", __func__);
1287 mct_preInitDCT(pMCTstat, pDCTstat);
1288 }
1289 node_sys_base = pDCTstat->NodeSysBase;
1290 node_sys_base += (pDCTstat->NodeSysLimit + 2) & ~0x0F;
1291 }
1292
1293#if IS_ENABLED(DIMM_VOLTAGE_SET_SUPPORT)
1294 printk(BIOS_DEBUG, "%s: DIMMSetVoltage\n", __func__);
1295 DIMMSetVoltages(pMCTstat, pDCTstatA); /* Set the DIMM voltages (mainboard specific) */
1296#endif
1297
1298 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
1299 struct DCTStatStruc *pDCTstat;
1300 pDCTstat = pDCTstatA + Node;
1301
1302 if (pDCTstat->NodePresent) {
1303 printk(BIOS_DEBUG, "%s: mctSMBhub_Init\n", __func__);
1304 mctSMBhub_Init(Node); /* Switch SMBUS crossbar to proper node*/
1305
Timothy Pearson59d0e042015-09-05 18:40:31 -05001306 printk(BIOS_DEBUG, "%s: mct_initDCT\n", __func__);
1307 mct_initDCT(pMCTstat, pDCTstat);
1308 if (pDCTstat->ErrCode == SC_FatalErr) {
1309 goto fatalexit; /* any fatal errors?*/
1310 } else if (pDCTstat->ErrCode < SC_StopError) {
1311 NodesWmem++;
1312 }
Timothy Pearson730a0432015-10-16 13:51:51 -05001313 }
Timothy Pearson59d0e042015-09-05 18:40:31 -05001314 }
1315 if (NodesWmem == 0) {
1316 printk(BIOS_DEBUG, "No Nodes?!\n");
1317 goto fatalexit;
1318 }
1319
1320 printk(BIOS_DEBUG, "mctAutoInitMCT_D: SyncDCTsReady_D\n");
1321 SyncDCTsReady_D(pMCTstat, pDCTstatA); /* Make sure DCTs are ready for accesses.*/
1322
1323 printk(BIOS_DEBUG, "mctAutoInitMCT_D: HTMemMapInit_D\n");
1324 HTMemMapInit_D(pMCTstat, pDCTstatA); /* Map local memory into system address space.*/
1325 mctHookAfterHTMap();
1326
1327 printk(BIOS_DEBUG, "mctAutoInitMCT_D: CPUMemTyping_D\n");
1328 CPUMemTyping_D(pMCTstat, pDCTstatA); /* Map dram into WB/UC CPU cacheability */
1329 mctHookAfterCPU(); /* Setup external northbridge(s) */
1330
1331 printk(BIOS_DEBUG, "mctAutoInitMCT_D: DQSTiming_D\n");
1332 DQSTiming_D(pMCTstat, pDCTstatA); /* Get Receiver Enable and DQS signal timing*/
1333
1334 printk(BIOS_DEBUG, "mctAutoInitMCT_D: UMAMemTyping_D\n");
1335 UMAMemTyping_D(pMCTstat, pDCTstatA); /* Fix up for UMA sizing */
1336
1337 printk(BIOS_DEBUG, "mctAutoInitMCT_D: :OtherTiming\n");
1338 mct_OtherTiming(pMCTstat, pDCTstatA);
1339
Timothy Pearson59d0e042015-09-05 18:40:31 -05001340 if (ReconfigureDIMMspare_D(pMCTstat, pDCTstatA)) { /* RESET# if 1st pass of DIMM spare enabled*/
1341 goto restartinit;
1342 }
1343
1344 InterleaveNodes_D(pMCTstat, pDCTstatA);
1345 InterleaveChannels_D(pMCTstat, pDCTstatA);
1346
1347 printk(BIOS_DEBUG, "mctAutoInitMCT_D: ECCInit_D\n");
1348 if (ECCInit_D(pMCTstat, pDCTstatA)) { /* Setup ECC control and ECC check-bits*/
1349 printk(BIOS_DEBUG, "mctAutoInitMCT_D: MCTMemClr_D\n");
1350 MCTMemClr_D(pMCTstat,pDCTstatA);
1351 }
1352
Timothy Pearson730a0432015-10-16 13:51:51 -05001353 printk(BIOS_DEBUG, "mctAutoInitMCT_D: mct_ForceNBPState0_Dis_Fam15\n");
1354 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
1355 struct DCTStatStruc *pDCTstat;
1356 pDCTstat = pDCTstatA + Node;
1357
1358 mct_ForceNBPState0_Dis_Fam15(pMCTstat, pDCTstat);
1359 }
1360
Timothy Pearson59d0e042015-09-05 18:40:31 -05001361 mct_FinalMCT_D(pMCTstat, pDCTstatA);
1362 printk(BIOS_DEBUG, "mctAutoInitMCT_D Done: Global Status: %x\n", pMCTstat->GStatus);
Zheng Baoeb75f652010-04-23 17:32:48 +00001363 }
Timothy Pearsonb8a355d2015-09-05 17:55:58 -05001364
Zheng Baoeb75f652010-04-23 17:32:48 +00001365 return;
1366
1367fatalexit:
1368 die("mct_d: fatalexit");
1369}
1370
1371static u8 ReconfigureDIMMspare_D(struct MCTStatStruc *pMCTstat,
1372 struct DCTStatStruc *pDCTstatA)
1373{
1374 u8 ret;
1375
1376 if (mctGet_NVbits(NV_CS_SpareCTL)) {
1377 if (MCT_DIMM_SPARE_NO_WARM) {
1378 /* Do no warm-reset DIMM spare */
1379 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
1380 LoadDQSSigTmgRegs_D(pMCTstat, pDCTstatA);
1381 ret = 0;
1382 } else {
1383 mct_ResetDataStruct_D(pMCTstat, pDCTstatA);
1384 pMCTstat->GStatus |= 1 << GSB_EnDIMMSpareNW;
1385 ret = 1;
1386 }
1387 } else {
1388 /* Do warm-reset DIMM spare */
1389 if (mctGet_NVbits(NV_DQSTrainCTL))
1390 mctWarmReset_D();
1391 ret = 0;
1392 }
1393 } else {
1394 ret = 0;
1395 }
1396
1397 return ret;
1398}
1399
Timothy Pearson730a0432015-10-16 13:51:51 -05001400/* Enable or disable phy-assisted training mode
1401 * Phy-assisted training mode applies to the follow DRAM training procedures:
1402 * Write Levelization Training (2.10.5.8.1)
1403 * DQS Receiver Enable Training (2.10.5.8.2)
1404 */
1405static void fam15EnableTrainingMode(struct MCTStatStruc *pMCTstat,
1406 struct DCTStatStruc *pDCTstat, uint8_t dct, uint8_t enable)
1407{
1408 uint8_t index;
1409 uint32_t dword;
1410 uint32_t index_reg = 0x98;
1411 uint32_t dev = pDCTstat->dev_dct;
1412
1413 if (enable) {
1414 /* Enable training mode */
1415 dword = Get_NB32_DCT(dev, dct, 0x78); /* DRAM Control */
1416 dword &= ~(0x1 << 17); /* AddrCmdTriEn = 0 */
1417 Set_NB32_DCT(dev, dct, 0x78, dword); /* DRAM Control */
1418
1419 dword = Get_NB32_DCT(dev, dct, 0x8c); /* DRAM Timing High */
1420 dword |= (0x1 << 18); /* DisAutoRefresh = 1 */
1421 Set_NB32_DCT(dev, dct, 0x8c, dword); /* DRAM Timing High */
1422
1423 dword = Get_NB32_DCT(dev, dct, 0x94); /* DRAM Configuration High */
1424 dword &= ~(0xf << 24); /* DcqBypassMax = 0 */
1425 dword &= ~(0x1 << 22); /* BankSwizzleMode = 0 */
1426 dword &= ~(0x1 << 15); /* PowerDownEn = 0 */
1427 dword &= ~(0x3 << 10); /* ZqcsInterval = 0 */
1428 Set_NB32_DCT(dev, dct, 0x94, dword); /* DRAM Configuration High */
1429
1430 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0000000d);
1431 dword &= ~(0xf << 16); /* RxMaxDurDllNoLock = 0 */
1432 dword &= ~(0xf); /* TxMaxDurDllNoLock = 0 */
1433 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0000000d, dword);
1434
1435 for (index = 0; index < 0x9; index++) {
1436 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f0010 | (index << 8));
1437 dword &= ~(0x1 << 12); /* EnRxPadStandby = 0 */
1438 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f0010 | (index << 8), dword);
1439 }
1440
1441 dword = Get_NB32_DCT(dev, dct, 0xa4); /* DRAM Controller Temperature Throttle */
1442 dword &= ~(0x1 << 11); /* BwCapEn = 0 */
1443 dword &= ~(0x1 << 8); /* ODTSEn = 0 */
1444 Set_NB32_DCT(dev, dct, 0xa4, dword); /* DRAM Controller Temperature Throttle */
1445
1446 dword = Get_NB32_DCT(dev, dct, 0x110); /* DRAM Controller Select Low */
1447 dword &= ~(0x1 << 2); /* DctSelIntLvEn = 0 */
1448 Set_NB32_DCT(dev, dct, 0x110, dword); /* DRAM Controller Select Low */
1449
1450 dword = Get_NB32_DCT(pDCTstat->dev_nbmisc, dct, 0x58); /* Scrub Rate Control */
1451 dword &= ~(0x1f << 24); /* L3Scrub = 0 */
1452 dword &= ~(0x1f); /* DramScrub = 0 */
1453 Set_NB32_DCT(pDCTstat->dev_nbmisc, dct, 0x58, dword); /* Scrub Rate Control */
1454
1455 dword = Get_NB32_DCT(pDCTstat->dev_nbmisc, dct, 0x5c); /* DRAM Scrub Address Low */
1456 dword &= ~(0x1); /* ScrubReDirEn = 0 */
1457 Set_NB32_DCT(pDCTstat->dev_nbmisc, dct, 0x5c, dword); /* DRAM Scrub Address Low */
1458
1459 dword = Get_NB32_DCT(pDCTstat->dev_nbmisc, dct, 0x1b8); /* L3 Control 1 */
1460 dword |= (0x1 << 4); /* L3ScrbRedirDis = 1 */
1461 Set_NB32_DCT(pDCTstat->dev_nbmisc, dct, 0x1b8, dword); /* L3 Control 1 */
1462
1463 /* Fam15h BKDG section 2.10.5.5.1 */
1464 dword = Get_NB32_DCT(dev, dct, 0x218); /* DRAM Timing 5 */
1465 dword &= ~(0xf << 24); /* TrdrdSdSc = 0xb */
1466 dword |= (0xb << 24);
1467 dword &= ~(0xf << 16); /* TrdrdSdDc = 0xb */
1468 dword |= (0xb << 16);
1469 dword &= ~(0xf); /* TrdrdDd = 0xb */
1470 dword |= 0xb;
1471 Set_NB32_DCT(dev, dct, 0x218, dword); /* DRAM Timing 5 */
1472
1473 /* Fam15h BKDG section 2.10.5.5.2 */
1474 dword = Get_NB32_DCT(dev, dct, 0x214); /* DRAM Timing 4 */
1475 dword &= ~(0xf << 16); /* TwrwrSdSc = 0xb */
1476 dword |= (0xb << 16);
1477 dword &= ~(0xf << 8); /* TwrwrSdDc = 0xb */
1478 dword |= (0xb << 8);
1479 dword &= ~(0xf); /* TwrwrDd = 0xb */
1480 dword |= 0xb;
1481 Set_NB32_DCT(dev, dct, 0x214, dword); /* DRAM Timing 4 */
1482
1483 /* Fam15h BKDG section 2.10.5.5.3 */
1484 dword = Get_NB32_DCT(dev, dct, 0x218); /* DRAM Timing 5 */
1485 dword &= ~(0xf << 8); /* Twrrd = 0xb */
1486 dword |= (0xb << 8);
1487 Set_NB32_DCT(dev, dct, 0x218, dword); /* DRAM Timing 5 */
1488
1489 /* Fam15h BKDG section 2.10.5.5.4 */
1490 dword = Get_NB32_DCT(dev, dct, 0x21c); /* DRAM Timing 6 */
1491 dword &= ~(0x1f << 8); /* TrwtTO = 0x16 */
1492 dword |= (0x16 << 8);
1493 dword &= ~(0x1f << 16); /* TrwtWB = TrwtTO + 1 */
1494 dword |= ((((dword >> 8) & 0x1f) + 1) << 16);
1495 Set_NB32_DCT(dev, dct, 0x21c, dword); /* DRAM Timing 6 */
1496 } else {
1497 /* Disable training mode */
1498 uint8_t lane;
1499 uint8_t dimm;
1500 uint8_t receiver;
1501 uint8_t max_lane;
1502 uint8_t ecc_enabled;
1503 uint8_t x4_present = 0;
1504 uint8_t x8_present = 0;
1505 uint8_t memclk_index;
1506 uint8_t interleave_channels = 0;
1507 uint8_t redirect_ecc_scrub = 0;
1508 uint16_t trdrdsddc;
1509 uint16_t trdrddd;
1510 uint16_t cdd_trdrddd;
1511 uint16_t twrwrsddc;
1512 uint16_t twrwrdd;
1513 uint16_t cdd_twrwrdd;
1514 uint16_t twrrd;
1515 uint16_t trwtto;
1516 uint8_t first_dimm;
1517 uint16_t delay;
1518 uint16_t delay2;
1519 uint8_t read_odt_delay;
1520 uint8_t write_odt_delay;
1521 uint16_t difference;
1522 uint16_t current_total_delay_1[MAX_BYTE_LANES];
1523 uint16_t current_total_delay_2[MAX_BYTE_LANES];
1524
1525 /* FIXME
1526 * This should be platform configurable
1527 */
1528 uint8_t dimm_event_l_pin_support = 0;
1529
1530 ecc_enabled = !!(pMCTstat->GStatus & 1 << GSB_ECCDIMMs);
1531 if (ecc_enabled)
1532 max_lane = 9;
1533 else
1534 max_lane = 8;
1535
1536 if (pDCTstat->Dimmx4Present & ((dct)?0xaa:0x55))
1537 x4_present = 1;
1538 if (pDCTstat->Dimmx8Present & ((dct)?0xaa:0x55))
1539 x8_present = 1;
1540 memclk_index = Get_NB32_DCT(dev, dct, 0x94) & 0x1f;
1541
1542 if (pDCTstat->DIMMValidDCT[0] && pDCTstat->DIMMValidDCT[1] && mctGet_NVbits(NV_Unganged))
1543 interleave_channels = 1;
1544
1545 if ((pMCTstat->GStatus & 1 << GSB_ECCDIMMs) && mctGet_NVbits(NV_ECCRedir))
1546 redirect_ecc_scrub = 1;
1547
1548 dword = (Get_NB32_DCT(dev, dct, 0x240) >> 4) & 0xf;
1549 if (dword > 6)
1550 read_odt_delay = dword - 6;
1551 else
1552 read_odt_delay = 0;
1553
1554 dword = Get_NB32_DCT(dev, dct, 0x240);
1555 delay = (dword >> 4) & 0xf;
1556 if (delay > 6)
1557 read_odt_delay = delay - 6;
1558 else
1559 read_odt_delay = 0;
1560 delay = (dword >> 12) & 0x7;
1561 if (delay > 6)
1562 write_odt_delay = delay - 6;
1563 else
1564 write_odt_delay = 0;
1565
1566 /* TODO:
1567 * Adjust trdrdsddc if four-rank DIMMs are installed per
1568 * section 2.10.5.5.1 of the Family 15h BKDG.
1569 * cdd_trdrdsddc will also need to be calculated in that process.
1570 */
1571 trdrdsddc = 3;
1572
1573 /* Calculate the Critical Delay Difference for TrdrdDd */
1574 cdd_trdrddd = 0;
1575 first_dimm = 1;
1576 for (receiver = 0; receiver < 8; receiver += 2) {
1577 dimm = (receiver >> 1);
1578
1579 if (!mct_RcvrRankEnabled_D(pMCTstat, pDCTstat, dct, receiver))
1580 continue;
1581
1582 read_dqs_receiver_enable_control_registers(current_total_delay_2, dev, dct, dimm, index_reg);
1583
1584 if (first_dimm) {
1585 memcpy(current_total_delay_1, current_total_delay_2, sizeof(current_total_delay_1));
1586 first_dimm = 0;
1587 }
1588
1589 for (lane = 0; lane < max_lane; lane++) {
1590 if (current_total_delay_1[lane] > current_total_delay_2[lane])
1591 difference = current_total_delay_1[lane] - current_total_delay_2[lane];
1592 else
1593 difference = current_total_delay_2[lane] - current_total_delay_1[lane];
1594
1595 if (difference > cdd_trdrddd)
1596 cdd_trdrddd = difference;
1597 }
1598 }
1599
1600 /* Convert the difference to MEMCLKs */
1601 cdd_trdrddd = (((cdd_trdrddd >> 5) & 0x1f) + 1) / 2;
1602
1603 /* Calculate Trdrddd */
1604 delay = (read_odt_delay + 3) * 2;
1605 delay2 = cdd_trdrddd + 7;
1606 if (delay2 > delay)
1607 delay = delay2;
1608 trdrddd = (delay + 1) / 2; /* + 1 is equivalent to ceiling function here */
1609 if (trdrdsddc > trdrddd)
1610 trdrddd = trdrdsddc;
1611
1612 /* TODO:
1613 * Adjust twrwrsddc if four-rank DIMMs are installed per
1614 * section 2.10.5.5.1 of the Family 15h BKDG.
1615 * cdd_twrwrsddc will also need to be calculated in that process.
1616 */
1617 twrwrsddc = 4;
1618
1619 /* Calculate the Critical Delay Difference for TwrwrDd */
1620 cdd_twrwrdd = 0;
1621 first_dimm = 1;
1622 for (receiver = 0; receiver < 8; receiver += 2) {
1623 dimm = (receiver >> 1);
1624
1625 if (!mct_RcvrRankEnabled_D(pMCTstat, pDCTstat, dct, receiver))
1626 continue;
1627
1628 read_dqs_write_timing_control_registers(current_total_delay_2, dev, dct, dimm, index_reg);
1629
1630 if (first_dimm) {
1631 memcpy(current_total_delay_1, current_total_delay_2, sizeof(current_total_delay_1));
1632 first_dimm = 0;
1633 }
1634
1635 for (lane = 0; lane < max_lane; lane++) {
1636 if (current_total_delay_1[lane] > current_total_delay_2[lane])
1637 difference = current_total_delay_1[lane] - current_total_delay_2[lane];
1638 else
1639 difference = current_total_delay_2[lane] - current_total_delay_1[lane];
1640
1641 if (difference > cdd_twrwrdd)
1642 cdd_twrwrdd = difference;
1643 }
1644 }
1645
1646 /* Convert the difference to MEMCLKs */
1647 cdd_twrwrdd = (((cdd_twrwrdd >> 5) & 0x1f) + 1) / 2;
1648
1649 /* Calculate Twrwrdd */
1650 delay = (write_odt_delay + 3) * 2;
1651 delay2 = cdd_twrwrdd + 7;
1652 if (delay2 > delay)
1653 delay = delay2;
1654 twrwrdd = (delay + 1) / 2; /* + 1 is equivalent to ceiling function here */
1655 if (twrwrsddc > twrwrdd)
1656 twrwrdd = twrwrsddc;
1657
1658 dword = Get_NB32_DCT(dev, dct, 0x78); /* DRAM Control */
1659 dword |= (0x1 << 17); /* AddrCmdTriEn = 1 */
1660 Set_NB32_DCT(dev, dct, 0x78, dword); /* DRAM Control */
1661
1662 dword = Get_NB32_DCT(dev, dct, 0x8c); /* DRAM Timing High */
1663 dword &= ~(0x1 << 18); /* DisAutoRefresh = 0 */
1664 Set_NB32_DCT(dev, dct, 0x8c, dword); /* DRAM Timing High */
1665
1666 dword = Get_NB32_DCT(dev, dct, 0x94); /* DRAM Configuration High */
1667 dword |= (0xf << 24); /* DcqBypassMax = 0xf */
1668 dword |= (0x1 << 22); /* BankSwizzleMode = 1 */
1669 dword |= (0x1 << 15); /* PowerDownEn = 1 */
1670 dword &= ~(0x3 << 10); /* ZqcsInterval = 0x2 */
1671 dword |= (0x2 << 10);
1672 Set_NB32_DCT(dev, dct, 0x94, dword); /* DRAM Configuration High */
1673
1674 if (x4_present && x8_present) {
1675 /* Mixed channel of 4x and 8x DIMMs */
1676 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0000000d);
1677 dword &= ~(0x3 << 24); /* RxDLLWakeupTime = 0 */
1678 dword &= ~(0x7 << 20); /* RxCPUpdPeriod = 0 */
1679 dword &= ~(0xf << 16); /* RxMaxDurDllNoLock = 0 */
1680 dword &= ~(0x3 << 8); /* TxDLLWakeupTime = 0 */
1681 dword &= ~(0x7 << 4); /* TxCPUpdPeriod = 0 */
1682 dword &= ~(0xf); /* TxMaxDurDllNoLock = 0 */
1683 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0000000d, dword);
1684 } else {
1685 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0000000d);
1686 dword &= ~(0x3 << 24); /* RxDLLWakeupTime = 3 */
1687 dword |= (0x3 << 24);
1688 dword &= ~(0x7 << 20); /* RxCPUpdPeriod = 3 */
1689 dword |= (0x3 << 20);
1690 dword &= ~(0xf << 16); /* RxMaxDurDllNoLock = 7 */
1691 dword |= (0x7 << 16);
1692 dword &= ~(0x3 << 8); /* TxDLLWakeupTime = 3 */
1693 dword |= (0x3 << 8);
1694 dword &= ~(0x7 << 4); /* TxCPUpdPeriod = 3 */
1695 dword |= (0x3 << 4);
1696 dword &= ~(0xf); /* TxMaxDurDllNoLock = 7 */
1697 dword |= 0x7;
1698 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0000000d, dword);
1699 }
1700
1701 if ((memclk_index <= 0x12) && (x4_present != x8_present)) {
1702 /* MemClkFreq <= 800MHz
1703 * Not a mixed channel of x4 and x8 DIMMs
1704 */
1705 for (index = 0; index < 0x9; index++) {
1706 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f0010 | (index << 8));
1707 dword |= (0x1 << 12); /* EnRxPadStandby = 1 */
1708 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f0010 | (index << 8), dword);
1709 }
1710 } else {
1711 for (index = 0; index < 0x9; index++) {
1712 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f0010 | (index << 8));
1713 dword &= ~(0x1 << 12); /* EnRxPadStandby = 0 */
1714 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f0010 | (index << 8), dword);
1715 }
1716 }
1717
1718 /* TODO
1719 * Calculate Twrrd per section 2.10.5.5.3 of the Family 15h BKDG
1720 */
1721 twrrd = 0xb;
1722
1723 /* TODO
1724 * Calculate TrwtTO per section 2.10.5.5.4 of the Family 15h BKDG
1725 */
1726 trwtto = 0x16;
1727
1728 dword = Get_NB32_DCT(dev, dct, 0xa4); /* DRAM Controller Temperature Throttle */
1729 dword &= ~(0x1 << 11); /* BwCapEn = 0 */
1730 dword &= ~(0x1 << 8); /* ODTSEn = dimm_event_l_pin_support */
1731 dword |= (dimm_event_l_pin_support & 0x1) << 8;
1732 Set_NB32_DCT(dev, dct, 0xa4, dword); /* DRAM Controller Temperature Throttle */
1733
1734 dword = Get_NB32_DCT(dev, dct, 0x110); /* DRAM Controller Select Low */
1735 dword &= ~(0x1 << 2); /* DctSelIntLvEn = interleave_channels */
1736 dword |= (interleave_channels & 0x1) << 2;
1737 Set_NB32_DCT(dev, dct, 0x110, dword); /* DRAM Controller Select Low */
1738
1739 dword = Get_NB32_DCT(pDCTstat->dev_nbmisc, dct, 0x58); /* Scrub Rate Control */
1740 dword &= ~(0x1f << 24); /* L3Scrub = NV_L3BKScrub */
1741 dword |= (mctGet_NVbits(NV_L3BKScrub) & 0x1f) << 24;
1742 dword &= ~(0x1f); /* DramScrub = NV_DramBKScrub */
1743 dword |= mctGet_NVbits(NV_DramBKScrub) & 0x1f;
1744 Set_NB32_DCT(pDCTstat->dev_nbmisc, dct, 0x58, dword); /* Scrub Rate Control */
1745
1746 dword = Get_NB32_DCT(pDCTstat->dev_nbmisc, dct, 0x5c); /* DRAM Scrub Address Low */
1747 dword &= ~(0x1); /* ScrubReDirEn = redirect_ecc_scrub */
1748 dword |= redirect_ecc_scrub & 0x1;
1749 Set_NB32_DCT(pDCTstat->dev_nbmisc, dct, 0x5c, dword); /* DRAM Scrub Address Low */
1750
1751 dword = Get_NB32_DCT(pDCTstat->dev_nbmisc, dct, 0x1b8); /* L3 Control 1 */
1752 dword &= ~(0x1 << 4); /* L3ScrbRedirDis = 0 */
1753 Set_NB32_DCT(pDCTstat->dev_nbmisc, dct, 0x1b8, dword); /* L3 Control 1 */
1754
1755 /* FIXME
1756 * The BKDG-recommended settings cause memory corruption on the ASUS KGPE-D16.
1757 * Investigate and fix...
1758 */
1759#if 0
1760 /* Fam15h BKDG section 2.10.5.5.1 */
1761 dword = Get_NB32_DCT(dev, dct, 0x218); /* DRAM Timing 5 */
1762 dword &= ~(0xf << 24); /* TrdrdSdSc = 0x1 */
1763 dword |= (0x1 << 24);
1764 dword &= ~(0xf << 16); /* TrdrdSdDc = trdrdsddc */
1765 dword |= ((trdrdsddc & 0xf) << 16);
1766 dword &= ~(0xf); /* TrdrdDd = trdrddd */
1767 dword |= (trdrddd & 0xf);
1768 Set_NB32_DCT(dev, dct, 0x218, dword); /* DRAM Timing 5 */
1769#endif
1770
1771 /* Fam15h BKDG section 2.10.5.5.2 */
1772 dword = Get_NB32_DCT(dev, dct, 0x214); /* DRAM Timing 4 */
1773 dword &= ~(0xf << 16); /* TwrwrSdSc = 0x1 */
1774 dword |= (0x1 << 16);
1775 dword &= ~(0xf << 8); /* TwrwrSdDc = twrwrsddc */
1776 dword |= ((twrwrsddc & 0xf) << 8);
1777 dword &= ~(0xf); /* TwrwrDd = twrwrdd */
1778 dword |= (twrwrdd & 0xf);
1779 Set_NB32_DCT(dev, dct, 0x214, dword); /* DRAM Timing 4 */
1780
1781 /* Fam15h BKDG section 2.10.5.5.3 */
1782 dword = Get_NB32_DCT(dev, dct, 0x218); /* DRAM Timing 5 */
1783 dword &= ~(0xf << 8); /* Twrrd = twrrd */
1784 dword |= ((twrrd & 0xf) << 8);
1785 Set_NB32_DCT(dev, dct, 0x218, dword); /* DRAM Timing 5 */
1786
1787 /* Fam15h BKDG section 2.10.5.5.4 */
1788 dword = Get_NB32_DCT(dev, dct, 0x21c); /* DRAM Timing 6 */
1789 dword &= ~(0x1f << 8); /* TrwtTO = trwtto */
1790 dword |= ((trwtto & 0x1f) << 8);
1791 dword &= ~(0x1f << 16); /* TrwtWB = TrwtTO + 1 */
1792 dword |= ((((dword >> 8) & 0x1f) + 1) << 16);
1793 Set_NB32_DCT(dev, dct, 0x21c, dword); /* DRAM Timing 6 */
1794
1795 /* Enable prefetchers */
1796 dword = Get_NB32_DCT(dev, dct, 0x110); /* Memory Controller Configuration High */
1797 dword &= ~(0x1 << 13); /* PrefIoDis = 0 */
1798 dword &= ~(0x1 << 12); /* PrefCpuDis = 0 */
1799 Set_NB32_DCT(dev, dct, 0x110, dword); /* Memory Controller Configuration High */
1800 }
1801}
1802
1803static void exit_training_mode_fam15(struct MCTStatStruc *pMCTstat,
1804 struct DCTStatStruc *pDCTstatA)
1805{
1806 uint8_t node;
1807 uint8_t dct;
1808
1809 for (node = 0; node < MAX_NODES_SUPPORTED; node++) {
1810 struct DCTStatStruc *pDCTstat;
1811 pDCTstat = pDCTstatA + node;
1812
1813 if (pDCTstat->NodePresent)
1814 for (dct = 0; dct < 2; dct++)
1815 fam15EnableTrainingMode(pMCTstat, pDCTstat, dct, 0);
1816 }
1817}
1818
Zheng Baoeb75f652010-04-23 17:32:48 +00001819static void DQSTiming_D(struct MCTStatStruc *pMCTstat,
1820 struct DCTStatStruc *pDCTstatA)
1821{
1822 u8 nv_DQSTrainCTL;
1823
1824 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
1825 return;
1826 }
1827
1828 nv_DQSTrainCTL = mctGet_NVbits(NV_DQSTrainCTL);
1829 /* FIXME: BOZO- DQS training every time*/
1830 nv_DQSTrainCTL = 1;
1831
1832 mct_BeforeDQSTrain_D(pMCTstat, pDCTstatA);
1833 phyAssistedMemFnceTraining(pMCTstat, pDCTstatA);
1834
Timothy Pearson730a0432015-10-16 13:51:51 -05001835 if (is_fam15h()) {
1836 uint8_t Node;
1837 struct DCTStatStruc *pDCTstat;
1838 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
1839 pDCTstat = pDCTstatA + Node;
1840 if (pDCTstat->NodePresent) {
1841 if (pDCTstat->DIMMValidDCT[0])
1842 InitPhyCompensation(pMCTstat, pDCTstat, 0);
1843 if (pDCTstat->DIMMValidDCT[1])
1844 InitPhyCompensation(pMCTstat, pDCTstat, 1);
1845 }
1846 }
1847 }
1848
Zheng Baoeb75f652010-04-23 17:32:48 +00001849 if (nv_DQSTrainCTL) {
1850 mctHookBeforeAnyTraining(pMCTstat, pDCTstatA);
1851 /* TODO: should be in mctHookBeforeAnyTraining */
1852 _WRMSR(0x26C, 0x04040404, 0x04040404);
1853 _WRMSR(0x26D, 0x04040404, 0x04040404);
1854 _WRMSR(0x26E, 0x04040404, 0x04040404);
1855 _WRMSR(0x26F, 0x04040404, 0x04040404);
Timothy Pearson730a0432015-10-16 13:51:51 -05001856 mct_WriteLevelization_HW(pMCTstat, pDCTstatA, FirstPass);
Zheng Baoeb75f652010-04-23 17:32:48 +00001857
Timothy Pearson730a0432015-10-16 13:51:51 -05001858 if (is_fam15h()) {
1859 /* Receiver Enable Training Pass 1 */
1860 TrainReceiverEn_D(pMCTstat, pDCTstatA, FirstPass);
1861 }
1862
1863 mct_WriteLevelization_HW(pMCTstat, pDCTstatA, SecondPass);
1864
1865 if (is_fam15h()) {
1866 /* Receiver Enable Training Pass 2 */
1867 // TrainReceiverEn_D(pMCTstat, pDCTstatA, SecondPass);
1868
1869 /* TODO:
1870 * Determine why running TrainReceiverEn_D in SecondPass
1871 * mode yields less stable training values than when run
1872 * in FirstPass mode as in the HACK below.
1873 */
1874 TrainReceiverEn_D(pMCTstat, pDCTstatA, FirstPass);
1875 } else {
1876 TrainReceiverEn_D(pMCTstat, pDCTstatA, FirstPass);
1877 }
Zheng Baoeb75f652010-04-23 17:32:48 +00001878
1879 mct_TrainDQSPos_D(pMCTstat, pDCTstatA);
1880
Timothy Pearson730a0432015-10-16 13:51:51 -05001881 if (is_fam15h())
1882 exit_training_mode_fam15(pMCTstat, pDCTstatA);
1883 else
1884 mctSetEccDQSRcvrEn_D(pMCTstat, pDCTstatA);
Zheng Baoeb75f652010-04-23 17:32:48 +00001885
1886 /* FIXME - currently uses calculated value TrainMaxReadLatency_D(pMCTstat, pDCTstatA); */
1887 mctHookAfterAnyTraining();
1888 mctSaveDQSSigTmg_D();
1889
1890 MCTMemClr_D(pMCTstat, pDCTstatA);
1891 } else {
1892 mctGetDQSSigTmg_D(); /* get values into data structure */
1893 LoadDQSSigTmgRegs_D(pMCTstat, pDCTstatA); /* load values into registers.*/
1894 /* mctDoWarmResetMemClr_D(); */
1895 MCTMemClr_D(pMCTstat, pDCTstatA);
1896 }
1897}
1898
1899static void LoadDQSSigTmgRegs_D(struct MCTStatStruc *pMCTstat,
1900 struct DCTStatStruc *pDCTstatA)
1901{
1902 u8 Node, Receiver, Channel, Dir, DIMM;
1903 u32 dev;
1904 u32 index_reg;
1905 u32 reg;
1906 u32 index;
1907 u32 val;
1908 u8 ByteLane;
1909 u8 txdqs;
1910
1911 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
1912 struct DCTStatStruc *pDCTstat;
1913 pDCTstat = pDCTstatA + Node;
1914
1915 if (pDCTstat->DCTSysLimit) {
1916 dev = pDCTstat->dev_dct;
1917 for (Channel = 0;Channel < 2; Channel++) {
1918 /* there are four receiver pairs,
1919 loosely associated with chipselects.*/
Timothy Pearson730a0432015-10-16 13:51:51 -05001920 index_reg = 0x98;
Zheng Baoeb75f652010-04-23 17:32:48 +00001921 for (Receiver = 0; Receiver < 8; Receiver += 2) {
1922 /* Set Receiver Enable Values */
1923 mct_SetRcvrEnDly_D(pDCTstat,
1924 0, /* RcvrEnDly */
1925 1, /* FinalValue, From stack */
1926 Channel,
1927 Receiver,
1928 dev, index_reg,
1929 (Receiver >> 1) * 3 + 0x10, /* Addl_Index */
1930 2); /* Pass Second Pass ? */
1931 /* Restore Write levelization training data */
1932 for (ByteLane = 0; ByteLane < 9; ByteLane ++) {
1933 txdqs = pDCTstat->CH_D_B_TxDqs[Channel][Receiver >> 1][ByteLane];
1934 index = Table_DQSRcvEn_Offset[ByteLane >> 1];
1935 index += (Receiver >> 1) * 3 + 0x10 + 0x20; /* Addl_Index */
Timothy Pearson730a0432015-10-16 13:51:51 -05001936 val = Get_NB32_index_wait_DCT(dev, Channel, 0x98, index);
Zheng Baoeb75f652010-04-23 17:32:48 +00001937 if (ByteLane & 1) { /* odd byte lane */
1938 val &= ~(0xFF << 16);
1939 val |= txdqs << 16;
1940 } else {
1941 val &= ~0xFF;
1942 val |= txdqs;
1943 }
Timothy Pearson730a0432015-10-16 13:51:51 -05001944 Set_NB32_index_wait_DCT(dev, Channel, 0x98, index, val);
Zheng Baoeb75f652010-04-23 17:32:48 +00001945 }
1946 }
1947 }
1948 for (Channel = 0; Channel<2; Channel++) {
1949 SetEccDQSRcvrEn_D(pDCTstat, Channel);
1950 }
1951
1952 for (Channel = 0; Channel < 2; Channel++) {
1953 u8 *p;
Timothy Pearson730a0432015-10-16 13:51:51 -05001954 index_reg = 0x98;
Zheng Baoeb75f652010-04-23 17:32:48 +00001955
1956 /* NOTE:
1957 * when 400, 533, 667, it will support dimm0/1/2/3,
1958 * and set conf for dimm0, hw will copy to dimm1/2/3
1959 * set for dimm1, hw will copy to dimm3
Elyes HAOUAS0f92f632014-07-27 19:37:31 +02001960 * Rev A/B only support DIMM0/1 when 800MHz and above
Zheng Baoeb75f652010-04-23 17:32:48 +00001961 * + 0x100 to next dimm
Elyes HAOUAS0f92f632014-07-27 19:37:31 +02001962 * Rev C support DIMM0/1/2/3 when 800MHz and above
Zheng Baoeb75f652010-04-23 17:32:48 +00001963 * + 0x100 to next dimm
1964 */
1965 for (DIMM = 0; DIMM < 4; DIMM++) {
1966 if (DIMM == 0) {
1967 index = 0; /* CHA Write Data Timing Low */
1968 } else {
Timothy Pearson730a0432015-10-16 13:51:51 -05001969 if (pDCTstat->Speed >= mhz_to_memclk_config(mctGet_NVbits(NV_MIN_MEMCLK))) {
Zheng Baoeb75f652010-04-23 17:32:48 +00001970 index = 0x100 * DIMM;
1971 } else {
1972 break;
1973 }
1974 }
1975 for (Dir = 0; Dir < 2; Dir++) {/* RD/WR */
1976 p = pDCTstat->CH_D_DIR_B_DQS[Channel][DIMM][Dir];
1977 val = stream_to_int(p); /* CHA Read Data Timing High */
Timothy Pearson730a0432015-10-16 13:51:51 -05001978 Set_NB32_index_wait_DCT(dev, Channel, index_reg, index+1, val);
Zheng Baoeb75f652010-04-23 17:32:48 +00001979 val = stream_to_int(p+4); /* CHA Write Data Timing High */
Timothy Pearson730a0432015-10-16 13:51:51 -05001980 Set_NB32_index_wait_DCT(dev, Channel, index_reg, index+2, val);
Zheng Baoeb75f652010-04-23 17:32:48 +00001981 val = *(p+8); /* CHA Write ECC Timing */
Timothy Pearson730a0432015-10-16 13:51:51 -05001982 Set_NB32_index_wait_DCT(dev, Channel, index_reg, index+3, val);
Zheng Baoeb75f652010-04-23 17:32:48 +00001983 index += 4;
1984 }
1985 }
1986 }
1987
1988 for (Channel = 0; Channel<2; Channel++) {
Timothy Pearson730a0432015-10-16 13:51:51 -05001989 reg = 0x78;
1990 val = Get_NB32_DCT(dev, Channel, reg);
Zheng Baoeb75f652010-04-23 17:32:48 +00001991 val &= ~(0x3ff<<22);
1992 val |= ((u32) pDCTstat->CH_MaxRdLat[Channel] << 22);
1993 val &= ~(1<<DqsRcvEnTrain);
Timothy Pearson730a0432015-10-16 13:51:51 -05001994 Set_NB32_DCT(dev, Channel, reg, val); /* program MaxRdLatency to correspond with current delay*/
Zheng Baoeb75f652010-04-23 17:32:48 +00001995 }
1996 }
1997 }
1998}
1999
2000static void HTMemMapInit_D(struct MCTStatStruc *pMCTstat,
2001 struct DCTStatStruc *pDCTstatA)
2002{
2003 u8 Node;
2004 u32 NextBase, BottomIO;
2005 u8 _MemHoleRemap, DramHoleBase, DramHoleOffset;
2006 u32 HoleSize, DramSelBaseAddr;
2007
2008 u32 val;
2009 u32 base;
2010 u32 limit;
2011 u32 dev, devx;
2012 struct DCTStatStruc *pDCTstat;
2013
2014 _MemHoleRemap = mctGet_NVbits(NV_MemHole);
2015
2016 if (pMCTstat->HoleBase == 0) {
2017 DramHoleBase = mctGet_NVbits(NV_BottomIO);
2018 } else {
2019 DramHoleBase = pMCTstat->HoleBase >> (24-8);
2020 }
2021
2022 BottomIO = DramHoleBase << (24-8);
2023
2024 NextBase = 0;
2025 pDCTstat = pDCTstatA + 0;
2026 dev = pDCTstat->dev_map;
2027
2028 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
2029 pDCTstat = pDCTstatA + Node;
2030 devx = pDCTstat->dev_map;
2031 DramSelBaseAddr = 0;
Zheng Baoeb75f652010-04-23 17:32:48 +00002032 if (!pDCTstat->GangedMode) {
2033 DramSelBaseAddr = pDCTstat->NodeSysLimit - pDCTstat->DCTSysLimit;
2034 /*In unganged mode, we must add DCT0 and DCT1 to DCTSysLimit */
2035 val = pDCTstat->NodeSysLimit;
2036 if ((val & 0xFF) == 0xFE) {
2037 DramSelBaseAddr++;
2038 val++;
2039 }
2040 pDCTstat->DCTSysLimit = val;
2041 }
2042
2043 base = pDCTstat->DCTSysBase;
2044 limit = pDCTstat->DCTSysLimit;
2045 if (limit > base) {
2046 base += NextBase;
2047 limit += NextBase;
2048 DramSelBaseAddr += NextBase;
2049 printk(BIOS_DEBUG, " Node: %02x base: %02x limit: %02x BottomIO: %02x\n", Node, base, limit, BottomIO);
2050
2051 if (_MemHoleRemap) {
2052 if ((base < BottomIO) && (limit >= BottomIO)) {
2053 /* HW Dram Remap */
2054 pDCTstat->Status |= 1 << SB_HWHole;
2055 pMCTstat->GStatus |= 1 << GSB_HWHole;
2056 pDCTstat->DCTSysBase = base;
2057 pDCTstat->DCTSysLimit = limit;
2058 pDCTstat->DCTHoleBase = BottomIO;
2059 pMCTstat->HoleBase = BottomIO;
2060 HoleSize = _4GB_RJ8 - BottomIO; /* HoleSize[39:8] */
2061 if ((DramSelBaseAddr > 0) && (DramSelBaseAddr < BottomIO))
2062 base = DramSelBaseAddr;
2063 val = ((base + HoleSize) >> (24-8)) & 0xFF;
2064 DramHoleOffset = val;
2065 val <<= 8; /* shl 16, rol 24 */
2066 val |= DramHoleBase << 24;
2067 val |= 1 << DramHoleValid;
2068 Set_NB32(devx, 0xF0, val); /* Dram Hole Address Reg */
2069 pDCTstat->DCTSysLimit += HoleSize;
2070 base = pDCTstat->DCTSysBase;
2071 limit = pDCTstat->DCTSysLimit;
2072 } else if (base == BottomIO) {
2073 /* SW Node Hoist */
2074 pMCTstat->GStatus |= 1<<GSB_SpIntRemapHole;
2075 pDCTstat->Status |= 1<<SB_SWNodeHole;
2076 pMCTstat->GStatus |= 1<<GSB_SoftHole;
2077 pMCTstat->HoleBase = base;
2078 limit -= base;
2079 base = _4GB_RJ8;
2080 limit += base;
2081 pDCTstat->DCTSysBase = base;
2082 pDCTstat->DCTSysLimit = limit;
2083 } else {
2084 /* No Remapping. Normal Contiguous mapping */
2085 pDCTstat->DCTSysBase = base;
2086 pDCTstat->DCTSysLimit = limit;
2087 }
2088 } else {
2089 /*No Remapping. Normal Contiguous mapping*/
2090 pDCTstat->DCTSysBase = base;
2091 pDCTstat->DCTSysLimit = limit;
2092 }
2093 base |= 3; /* set WE,RE fields*/
2094 pMCTstat->SysLimit = limit;
2095 }
2096 Set_NB32(dev, 0x40 + (Node << 3), base); /* [Node] + Dram Base 0 */
2097
2098 val = limit & 0xFFFF0000;
2099 val |= Node;
2100 Set_NB32(dev, 0x44 + (Node << 3), val); /* set DstNode */
2101
2102 printk(BIOS_DEBUG, " Node: %02x base: %02x limit: %02x \n", Node, base, limit);
2103 limit = pDCTstat->DCTSysLimit;
2104 if (limit) {
2105 NextBase = (limit & 0xFFFF0000) + 0x10000;
2106 }
2107 }
2108
2109 /* Copy dram map from Node 0 to Node 1-7 */
2110 for (Node = 1; Node < MAX_NODES_SUPPORTED; Node++) {
2111 u32 reg;
2112 pDCTstat = pDCTstatA + Node;
2113 devx = pDCTstat->dev_map;
2114
2115 if (pDCTstat->NodePresent) {
Timothy Pearsonb8a355d2015-09-05 17:55:58 -05002116 printk(BIOS_DEBUG, " Copy dram map from Node 0 to Node %02x \n", Node);
Zheng Baoeb75f652010-04-23 17:32:48 +00002117 reg = 0x40; /*Dram Base 0*/
2118 do {
2119 val = Get_NB32(dev, reg);
2120 Set_NB32(devx, reg, val);
2121 reg += 4;
2122 } while ( reg < 0x80);
2123 } else {
2124 break; /* stop at first absent Node */
2125 }
2126 }
2127
2128 /*Copy dram map to F1x120/124*/
2129 mct_HTMemMapExt(pMCTstat, pDCTstatA);
2130}
2131
2132static void MCTMemClr_D(struct MCTStatStruc *pMCTstat,
2133 struct DCTStatStruc *pDCTstatA)
2134{
2135
2136 /* Initiates a memory clear operation for all node. The mem clr
Zheng Baoc3af12f2010-10-08 05:08:47 +00002137 * is done in parallel. After the memclr is complete, all processors
Zheng Baoeb75f652010-04-23 17:32:48 +00002138 * status are checked to ensure that memclr has completed.
2139 */
2140 u8 Node;
2141 struct DCTStatStruc *pDCTstat;
2142
2143 if (!mctGet_NVbits(NV_DQSTrainCTL)){
2144 /* FIXME: callback to wrapper: mctDoWarmResetMemClr_D */
2145 } else { /* NV_DQSTrainCTL == 1 */
2146 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
2147 pDCTstat = pDCTstatA + Node;
2148
2149 if (pDCTstat->NodePresent) {
2150 DCTMemClr_Init_D(pMCTstat, pDCTstat);
2151 }
2152 }
2153 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
2154 pDCTstat = pDCTstatA + Node;
2155
2156 if (pDCTstat->NodePresent) {
2157 DCTMemClr_Sync_D(pMCTstat, pDCTstat);
2158 }
2159 }
2160 }
2161}
2162
2163static void DCTMemClr_Init_D(struct MCTStatStruc *pMCTstat,
2164 struct DCTStatStruc *pDCTstat)
2165{
2166 u32 val;
2167 u32 dev;
2168 u32 reg;
2169
2170 /* Initiates a memory clear operation on one node */
2171 if (pDCTstat->DCTSysLimit) {
2172 dev = pDCTstat->dev_dct;
2173 reg = 0x110;
2174
2175 do {
2176 val = Get_NB32(dev, reg);
2177 } while (val & (1 << MemClrBusy));
2178
2179 val |= (1 << MemClrInit);
2180 Set_NB32(dev, reg, val);
2181 }
2182}
2183
2184static void MCTMemClrSync_D(struct MCTStatStruc *pMCTstat,
2185 struct DCTStatStruc *pDCTstatA)
2186{
2187 /* Ensures that memory clear has completed on all node.*/
2188 u8 Node;
2189 struct DCTStatStruc *pDCTstat;
2190
2191 if (!mctGet_NVbits(NV_DQSTrainCTL)){
2192 /* callback to wrapper: mctDoWarmResetMemClr_D */
2193 } else { /* NV_DQSTrainCTL == 1 */
2194 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
2195 pDCTstat = pDCTstatA + Node;
2196
2197 if (pDCTstat->NodePresent) {
2198 DCTMemClr_Sync_D(pMCTstat, pDCTstat);
2199 }
2200 }
2201 }
2202}
2203
2204static void DCTMemClr_Sync_D(struct MCTStatStruc *pMCTstat,
2205 struct DCTStatStruc *pDCTstat)
2206{
2207 u32 val;
2208 u32 dev = pDCTstat->dev_dct;
2209 u32 reg;
2210
2211 /* Ensure that a memory clear operation has completed on one node */
2212 if (pDCTstat->DCTSysLimit){
2213 reg = 0x110;
2214
2215 do {
2216 val = Get_NB32(dev, reg);
2217 } while (val & (1 << MemClrBusy));
2218
2219 do {
2220 val = Get_NB32(dev, reg);
2221 } while (!(val & (1 << Dr_MemClrStatus)));
2222 }
2223
2224 val = 0x0FE40FC0; /* BKDG recommended */
2225 val |= MCCH_FlushWrOnStpGnt; /* Set for S3 */
2226 Set_NB32(dev, 0x11C, val);
2227}
2228
2229static u8 NodePresent_D(u8 Node)
2230{
2231 /*
2232 * Determine if a single Hammer Node exists within the network.
2233 */
2234 u32 dev;
2235 u32 val;
2236 u32 dword;
2237 u8 ret = 0;
2238
2239 dev = PA_HOST(Node); /*test device/vendor id at host bridge */
2240 val = Get_NB32(dev, 0);
2241 dword = mct_NodePresent_D(); /* FIXME: BOZO -11001022h rev for F */
2242 if (val == dword) { /* AMD Hammer Family CPU HT Configuration */
2243 if (oemNodePresent_D(Node, &ret))
2244 goto finish;
2245 /* Node ID register */
2246 val = Get_NB32(dev, 0x60);
2247 val &= 0x07;
2248 dword = Node;
2249 if (val == dword) /* current nodeID = requested nodeID ? */
2250 ret = 1;
2251 }
2252finish:
2253 return ret;
2254}
2255
Timothy Pearson730a0432015-10-16 13:51:51 -05002256static void DCTPreInit_D(struct MCTStatStruc *pMCTstat, struct DCTStatStruc *pDCTstat, u8 dct)
2257{
2258 /*
2259 * Run DCT pre-initialization tasks
2260 */
2261 uint32_t dword;
2262
2263 /* Reset DCT registers */
2264 ClearDCT_D(pMCTstat, pDCTstat, dct);
2265 pDCTstat->stopDCT = 1; /*preload flag with 'disable' */
2266
2267 if (!is_fam15h()) {
2268 /* Enable DDR3 support */
2269 dword = Get_NB32_DCT(pDCTstat->dev_dct, dct, 0x94);
2270 dword |= 1 << Ddr3Mode;
2271 Set_NB32_DCT(pDCTstat->dev_dct, dct, 0x94, dword);
2272 }
2273
2274 /* Read the SPD information into the data structures */
2275 if (mct_DIMMPresence(pMCTstat, pDCTstat, dct) < SC_StopError) {
2276 printk(BIOS_DEBUG, "\t\tDCTInit_D: mct_DIMMPresence Done\n");
2277 }
2278}
2279
Zheng Baoeb75f652010-04-23 17:32:48 +00002280static void DCTInit_D(struct MCTStatStruc *pMCTstat, struct DCTStatStruc *pDCTstat, u8 dct)
2281{
2282 /*
2283 * Initialize DRAM on single Athlon 64/Opteron Node.
2284 */
Timothy Pearson730a0432015-10-16 13:51:51 -05002285 uint32_t dword;
Zheng Baoeb75f652010-04-23 17:32:48 +00002286
Timothy Pearson730a0432015-10-16 13:51:51 -05002287 if (!is_fam15h()) {
2288 /* (Re)-enable DDR3 support */
2289 dword = Get_NB32_DCT(pDCTstat->dev_dct, dct, 0x94);
2290 dword |= 1 << Ddr3Mode;
2291 Set_NB32_DCT(pDCTstat->dev_dct, dct, 0x94, dword);
2292 }
2293
2294 if (mct_SPDCalcWidth(pMCTstat, pDCTstat, dct) < SC_StopError) {
2295 printk(BIOS_DEBUG, "\t\tDCTInit_D: mct_SPDCalcWidth Done\n");
2296 if (AutoCycTiming_D(pMCTstat, pDCTstat, dct) < SC_StopError) {
2297 printk(BIOS_DEBUG, "\t\tDCTInit_D: AutoCycTiming_D Done\n");
2298 if (AutoConfig_D(pMCTstat, pDCTstat, dct) < SC_StopError) {
2299 printk(BIOS_DEBUG, "\t\tDCTInit_D: AutoConfig_D Done\n");
2300 if (PlatformSpec_D(pMCTstat, pDCTstat, dct) < SC_StopError) {
2301 printk(BIOS_DEBUG, "\t\tDCTInit_D: PlatformSpec_D Done\n");
2302 pDCTstat->stopDCT = 0;
2303 if (!(pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW))) {
2304 printk(BIOS_DEBUG, "\t\tDCTInit_D: StartupDCT_D\n");
2305 StartupDCT_D(pMCTstat, pDCTstat, dct); /*yeaahhh! */
Zheng Baoeb75f652010-04-23 17:32:48 +00002306 }
2307 }
2308 }
2309 }
2310 }
2311
Timothy Pearson730a0432015-10-16 13:51:51 -05002312 if (pDCTstat->stopDCT) {
2313 dword = 1 << DisDramInterface;
2314 Set_NB32_DCT(pDCTstat->dev_dct, dct, 0x94, dword);
2315
2316 /* To maximize power savings when DisDramInterface=1b,
2317 * all of the MemClkDis bits should also be set.
2318 */
2319 Set_NB32_DCT(pDCTstat->dev_dct, dct, 0x88, 0xff000000);
Zheng Baoeb75f652010-04-23 17:32:48 +00002320 } else {
Zheng Bao69436e12011-01-06 02:18:12 +00002321 mct_EnDllShutdownSR(pMCTstat, pDCTstat, dct);
Zheng Baoeb75f652010-04-23 17:32:48 +00002322 }
2323}
2324
2325static void SyncDCTsReady_D(struct MCTStatStruc *pMCTstat,
2326 struct DCTStatStruc *pDCTstatA)
2327{
2328 /* Wait (and block further access to dram) for all DCTs to be ready,
2329 * by polling all InitDram bits and waiting for possible memory clear
2330 * operations to be complete. Read MemClkFreqVal bit to see if
2331 * the DIMMs are present in this node.
2332 */
2333 u8 Node;
2334 u32 val;
2335
2336 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
2337 struct DCTStatStruc *pDCTstat;
2338 pDCTstat = pDCTstatA + Node;
2339 mct_SyncDCTsReady(pDCTstat);
2340 }
Timothy Pearson730a0432015-10-16 13:51:51 -05002341
2342 if (!is_fam15h()) {
2343 /* v6.1.3 */
2344 /* re-enable phy compensation engine when dram init is completed on all nodes. */
2345 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
2346 struct DCTStatStruc *pDCTstat;
2347 pDCTstat = pDCTstatA + Node;
2348 if (pDCTstat->NodePresent) {
2349 if (pDCTstat->DIMMValidDCT[0] > 0 || pDCTstat->DIMMValidDCT[1] > 0) {
2350 /* re-enable phy compensation engine when dram init on both DCTs is completed. */
2351 val = Get_NB32_index_wait(pDCTstat->dev_dct, 0x98, 0x8);
2352 val &= ~(1 << DisAutoComp);
2353 Set_NB32_index_wait(pDCTstat->dev_dct, 0x98, 0x8, val);
2354 }
Zheng Baoeb75f652010-04-23 17:32:48 +00002355 }
2356 }
2357 }
Timothy Pearson730a0432015-10-16 13:51:51 -05002358
Zheng Baoeb75f652010-04-23 17:32:48 +00002359 /* wait 750us before any memory access can be made. */
2360 mct_Wait(15000);
2361}
2362
2363static void StartupDCT_D(struct MCTStatStruc *pMCTstat,
2364 struct DCTStatStruc *pDCTstat, u8 dct)
2365{
2366 /* Read MemClkFreqVal bit to see if the DIMMs are present in this node.
2367 * If the DIMMs are present then set the DRAM Enable bit for this node.
2368 *
2369 * Setting dram init starts up the DCT state machine, initializes the
2370 * dram devices with MRS commands, and kicks off any
2371 * HW memory clear process that the chip is capable of. The sooner
2372 * that dram init is set for all nodes, the faster the memory system
2373 * initialization can complete. Thus, the init loop is unrolled into
Zheng Baoc3af12f2010-10-08 05:08:47 +00002374 * two loops so as to start the processes for non BSP nodes sooner.
Zheng Baoeb75f652010-04-23 17:32:48 +00002375 * This procedure will not wait for the process to finish.
2376 * Synchronization is handled elsewhere.
2377 */
2378 u32 val;
2379 u32 dev;
Zheng Baoeb75f652010-04-23 17:32:48 +00002380
2381 dev = pDCTstat->dev_dct;
Timothy Pearson730a0432015-10-16 13:51:51 -05002382 val = Get_NB32_DCT(dev, dct, 0x94);
Zheng Baoeb75f652010-04-23 17:32:48 +00002383 if (val & (1<<MemClkFreqVal)) {
2384 mctHookBeforeDramInit(); /* generalized Hook */
2385 if (!(pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)))
2386 mct_DramInit(pMCTstat, pDCTstat, dct);
2387 AfterDramInit_D(pDCTstat, dct);
2388 mctHookAfterDramInit(); /* generalized Hook*/
2389 }
2390}
2391
2392static void ClearDCT_D(struct MCTStatStruc *pMCTstat,
2393 struct DCTStatStruc *pDCTstat, u8 dct)
2394{
2395 u32 reg_end;
2396 u32 dev = pDCTstat->dev_dct;
Timothy Pearson730a0432015-10-16 13:51:51 -05002397 u32 reg = 0x40;
Zheng Baoeb75f652010-04-23 17:32:48 +00002398 u32 val = 0;
2399
2400 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
Timothy Pearson730a0432015-10-16 13:51:51 -05002401 reg_end = 0x78;
Zheng Baoeb75f652010-04-23 17:32:48 +00002402 } else {
Timothy Pearson730a0432015-10-16 13:51:51 -05002403 reg_end = 0xA4;
Zheng Baoeb75f652010-04-23 17:32:48 +00002404 }
2405
2406 while(reg < reg_end) {
Zheng Bao69436e12011-01-06 02:18:12 +00002407 if ((reg & 0xFF) == 0x90) {
2408 if (pDCTstat->LogicalCPUID & AMD_DR_Dx) {
Timothy Pearson730a0432015-10-16 13:51:51 -05002409 val = Get_NB32_DCT(dev, dct, reg); /* get DRAMConfigLow */
Zheng Bao69436e12011-01-06 02:18:12 +00002410 val |= 0x08000000; /* preserve value of DisDllShutdownSR for only Rev.D */
2411 }
2412 }
Timothy Pearson730a0432015-10-16 13:51:51 -05002413 Set_NB32_DCT(dev, dct, reg, val);
Zheng Bao69436e12011-01-06 02:18:12 +00002414 val = 0;
Zheng Baoeb75f652010-04-23 17:32:48 +00002415 reg += 4;
2416 }
2417
2418 val = 0;
2419 dev = pDCTstat->dev_map;
2420 reg = 0xF0;
2421 Set_NB32(dev, reg, val);
2422}
2423
2424static void SPD2ndTiming(struct MCTStatStruc *pMCTstat,
2425 struct DCTStatStruc *pDCTstat, u8 dct)
2426{
2427 u8 i;
2428 u16 Twr, Trtp;
2429 u16 Trp, Trrd, Trcd, Tras, Trc;
2430 u8 Trfc[4];
2431 u16 Tfaw;
Timothy Pearson730a0432015-10-16 13:51:51 -05002432 u16 Tcwl; /* Fam15h only */
Zheng Baoeb75f652010-04-23 17:32:48 +00002433 u32 DramTimingLo, DramTimingHi;
2434 u8 tCK16x;
2435 u16 Twtr;
2436 u8 LDIMM;
2437 u8 MTB16x;
2438 u8 byte;
2439 u32 dword;
2440 u32 dev;
Zheng Baoeb75f652010-04-23 17:32:48 +00002441 u32 val;
2442 u16 smbaddr;
2443
Timothy Pearson730a0432015-10-16 13:51:51 -05002444 printk(BIOS_DEBUG, "%s: Start\n", __func__);
2445
Zheng Baoeb75f652010-04-23 17:32:48 +00002446 /* Gather all DIMM mini-max values for cycle timing data */
2447 Trp = 0;
2448 Trrd = 0;
2449 Trcd = 0;
2450 Trtp = 0;
2451 Tras = 0;
2452 Trc = 0;
2453 Twr = 0;
2454 Twtr = 0;
2455 for (i=0; i < 4; i++)
2456 Trfc[i] = 0;
2457 Tfaw = 0;
2458
2459 for ( i = 0; i< MAX_DIMMS_SUPPORTED; i++) {
2460 LDIMM = i >> 1;
2461 if (pDCTstat->DIMMValid & (1 << i)) {
2462 smbaddr = Get_DIMMAddress_D(pDCTstat, (dct + i));
2463
2464 val = mctRead_SPD(smbaddr, SPD_MTBDivisor); /* MTB=Dividend/Divisor */
2465 MTB16x = ((mctRead_SPD(smbaddr, SPD_MTBDividend) & 0xFF)<<4);
2466 MTB16x /= val; /* transfer to MTB*16 */
2467
2468 byte = mctRead_SPD(smbaddr, SPD_tRPmin);
2469 val = byte * MTB16x;
2470 if (Trp < val)
2471 Trp = val;
2472
2473 byte = mctRead_SPD(smbaddr, SPD_tRRDmin);
2474 val = byte * MTB16x;
2475 if (Trrd < val)
2476 Trrd = val;
2477
2478 byte = mctRead_SPD(smbaddr, SPD_tRCDmin);
2479 val = byte * MTB16x;
2480 if (Trcd < val)
2481 Trcd = val;
2482
2483 byte = mctRead_SPD(smbaddr, SPD_tRTPmin);
2484 val = byte * MTB16x;
2485 if (Trtp < val)
2486 Trtp = val;
2487
2488 byte = mctRead_SPD(smbaddr, SPD_tWRmin);
2489 val = byte * MTB16x;
2490 if (Twr < val)
2491 Twr = val;
2492
2493 byte = mctRead_SPD(smbaddr, SPD_tWTRmin);
2494 val = byte * MTB16x;
2495 if (Twtr < val)
2496 Twtr = val;
2497
2498 val = mctRead_SPD(smbaddr, SPD_Upper_tRAS_tRC) & 0xFF;
2499 val >>= 4;
2500 val <<= 8;
2501 val |= mctRead_SPD(smbaddr, SPD_tRCmin) & 0xFF;
2502 val *= MTB16x;
2503 if (Trc < val)
2504 Trc = val;
2505
2506 byte = mctRead_SPD(smbaddr, SPD_Density) & 0xF;
2507 if (Trfc[LDIMM] < byte)
2508 Trfc[LDIMM] = byte;
2509
2510 val = mctRead_SPD(smbaddr, SPD_Upper_tRAS_tRC) & 0xF;
2511 val <<= 8;
2512 val |= (mctRead_SPD(smbaddr, SPD_tRASmin) & 0xFF);
2513 val *= MTB16x;
2514 if (Tras < val)
2515 Tras = val;
2516
2517 val = mctRead_SPD(smbaddr, SPD_Upper_tFAW) & 0xF;
2518 val <<= 8;
2519 val |= mctRead_SPD(smbaddr, SPD_tFAWmin) & 0xFF;
2520 val *= MTB16x;
2521 if (Tfaw < val)
2522 Tfaw = val;
2523 } /* Dimm Present */
2524 }
2525
2526 /* Convert DRAM CycleTiming values and store into DCT structure */
2527 byte = pDCTstat->DIMMAutoSpeed;
2528 if (byte == 7)
2529 tCK16x = 20;
2530 else if (byte == 6)
2531 tCK16x = 24;
2532 else if (byte == 5)
2533 tCK16x = 30;
2534 else
2535 tCK16x = 40;
2536
2537 /* Notes:
2538 1. All secondary time values given in SPDs are in binary with units of ns.
2539 2. Some time values are scaled by 16, in order to have least count of 0.25 ns
2540 (more accuracy). JEDEC SPD spec. shows which ones are x1 and x4.
2541 3. Internally to this SW, cycle time, tCK16x, is scaled by 16 to match time values
2542 */
2543
2544 /* Tras */
2545 pDCTstat->DIMMTras = (u16)Tras;
2546 val = Tras / tCK16x;
2547 if (Tras % tCK16x) { /* round up number of busclocks */
2548 val++;
2549 }
2550 if (val < Min_TrasT)
2551 val = Min_TrasT;
2552 else if (val > Max_TrasT)
2553 val = Max_TrasT;
2554 pDCTstat->Tras = val;
2555
2556 /* Trp */
2557 pDCTstat->DIMMTrp = Trp;
2558 val = Trp / tCK16x;
2559 if (Trp % tCK16x) { /* round up number of busclocks */
2560 val++;
2561 }
2562 if (val < Min_TrpT)
2563 val = Min_TrpT;
2564 else if (val > Max_TrpT)
2565 val = Max_TrpT;
2566 pDCTstat->Trp = val;
2567
2568 /*Trrd*/
2569 pDCTstat->DIMMTrrd = Trrd;
2570 val = Trrd / tCK16x;
2571 if (Trrd % tCK16x) { /* round up number of busclocks */
2572 val++;
2573 }
2574 if (val < Min_TrrdT)
2575 val = Min_TrrdT;
2576 else if (val > Max_TrrdT)
2577 val = Max_TrrdT;
2578 pDCTstat->Trrd = val;
2579
2580 /* Trcd */
2581 pDCTstat->DIMMTrcd = Trcd;
2582 val = Trcd / tCK16x;
2583 if (Trcd % tCK16x) { /* round up number of busclocks */
2584 val++;
2585 }
2586 if (val < Min_TrcdT)
2587 val = Min_TrcdT;
2588 else if (val > Max_TrcdT)
2589 val = Max_TrcdT;
2590 pDCTstat->Trcd = val;
2591
2592 /* Trc */
2593 pDCTstat->DIMMTrc = Trc;
2594 val = Trc / tCK16x;
2595 if (Trc % tCK16x) { /* round up number of busclocks */
2596 val++;
2597 }
2598 if (val < Min_TrcT)
2599 val = Min_TrcT;
2600 else if (val > Max_TrcT)
2601 val = Max_TrcT;
2602 pDCTstat->Trc = val;
2603
2604 /* Trtp */
2605 pDCTstat->DIMMTrtp = Trtp;
2606 val = Trtp / tCK16x;
2607 if (Trtp % tCK16x) {
2608 val ++;
2609 }
2610 if (val < Min_TrtpT)
2611 val = Min_TrtpT;
2612 else if (val > Max_TrtpT)
2613 val = Max_TrtpT;
2614 pDCTstat->Trtp = val;
2615
2616 /* Twr */
2617 pDCTstat->DIMMTwr = Twr;
2618 val = Twr / tCK16x;
2619 if (Twr % tCK16x) { /* round up number of busclocks */
2620 val++;
2621 }
2622 if (val < Min_TwrT)
2623 val = Min_TwrT;
2624 else if (val > Max_TwrT)
2625 val = Max_TwrT;
2626 pDCTstat->Twr = val;
2627
2628 /* Twtr */
2629 pDCTstat->DIMMTwtr = Twtr;
2630 val = Twtr / tCK16x;
2631 if (Twtr % tCK16x) { /* round up number of busclocks */
2632 val++;
2633 }
2634 if (val < Min_TwtrT)
2635 val = Min_TwtrT;
2636 else if (val > Max_TwtrT)
2637 val = Max_TwtrT;
2638 pDCTstat->Twtr = val;
2639
2640 /* Trfc0-Trfc3 */
2641 for (i=0; i<4; i++)
2642 pDCTstat->Trfc[i] = Trfc[i];
2643
2644 /* Tfaw */
2645 pDCTstat->DIMMTfaw = Tfaw;
2646 val = Tfaw / tCK16x;
2647 if (Tfaw % tCK16x) { /* round up number of busclocks */
2648 val++;
2649 }
2650 if (val < Min_TfawT)
2651 val = Min_TfawT;
2652 else if (val > Max_TfawT)
2653 val = Max_TfawT;
2654 pDCTstat->Tfaw = val;
2655
2656 mctAdjustAutoCycTmg_D();
2657
Timothy Pearson730a0432015-10-16 13:51:51 -05002658 if (is_fam15h()) {
2659 /* Compute Tcwl (Fam15h BKDG v3.14 Table 203) */
2660 if (pDCTstat->Speed <= 0x6)
2661 Tcwl = 0x5;
2662 else if (pDCTstat->Speed == 0xa)
2663 Tcwl = 0x6;
2664 else if (pDCTstat->Speed == 0xe)
2665 Tcwl = 0x7;
2666 else if (pDCTstat->Speed == 0x12)
2667 Tcwl = 0x8;
2668 else if (pDCTstat->Speed == 0x16)
2669 Tcwl = 0x9;
2670 else
2671 Tcwl = 0x5; /* Power-on default */
2672 }
2673
Zheng Baoeb75f652010-04-23 17:32:48 +00002674 /* Program DRAM Timing values */
Timothy Pearson730a0432015-10-16 13:51:51 -05002675 if (is_fam15h()) {
2676 dev = pDCTstat->dev_dct;
Zheng Baoeb75f652010-04-23 17:32:48 +00002677
Timothy Pearson730a0432015-10-16 13:51:51 -05002678 dword = Get_NB32_DCT(dev, dct, 0x8c); /* DRAM Timing High */
2679 val = 2; /* Tref = 7.8us */
2680 dword &= ~(0x3 << 16);
2681 dword |= (val & 0x3) << 16;
2682 Set_NB32_DCT(dev, dct, 0x8c, dword); /* DRAM Timing High */
Zheng Baoeb75f652010-04-23 17:32:48 +00002683
Timothy Pearson730a0432015-10-16 13:51:51 -05002684 dword = Get_NB32_DCT(dev, dct, 0x200); /* DRAM Timing 0 */
2685 dword &= ~(0x3f1f1f1f);
2686 dword |= ((pDCTstat->Tras + 0xf) & 0x3f) << 24; /* Tras */
2687 dword |= ((pDCTstat->Trp + 0x5) & 0x1f) << 16; /* Trp */
2688 dword |= ((pDCTstat->Trcd + 0x5) & 0x1f) << 8; /* Trcd */
2689 dword |= (pDCTstat->CASL & 0x1f); /* Tcl */
2690 Set_NB32_DCT(dev, dct, 0x200, dword); /* DRAM Timing 0 */
Zheng Baoeb75f652010-04-23 17:32:48 +00002691
Timothy Pearson730a0432015-10-16 13:51:51 -05002692 dword = Get_NB32_DCT(dev, dct, 0x204); /* DRAM Timing 1 */
2693 dword &= ~(0x0f3f0f3f);
2694 dword |= ((pDCTstat->Trtp + 0x4) & 0xf) << 24; /* Trtp */
2695 if (pDCTstat->Tfaw != 0)
2696 dword |= ((((pDCTstat->Tfaw - 0x1) * 2) + 0x10) & 0x3f) << 16; /* FourActWindow */
2697 dword |= ((pDCTstat->Trrd + 0x4) & 0xf) << 8; /* Trrd */
2698 dword |= ((pDCTstat->Trc + 0xb) & 0x3f); /* Trc */
2699 Set_NB32_DCT(dev, dct, 0x204, dword); /* DRAM Timing 1 */
Zheng Baoeb75f652010-04-23 17:32:48 +00002700
Timothy Pearson730a0432015-10-16 13:51:51 -05002701 dword = Get_NB32_DCT(dev, dct, 0x208); /* DRAM Timing 2 */
2702 dword &= ~(0x07070707);
2703 dword |= (pDCTstat->Trfc[3] & 0x7) << 24; /* Trfc3 */
2704 dword |= (pDCTstat->Trfc[2] & 0x7) << 16; /* Trfc2 */
2705 dword |= (pDCTstat->Trfc[1] & 0x7) << 8; /* Trfc1 */
2706 dword |= (pDCTstat->Trfc[0] & 0x7); /* Trfc0 */
2707 Set_NB32_DCT(dev, dct, 0x208, dword); /* DRAM Timing 2 */
Zheng Baoeb75f652010-04-23 17:32:48 +00002708
Timothy Pearson730a0432015-10-16 13:51:51 -05002709 dword = Get_NB32_DCT(dev, dct, 0x20c); /* DRAM Timing 3 */
2710 dword &= ~(0x00000f00);
2711 dword |= ((pDCTstat->Twtr + 0x4) & 0xf) << 8; /* Twtr */
2712 dword &= ~(0x0000001f);
2713 dword |= (Tcwl & 0x1f); /* Tcwl */
2714 Set_NB32_DCT(dev, dct, 0x20c, dword); /* DRAM Timing 3 */
Zheng Baoeb75f652010-04-23 17:32:48 +00002715
Timothy Pearson730a0432015-10-16 13:51:51 -05002716 dword = Get_NB32_DCT(dev, dct, 0x22c); /* DRAM Timing 10 */
2717 dword &= ~(0x0000001f);
2718 dword |= ((pDCTstat->Twr + 0x4) & 0x1f); /* Twr */
2719 Set_NB32_DCT(dev, dct, 0x22c, dword); /* DRAM Timing 10 */
Zheng Baoeb75f652010-04-23 17:32:48 +00002720
Timothy Pearson730a0432015-10-16 13:51:51 -05002721 if (pDCTstat->Speed > mhz_to_memclk_config(mctGet_NVbits(NV_MIN_MEMCLK))) {
2722 /* Enable phy-assisted training mode */
2723 fam15EnableTrainingMode(pMCTstat, pDCTstat, dct, 1);
2724 }
Zheng Baoeb75f652010-04-23 17:32:48 +00002725
Timothy Pearson730a0432015-10-16 13:51:51 -05002726 /* Other setup (not training specific) */
2727 dword = Get_NB32_DCT(dev, dct, 0x90); /* DRAM Configuration Low */
2728 dword &= ~(0x1 << 23); /* ForceAutoPchg = 0 */
2729 dword &= ~(0x1 << 20); /* DynPageCloseEn = 0 */
2730 Set_NB32_DCT(dev, dct, 0x90, dword); /* DRAM Configuration Low */
Zheng Baoeb75f652010-04-23 17:32:48 +00002731
Timothy Pearson730a0432015-10-16 13:51:51 -05002732 Set_NB32_DCT(dev, dct, 0x228, 0x14141414); /* DRAM Timing 9 */
2733 } else {
2734 DramTimingLo = 0; /* Dram Timing Low init */
2735 val = pDCTstat->CASL - 4; /* pDCTstat.CASL to reg. definition */
Zheng Baoeb75f652010-04-23 17:32:48 +00002736 DramTimingLo |= val;
Zheng Baoeb75f652010-04-23 17:32:48 +00002737
Timothy Pearson730a0432015-10-16 13:51:51 -05002738 val = pDCTstat->Trcd - Bias_TrcdT;
2739 DramTimingLo |= val<<4;
2740
2741 val = pDCTstat->Trp - Bias_TrpT;
2742 val = mct_AdjustSPDTimings(pMCTstat, pDCTstat, val);
2743 DramTimingLo |= val<<7;
2744
2745 val = pDCTstat->Trtp - Bias_TrtpT;
2746 DramTimingLo |= val<<10;
2747
2748 val = pDCTstat->Tras - Bias_TrasT;
2749 DramTimingLo |= val<<12;
2750
2751 val = pDCTstat->Trc - Bias_TrcT;
2752 DramTimingLo |= val<<16;
2753
2754 val = pDCTstat->Trrd - Bias_TrrdT;
2755 DramTimingLo |= val<<22;
2756
2757 DramTimingHi = 0; /* Dram Timing High init */
2758 val = pDCTstat->Twtr - Bias_TwtrT;
2759 DramTimingHi |= val<<8;
2760
2761 val = 2; /* Tref = 7.8us */
2762 DramTimingHi |= val<<16;
2763
2764 val = 0;
2765 for (i=4;i>0;i--) {
2766 val <<= 3;
2767 val |= Trfc[i-1];
2768 }
2769 DramTimingHi |= val << 20;
2770
2771 dev = pDCTstat->dev_dct;
2772 /* Twr */
2773 val = pDCTstat->Twr;
2774 if (val == 10)
2775 val = 9;
2776 else if (val == 12)
2777 val = 10;
2778 val = mct_AdjustSPDTimings(pMCTstat, pDCTstat, val);
2779 val -= Bias_TwrT;
2780 val <<= 4;
2781 dword = Get_NB32_DCT(dev, dct, 0x84);
2782 dword &= ~0x70;
2783 dword |= val;
2784 Set_NB32_DCT(dev, dct, 0x84, dword);
2785
2786 /* Tfaw */
2787 val = pDCTstat->Tfaw;
2788 val = mct_AdjustSPDTimings(pMCTstat, pDCTstat, val);
2789 val -= Bias_TfawT;
2790 val >>= 1;
2791 val <<= 28;
2792 dword = Get_NB32_DCT(dev, dct, 0x94);
2793 dword &= ~0xf0000000;
2794 dword |= val;
2795 Set_NB32_DCT(dev, dct, 0x94, dword);
2796
2797 /* dev = pDCTstat->dev_dct; */
2798
2799 if (pDCTstat->Speed > mhz_to_memclk_config(mctGet_NVbits(NV_MIN_MEMCLK))) {
2800 val = Get_NB32_DCT(dev, dct, 0x88);
2801 val &= 0xFF000000;
2802 DramTimingLo |= val;
2803 }
2804 Set_NB32_DCT(dev, dct, 0x88, DramTimingLo); /*DCT Timing Low*/
2805
2806 if (pDCTstat->Speed > mhz_to_memclk_config(mctGet_NVbits(NV_MIN_MEMCLK))) {
2807 DramTimingHi |= 1 << DisAutoRefresh;
2808 }
2809 DramTimingHi |= 0x000018FF;
2810 Set_NB32_DCT(dev, dct, 0x8c, DramTimingHi); /*DCT Timing Hi*/
Zheng Baoeb75f652010-04-23 17:32:48 +00002811 }
Zheng Baoeb75f652010-04-23 17:32:48 +00002812
2813 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
Timothy Pearson730a0432015-10-16 13:51:51 -05002814
2815 printk(BIOS_DEBUG, "%s: Done\n", __func__);
Zheng Baoeb75f652010-04-23 17:32:48 +00002816}
2817
2818static u8 AutoCycTiming_D(struct MCTStatStruc *pMCTstat,
2819 struct DCTStatStruc *pDCTstat, u8 dct)
2820{
2821 /* Initialize DCT Timing registers as per DIMM SPD.
2822 * For primary timing (T, CL) use best case T value.
2823 * For secondary timing params., use most aggressive settings
2824 * of slowest DIMM.
2825 *
2826 * There are three components to determining "maximum frequency":
2827 * SPD component, Bus load component, and "Preset" max frequency
2828 * component.
2829 *
2830 * The SPD component is a function of the min cycle time specified
2831 * by each DIMM, and the interaction of cycle times from all DIMMs
2832 * in conjunction with CAS latency. The SPD component only applies
2833 * when user timing mode is 'Auto'.
2834 *
2835 * The Bus load component is a limiting factor determined by electrical
2836 * characteristics on the bus as a result of varying number of device
2837 * loads. The Bus load component is specific to each platform but may
2838 * also be a function of other factors. The bus load component only
2839 * applies when user timing mode is 'Auto'.
2840 *
2841 * The Preset component is subdivided into three items and is
2842 * the minimum of the set: Silicon revision, user limit
2843 * setting when user timing mode is 'Auto' and memclock mode
2844 * is 'Limit', OEM build specification of the maximum
2845 * frequency. The Preset component is only applies when user
2846 * timing mode is 'Auto'.
2847 */
2848
Timothy Pearson730a0432015-10-16 13:51:51 -05002849 printk(BIOS_DEBUG, "%s: Start\n", __func__);
2850
Zheng Baoeb75f652010-04-23 17:32:48 +00002851 /* Get primary timing (CAS Latency and Cycle Time) */
2852 if (pDCTstat->Speed == 0) {
2853 mctGet_MaxLoadFreq(pDCTstat);
2854
2855 /* and Factor in presets (setup options, Si cap, etc.) */
2856 GetPresetmaxF_D(pMCTstat, pDCTstat);
2857
2858 /* Go get best T and CL as specified by DIMM mfgs. and OEM */
2859 SPDGetTCL_D(pMCTstat, pDCTstat, dct);
Timothy Pearson730a0432015-10-16 13:51:51 -05002860
Zheng Baoeb75f652010-04-23 17:32:48 +00002861 /* skip callback mctForce800to1067_D */
2862 pDCTstat->Speed = pDCTstat->DIMMAutoSpeed;
2863 pDCTstat->CASL = pDCTstat->DIMMCASL;
2864
2865 }
2866 mct_AfterGetCLT(pMCTstat, pDCTstat, dct);
2867
2868 SPD2ndTiming(pMCTstat, pDCTstat, dct);
2869
2870 printk(BIOS_DEBUG, "AutoCycTiming: Status %x\n", pDCTstat->Status);
2871 printk(BIOS_DEBUG, "AutoCycTiming: ErrStatus %x\n", pDCTstat->ErrStatus);
2872 printk(BIOS_DEBUG, "AutoCycTiming: ErrCode %x\n", pDCTstat->ErrCode);
2873 printk(BIOS_DEBUG, "AutoCycTiming: Done\n\n");
2874
2875 mctHookAfterAutoCycTmg();
2876
2877 return pDCTstat->ErrCode;
2878}
2879
2880static void GetPresetmaxF_D(struct MCTStatStruc *pMCTstat,
2881 struct DCTStatStruc *pDCTstat)
2882{
2883 /* Get max frequency from OEM platform definition, from any user
2884 * override (limiting) of max frequency, and from any Si Revision
2885 * Specific information. Return the least of these three in
2886 * DCTStatStruc.PresetmaxFreq.
2887 */
Zheng Baof7a999a2010-09-05 05:52:33 +00002888 /* TODO: Set the proper max frequency in wrappers/mcti_d.c. */
Zheng Baoeb75f652010-04-23 17:32:48 +00002889 u16 proposedFreq;
2890 u16 word;
2891
2892 /* Get CPU Si Revision defined limit (NPT) */
Timothy Pearson730a0432015-10-16 13:51:51 -05002893 if (is_fam15h())
2894 proposedFreq = 933;
2895 else
2896 proposedFreq = 800; /* Rev F0 programmable max memclock is */
Zheng Baoeb75f652010-04-23 17:32:48 +00002897
2898 /*Get User defined limit if "limit" mode */
2899 if ( mctGet_NVbits(NV_MCTUSRTMGMODE) == 1) {
2900 word = Get_Fk_D(mctGet_NVbits(NV_MemCkVal) + 1);
2901 if (word < proposedFreq)
2902 proposedFreq = word;
2903
2904 /* Get Platform defined limit */
2905 word = mctGet_NVbits(NV_MAX_MEMCLK);
2906 if (word < proposedFreq)
2907 proposedFreq = word;
2908
2909 word = pDCTstat->PresetmaxFreq;
2910 if (word > proposedFreq)
2911 word = proposedFreq;
2912
2913 pDCTstat->PresetmaxFreq = word;
2914 }
2915 /* Check F3xE8[DdrMaxRate] for maximum DRAM data rate support */
2916}
2917
2918static void SPDGetTCL_D(struct MCTStatStruc *pMCTstat,
2919 struct DCTStatStruc *pDCTstat, u8 dct)
2920{
2921 /* Find the best T and CL primary timing parameter pair, per Mfg.,
2922 * for the given set of DIMMs, and store into DCTStatStruc
2923 * (.DIMMAutoSpeed and .DIMMCASL). See "Global relationship between
2924 * index values and item values" for definition of CAS latency
2925 * index (j) and Frequency index (k).
2926 */
2927 u8 i, CASLatLow, CASLatHigh;
2928 u16 tAAmin16x;
2929 u8 MTB16x;
2930 u16 tCKmin16x;
2931 u16 tCKproposed16x;
2932 u8 CLactual, CLdesired, CLT_Fail;
Timothy Pearson730a0432015-10-16 13:51:51 -05002933 uint16_t min_frequency_tck16x;
Zheng Baoeb75f652010-04-23 17:32:48 +00002934
Stefan Reinauer328a6942011-10-13 17:04:02 -07002935 u8 smbaddr, byte = 0, bytex = 0;
Zheng Baoeb75f652010-04-23 17:32:48 +00002936
2937 CASLatLow = 0xFF;
2938 CASLatHigh = 0xFF;
2939 tAAmin16x = 0;
2940 tCKmin16x = 0;
2941 CLT_Fail = 0;
2942
Timothy Pearson730a0432015-10-16 13:51:51 -05002943 printk(BIOS_DEBUG, "%s: Start\n", __func__);
2944
2945 if (is_fam15h()) {
2946 uint16_t minimum_frequency_mhz = mctGet_NVbits(NV_MIN_MEMCLK);
2947 if (minimum_frequency_mhz == 0)
2948 minimum_frequency_mhz = 333;
2949 min_frequency_tck16x = 16000 / minimum_frequency_mhz;
2950 } else {
2951 min_frequency_tck16x = 40;
2952 }
2953
Zheng Baoeb75f652010-04-23 17:32:48 +00002954 for (i = 0; i < MAX_DIMMS_SUPPORTED; i++) {
2955 if (pDCTstat->DIMMValid & (1 << i)) {
2956 smbaddr = Get_DIMMAddress_D(pDCTstat, (dct + i));
2957 /* Step 1: Determine the common set of supported CAS Latency
2958 * values for all modules on the memory channel using the CAS
2959 * Latencies Supported in SPD bytes 14 and 15.
2960 */
2961 byte = mctRead_SPD(smbaddr, SPD_CASLow);
2962 CASLatLow &= byte;
2963 byte = mctRead_SPD(smbaddr, SPD_CASHigh);
2964 CASLatHigh &= byte;
2965 /* Step 2: Determine tAAmin(all) which is the largest tAAmin
2966 value for all modules on the memory channel (SPD byte 16). */
2967 byte = mctRead_SPD(smbaddr, SPD_MTBDivisor);
2968
2969 MTB16x = ((mctRead_SPD(smbaddr, SPD_MTBDividend) & 0xFF)<<4);
2970 MTB16x /= byte; /* transfer to MTB*16 */
2971
2972 byte = mctRead_SPD(smbaddr, SPD_tAAmin);
2973 if (tAAmin16x < byte * MTB16x)
2974 tAAmin16x = byte * MTB16x;
2975 /* Step 3: Determine tCKmin(all) which is the largest tCKmin
2976 value for all modules on the memory channel (SPD byte 12). */
2977 byte = mctRead_SPD(smbaddr, SPD_tCKmin);
2978
2979 if (tCKmin16x < byte * MTB16x)
2980 tCKmin16x = byte * MTB16x;
2981 }
2982 }
Timothy Pearson730a0432015-10-16 13:51:51 -05002983 /* calculate tCKproposed16x (proposed clock period in ns * 16) */
Zheng Baoeb75f652010-04-23 17:32:48 +00002984 tCKproposed16x = 16000 / pDCTstat->PresetmaxFreq;
2985 if (tCKmin16x > tCKproposed16x)
2986 tCKproposed16x = tCKmin16x;
2987
Zheng Baoeb75f652010-04-23 17:32:48 +00002988 /* TODO: get user manual tCK16x(Freq.) and overwrite current tCKproposed16x if manual. */
Timothy Pearson730a0432015-10-16 13:51:51 -05002989 if (is_fam15h()) {
2990 if (tCKproposed16x == 17)
2991 pDCTstat->TargetFreq = 0x16;
2992 else if (tCKproposed16x <= 20) {
2993 pDCTstat->TargetFreq = 0x12;
2994 tCKproposed16x = 20;
2995 } else if (tCKproposed16x <= 24) {
2996 pDCTstat->TargetFreq = 0xe;
2997 tCKproposed16x = 24;
2998 } else if (tCKproposed16x <= 30) {
2999 pDCTstat->TargetFreq = 0xa;
3000 tCKproposed16x = 30;
3001 } else if (tCKproposed16x <= 40) {
3002 pDCTstat->TargetFreq = 0x6;
3003 tCKproposed16x = 40;
3004 } else {
3005 pDCTstat->TargetFreq = 0x4;
3006 tCKproposed16x = 48;
3007 }
Timothy Pearsonb8a355d2015-09-05 17:55:58 -05003008 } else {
Timothy Pearson730a0432015-10-16 13:51:51 -05003009 if (tCKproposed16x == 20)
3010 pDCTstat->TargetFreq = 7;
3011 else if (tCKproposed16x <= 24) {
3012 pDCTstat->TargetFreq = 6;
3013 tCKproposed16x = 24;
3014 } else if (tCKproposed16x <= 30) {
3015 pDCTstat->TargetFreq = 5;
3016 tCKproposed16x = 30;
3017 } else {
3018 pDCTstat->TargetFreq = 4;
3019 tCKproposed16x = 40;
3020 }
Zheng Baoeb75f652010-04-23 17:32:48 +00003021 }
3022 /* Running through this loop twice:
3023 - First time find tCL at target frequency
Timothy Pearsonb8a355d2015-09-05 17:55:58 -05003024 - Second time find tCL at 400MHz */
Zheng Baoeb75f652010-04-23 17:32:48 +00003025
3026 for (;;) {
3027 CLT_Fail = 0;
3028 /* Step 4: For a proposed tCK value (tCKproposed) between tCKmin(all) and tCKmax,
3029 determine the desired CAS Latency. If tCKproposed is not a standard JEDEC
3030 value (2.5, 1.875, 1.5, or 1.25 ns) then tCKproposed must be adjusted to the
3031 next lower standard tCK value for calculating CLdesired.
3032 CLdesired = ceiling ( tAAmin(all) / tCKproposed )
3033 where tAAmin is defined in Byte 16. The ceiling function requires that the
3034 quotient be rounded up always. */
3035 CLdesired = tAAmin16x / tCKproposed16x;
3036 if (tAAmin16x % tCKproposed16x)
3037 CLdesired ++;
3038 /* Step 5: Chose an actual CAS Latency (CLactual) that is greather than or equal
3039 to CLdesired and is supported by all modules on the memory channel as
3040 determined in step 1. If no such value exists, choose a higher tCKproposed
3041 value and repeat steps 4 and 5 until a solution is found. */
3042 for (i = 0, CLactual = 4; i < 15; i++, CLactual++) {
3043 if ((CASLatHigh << 8 | CASLatLow) & (1 << i)) {
3044 if (CLdesired <= CLactual)
3045 break;
3046 }
3047 }
3048 if (i == 15)
3049 CLT_Fail = 1;
3050 /* Step 6: Once the calculation of CLactual is completed, the BIOS must also
3051 verify that this CAS Latency value does not exceed tAAmax, which is 20 ns
3052 for all DDR3 speed grades, by multiplying CLactual times tCKproposed. If
3053 not, choose a lower CL value and repeat steps 5 and 6 until a solution is found. */
3054 if (CLactual * tCKproposed16x > 320)
3055 CLT_Fail = 1;
3056 /* get CL and T */
3057 if (!CLT_Fail) {
Timothy Pearsonb8a355d2015-09-05 17:55:58 -05003058 bytex = CLactual;
Timothy Pearson730a0432015-10-16 13:51:51 -05003059 if (is_fam15h()) {
3060 if (tCKproposed16x == 17)
3061 byte = 0x16;
3062 else if (tCKproposed16x == 20)
3063 byte = 0x12;
3064 else if (tCKproposed16x == 24)
3065 byte = 0xe;
3066 else if (tCKproposed16x == 30)
3067 byte = 0xa;
3068 else if (tCKproposed16x == 40)
3069 byte = 0x6;
3070 else
3071 byte = 0x4;
3072 } else {
3073 if (tCKproposed16x == 20)
3074 byte = 7;
3075 else if (tCKproposed16x == 24)
3076 byte = 6;
3077 else if (tCKproposed16x == 30)
3078 byte = 5;
3079 else
3080 byte = 4;
3081 }
Zheng Baoeb75f652010-04-23 17:32:48 +00003082 } else {
3083 /* mctHookManualCLOverride */
3084 /* TODO: */
3085 }
3086
Timothy Pearson730a0432015-10-16 13:51:51 -05003087 if (tCKproposed16x != min_frequency_tck16x) {
Zheng Baoeb75f652010-04-23 17:32:48 +00003088 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
3089 pDCTstat->DIMMAutoSpeed = byte;
3090 pDCTstat->DIMMCASL = bytex;
3091 break;
3092 } else {
3093 pDCTstat->TargetCASL = bytex;
Timothy Pearson730a0432015-10-16 13:51:51 -05003094 tCKproposed16x = min_frequency_tck16x;
Zheng Baoeb75f652010-04-23 17:32:48 +00003095 }
3096 } else {
3097 pDCTstat->DIMMAutoSpeed = byte;
3098 pDCTstat->DIMMCASL = bytex;
3099 break;
3100 }
3101 }
3102
3103 printk(BIOS_DEBUG, "SPDGetTCL_D: DIMMCASL %x\n", pDCTstat->DIMMCASL);
3104 printk(BIOS_DEBUG, "SPDGetTCL_D: DIMMAutoSpeed %x\n", pDCTstat->DIMMAutoSpeed);
3105
3106 printk(BIOS_DEBUG, "SPDGetTCL_D: Status %x\n", pDCTstat->Status);
3107 printk(BIOS_DEBUG, "SPDGetTCL_D: ErrStatus %x\n", pDCTstat->ErrStatus);
3108 printk(BIOS_DEBUG, "SPDGetTCL_D: ErrCode %x\n", pDCTstat->ErrCode);
3109 printk(BIOS_DEBUG, "SPDGetTCL_D: Done\n\n");
3110}
3111
3112static u8 PlatformSpec_D(struct MCTStatStruc *pMCTstat,
3113 struct DCTStatStruc *pDCTstat, u8 dct)
3114{
Timothy Pearson730a0432015-10-16 13:51:51 -05003115 if (!is_fam15h()) {
3116 mctGet_PS_Cfg_D(pMCTstat, pDCTstat, dct);
Zheng Baoeb75f652010-04-23 17:32:48 +00003117
Timothy Pearson730a0432015-10-16 13:51:51 -05003118 if (pDCTstat->GangedMode == 1) {
3119 mctGet_PS_Cfg_D(pMCTstat, pDCTstat, 1);
3120 mct_BeforePlatformSpec(pMCTstat, pDCTstat, 1);
3121 }
Zheng Baoeb75f652010-04-23 17:32:48 +00003122
Timothy Pearson730a0432015-10-16 13:51:51 -05003123 set_2t_configuration(pMCTstat, pDCTstat, dct);
3124
3125 mct_BeforePlatformSpec(pMCTstat, pDCTstat, dct);
3126 mct_PlatformSpec(pMCTstat, pDCTstat, dct);
3127 if (pDCTstat->DIMMAutoSpeed == mhz_to_memclk_config(mctGet_NVbits(NV_MIN_MEMCLK)))
3128 InitPhyCompensation(pMCTstat, pDCTstat, dct);
Zheng Baoeb75f652010-04-23 17:32:48 +00003129 }
Zheng Baoeb75f652010-04-23 17:32:48 +00003130 mctHookAfterPSCfg();
3131
3132 return pDCTstat->ErrCode;
3133}
3134
3135static u8 AutoConfig_D(struct MCTStatStruc *pMCTstat,
3136 struct DCTStatStruc *pDCTstat, u8 dct)
3137{
3138 u32 DramControl, DramTimingLo, Status;
3139 u32 DramConfigLo, DramConfigHi, DramConfigMisc, DramConfigMisc2;
3140 u32 val;
Zheng Baoeb75f652010-04-23 17:32:48 +00003141 u32 dev;
3142 u16 word;
3143 u32 dword;
3144 u8 byte;
Timothy Pearson730a0432015-10-16 13:51:51 -05003145 uint32_t offset;
Zheng Baoeb75f652010-04-23 17:32:48 +00003146
3147 DramConfigLo = 0;
3148 DramConfigHi = 0;
3149 DramConfigMisc = 0;
3150 DramConfigMisc2 = 0;
3151
Zheng Baoc3af12f2010-10-08 05:08:47 +00003152 /* set bank addressing and Masks, plus CS pops */
Zheng Baoeb75f652010-04-23 17:32:48 +00003153 SPDSetBanks_D(pMCTstat, pDCTstat, dct);
3154 if (pDCTstat->ErrCode == SC_StopError)
3155 goto AutoConfig_exit;
3156
3157 /* map chip-selects into local address space */
3158 StitchMemory_D(pMCTstat, pDCTstat, dct);
3159 InterleaveBanks_D(pMCTstat, pDCTstat, dct);
3160
3161 /* temp image of status (for convenience). RO usage! */
3162 Status = pDCTstat->Status;
3163
3164 dev = pDCTstat->dev_dct;
Zheng Baoeb75f652010-04-23 17:32:48 +00003165
3166 /* Build Dram Control Register Value */
Timothy Pearson730a0432015-10-16 13:51:51 -05003167 DramConfigMisc2 = Get_NB32_DCT(dev, dct, 0xA8); /* Dram Control*/
3168 DramControl = Get_NB32_DCT(dev, dct, 0x78); /* Dram Control*/
Zheng Baoeb75f652010-04-23 17:32:48 +00003169
3170 /* FIXME: Skip mct_checkForDxSupport */
3171 /* REV_CALL mct_DoRdPtrInit if not Dx */
3172 if (pDCTstat->LogicalCPUID & AMD_DR_Bx)
3173 val = 5;
3174 else
3175 val = 6;
3176 DramControl &= ~0xFF;
Zheng Baoc3af12f2010-10-08 05:08:47 +00003177 DramControl |= val; /* RdPtrInit = 6 for Cx CPU */
Zheng Baoeb75f652010-04-23 17:32:48 +00003178
3179 if (mctGet_NVbits(NV_CLKHZAltVidC3))
3180 DramControl |= 1<<16; /* check */
3181
3182 DramControl |= 0x00002A00;
3183
3184 /* FIXME: Skip for Ax versions */
3185 /* callback not required - if (!mctParityControl_D()) */
3186 if (Status & (1 << SB_128bitmode))
3187 DramConfigLo |= 1 << Width128; /* 128-bit mode (normal) */
3188
3189 word = dct;
3190 dword = X4Dimm;
3191 while (word < 8) {
3192 if (pDCTstat->Dimmx4Present & (1 << word))
3193 DramConfigLo |= 1 << dword; /* X4Dimm[3:0] */
3194 word++;
3195 word++;
3196 dword++;
3197 }
3198
3199 if (!(Status & (1 << SB_Registered)))
Zheng Baoc3af12f2010-10-08 05:08:47 +00003200 DramConfigLo |= 1 << UnBuffDimm; /* Unbuffered DIMMs */
Zheng Baoeb75f652010-04-23 17:32:48 +00003201
3202 if (mctGet_NVbits(NV_ECC_CAP))
3203 if (Status & (1 << SB_ECCDIMMs))
3204 if ( mctGet_NVbits(NV_ECC))
3205 DramConfigLo |= 1 << DimmEcEn;
3206
3207 DramConfigLo = mct_DisDllShutdownSR(pMCTstat, pDCTstat, DramConfigLo, dct);
3208
3209 /* Build Dram Config Hi Register Value */
Timothy Pearson730a0432015-10-16 13:51:51 -05003210 if (is_fam15h())
3211 offset = 0x0;
3212 else
3213 offset = 0x1;
Zheng Baoeb75f652010-04-23 17:32:48 +00003214 dword = pDCTstat->Speed;
Timothy Pearson730a0432015-10-16 13:51:51 -05003215 DramConfigHi |= dword - offset; /* get MemClk encoding */
Zheng Baoeb75f652010-04-23 17:32:48 +00003216 DramConfigHi |= 1 << MemClkFreqVal;
3217
3218 if (Status & (1 << SB_Registered))
3219 if ((pDCTstat->Dimmx4Present != 0) && (pDCTstat->Dimmx8Present != 0))
3220 /* set only if x8 Registered DIMMs in System*/
3221 DramConfigHi |= 1 << RDqsEn;
3222
3223 if (mctGet_NVbits(NV_CKE_CTL))
3224 /*Chip Select control of CKE*/
3225 DramConfigHi |= 1 << 16;
3226
3227 /* Control Bank Swizzle */
3228 if (0) /* call back not needed mctBankSwizzleControl_D()) */
3229 DramConfigHi &= ~(1 << BankSwizzleMode);
3230 else
3231 DramConfigHi |= 1 << BankSwizzleMode; /* recommended setting (default) */
3232
3233 /* Check for Quadrank DIMM presence */
3234 if ( pDCTstat->DimmQRPresent != 0) {
3235 byte = mctGet_NVbits(NV_4RANKType);
3236 if (byte == 2)
3237 DramConfigHi |= 1 << 17; /* S4 (4-Rank SO-DIMMs) */
3238 else if (byte == 1)
3239 DramConfigHi |= 1 << 18; /* R4 (4-Rank Registered DIMMs) */
3240 }
3241
3242 if (0) /* call back not needed mctOverrideDcqBypMax_D ) */
3243 val = mctGet_NVbits(NV_BYPMAX);
3244 else
3245 val = 0x0f; /* recommended setting (default) */
3246 DramConfigHi |= val << 24;
3247
Timothy Pearson730a0432015-10-16 13:51:51 -05003248 if (pDCTstat->LogicalCPUID & (AMD_DR_Dx | AMD_DR_Cx | AMD_DR_Bx | AMD_FAM15_ALL))
Zheng Baoeb75f652010-04-23 17:32:48 +00003249 DramConfigHi |= 1 << DcqArbBypassEn;
3250
3251 /* Build MemClkDis Value from Dram Timing Lo and
3252 Dram Config Misc Registers
3253 1. We will assume that MemClkDis field has been preset prior to this
3254 point.
3255 2. We will only set MemClkDis bits if a DIMM is NOT present AND if:
3256 NV_AllMemClks <>0 AND SB_DiagClks ==0 */
3257
3258 /* Dram Timing Low (owns Clock Enable bits) */
Timothy Pearson730a0432015-10-16 13:51:51 -05003259 DramTimingLo = Get_NB32_DCT(dev, dct, 0x88);
Zheng Baoeb75f652010-04-23 17:32:48 +00003260 if (mctGet_NVbits(NV_AllMemClks) == 0) {
3261 /* Special Jedec SPD diagnostic bit - "enable all clocks" */
3262 if (!(pDCTstat->Status & (1<<SB_DiagClks))) {
3263 const u8 *p;
3264 const u32 *q;
3265 p = Tab_ManualCLKDis;
3266 q = (u32 *)p;
3267
3268 byte = mctGet_NVbits(NV_PACK_TYPE);
3269 if (byte == PT_L1)
3270 p = Tab_L1CLKDis;
3271 else if (byte == PT_M2 || byte == PT_AS)
3272 p = Tab_AM3CLKDis;
Timothy Pearsonb8a355d2015-09-05 17:55:58 -05003273 else if (byte == PT_C3)
3274 p = Tab_C32CLKDis;
3275 else if (byte == PT_GR)
3276 p = Tab_G34CLKDis;
Zheng Baoeb75f652010-04-23 17:32:48 +00003277 else
3278 p = Tab_S1CLKDis;
3279
3280 dword = 0;
3281 byte = 0xFF;
3282 while(dword < MAX_CS_SUPPORTED) {
3283 if (pDCTstat->CSPresent & (1<<dword)){
3284 /* re-enable clocks for the enabled CS */
3285 val = p[dword];
3286 byte &= ~val;
3287 }
3288 dword++ ;
3289 }
Timothy Pearson730a0432015-10-16 13:51:51 -05003290 DramTimingLo &= ~(0xff << 24);
Zheng Baoeb75f652010-04-23 17:32:48 +00003291 DramTimingLo |= byte << 24;
3292 }
3293 }
3294
Timothy Pearson730a0432015-10-16 13:51:51 -05003295 printk(BIOS_DEBUG, "AutoConfig_D: DramControl: %08x\n", DramControl);
3296 printk(BIOS_DEBUG, "AutoConfig_D: DramTimingLo: %08x\n", DramTimingLo);
3297 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigMisc: %08x\n", DramConfigMisc);
3298 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigMisc2: %08x\n", DramConfigMisc2);
3299 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigLo: %08x\n", DramConfigLo);
3300 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigHi: %08x\n", DramConfigHi);
Zheng Baoeb75f652010-04-23 17:32:48 +00003301
3302 /* Write Values to the registers */
Timothy Pearson730a0432015-10-16 13:51:51 -05003303 Set_NB32_DCT(dev, dct, 0x78, DramControl);
3304 Set_NB32_DCT(dev, dct, 0x88, DramTimingLo);
3305 Set_NB32_DCT(dev, dct, 0xa0, DramConfigMisc);
Zheng Baoeb75f652010-04-23 17:32:48 +00003306 DramConfigMisc2 = mct_SetDramConfigMisc2(pDCTstat, dct, DramConfigMisc2);
Timothy Pearson730a0432015-10-16 13:51:51 -05003307 Set_NB32_DCT(dev, dct, 0xa8, DramConfigMisc2);
3308 Set_NB32_DCT(dev, dct, 0x90, DramConfigLo);
Zheng Baoeb75f652010-04-23 17:32:48 +00003309 ProgDramMRSReg_D(pMCTstat, pDCTstat, dct);
Timothy Pearson730a0432015-10-16 13:51:51 -05003310
3311 if (is_fam15h())
3312 InitDDRPhy(pMCTstat, pDCTstat, dct);
3313
3314 /* Write the DRAM Configuration High register, including memory frequency change */
3315 dword = Get_NB32_DCT(dev, dct, 0x94);
Zheng Baoeb75f652010-04-23 17:32:48 +00003316 DramConfigHi |= dword;
Timothy Pearson730a0432015-10-16 13:51:51 -05003317 mct_SetDramConfigHi_D(pMCTstat, pDCTstat, dct, DramConfigHi);
Zheng Bao69436e12011-01-06 02:18:12 +00003318 mct_EarlyArbEn_D(pMCTstat, pDCTstat, dct);
Zheng Baoeb75f652010-04-23 17:32:48 +00003319 mctHookAfterAutoCfg();
3320
3321 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
3322
3323 printk(BIOS_DEBUG, "AutoConfig: Status %x\n", pDCTstat->Status);
3324 printk(BIOS_DEBUG, "AutoConfig: ErrStatus %x\n", pDCTstat->ErrStatus);
3325 printk(BIOS_DEBUG, "AutoConfig: ErrCode %x\n", pDCTstat->ErrCode);
3326 printk(BIOS_DEBUG, "AutoConfig: Done\n\n");
Timothy Pearson730a0432015-10-16 13:51:51 -05003327
Zheng Baoeb75f652010-04-23 17:32:48 +00003328AutoConfig_exit:
3329 return pDCTstat->ErrCode;
3330}
3331
3332static void SPDSetBanks_D(struct MCTStatStruc *pMCTstat,
3333 struct DCTStatStruc *pDCTstat, u8 dct)
3334{
3335 /* Set bank addressing, program Mask values and build a chip-select
3336 * population map. This routine programs PCI 0:24N:2x80 config register
3337 * and PCI 0:24N:2x60,64,68,6C config registers (CS Mask 0-3).
3338 */
3339 u8 ChipSel, Rows, Cols, Ranks, Banks;
3340 u32 BankAddrReg, csMask;
3341
3342 u32 val;
3343 u32 reg;
3344 u32 dev;
Zheng Baoeb75f652010-04-23 17:32:48 +00003345 u8 byte;
3346 u16 word;
3347 u32 dword;
3348 u16 smbaddr;
3349
3350 dev = pDCTstat->dev_dct;
Zheng Baoeb75f652010-04-23 17:32:48 +00003351
3352 BankAddrReg = 0;
3353 for (ChipSel = 0; ChipSel < MAX_CS_SUPPORTED; ChipSel+=2) {
3354 byte = ChipSel;
3355 if ((pDCTstat->Status & (1 << SB_64MuxedMode)) && ChipSel >=4)
3356 byte -= 3;
3357
3358 if (pDCTstat->DIMMValid & (1<<byte)) {
3359 smbaddr = Get_DIMMAddress_D(pDCTstat, (ChipSel + dct));
3360
3361 byte = mctRead_SPD(smbaddr, SPD_Addressing);
3362 Rows = (byte >> 3) & 0x7; /* Rows:0b=12-bit,... */
3363 Cols = byte & 0x7; /* Cols:0b=9-bit,... */
3364
3365 byte = mctRead_SPD(smbaddr, SPD_Density);
3366 Banks = (byte >> 4) & 7; /* Banks:0b=3-bit,... */
3367
3368 byte = mctRead_SPD(smbaddr, SPD_Organization);
3369 Ranks = ((byte >> 3) & 7) + 1;
3370
3371 /* Configure Bank encoding
3372 * Use a 6-bit key into a lookup table.
3373 * Key (index) = RRRBCC, where CC is the number of Columns minus 9,
3374 * RRR is the number of Rows minus 12, and B is the number of banks
3375 * minus 3.
3376 */
3377 byte = Cols;
3378 if (Banks == 1)
3379 byte |= 4;
3380
3381 byte |= Rows << 3; /* RRRBCC internal encode */
3382
3383 for (dword=0; dword < 13; dword++) {
3384 if (byte == Tab_BankAddr[dword])
3385 break;
3386 }
3387
3388 if (dword > 12)
3389 continue;
3390
3391 /* bit no. of CS field in address mapping reg.*/
3392 dword <<= (ChipSel<<1);
3393 BankAddrReg |= dword;
3394
3395 /* Mask value=(2pow(rows+cols+banks+3)-1)>>8,
3396 or 2pow(rows+cols+banks-5)-1*/
3397 csMask = 0;
3398
3399 byte = Rows + Cols; /* cl=rows+cols*/
3400 byte += 21; /* row:12+col:9 */
3401 byte -= 2; /* 3 banks - 5 */
3402
3403 if (pDCTstat->Status & (1 << SB_128bitmode))
3404 byte++; /* double mask size if in 128-bit mode*/
3405
3406 csMask |= 1 << byte;
3407 csMask--;
3408
3409 /*set ChipSelect population indicator even bits*/
3410 pDCTstat->CSPresent |= (1<<ChipSel);
3411 if (Ranks >= 2)
3412 /*set ChipSelect population indicator odd bits*/
3413 pDCTstat->CSPresent |= 1 << (ChipSel + 1);
3414
Timothy Pearson730a0432015-10-16 13:51:51 -05003415 reg = 0x60+(ChipSel<<1); /*Dram CS Mask Register */
Zheng Baoeb75f652010-04-23 17:32:48 +00003416 val = csMask;
3417 val &= 0x1FF83FE0; /* Mask out reserved bits.*/
Timothy Pearson730a0432015-10-16 13:51:51 -05003418 Set_NB32_DCT(dev, dct, reg, val);
Zheng Baoeb75f652010-04-23 17:32:48 +00003419 } else {
3420 if (pDCTstat->DIMMSPDCSE & (1<<ChipSel))
3421 pDCTstat->CSTestFail |= (1<<ChipSel);
3422 } /* if DIMMValid*/
3423 } /* while ChipSel*/
3424
3425 SetCSTriState(pMCTstat, pDCTstat, dct);
3426 SetCKETriState(pMCTstat, pDCTstat, dct);
3427 SetODTTriState(pMCTstat, pDCTstat, dct);
3428
3429 if (pDCTstat->Status & (1 << SB_128bitmode)) {
3430 SetCSTriState(pMCTstat, pDCTstat, 1); /* force dct1) */
3431 SetCKETriState(pMCTstat, pDCTstat, 1); /* force dct1) */
3432 SetODTTriState(pMCTstat, pDCTstat, 1); /* force dct1) */
3433 }
3434
3435 word = pDCTstat->CSPresent;
3436 mctGetCS_ExcludeMap(); /* mask out specified chip-selects */
3437 word ^= pDCTstat->CSPresent;
3438 pDCTstat->CSTestFail |= word; /* enable ODT to disabled DIMMs */
3439 if (!pDCTstat->CSPresent)
3440 pDCTstat->ErrCode = SC_StopError;
3441
Timothy Pearson730a0432015-10-16 13:51:51 -05003442 reg = 0x80; /* Bank Addressing Register */
3443 Set_NB32_DCT(dev, dct, reg, BankAddrReg);
Zheng Baoeb75f652010-04-23 17:32:48 +00003444
3445 pDCTstat->CSPresent_DCT[dct] = pDCTstat->CSPresent;
3446 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
3447
3448 printk(BIOS_DEBUG, "SPDSetBanks: CSPresent %x\n", pDCTstat->CSPresent_DCT[dct]);
3449 printk(BIOS_DEBUG, "SPDSetBanks: Status %x\n", pDCTstat->Status);
3450 printk(BIOS_DEBUG, "SPDSetBanks: ErrStatus %x\n", pDCTstat->ErrStatus);
3451 printk(BIOS_DEBUG, "SPDSetBanks: ErrCode %x\n", pDCTstat->ErrCode);
3452 printk(BIOS_DEBUG, "SPDSetBanks: Done\n\n");
3453}
3454
3455static void SPDCalcWidth_D(struct MCTStatStruc *pMCTstat,
3456 struct DCTStatStruc *pDCTstat)
3457{
3458 /* Per SPDs, check the symmetry of DIMM pairs (DIMM on Channel A
3459 * matching with DIMM on Channel B), the overall DIMM population,
3460 * and determine the width mode: 64-bit, 64-bit muxed, 128-bit.
3461 */
3462 u8 i;
3463 u8 smbaddr, smbaddr1;
3464 u8 byte, byte1;
3465
3466 /* Check Symmetry of Channel A and Channel B DIMMs
3467 (must be matched for 128-bit mode).*/
3468 for (i=0; i < MAX_DIMMS_SUPPORTED; i += 2) {
3469 if ((pDCTstat->DIMMValid & (1 << i)) && (pDCTstat->DIMMValid & (1<<(i+1)))) {
3470 smbaddr = Get_DIMMAddress_D(pDCTstat, i);
3471 smbaddr1 = Get_DIMMAddress_D(pDCTstat, i+1);
3472
3473 byte = mctRead_SPD(smbaddr, SPD_Addressing) & 0x7;
3474 byte1 = mctRead_SPD(smbaddr1, SPD_Addressing) & 0x7;
3475 if (byte != byte1) {
3476 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
3477 break;
3478 }
3479
3480 byte = mctRead_SPD(smbaddr, SPD_Density) & 0x0f;
3481 byte1 = mctRead_SPD(smbaddr1, SPD_Density) & 0x0f;
3482 if (byte != byte1) {
3483 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
3484 break;
3485 }
3486
3487 byte = mctRead_SPD(smbaddr, SPD_Organization) & 0x7;
3488 byte1 = mctRead_SPD(smbaddr1, SPD_Organization) & 0x7;
3489 if (byte != byte1) {
3490 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
3491 break;
3492 }
3493
3494 byte = (mctRead_SPD(smbaddr, SPD_Organization) >> 3) & 0x7;
3495 byte1 = (mctRead_SPD(smbaddr1, SPD_Organization) >> 3) & 0x7;
3496 if (byte != byte1) {
3497 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
3498 break;
3499 }
3500
3501 byte = mctRead_SPD(smbaddr, SPD_DMBANKS) & 7; /* #ranks-1 */
3502 byte1 = mctRead_SPD(smbaddr1, SPD_DMBANKS) & 7; /* #ranks-1 */
3503 if (byte != byte1) {
3504 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
3505 break;
3506 }
3507
3508 }
3509 }
3510
3511}
3512
3513static void StitchMemory_D(struct MCTStatStruc *pMCTstat,
3514 struct DCTStatStruc *pDCTstat, u8 dct)
3515{
3516 /* Requires that Mask values for each bank be programmed first and that
3517 * the chip-select population indicator is correctly set.
3518 */
3519 u8 b = 0;
3520 u32 nxtcsBase, curcsBase;
3521 u8 p, q;
3522 u32 Sizeq, BiggestBank;
3523 u8 _DSpareEn;
3524
3525 u16 word;
3526 u32 dev;
3527 u32 reg;
Zheng Baoeb75f652010-04-23 17:32:48 +00003528 u32 val;
3529
3530 dev = pDCTstat->dev_dct;
Zheng Baoeb75f652010-04-23 17:32:48 +00003531
3532 _DSpareEn = 0;
3533
3534 /* CS Sparing 1=enabled, 0=disabled */
3535 if (mctGet_NVbits(NV_CS_SpareCTL) & 1) {
3536 if (MCT_DIMM_SPARE_NO_WARM) {
3537 /* Do no warm-reset DIMM spare */
3538 if (pMCTstat->GStatus & 1 << GSB_EnDIMMSpareNW) {
3539 word = pDCTstat->CSPresent;
3540 val = bsf(word);
3541 word &= ~(1<<val);
3542 if (word)
3543 /* Make sure at least two chip-selects are available */
3544 _DSpareEn = 1;
3545 else
3546 pDCTstat->ErrStatus |= 1 << SB_SpareDis;
3547 }
3548 } else {
3549 if (!mctGet_NVbits(NV_DQSTrainCTL)) { /*DQS Training 1=enabled, 0=disabled */
3550 word = pDCTstat->CSPresent;
3551 val = bsf(word);
3552 word &= ~(1 << val);
3553 if (word)
3554 /* Make sure at least two chip-selects are available */
3555 _DSpareEn = 1;
3556 else
3557 pDCTstat->ErrStatus |= 1 << SB_SpareDis;
3558 }
3559 }
3560 }
3561
3562 nxtcsBase = 0; /* Next available cs base ADDR[39:8] */
3563 for (p=0; p < MAX_DIMMS_SUPPORTED; p++) {
3564 BiggestBank = 0;
3565 for (q = 0; q < MAX_CS_SUPPORTED; q++) { /* from DIMMS to CS */
3566 if (pDCTstat->CSPresent & (1 << q)) { /* bank present? */
Timothy Pearson730a0432015-10-16 13:51:51 -05003567 reg = 0x40 + (q << 2); /* Base[q] reg.*/
3568 val = Get_NB32_DCT(dev, dct, reg);
Zheng Baoeb75f652010-04-23 17:32:48 +00003569 if (!(val & 3)) { /* (CSEnable|Spare==1)bank is enabled already? */
Timothy Pearson730a0432015-10-16 13:51:51 -05003570 reg = 0x60 + (q << 1); /*Mask[q] reg.*/
3571 val = Get_NB32_DCT(dev, dct, reg);
Zheng Baoeb75f652010-04-23 17:32:48 +00003572 val >>= 19;
3573 val++;
3574 val <<= 19;
3575 Sizeq = val; /* never used */
3576 if (val > BiggestBank) {
3577 /*Bingo! possibly Map this chip-select next! */
3578 BiggestBank = val;
3579 b = q;
3580 }
3581 }
3582 } /*if bank present */
3583 } /* while q */
3584 if (BiggestBank !=0) {
3585 curcsBase = nxtcsBase; /* curcsBase=nxtcsBase*/
3586 /* DRAM CS Base b Address Register offset */
Timothy Pearson730a0432015-10-16 13:51:51 -05003587 reg = 0x40 + (b << 2);
Zheng Baoeb75f652010-04-23 17:32:48 +00003588 if (_DSpareEn) {
3589 BiggestBank = 0;
3590 val = 1 << Spare; /* Spare Enable*/
3591 } else {
3592 val = curcsBase;
3593 val |= 1 << CSEnable; /* Bank Enable */
3594 }
3595 if (((reg - 0x40) >> 2) & 1) {
3596 if (!(pDCTstat->Status & (1 << SB_Registered))) {
3597 u16 dimValid;
3598 dimValid = pDCTstat->DIMMValid;
3599 if (dct & 1)
3600 dimValid <<= 1;
3601 if ((dimValid & pDCTstat->MirrPresU_NumRegR) != 0) {
3602 val |= 1 << onDimmMirror;
3603 }
3604 }
3605 }
Timothy Pearson730a0432015-10-16 13:51:51 -05003606 Set_NB32_DCT(dev, dct, reg, val);
Zheng Baoeb75f652010-04-23 17:32:48 +00003607 if (_DSpareEn)
3608 _DSpareEn = 0;
3609 else
3610 /* let nxtcsBase+=Size[b] */
3611 nxtcsBase += BiggestBank;
3612 }
3613
3614 /* bank present but disabled?*/
3615 if ( pDCTstat->CSTestFail & (1 << p)) {
3616 /* DRAM CS Base b Address Register offset */
Timothy Pearson730a0432015-10-16 13:51:51 -05003617 reg = (p << 2) + 0x40;
Zheng Baoeb75f652010-04-23 17:32:48 +00003618 val = 1 << TestFail;
Timothy Pearson730a0432015-10-16 13:51:51 -05003619 Set_NB32_DCT(dev, dct, reg, val);
Zheng Baoeb75f652010-04-23 17:32:48 +00003620 }
3621 }
3622
3623 if (nxtcsBase) {
3624 pDCTstat->DCTSysLimit = nxtcsBase - 1;
3625 mct_AfterStitchMemory(pMCTstat, pDCTstat, dct);
3626 }
3627
3628 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
3629
3630 printk(BIOS_DEBUG, "StitchMemory: Status %x\n", pDCTstat->Status);
3631 printk(BIOS_DEBUG, "StitchMemory: ErrStatus %x\n", pDCTstat->ErrStatus);
3632 printk(BIOS_DEBUG, "StitchMemory: ErrCode %x\n", pDCTstat->ErrCode);
3633 printk(BIOS_DEBUG, "StitchMemory: Done\n\n");
3634}
3635
3636static u16 Get_Fk_D(u8 k)
3637{
3638 return Table_F_k[k]; /* FIXME: k or k<<1 ? */
3639}
3640
3641static u8 DIMMPresence_D(struct MCTStatStruc *pMCTstat,
3642 struct DCTStatStruc *pDCTstat)
3643{
3644 /* Check DIMMs present, verify checksum, flag SDRAM type,
3645 * build population indicator bitmaps, and preload bus loading
3646 * of DIMMs into DCTStatStruc.
3647 * MAAload=number of devices on the "A" bus.
3648 * MABload=number of devices on the "B" bus.
3649 * MAAdimms=number of DIMMs on the "A" bus slots.
3650 * MABdimms=number of DIMMs on the "B" bus slots.
3651 * DATAAload=number of ranks on the "A" bus slots.
3652 * DATABload=number of ranks on the "B" bus slots.
3653 */
Timothy Pearson620fa5f2015-03-27 22:50:09 -05003654 u16 i, j, k;
Zheng Baoeb75f652010-04-23 17:32:48 +00003655 u8 smbaddr;
3656 u8 SPDCtrl;
Timothy Pearson730a0432015-10-16 13:51:51 -05003657 u16 RegDIMMPresent, LRDIMMPresent, MaxDimms;
Zheng Baoeb75f652010-04-23 17:32:48 +00003658 u8 devwidth;
3659 u16 DimmSlots;
3660 u8 byte = 0, bytex;
3661
3662 /* preload data structure with addrs */
3663 mctGet_DIMMAddr(pDCTstat, pDCTstat->Node_ID);
3664
3665 DimmSlots = MaxDimms = mctGet_NVbits(NV_MAX_DIMMS);
3666
3667 SPDCtrl = mctGet_NVbits(NV_SPDCHK_RESTRT);
3668
3669 RegDIMMPresent = 0;
Timothy Pearson730a0432015-10-16 13:51:51 -05003670 LRDIMMPresent = 0;
Zheng Baoeb75f652010-04-23 17:32:48 +00003671 pDCTstat->DimmQRPresent = 0;
3672
Kerry She108d30b2010-08-30 07:24:13 +00003673 for (i = 0; i < MAX_DIMMS_SUPPORTED; i++) {
Zheng Baoeb75f652010-04-23 17:32:48 +00003674 if (i >= MaxDimms)
3675 break;
3676
3677 if ((pDCTstat->DimmQRPresent & (1 << i)) || (i < DimmSlots)) {
3678 int status;
3679 smbaddr = Get_DIMMAddress_D(pDCTstat, i);
3680 status = mctRead_SPD(smbaddr, SPD_ByteUse);
3681 if (status >= 0) { /* SPD access is ok */
3682 pDCTstat->DIMMPresent |= 1 << i;
3683 if (crcCheck(smbaddr)) { /* CRC is OK */
3684 byte = mctRead_SPD(smbaddr, SPD_TYPE);
3685 if (byte == JED_DDR3SDRAM) {
3686 /*Dimm is 'Present'*/
3687 pDCTstat->DIMMValid |= 1 << i;
3688 }
3689 } else {
3690 pDCTstat->DIMMSPDCSE = 1 << i;
3691 if (SPDCtrl == 0) {
3692 pDCTstat->ErrStatus |= 1 << SB_DIMMChkSum;
3693 pDCTstat->ErrCode = SC_StopError;
3694 } else {
3695 /*if NV_SPDCHK_RESTRT is set to 1, ignore faulty SPD checksum*/
3696 pDCTstat->ErrStatus |= 1<<SB_DIMMChkSum;
3697 byte = mctRead_SPD(smbaddr, SPD_TYPE);
3698 if (byte == JED_DDR3SDRAM)
3699 pDCTstat->DIMMValid |= 1 << i;
3700 }
3701 }
Timothy Pearson620fa5f2015-03-27 22:50:09 -05003702 /* Get module information for SMBIOS */
3703 if (pDCTstat->DIMMValid & (1 << i)) {
3704 pDCTstat->DimmManufacturerID[i] = 0;
3705 for (k = 0; k < 8; k++)
3706 pDCTstat->DimmManufacturerID[i] |= ((uint64_t)mctRead_SPD(smbaddr, SPD_MANID_START + k)) << (k * 8);
3707 for (k = 0; k < SPD_PARTN_LENGTH; k++)
3708 pDCTstat->DimmPartNumber[i][k] = mctRead_SPD(smbaddr, SPD_PARTN_START + k);
Timothy Pearson730a0432015-10-16 13:51:51 -05003709 pDCTstat->DimmPartNumber[i][SPD_PARTN_LENGTH] = 0;
Timothy Pearson620fa5f2015-03-27 22:50:09 -05003710 pDCTstat->DimmRevisionNumber[i] = 0;
3711 for (k = 0; k < 2; k++)
3712 pDCTstat->DimmRevisionNumber[i] |= ((uint16_t)mctRead_SPD(smbaddr, SPD_REVNO_START + k)) << (k * 8);
3713 pDCTstat->DimmSerialNumber[i] = 0;
3714 for (k = 0; k < 4; k++)
3715 pDCTstat->DimmSerialNumber[i] |= ((uint32_t)mctRead_SPD(smbaddr, SPD_SERIAL_START + k)) << (k * 8);
3716 pDCTstat->DimmRows[i] = (mctRead_SPD(smbaddr, SPD_Addressing) & 0x38) >> 3;
3717 pDCTstat->DimmCols[i] = mctRead_SPD(smbaddr, SPD_Addressing) & 0x7;
3718 pDCTstat->DimmRanks[i] = ((mctRead_SPD(smbaddr, SPD_Organization) & 0x38) >> 3) + 1;
3719 pDCTstat->DimmBanks[i] = 1ULL << (((mctRead_SPD(smbaddr, SPD_Density) & 0x70) >> 4) + 3);
3720 pDCTstat->DimmWidth[i] = 1ULL << ((mctRead_SPD(smbaddr, SPD_BusWidth) & 0x7) + 3);
3721 }
Timothy Pearson2a839352015-09-05 18:56:05 -05003722 /* Check supported voltage(s) */
3723 pDCTstat->DimmSupportedVoltages[i] = mctRead_SPD(smbaddr, SPD_Voltage) & 0x7;
3724 pDCTstat->DimmSupportedVoltages[i] ^= 0x1; /* Invert LSB to convert from SPD format to internal bitmap format */
Zheng Baoeb75f652010-04-23 17:32:48 +00003725 /* Check module type */
3726 byte = mctRead_SPD(smbaddr, SPD_DIMMTYPE) & 0x7;
Timothy Pearson620fa5f2015-03-27 22:50:09 -05003727 if (byte == JED_RDIMM || byte == JED_MiniRDIMM) {
Zheng Baoeb75f652010-04-23 17:32:48 +00003728 RegDIMMPresent |= 1 << i;
Timothy Pearson620fa5f2015-03-27 22:50:09 -05003729 pDCTstat->DimmRegistered[i] = 1;
Timothy Pearsonb8a355d2015-09-05 17:55:58 -05003730 } else {
Timothy Pearson620fa5f2015-03-27 22:50:09 -05003731 pDCTstat->DimmRegistered[i] = 0;
3732 }
Timothy Pearson730a0432015-10-16 13:51:51 -05003733 if (byte == JED_LRDIMM) {
3734 LRDIMMPresent |= 1 << i;
3735 pDCTstat->DimmLoadReduced[i] = 1;
3736 } else {
3737 pDCTstat->DimmLoadReduced[i] = 0;
3738 }
Zheng Baoeb75f652010-04-23 17:32:48 +00003739 /* Check ECC capable */
3740 byte = mctRead_SPD(smbaddr, SPD_BusWidth);
3741 if (byte & JED_ECC) {
3742 /* DIMM is ECC capable */
3743 pDCTstat->DimmECCPresent |= 1 << i;
3744 }
3745 /* Check if x4 device */
3746 devwidth = mctRead_SPD(smbaddr, SPD_Organization) & 0x7; /* 0:x4,1:x8,2:x16 */
3747 if (devwidth == 0) {
3748 /* DIMM is made with x4 or x16 drams */
3749 pDCTstat->Dimmx4Present |= 1 << i;
3750 } else if (devwidth == 1) {
3751 pDCTstat->Dimmx8Present |= 1 << i;
3752 } else if (devwidth == 2) {
3753 pDCTstat->Dimmx16Present |= 1 << i;
3754 }
3755
3756 byte = (mctRead_SPD(smbaddr, SPD_Organization) >> 3);
3757 byte &= 7;
3758 if (byte == 3) { /* 4ranks */
3759 /* if any DIMMs are QR, we have to make two passes through DIMMs*/
3760 if ( pDCTstat->DimmQRPresent == 0) {
3761 MaxDimms <<= 1;
3762 }
3763 if (i < DimmSlots) {
3764 pDCTstat->DimmQRPresent |= (1 << i) | (1 << (i+4));
3765 } else {
3766 pDCTstat->MAdimms[i & 1] --;
3767 }
3768 byte = 1; /* upper two ranks of QR DIMM will be counted on another DIMM number iteration*/
3769 } else if (byte == 1) { /* 2ranks */
3770 pDCTstat->DimmDRPresent |= 1 << i;
3771 }
3772 bytex = devwidth;
3773 if (devwidth == 0)
3774 bytex = 16;
3775 else if (devwidth == 1)
3776 bytex = 8;
3777 else if (devwidth == 2)
3778 bytex = 4;
3779
3780 byte++; /* al+1=rank# */
3781 if (byte == 2)
3782 bytex <<= 1; /*double Addr bus load value for dual rank DIMMs*/
3783
3784 j = i & (1<<0);
3785 pDCTstat->DATAload[j] += byte; /*number of ranks on DATA bus*/
3786 pDCTstat->MAload[j] += bytex; /*number of devices on CMD/ADDR bus*/
3787 pDCTstat->MAdimms[j]++; /*number of DIMMs on A bus */
3788
3789 /* check address mirror support for unbuffered dimm */
3790 /* check number of registers on a dimm for registered dimm */
3791 byte = mctRead_SPD(smbaddr, SPD_AddressMirror);
3792 if (RegDIMMPresent & (1 << i)) {
3793 if ((byte & 3) > 1)
3794 pDCTstat->MirrPresU_NumRegR |= 1 << i;
3795 } else {
3796 if ((byte & 1) == 1)
3797 pDCTstat->MirrPresU_NumRegR |= 1 << i;
3798 }
3799 /* Get byte62: Reference Raw Card information. We dont need it now. */
Zheng Bao9fae99f2010-08-31 06:10:54 +00003800 /* byte = mctRead_SPD(smbaddr, SPD_RefRawCard); */
3801 /* Get Byte65/66 for register manufacture ID code */
3802 if ((0x97 == mctRead_SPD(smbaddr, SPD_RegManufactureID_H)) &&
3803 (0x80 == mctRead_SPD(smbaddr, SPD_RegManufactureID_L))) {
3804 if (0x16 == mctRead_SPD(smbaddr, SPD_RegManRevID))
3805 pDCTstat->RegMan2Present |= 1 << i;
3806 else
3807 pDCTstat->RegMan1Present |= 1 << i;
3808 }
Zheng Baoeb75f652010-04-23 17:32:48 +00003809 /* Get Control word values for RC3. We dont need it. */
3810 byte = mctRead_SPD(smbaddr, 70);
3811 pDCTstat->CtrlWrd3 |= (byte >> 4) << (i << 2); /* C3 = SPD byte 70 [7:4] */
3812 /* Get Control word values for RC4, and RC5 */
3813 byte = mctRead_SPD(smbaddr, 71);
3814 pDCTstat->CtrlWrd4 |= (byte & 0xFF) << (i << 2); /* RC4 = SPD byte 71 [3:0] */
3815 pDCTstat->CtrlWrd5 |= (byte >> 4) << (i << 2); /* RC5 = SPD byte 71 [7:4] */
3816 }
3817 }
3818 }
3819 printk(BIOS_DEBUG, "\t DIMMPresence: DIMMValid=%x\n", pDCTstat->DIMMValid);
3820 printk(BIOS_DEBUG, "\t DIMMPresence: DIMMPresent=%x\n", pDCTstat->DIMMPresent);
3821 printk(BIOS_DEBUG, "\t DIMMPresence: RegDIMMPresent=%x\n", RegDIMMPresent);
Timothy Pearson730a0432015-10-16 13:51:51 -05003822 printk(BIOS_DEBUG, "\t DIMMPresence: LRDIMMPresent=%x\n", LRDIMMPresent);
Zheng Baoeb75f652010-04-23 17:32:48 +00003823 printk(BIOS_DEBUG, "\t DIMMPresence: DimmECCPresent=%x\n", pDCTstat->DimmECCPresent);
3824 printk(BIOS_DEBUG, "\t DIMMPresence: DimmPARPresent=%x\n", pDCTstat->DimmPARPresent);
3825 printk(BIOS_DEBUG, "\t DIMMPresence: Dimmx4Present=%x\n", pDCTstat->Dimmx4Present);
3826 printk(BIOS_DEBUG, "\t DIMMPresence: Dimmx8Present=%x\n", pDCTstat->Dimmx8Present);
3827 printk(BIOS_DEBUG, "\t DIMMPresence: Dimmx16Present=%x\n", pDCTstat->Dimmx16Present);
3828 printk(BIOS_DEBUG, "\t DIMMPresence: DimmPlPresent=%x\n", pDCTstat->DimmPlPresent);
3829 printk(BIOS_DEBUG, "\t DIMMPresence: DimmDRPresent=%x\n", pDCTstat->DimmDRPresent);
3830 printk(BIOS_DEBUG, "\t DIMMPresence: DimmQRPresent=%x\n", pDCTstat->DimmQRPresent);
3831 printk(BIOS_DEBUG, "\t DIMMPresence: DATAload[0]=%x\n", pDCTstat->DATAload[0]);
3832 printk(BIOS_DEBUG, "\t DIMMPresence: MAload[0]=%x\n", pDCTstat->MAload[0]);
3833 printk(BIOS_DEBUG, "\t DIMMPresence: MAdimms[0]=%x\n", pDCTstat->MAdimms[0]);
3834 printk(BIOS_DEBUG, "\t DIMMPresence: DATAload[1]=%x\n", pDCTstat->DATAload[1]);
3835 printk(BIOS_DEBUG, "\t DIMMPresence: MAload[1]=%x\n", pDCTstat->MAload[1]);
3836 printk(BIOS_DEBUG, "\t DIMMPresence: MAdimms[1]=%x\n", pDCTstat->MAdimms[1]);
3837
3838 if (pDCTstat->DIMMValid != 0) { /* If any DIMMs are present...*/
3839 if (RegDIMMPresent != 0) {
3840 if ((RegDIMMPresent ^ pDCTstat->DIMMValid) !=0) {
3841 /* module type DIMM mismatch (reg'ed, unbuffered) */
3842 pDCTstat->ErrStatus |= 1<<SB_DimmMismatchM;
3843 pDCTstat->ErrCode = SC_StopError;
3844 } else{
3845 /* all DIMMs are registered */
3846 pDCTstat->Status |= 1<<SB_Registered;
3847 }
3848 }
Timothy Pearson730a0432015-10-16 13:51:51 -05003849 if (LRDIMMPresent != 0) {
3850 if ((LRDIMMPresent ^ pDCTstat->DIMMValid) !=0) {
3851 /* module type DIMM mismatch (reg'ed, unbuffered) */
3852 pDCTstat->ErrStatus |= 1<<SB_DimmMismatchM;
3853 pDCTstat->ErrCode = SC_StopError;
3854 } else{
3855 /* all DIMMs are registered */
3856 pDCTstat->Status |= 1<<SB_LoadReduced;
3857 }
3858 }
Zheng Baoeb75f652010-04-23 17:32:48 +00003859 if (pDCTstat->DimmECCPresent != 0) {
3860 if ((pDCTstat->DimmECCPresent ^ pDCTstat->DIMMValid )== 0) {
3861 /* all DIMMs are ECC capable */
3862 pDCTstat->Status |= 1<<SB_ECCDIMMs;
3863 }
3864 }
3865 if (pDCTstat->DimmPARPresent != 0) {
3866 if ((pDCTstat->DimmPARPresent ^ pDCTstat->DIMMValid) == 0) {
3867 /*all DIMMs are Parity capable */
3868 pDCTstat->Status |= 1<<SB_PARDIMMs;
3869 }
3870 }
3871 } else {
3872 /* no DIMMs present or no DIMMs that qualified. */
3873 pDCTstat->ErrStatus |= 1<<SB_NoDimms;
3874 pDCTstat->ErrCode = SC_StopError;
3875 }
3876
3877 printk(BIOS_DEBUG, "\t DIMMPresence: Status %x\n", pDCTstat->Status);
3878 printk(BIOS_DEBUG, "\t DIMMPresence: ErrStatus %x\n", pDCTstat->ErrStatus);
3879 printk(BIOS_DEBUG, "\t DIMMPresence: ErrCode %x\n", pDCTstat->ErrCode);
3880 printk(BIOS_DEBUG, "\t DIMMPresence: Done\n\n");
3881
3882 mctHookAfterDIMMpre();
3883
3884 return pDCTstat->ErrCode;
3885}
3886
3887static u8 Get_DIMMAddress_D(struct DCTStatStruc *pDCTstat, u8 i)
3888{
3889 u8 *p;
3890
3891 p = pDCTstat->DIMMAddr;
3892 /* mct_BeforeGetDIMMAddress(); */
3893 return p[i];
3894}
3895
Timothy Pearson730a0432015-10-16 13:51:51 -05003896static void mct_preInitDCT(struct MCTStatStruc *pMCTstat,
3897 struct DCTStatStruc *pDCTstat)
3898{
3899 u8 err_code;
3900
3901 /* Preconfigure DCT0 */
3902 DCTPreInit_D(pMCTstat, pDCTstat, 0);
3903
3904 /* Configure DCT1 if unganged and enabled*/
3905 if (!pDCTstat->GangedMode) {
3906 if (pDCTstat->DIMMValidDCT[1] > 0) {
3907 err_code = pDCTstat->ErrCode; /* save DCT0 errors */
3908 pDCTstat->ErrCode = 0;
3909 DCTPreInit_D(pMCTstat, pDCTstat, 1);
3910 if (pDCTstat->ErrCode == 2) /* DCT1 is not Running */
3911 pDCTstat->ErrCode = err_code; /* Using DCT0 Error code to update pDCTstat.ErrCode */
3912 }
3913 }
3914}
3915
Zheng Baoeb75f652010-04-23 17:32:48 +00003916static void mct_initDCT(struct MCTStatStruc *pMCTstat,
3917 struct DCTStatStruc *pDCTstat)
3918{
3919 u32 val;
3920 u8 err_code;
3921
3922 /* Config. DCT0 for Ganged or unganged mode */
3923 DCTInit_D(pMCTstat, pDCTstat, 0);
3924 if (pDCTstat->ErrCode == SC_FatalErr) {
3925 /* Do nothing goto exitDCTInit; any fatal errors? */
3926 } else {
Timothy Pearson730a0432015-10-16 13:51:51 -05003927 /* Configure DCT1 if unganged and enabled */
Zheng Baoeb75f652010-04-23 17:32:48 +00003928 if (!pDCTstat->GangedMode) {
Kerry She108d30b2010-08-30 07:24:13 +00003929 if (pDCTstat->DIMMValidDCT[1] > 0) {
Zheng Baoeb75f652010-04-23 17:32:48 +00003930 err_code = pDCTstat->ErrCode; /* save DCT0 errors */
3931 pDCTstat->ErrCode = 0;
3932 DCTInit_D(pMCTstat, pDCTstat, 1);
3933 if (pDCTstat->ErrCode == 2) /* DCT1 is not Running */
3934 pDCTstat->ErrCode = err_code; /* Using DCT0 Error code to update pDCTstat.ErrCode */
3935 } else {
3936 val = 1 << DisDramInterface;
Timothy Pearson730a0432015-10-16 13:51:51 -05003937 Set_NB32_DCT(pDCTstat->dev_dct, 1, 0x94, val);
3938
3939 /* To maximize power savings when DisDramInterface=1b,
3940 * all of the MemClkDis bits should also be set.
3941 */
3942 Set_NB32_DCT(pDCTstat->dev_dct, 1, 0x88, 0xff000000);
Zheng Baoeb75f652010-04-23 17:32:48 +00003943 }
3944 }
3945 }
Zheng Baoeb75f652010-04-23 17:32:48 +00003946}
3947
3948static void mct_DramInit(struct MCTStatStruc *pMCTstat,
3949 struct DCTStatStruc *pDCTstat, u8 dct)
3950{
Timothy Pearson730a0432015-10-16 13:51:51 -05003951 mct_BeforeDramInit_Prod_D(pMCTstat, pDCTstat, dct);
Zheng Baoeb75f652010-04-23 17:32:48 +00003952 mct_DramInit_Sw_D(pMCTstat, pDCTstat, dct);
3953 /* mct_DramInit_Hw_D(pMCTstat, pDCTstat, dct); */
3954}
3955
3956static u8 mct_setMode(struct MCTStatStruc *pMCTstat,
3957 struct DCTStatStruc *pDCTstat)
3958{
3959 u8 byte;
3960 u8 bytex;
3961 u32 val;
3962 u32 reg;
3963
3964 byte = bytex = pDCTstat->DIMMValid;
3965 bytex &= 0x55; /* CHA DIMM pop */
3966 pDCTstat->DIMMValidDCT[0] = bytex;
3967
3968 byte &= 0xAA; /* CHB DIMM popa */
3969 byte >>= 1;
3970 pDCTstat->DIMMValidDCT[1] = byte;
3971
3972 if (byte != bytex) {
3973 pDCTstat->ErrStatus &= ~(1 << SB_DimmMismatchO);
3974 } else {
3975 byte = mctGet_NVbits(NV_Unganged);
3976 if (byte)
3977 pDCTstat->ErrStatus |= (1 << SB_DimmMismatchO); /* Set temp. to avoid setting of ganged mode */
3978
Timothy Pearson730a0432015-10-16 13:51:51 -05003979 if ((!(pDCTstat->ErrStatus & (1 << SB_DimmMismatchO))) && (pDCTstat->LogicalCPUID & AMD_FAM10_ALL)) {
3980 /* Ganged channel mode not supported on Family 15h or higher */
Zheng Baoeb75f652010-04-23 17:32:48 +00003981 pDCTstat->GangedMode = 1;
3982 /* valid 128-bit mode population. */
3983 pDCTstat->Status |= 1 << SB_128bitmode;
3984 reg = 0x110;
3985 val = Get_NB32(pDCTstat->dev_dct, reg);
3986 val |= 1 << DctGangEn;
3987 Set_NB32(pDCTstat->dev_dct, reg, val);
3988 }
3989 if (byte) /* NV_Unganged */
3990 pDCTstat->ErrStatus &= ~(1 << SB_DimmMismatchO); /* Clear so that there is no DIMM missmatch error */
3991 }
3992 return pDCTstat->ErrCode;
3993}
3994
3995u32 Get_NB32(u32 dev, u32 reg)
3996{
3997 return pci_read_config32(dev, reg);
3998}
3999
4000void Set_NB32(u32 dev, u32 reg, u32 val)
4001{
4002 pci_write_config32(dev, reg, val);
4003}
4004
4005
4006u32 Get_NB32_index(u32 dev, u32 index_reg, u32 index)
4007{
4008 u32 dword;
4009
4010 Set_NB32(dev, index_reg, index);
4011 dword = Get_NB32(dev, index_reg+0x4);
4012
4013 return dword;
4014}
4015
4016void Set_NB32_index(u32 dev, u32 index_reg, u32 index, u32 data)
4017{
4018 Set_NB32(dev, index_reg, index);
4019 Set_NB32(dev, index_reg + 0x4, data);
4020}
4021
4022u32 Get_NB32_index_wait(u32 dev, u32 index_reg, u32 index)
4023{
Zheng Baoeb75f652010-04-23 17:32:48 +00004024 u32 dword;
4025
Zheng Baoeb75f652010-04-23 17:32:48 +00004026 index &= ~(1 << DctAccessWrite);
4027 Set_NB32(dev, index_reg, index);
4028 do {
4029 dword = Get_NB32(dev, index_reg);
4030 } while (!(dword & (1 << DctAccessDone)));
4031 dword = Get_NB32(dev, index_reg + 0x4);
4032
4033 return dword;
4034}
4035
4036void Set_NB32_index_wait(u32 dev, u32 index_reg, u32 index, u32 data)
4037{
4038 u32 dword;
4039
Zheng Baoeb75f652010-04-23 17:32:48 +00004040 Set_NB32(dev, index_reg + 0x4, data);
4041 index |= (1 << DctAccessWrite);
4042 Set_NB32(dev, index_reg, index);
4043 do {
4044 dword = Get_NB32(dev, index_reg);
4045 } while (!(dword & (1 << DctAccessDone)));
4046
4047}
4048
Zheng Bao69436e12011-01-06 02:18:12 +00004049static u8 mct_BeforePlatformSpec(struct MCTStatStruc *pMCTstat,
4050 struct DCTStatStruc *pDCTstat, u8 dct)
4051{
4052 /* mct_checkForCxDxSupport_D */
4053 if (pDCTstat->LogicalCPUID & AMD_DR_GT_Bx) {
Timothy Pearson730a0432015-10-16 13:51:51 -05004054 /* Family 10h Errata 322: Address and Command Fine Delay Values May Be Incorrect */
Zheng Bao69436e12011-01-06 02:18:12 +00004055 /* 1. Write 00000000h to F2x[1,0]9C_xD08E000 */
Timothy Pearson730a0432015-10-16 13:51:51 -05004056 Set_NB32_index_wait_DCT(pDCTstat->dev_dct, dct, 0x98, 0x0D08E000, 0);
Zheng Bao69436e12011-01-06 02:18:12 +00004057 /* 2. If DRAM Configuration Register[MemClkFreq] (F2x[1,0]94[2:0]) is
4058 greater than or equal to 011b (DDR-800 and higher),
4059 then write 00000080h to F2x[1,0]9C_xD02E001,
4060 else write 00000090h to F2x[1,0]9C_xD02E001. */
Timothy Pearson730a0432015-10-16 13:51:51 -05004061 if (pDCTstat->Speed >= mhz_to_memclk_config(mctGet_NVbits(NV_MIN_MEMCLK)))
4062 Set_NB32_index_wait_DCT(pDCTstat->dev_dct, dct, 0x98, 0x0D02E001, 0x80);
Zheng Bao69436e12011-01-06 02:18:12 +00004063 else
Timothy Pearson730a0432015-10-16 13:51:51 -05004064 Set_NB32_index_wait_DCT(pDCTstat->dev_dct, dct, 0x98, 0x0D02E001, 0x90);
Zheng Bao69436e12011-01-06 02:18:12 +00004065 }
4066 return pDCTstat->ErrCode;
4067}
4068
Zheng Baoeb75f652010-04-23 17:32:48 +00004069static u8 mct_PlatformSpec(struct MCTStatStruc *pMCTstat,
4070 struct DCTStatStruc *pDCTstat, u8 dct)
4071{
4072 /* Get platform specific config/timing values from the interface layer
4073 * and program them into DCT.
4074 */
4075
4076 u32 dev = pDCTstat->dev_dct;
4077 u32 index_reg;
4078 u8 i, i_start, i_end;
4079
4080 if (pDCTstat->GangedMode) {
4081 SyncSetting(pDCTstat);
4082 /* mct_SetupSync_D */
4083 i_start = 0;
4084 i_end = 2;
4085 } else {
4086 i_start = dct;
4087 i_end = dct + 1;
4088 }
4089 for (i=i_start; i<i_end; i++) {
Timothy Pearson730a0432015-10-16 13:51:51 -05004090 index_reg = 0x98;
4091 Set_NB32_index_wait_DCT(dev, i, index_reg, 0x00, pDCTstat->CH_ODC_CTL[i]); /* Channel A Output Driver Compensation Control */
4092 Set_NB32_index_wait_DCT(dev, i, index_reg, 0x04, pDCTstat->CH_ADDR_TMG[i]); /* Channel A Output Driver Compensation Control */
Zheng Baoeb75f652010-04-23 17:32:48 +00004093 }
4094
4095 return pDCTstat->ErrCode;
Zheng Baoeb75f652010-04-23 17:32:48 +00004096}
4097
4098static void mct_SyncDCTsReady(struct DCTStatStruc *pDCTstat)
4099{
4100 u32 dev;
4101 u32 val;
4102
4103 if (pDCTstat->NodePresent) {
4104 dev = pDCTstat->dev_dct;
4105
4106 if ((pDCTstat->DIMMValidDCT[0] ) || (pDCTstat->DIMMValidDCT[1])) { /* This Node has dram */
4107 do {
4108 val = Get_NB32(dev, 0x110);
4109 } while (!(val & (1 << DramEnabled)));
4110 }
4111 } /* Node is present */
4112}
4113
4114static void mct_AfterGetCLT(struct MCTStatStruc *pMCTstat,
4115 struct DCTStatStruc *pDCTstat, u8 dct)
4116{
4117 if (!pDCTstat->GangedMode) {
4118 if (dct == 0 ) {
4119 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[dct];
4120 if (pDCTstat->DIMMValidDCT[dct] == 0)
4121 pDCTstat->ErrCode = SC_StopError;
4122 } else {
4123 pDCTstat->CSPresent = 0;
4124 pDCTstat->CSTestFail = 0;
4125 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[dct];
4126 if (pDCTstat->DIMMValidDCT[dct] == 0)
4127 pDCTstat->ErrCode = SC_StopError;
4128 }
4129 }
4130}
4131
4132static u8 mct_SPDCalcWidth(struct MCTStatStruc *pMCTstat,
4133 struct DCTStatStruc *pDCTstat, u8 dct)
4134{
4135 u8 ret;
4136 u32 val;
4137
4138 if ( dct == 0) {
4139 SPDCalcWidth_D(pMCTstat, pDCTstat);
4140 ret = mct_setMode(pMCTstat, pDCTstat);
4141 } else {
4142 ret = pDCTstat->ErrCode;
4143 }
4144
4145 if (pDCTstat->DIMMValidDCT[0] == 0) {
Timothy Pearson730a0432015-10-16 13:51:51 -05004146 val = Get_NB32_DCT(pDCTstat->dev_dct, 0, 0x94);
Zheng Baoeb75f652010-04-23 17:32:48 +00004147 val |= 1 << DisDramInterface;
Timothy Pearson730a0432015-10-16 13:51:51 -05004148 Set_NB32_DCT(pDCTstat->dev_dct, 0, 0x94, val);
Zheng Baoeb75f652010-04-23 17:32:48 +00004149 }
4150 if (pDCTstat->DIMMValidDCT[1] == 0) {
Timothy Pearson730a0432015-10-16 13:51:51 -05004151 val = Get_NB32_DCT(pDCTstat->dev_dct, 1, 0x94);
Zheng Baoeb75f652010-04-23 17:32:48 +00004152 val |= 1 << DisDramInterface;
Timothy Pearson730a0432015-10-16 13:51:51 -05004153 Set_NB32_DCT(pDCTstat->dev_dct, 1, 0x94, val);
Zheng Baoeb75f652010-04-23 17:32:48 +00004154 }
4155
4156 printk(BIOS_DEBUG, "SPDCalcWidth: Status %x\n", pDCTstat->Status);
4157 printk(BIOS_DEBUG, "SPDCalcWidth: ErrStatus %x\n", pDCTstat->ErrStatus);
4158 printk(BIOS_DEBUG, "SPDCalcWidth: ErrCode %x\n", pDCTstat->ErrCode);
4159 printk(BIOS_DEBUG, "SPDCalcWidth: Done\n");
4160 /* Disable dram interface before DRAM init */
4161
4162 return ret;
4163}
4164
4165static void mct_AfterStitchMemory(struct MCTStatStruc *pMCTstat,
4166 struct DCTStatStruc *pDCTstat, u8 dct)
4167{
4168 u32 val;
4169 u32 dword;
4170 u32 dev;
4171 u32 reg;
4172 u8 _MemHoleRemap;
4173 u32 DramHoleBase;
4174
4175 _MemHoleRemap = mctGet_NVbits(NV_MemHole);
4176 DramHoleBase = mctGet_NVbits(NV_BottomIO);
4177 DramHoleBase <<= 8;
4178 /* Increase hole size so;[31:24]to[31:16]
4179 * it has granularity of 128MB shl eax,8
4180 * Set 'effective' bottom IOmov DramHoleBase,eax
4181 */
4182 pMCTstat->HoleBase = (DramHoleBase & 0xFFFFF800) << 8;
4183
4184 /* In unganged mode, we must add DCT0 and DCT1 to DCTSysLimit */
4185 if (!pDCTstat->GangedMode) {
4186 dev = pDCTstat->dev_dct;
4187 pDCTstat->NodeSysLimit += pDCTstat->DCTSysLimit;
4188 /* if DCT0 and DCT1 both exist, set DctSelBaseAddr[47:27] to the top of DCT0 */
4189 if (dct == 0) {
4190 if (pDCTstat->DIMMValidDCT[1] > 0) {
4191 dword = pDCTstat->DCTSysLimit + 1;
4192 dword += pDCTstat->NodeSysBase;
4193 dword >>= 8; /* scale [39:8] to [47:27],and to F2x110[31:11] */
4194 if ((dword >= DramHoleBase) && _MemHoleRemap) {
4195 pMCTstat->HoleBase = (DramHoleBase & 0xFFFFF800) << 8;
4196 val = pMCTstat->HoleBase;
4197 val >>= 16;
4198 val = (((~val) & 0xFF) + 1);
4199 val <<= 8;
4200 dword += val;
4201 }
4202 reg = 0x110;
4203 val = Get_NB32(dev, reg);
4204 val &= 0x7F;
4205 val |= dword;
4206 val |= 3; /* Set F2x110[DctSelHiRngEn], F2x110[DctSelHi] */
4207 Set_NB32(dev, reg, val);
4208
4209 reg = 0x114;
4210 val = dword;
4211 Set_NB32(dev, reg, val);
4212 }
4213 } else {
4214 /* Program the DctSelBaseAddr value to 0
4215 if DCT 0 is disabled */
4216 if (pDCTstat->DIMMValidDCT[0] == 0) {
4217 dword = pDCTstat->NodeSysBase;
4218 dword >>= 8;
4219 if ((dword >= DramHoleBase) && _MemHoleRemap) {
4220 pMCTstat->HoleBase = (DramHoleBase & 0xFFFFF800) << 8;
4221 val = pMCTstat->HoleBase;
4222 val >>= 8;
4223 val &= ~(0xFFFF);
4224 val |= (((~val) & 0xFFFF) + 1);
4225 dword += val;
4226 }
4227 reg = 0x114;
4228 val = dword;
4229 Set_NB32(dev, reg, val);
4230
4231 reg = 0x110;
4232 val |= 3; /* Set F2x110[DctSelHiRngEn], F2x110[DctSelHi] */
4233 Set_NB32(dev, reg, val);
4234 }
4235 }
4236 } else {
4237 pDCTstat->NodeSysLimit += pDCTstat->DCTSysLimit;
4238 }
4239 printk(BIOS_DEBUG, "AfterStitch pDCTstat->NodeSysBase = %x\n", pDCTstat->NodeSysBase);
4240 printk(BIOS_DEBUG, "mct_AfterStitchMemory: pDCTstat->NodeSysLimit = %x\n", pDCTstat->NodeSysLimit);
4241}
4242
4243static u8 mct_DIMMPresence(struct MCTStatStruc *pMCTstat,
4244 struct DCTStatStruc *pDCTstat, u8 dct)
4245{
4246 u8 ret;
4247
Kerry She108d30b2010-08-30 07:24:13 +00004248 if (dct == 0)
Zheng Baoeb75f652010-04-23 17:32:48 +00004249 ret = DIMMPresence_D(pMCTstat, pDCTstat);
4250 else
4251 ret = pDCTstat->ErrCode;
4252
4253 return ret;
4254}
4255
4256/* mct_BeforeGetDIMMAddress inline in C */
4257
4258static void mct_OtherTiming(struct MCTStatStruc *pMCTstat,
4259 struct DCTStatStruc *pDCTstatA)
4260{
4261 u8 Node;
4262
4263 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
4264 struct DCTStatStruc *pDCTstat;
4265 pDCTstat = pDCTstatA + Node;
4266 if (pDCTstat->NodePresent) {
4267 if (pDCTstat->DIMMValidDCT[0]) {
4268 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[0];
4269 Set_OtherTiming(pMCTstat, pDCTstat, 0);
4270 }
4271 if (pDCTstat->DIMMValidDCT[1] && !pDCTstat->GangedMode ) {
4272 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[1];
4273 Set_OtherTiming(pMCTstat, pDCTstat, 1);
4274 }
4275 } /* Node is present*/
4276 } /* while Node */
4277}
4278
4279static void Set_OtherTiming(struct MCTStatStruc *pMCTstat,
4280 struct DCTStatStruc *pDCTstat, u8 dct)
4281{
4282 u32 reg;
Zheng Baoeb75f652010-04-23 17:32:48 +00004283 u32 val;
4284 u32 dword;
4285 u32 dev = pDCTstat->dev_dct;
4286
Timothy Pearson730a0432015-10-16 13:51:51 -05004287 Get_DqsRcvEnGross_Diff(pDCTstat, dev, dct, 0x98);
4288 Get_WrDatGross_Diff(pDCTstat, dct, dev, 0x98);
Zheng Baoeb75f652010-04-23 17:32:48 +00004289 Get_Trdrd(pMCTstat, pDCTstat, dct);
4290 Get_Twrwr(pMCTstat, pDCTstat, dct);
4291 Get_Twrrd(pMCTstat, pDCTstat, dct);
4292 Get_TrwtTO(pMCTstat, pDCTstat, dct);
4293 Get_TrwtWB(pMCTstat, pDCTstat);
4294
Timothy Pearson730a0432015-10-16 13:51:51 -05004295 reg = 0x8C; /* Dram Timing Hi */
4296 val = Get_NB32_DCT(dev, dct, reg);
Zheng Baoeb75f652010-04-23 17:32:48 +00004297 val &= 0xffff0300;
4298 dword = pDCTstat->TrwtTO;
4299 val |= dword << 4;
4300 dword = pDCTstat->Twrrd & 3;
4301 val |= dword << 10;
4302 dword = pDCTstat->Twrwr & 3;
4303 val |= dword << 12;
4304 dword = pDCTstat->Trdrd & 3;
4305 val |= dword << 14;
4306 dword = pDCTstat->TrwtWB;
4307 val |= dword;
Timothy Pearson730a0432015-10-16 13:51:51 -05004308 Set_NB32_DCT(dev, dct, reg, val);
Zheng Baoeb75f652010-04-23 17:32:48 +00004309
Timothy Pearson730a0432015-10-16 13:51:51 -05004310 reg = 0x78;
4311 val = Get_NB32_DCT(dev, dct, reg);
Zheng Baoeb75f652010-04-23 17:32:48 +00004312 val &= 0xFFFFC0FF;
4313 dword = pDCTstat->Twrrd >> 2;
4314 val |= dword << 8;
4315 dword = pDCTstat->Twrwr >> 2;
4316 val |= dword << 10;
4317 dword = pDCTstat->Trdrd >> 2;
4318 val |= dword << 12;
Timothy Pearson730a0432015-10-16 13:51:51 -05004319 Set_NB32_DCT(dev, dct, reg, val);
Zheng Baoeb75f652010-04-23 17:32:48 +00004320}
4321
4322static void Get_Trdrd(struct MCTStatStruc *pMCTstat,
4323 struct DCTStatStruc *pDCTstat, u8 dct)
4324{
4325 int8_t Trdrd;
4326
4327 Trdrd = ((int8_t)(pDCTstat->DqsRcvEnGrossMax - pDCTstat->DqsRcvEnGrossMin) >> 1) + 1;
4328 if (Trdrd > 8)
4329 Trdrd = 8;
4330 pDCTstat->Trdrd = Trdrd;
4331}
4332
4333static void Get_Twrwr(struct MCTStatStruc *pMCTstat,
4334 struct DCTStatStruc *pDCTstat, u8 dct)
4335{
4336 int8_t Twrwr = 0;
4337
4338 Twrwr = ((int8_t)(pDCTstat->WrDatGrossMax - pDCTstat->WrDatGrossMin) >> 1) + 2;
4339
4340 if (Twrwr < 2)
4341 Twrwr = 2;
4342 else if (Twrwr > 9)
4343 Twrwr = 9;
4344
4345 pDCTstat->Twrwr = Twrwr;
4346}
4347
4348static void Get_Twrrd(struct MCTStatStruc *pMCTstat,
4349 struct DCTStatStruc *pDCTstat, u8 dct)
4350{
4351 u8 LDplus1;
4352 int8_t Twrrd;
4353
4354 LDplus1 = Get_Latency_Diff(pMCTstat, pDCTstat, dct);
4355
4356 Twrrd = ((int8_t)(pDCTstat->WrDatGrossMax - pDCTstat->DqsRcvEnGrossMin) >> 1) + 4 - LDplus1;
4357
4358 if (Twrrd < 2)
4359 Twrrd = 2;
4360 else if (Twrrd > 10)
4361 Twrrd = 10;
4362 pDCTstat->Twrrd = Twrrd;
4363}
4364
4365static void Get_TrwtTO(struct MCTStatStruc *pMCTstat,
4366 struct DCTStatStruc *pDCTstat, u8 dct)
4367{
4368 u8 LDplus1;
4369 int8_t TrwtTO;
4370
4371 LDplus1 = Get_Latency_Diff(pMCTstat, pDCTstat, dct);
4372
4373 TrwtTO = ((int8_t)(pDCTstat->DqsRcvEnGrossMax - pDCTstat->WrDatGrossMin) >> 1) + LDplus1;
4374
4375 pDCTstat->TrwtTO = TrwtTO;
4376}
4377
4378static void Get_TrwtWB(struct MCTStatStruc *pMCTstat,
4379 struct DCTStatStruc *pDCTstat)
4380{
4381 /* TrwtWB ensures read-to-write data-bus turnaround.
4382 This value should be one more than the programmed TrwtTO.*/
4383 pDCTstat->TrwtWB = pDCTstat->TrwtTO;
4384}
4385
4386static u8 Get_Latency_Diff(struct MCTStatStruc *pMCTstat,
4387 struct DCTStatStruc *pDCTstat, u8 dct)
4388{
Zheng Baoeb75f652010-04-23 17:32:48 +00004389 u32 dev = pDCTstat->dev_dct;
4390 u32 val1, val2;
4391
Timothy Pearson730a0432015-10-16 13:51:51 -05004392 val1 = Get_NB32_DCT(dev, dct, 0x88) & 0xF;
4393 val2 = (Get_NB32_DCT(dev, dct, 0x84) >> 20) & 7;
Zheng Baoeb75f652010-04-23 17:32:48 +00004394
4395 return val1 - val2;
4396}
4397
4398static void Get_DqsRcvEnGross_Diff(struct DCTStatStruc *pDCTstat,
Timothy Pearson730a0432015-10-16 13:51:51 -05004399 u32 dev, uint8_t dct, u32 index_reg)
Zheng Baoeb75f652010-04-23 17:32:48 +00004400{
4401 u8 Smallest, Largest;
4402 u32 val;
4403 u8 byte, bytex;
4404
4405 /* The largest DqsRcvEnGrossDelay of any DIMM minus the
4406 DqsRcvEnGrossDelay of any other DIMM is equal to the Critical
4407 Gross Delay Difference (CGDD) */
4408 /* DqsRcvEn byte 1,0 */
Timothy Pearson730a0432015-10-16 13:51:51 -05004409 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, dct, index_reg, 0x10);
Zheng Baoeb75f652010-04-23 17:32:48 +00004410 Largest = val & 0xFF;
4411 Smallest = (val >> 8) & 0xFF;
4412
4413 /* DqsRcvEn byte 3,2 */
Timothy Pearson730a0432015-10-16 13:51:51 -05004414 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, dct, index_reg, 0x11);
Zheng Baoeb75f652010-04-23 17:32:48 +00004415 byte = val & 0xFF;
4416 bytex = (val >> 8) & 0xFF;
4417 if (bytex < Smallest)
4418 Smallest = bytex;
4419 if (byte > Largest)
4420 Largest = byte;
4421
4422 /* DqsRcvEn byte 5,4 */
Timothy Pearson730a0432015-10-16 13:51:51 -05004423 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, dct, index_reg, 0x20);
Zheng Baoeb75f652010-04-23 17:32:48 +00004424 byte = val & 0xFF;
4425 bytex = (val >> 8) & 0xFF;
4426 if (bytex < Smallest)
4427 Smallest = bytex;
4428 if (byte > Largest)
4429 Largest = byte;
4430
4431 /* DqsRcvEn byte 7,6 */
Timothy Pearson730a0432015-10-16 13:51:51 -05004432 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, dct, index_reg, 0x21);
Zheng Baoeb75f652010-04-23 17:32:48 +00004433 byte = val & 0xFF;
4434 bytex = (val >> 8) & 0xFF;
4435 if (bytex < Smallest)
4436 Smallest = bytex;
4437 if (byte > Largest)
4438 Largest = byte;
4439
4440 if (pDCTstat->DimmECCPresent> 0) {
4441 /*DqsRcvEn Ecc */
Timothy Pearson730a0432015-10-16 13:51:51 -05004442 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, dct, index_reg, 0x12);
Zheng Baoeb75f652010-04-23 17:32:48 +00004443 byte = val & 0xFF;
4444 bytex = (val >> 8) & 0xFF;
4445 if (bytex < Smallest)
4446 Smallest = bytex;
4447 if (byte > Largest)
4448 Largest = byte;
4449 }
4450
4451 pDCTstat->DqsRcvEnGrossMax = Largest;
4452 pDCTstat->DqsRcvEnGrossMin = Smallest;
4453}
4454
4455static void Get_WrDatGross_Diff(struct DCTStatStruc *pDCTstat,
4456 u8 dct, u32 dev, u32 index_reg)
4457{
Stefan Reinauer328a6942011-10-13 17:04:02 -07004458 u8 Smallest = 0, Largest = 0;
Zheng Baoeb75f652010-04-23 17:32:48 +00004459 u32 val;
4460 u8 byte, bytex;
4461
4462 /* The largest WrDatGrossDlyByte of any DIMM minus the
4463 WrDatGrossDlyByte of any other DIMM is equal to CGDD */
4464 if (pDCTstat->DIMMValid & (1 << 0)) {
4465 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x01); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM0 */
4466 Largest = val & 0xFF;
4467 Smallest = (val >> 8) & 0xFF;
4468 }
4469 if (pDCTstat->DIMMValid & (1 << 2)) {
4470 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x101); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM1 */
4471 byte = val & 0xFF;
4472 bytex = (val >> 8) & 0xFF;
4473 if (bytex < Smallest)
4474 Smallest = bytex;
4475 if (byte > Largest)
4476 Largest = byte;
4477 }
4478
4479 /* If Cx, 2 more dimm need to be checked to find out the largest and smallest */
4480 if (pDCTstat->LogicalCPUID & AMD_DR_Cx) {
4481 if (pDCTstat->DIMMValid & (1 << 4)) {
4482 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x201); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM2 */
4483 byte = val & 0xFF;
4484 bytex = (val >> 8) & 0xFF;
4485 if (bytex < Smallest)
4486 Smallest = bytex;
4487 if (byte > Largest)
4488 Largest = byte;
4489 }
4490 if (pDCTstat->DIMMValid & (1 << 6)) {
4491 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x301); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM2 */
4492 byte = val & 0xFF;
4493 bytex = (val >> 8) & 0xFF;
4494 if (bytex < Smallest)
4495 Smallest = bytex;
4496 if (byte > Largest)
4497 Largest = byte;
4498 }
4499 }
4500
4501 pDCTstat->WrDatGrossMax = Largest;
4502 pDCTstat->WrDatGrossMin = Smallest;
4503}
4504
4505static u16 Get_DqsRcvEnGross_MaxMin(struct DCTStatStruc *pDCTstat,
Timothy Pearson730a0432015-10-16 13:51:51 -05004506 u32 dev, uint8_t dct, u32 index_reg,
Zheng Baoeb75f652010-04-23 17:32:48 +00004507 u32 index)
4508{
4509 u8 Smallest, Largest;
4510 u8 i;
4511 u8 byte;
4512 u32 val;
4513 u16 word;
4514 u8 ecc_reg = 0;
4515
4516 Smallest = 7;
4517 Largest = 0;
4518
4519 if (index == 0x12)
4520 ecc_reg = 1;
4521
4522 for (i=0; i < 8; i+=2) {
4523 if ( pDCTstat->DIMMValid & (1 << i)) {
Timothy Pearson730a0432015-10-16 13:51:51 -05004524 val = Get_NB32_index_wait_DCT(dev, dct, index_reg, index);
Zheng Baoeb75f652010-04-23 17:32:48 +00004525 val &= 0x00E000E0;
4526 byte = (val >> 5) & 0xFF;
4527 if (byte < Smallest)
4528 Smallest = byte;
4529 if (byte > Largest)
4530 Largest = byte;
4531 if (!(ecc_reg)) {
4532 byte = (val >> (16 + 5)) & 0xFF;
4533 if (byte < Smallest)
4534 Smallest = byte;
4535 if (byte > Largest)
4536 Largest = byte;
4537 }
4538 }
Zheng Bao7b1a3c32010-09-28 04:43:16 +00004539 index += 3;
Zheng Baoeb75f652010-04-23 17:32:48 +00004540 } /* while ++i */
4541
4542 word = Smallest;
4543 word <<= 8;
4544 word |= Largest;
4545
4546 return word;
4547}
4548
4549static u16 Get_WrDatGross_MaxMin(struct DCTStatStruc *pDCTstat,
4550 u8 dct, u32 dev, u32 index_reg,
4551 u32 index)
4552{
4553 u8 Smallest, Largest;
4554 u8 i, j;
4555 u32 val;
4556 u8 byte;
4557 u16 word;
4558
4559 Smallest = 3;
4560 Largest = 0;
4561 for (i=0; i < 2; i++) {
Timothy Pearson730a0432015-10-16 13:51:51 -05004562 val = Get_NB32_index_wait_DCT(dev, dct, index_reg, index);
Zheng Baoeb75f652010-04-23 17:32:48 +00004563 val &= 0x60606060;
4564 val >>= 5;
4565 for (j=0; j < 4; j++) {
4566 byte = val & 0xFF;
4567 if (byte < Smallest)
4568 Smallest = byte;
4569 if (byte > Largest)
4570 Largest = byte;
4571 val >>= 8;
4572 } /* while ++j */
4573 index++;
4574 } /*while ++i*/
4575
4576 if (pDCTstat->DimmECCPresent > 0) {
4577 index++;
Timothy Pearson730a0432015-10-16 13:51:51 -05004578 val = Get_NB32_index_wait_DCT(dev, dct, index_reg, index);
Zheng Baoeb75f652010-04-23 17:32:48 +00004579 val &= 0x00000060;
4580 val >>= 5;
4581 byte = val & 0xFF;
4582 if (byte < Smallest)
4583 Smallest = byte;
4584 if (byte > Largest)
4585 Largest = byte;
4586 }
4587
4588 word = Smallest;
4589 word <<= 8;
4590 word |= Largest;
4591
4592 return word;
4593}
4594
Zheng Bao69436e12011-01-06 02:18:12 +00004595static void mct_PhyController_Config(struct MCTStatStruc *pMCTstat,
4596 struct DCTStatStruc *pDCTstat, u8 dct)
Zheng Baoeb75f652010-04-23 17:32:48 +00004597{
Timothy Pearson730a0432015-10-16 13:51:51 -05004598 uint8_t index;
4599 uint32_t dword;
4600 u32 index_reg = 0x98;
Zheng Bao69436e12011-01-06 02:18:12 +00004601 u32 dev = pDCTstat->dev_dct;
Zheng Bao69436e12011-01-06 02:18:12 +00004602
Timothy Pearson730a0432015-10-16 13:51:51 -05004603 if (pDCTstat->LogicalCPUID & (AMD_DR_DAC2_OR_C3 | AMD_RB_C3 | AMD_FAM15_ALL)) {
Zheng Bao69436e12011-01-06 02:18:12 +00004604 if (pDCTstat->Dimmx4Present == 0) {
Timothy Pearson730a0432015-10-16 13:51:51 -05004605 /* Set bit7 RxDqsUDllPowerDown to register F2x[1, 0]98_x0D0F0F13 for
4606 * additional power saving when x4 DIMMs are not present.
4607 */
4608 for (index = 0; index < 0x9; index++) {
4609 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f0013 | (index << 8));
4610 dword |= (0x1 << 7); /* RxDqsUDllPowerDown = 1 */
4611 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f0013 | (index << 8), dword);
4612 }
Zheng Bao69436e12011-01-06 02:18:12 +00004613 }
4614 }
4615
Timothy Pearson730a0432015-10-16 13:51:51 -05004616 if (pDCTstat->LogicalCPUID & (AMD_DR_DAC2_OR_C3 | AMD_FAM15_ALL)) {
Zheng Bao69436e12011-01-06 02:18:12 +00004617 if (pDCTstat->DimmECCPresent == 0) {
4618 /* Set bit4 PwrDn to register F2x[1, 0]98_x0D0F0830 for power saving */
Timothy Pearson730a0432015-10-16 13:51:51 -05004619 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f0830);
4620 dword |= 1 << 4; /* BIOS should set this bit if ECC DIMMs are not present */
4621 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f0830, dword);
Zheng Bao69436e12011-01-06 02:18:12 +00004622 }
4623 }
4624
4625}
4626
4627static void mct_FinalMCT_D(struct MCTStatStruc *pMCTstat,
4628 struct DCTStatStruc *pDCTstatA)
4629{
4630 u8 Node;
4631 struct DCTStatStruc *pDCTstat;
4632 u32 val;
4633
4634 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
4635 pDCTstat = pDCTstatA + Node;
4636
4637 if (pDCTstat->NodePresent) {
4638 mct_PhyController_Config(pMCTstat, pDCTstat, 0);
4639 mct_PhyController_Config(pMCTstat, pDCTstat, 1);
4640 }
4641 if (!(pDCTstat->LogicalCPUID & AMD_DR_Dx)) { /* mct_checkForDxSupport */
4642 mct_ExtMCTConfig_Cx(pDCTstat);
4643 mct_ExtMCTConfig_Bx(pDCTstat);
4644 } else { /* For Dx CPU */
4645 val = 0x0CE00F00 | 1 << 29/* FlushWrOnStpGnt */;
4646 if (!(pDCTstat->GangedMode))
Timothy Pearsonb8a355d2015-09-05 17:55:58 -05004647 val |= 0x20; /* MctWrLimit = 8 for Unganged mode */
Zheng Bao69436e12011-01-06 02:18:12 +00004648 else
Timothy Pearsonb8a355d2015-09-05 17:55:58 -05004649 val |= 0x40; /* MctWrLimit = 16 for ganged mode */
Zheng Bao69436e12011-01-06 02:18:12 +00004650 Set_NB32(pDCTstat->dev_dct, 0x11C, val);
4651
4652 val = Get_NB32(pDCTstat->dev_dct, 0x1B0);
4653 val &= 0xFFFFF8C0;
4654 val |= 0x101; /* BKDG recommended settings */
4655 val |= 0x0FC00000; /* Agesa V5 */
4656 if (!(pDCTstat->GangedMode))
4657 val |= 1 << 12;
4658 else
4659 val &= ~(1 << 12);
4660
4661 val &= 0x0FFFFFFF;
Timothy Pearson730a0432015-10-16 13:51:51 -05004662 if (!is_fam15h()) {
4663 switch (pDCTstat->Speed) {
4664 case 4:
4665 val |= 0x50000000; /* 5 for DDR800 */
4666 break;
4667 case 5:
4668 val |= 0x60000000; /* 6 for DDR1066 */
4669 break;
4670 case 6:
4671 val |= 0x80000000; /* 8 for DDR800 */
4672 break;
4673 default:
4674 val |= 0x90000000; /* 9 for DDR1600 */
4675 break;
4676 }
Zheng Bao69436e12011-01-06 02:18:12 +00004677 }
4678 Set_NB32(pDCTstat->dev_dct, 0x1B0, val);
Timothy Pearson730a0432015-10-16 13:51:51 -05004679
4680 if (is_fam15h()) {
4681 uint8_t wm1;
4682 uint8_t wm2;
4683
4684 switch (pDCTstat->Speed) {
4685 case 0x4:
4686 wm1 = 0x3;
4687 wm2 = 0x4;
4688 break;
4689 case 0x6:
4690 wm1 = 0x3;
4691 wm2 = 0x5;
4692 break;
4693 case 0xa:
4694 wm1 = 0x4;
4695 wm2 = 0x6;
4696 break;
4697 case 0xe:
4698 wm1 = 0x5;
4699 wm2 = 0x8;
4700 break;
4701 case 0x12:
4702 wm1 = 0x6;
4703 wm2 = 0x9;
4704 break;
4705 default:
4706 wm1 = 0x7;
4707 wm2 = 0xa;
4708 break;
4709 }
4710
4711 val = Get_NB32(pDCTstat->dev_dct, 0x1B4);
4712 val &= ~(0x3ff);
4713 val |= ((wm2 & 0x1f) << 5);
4714 val |= (wm1 & 0x1f);
4715 Set_NB32(pDCTstat->dev_dct, 0x1B4, val);
4716 }
Zheng Bao69436e12011-01-06 02:18:12 +00004717 }
4718 }
4719
Arne Georg Gleditsche150e9a2010-09-09 10:35:52 +00004720 /* ClrClToNB_D postponed until we're done executing from ROM */
Zheng Baoeb75f652010-04-23 17:32:48 +00004721 mct_ClrWbEnhWsbDis_D(pMCTstat, pDCTstat);
Zheng Bao69436e12011-01-06 02:18:12 +00004722
4723 /* set F3x8C[DisFastTprWr] on all DR, if L3Size=0 */
4724 if (pDCTstat->LogicalCPUID & AMD_DR_ALL) {
4725 if (!(cpuid_edx(0x80000006) & 0xFFFC0000)) {
4726 val = Get_NB32(pDCTstat->dev_nbmisc, 0x8C);
4727 val |= 1 << 24;
4728 Set_NB32(pDCTstat->dev_nbmisc, 0x8C, val);
4729 }
4730 }
Zheng Baoeb75f652010-04-23 17:32:48 +00004731}
4732
Timothy Pearson730a0432015-10-16 13:51:51 -05004733void mct_ForceNBPState0_En_Fam15(struct MCTStatStruc *pMCTstat,
4734 struct DCTStatStruc *pDCTstat)
4735{
4736 /* Force the NB P-state to P0 */
4737 uint32_t dword;
4738 uint32_t dword2;
4739
4740 dword = Get_NB32(pDCTstat->dev_nbctl, 0x174);
4741 if (!(dword & 0x1)) {
4742 dword = Get_NB32(pDCTstat->dev_nbctl, 0x170);
4743 pDCTstat->SwNbPstateLoDis = (dword >> 14) & 0x1;
4744 pDCTstat->NbPstateDisOnP0 = (dword >> 13) & 0x1;
4745 pDCTstat->NbPstateThreshold = (dword >> 9) & 0x7;
4746 pDCTstat->NbPstateHi = (dword >> 6) & 0x3;
4747 dword &= ~(0x1 << 14); /* SwNbPstateLoDis = 0 */
4748 dword &= ~(0x1 << 13); /* NbPstateDisOnP0 = 0 */
4749 dword &= ~(0x7 << 9); /* NbPstateThreshold = 0 */
4750 dword &= ~(0x3 << 3); /* NbPstateLo = NbPstateMaxVal */
4751 dword |= ((dword & 0x3) << 3);
4752 Set_NB32(pDCTstat->dev_nbctl, 0x170, dword);
4753
4754 /* Wait until CurNbPState == NbPstateLo */
4755 do {
4756 dword2 = Get_NB32(pDCTstat->dev_nbctl, 0x174);
4757 } while (((dword2 << 19) & 0x7) != (dword & 0x3));
4758
4759 dword = Get_NB32(pDCTstat->dev_nbctl, 0x170);
4760 dword &= ~(0x3 << 6); /* NbPstateHi = 0 */
4761 dword |= (0x3 << 14); /* SwNbPstateLoDis = 1 */
4762 Set_NB32(pDCTstat->dev_nbctl, 0x170, dword);
4763
4764 /* Wait until CurNbPState == 0 */
4765 do {
4766 dword2 = Get_NB32(pDCTstat->dev_nbctl, 0x174);
4767 } while (((dword2 << 19) & 0x7) != 0);
4768 }
4769}
4770
4771void mct_ForceNBPState0_Dis_Fam15(struct MCTStatStruc *pMCTstat,
4772 struct DCTStatStruc *pDCTstat)
4773{
4774 /* Restore normal NB P-state functionailty */
4775 uint32_t dword;
4776
4777 dword = Get_NB32(pDCTstat->dev_nbctl, 0x174);
4778 if (!(dword & 0x1)) {
4779 dword = Get_NB32(pDCTstat->dev_nbctl, 0x170);
4780 dword &= ~(0x1 << 14); /* SwNbPstateLoDis*/
4781 dword |= ((pDCTstat->SwNbPstateLoDis & 0x1) << 14);
4782 dword &= ~(0x1 << 13); /* NbPstateDisOnP0 */
4783 dword |= ((pDCTstat->NbPstateDisOnP0 & 0x1) << 13);
4784 dword &= ~(0x7 << 9); /* NbPstateThreshold */
4785 dword |= ((pDCTstat->NbPstateThreshold & 0x7) << 9);
4786 dword &= ~(0x3 << 6); /* NbPstateHi */
4787 dword |= ((pDCTstat->NbPstateHi & 0x3) << 3);
4788 Set_NB32(pDCTstat->dev_nbctl, 0x170, dword);
4789 }
4790}
4791
Zheng Baoeb75f652010-04-23 17:32:48 +00004792static void mct_InitialMCT_D(struct MCTStatStruc *pMCTstat, struct DCTStatStruc *pDCTstat)
4793{
Timothy Pearson730a0432015-10-16 13:51:51 -05004794 if (is_fam15h()) {
4795 msr_t p0_state_msr;
4796 uint8_t cpu_fid;
4797 uint8_t cpu_did;
4798 uint32_t cpu_divisor;
4799 uint8_t boost_states;
4800
4801 /* Retrieve the number of boost states */
4802 boost_states = (Get_NB32(pDCTstat->dev_link, 0x15c) >> 2) & 0x7;
4803
4804 /* Retrieve and store the TSC frequency (P0 COF) */
4805 p0_state_msr = rdmsr(0xc0010064 + boost_states);
4806 cpu_fid = p0_state_msr.lo & 0x3f;
4807 cpu_did = (p0_state_msr.lo >> 6) & 0x7;
4808 cpu_divisor = (0x1 << cpu_did);
4809 pMCTstat->TSCFreq = (100 * (cpu_fid + 0x10)) / cpu_divisor;
4810
4811 mct_ForceNBPState0_En_Fam15(pMCTstat, pDCTstat);
4812 } else {
4813 /* K10 BKDG v3.62 section 2.8.9.2 */
4814 printk(BIOS_DEBUG, "mct_InitialMCT_D: clear_legacy_Mode\n");
4815 clear_legacy_Mode(pMCTstat, pDCTstat);
4816
4817 /* Northbridge configuration */
4818 mct_SetClToNB_D(pMCTstat, pDCTstat);
4819 mct_SetWbEnhWsbDis_D(pMCTstat, pDCTstat);
4820 }
Zheng Baoeb75f652010-04-23 17:32:48 +00004821}
4822
4823static u32 mct_NodePresent_D(void)
4824{
4825 u32 val;
Timothy Pearson730a0432015-10-16 13:51:51 -05004826 if (is_fam15h())
4827 val = 0x16001022;
4828 else
4829 val = 0x12001022;
Zheng Baoeb75f652010-04-23 17:32:48 +00004830 return val;
4831}
4832
4833static void mct_init(struct MCTStatStruc *pMCTstat,
4834 struct DCTStatStruc *pDCTstat)
4835{
4836 u32 lo, hi;
4837 u32 addr;
4838
4839 pDCTstat->GangedMode = 0;
4840 pDCTstat->DRPresent = 1;
4841
4842 /* enable extend PCI configuration access */
4843 addr = 0xC001001F;
4844 _RDMSR(addr, &lo, &hi);
4845 if (hi & (1 << (46-32))) {
4846 pDCTstat->Status |= 1 << SB_ExtConfig;
4847 } else {
4848 hi |= 1 << (46-32);
4849 _WRMSR(addr, lo, hi);
4850 }
4851}
4852
4853static void clear_legacy_Mode(struct MCTStatStruc *pMCTstat,
4854 struct DCTStatStruc *pDCTstat)
4855{
4856 u32 reg;
4857 u32 val;
4858 u32 dev = pDCTstat->dev_dct;
4859
4860 /* Clear Legacy BIOS Mode bit */
4861 reg = 0x94;
Timothy Pearson730a0432015-10-16 13:51:51 -05004862 val = Get_NB32_DCT(dev, 0, reg);
Zheng Baoeb75f652010-04-23 17:32:48 +00004863 val &= ~(1<<LegacyBiosMode);
Timothy Pearson730a0432015-10-16 13:51:51 -05004864 Set_NB32_DCT(dev, 0, reg, val);
Zheng Baoeb75f652010-04-23 17:32:48 +00004865
Timothy Pearson730a0432015-10-16 13:51:51 -05004866 val = Get_NB32_DCT(dev, 1, reg);
Zheng Baoeb75f652010-04-23 17:32:48 +00004867 val &= ~(1<<LegacyBiosMode);
Timothy Pearson730a0432015-10-16 13:51:51 -05004868 Set_NB32_DCT(dev, 1, reg, val);
Zheng Baoeb75f652010-04-23 17:32:48 +00004869}
4870
4871static void mct_HTMemMapExt(struct MCTStatStruc *pMCTstat,
4872 struct DCTStatStruc *pDCTstatA)
4873{
4874 u8 Node;
4875 u32 Drambase, Dramlimit;
4876 u32 val;
4877 u32 reg;
4878 u32 dev;
4879 u32 devx;
4880 u32 dword;
4881 struct DCTStatStruc *pDCTstat;
4882
4883 pDCTstat = pDCTstatA + 0;
4884 dev = pDCTstat->dev_map;
4885
4886 /* Copy dram map from F1x40/44,F1x48/4c,
4887 to F1x120/124(Node0),F1x120/124(Node1),...*/
4888 for (Node=0; Node < MAX_NODES_SUPPORTED; Node++) {
4889 pDCTstat = pDCTstatA + Node;
4890 devx = pDCTstat->dev_map;
4891
4892 /* get base/limit from Node0 */
4893 reg = 0x40 + (Node << 3); /* Node0/Dram Base 0 */
4894 val = Get_NB32(dev, reg);
4895 Drambase = val >> ( 16 + 3);
4896
4897 reg = 0x44 + (Node << 3); /* Node0/Dram Base 0 */
4898 val = Get_NB32(dev, reg);
4899 Dramlimit = val >> (16 + 3);
4900
4901 /* set base/limit to F1x120/124 per Node */
4902 if (pDCTstat->NodePresent) {
4903 reg = 0x120; /* F1x120,DramBase[47:27] */
4904 val = Get_NB32(devx, reg);
4905 val &= 0xFFE00000;
4906 val |= Drambase;
4907 Set_NB32(devx, reg, val);
4908
4909 reg = 0x124;
4910 val = Get_NB32(devx, reg);
4911 val &= 0xFFE00000;
4912 val |= Dramlimit;
4913 Set_NB32(devx, reg, val);
4914
4915 if ( pMCTstat->GStatus & ( 1 << GSB_HWHole)) {
4916 reg = 0xF0;
4917 val = Get_NB32(devx, reg);
4918 val |= (1 << DramMemHoistValid);
4919 val &= ~(0xFF << 24);
4920 dword = (pMCTstat->HoleBase >> (24 - 8)) & 0xFF;
4921 dword <<= 24;
4922 val |= dword;
4923 Set_NB32(devx, reg, val);
4924 }
4925
4926 }
4927 }
4928}
4929
4930static void SetCSTriState(struct MCTStatStruc *pMCTstat,
4931 struct DCTStatStruc *pDCTstat, u8 dct)
4932{
4933 u32 val;
4934 u32 dev = pDCTstat->dev_dct;
Timothy Pearson730a0432015-10-16 13:51:51 -05004935 u32 index_reg = 0x98;
Zheng Baoeb75f652010-04-23 17:32:48 +00004936 u32 index;
4937 u16 word;
4938
4939 /* Tri-state unused chipselects when motherboard
4940 termination is available */
4941
4942 /* FIXME: skip for Ax */
4943
4944 word = pDCTstat->CSPresent;
4945 if (pDCTstat->Status & (1 << SB_Registered)) {
4946 word |= (word & 0x55) << 1;
4947 }
4948 word = (~word) & 0xFF;
4949 index = 0x0c;
Timothy Pearson730a0432015-10-16 13:51:51 -05004950 val = Get_NB32_index_wait_DCT(dev, dct, index_reg, index);
Zheng Baoeb75f652010-04-23 17:32:48 +00004951 val |= word;
Timothy Pearson730a0432015-10-16 13:51:51 -05004952 Set_NB32_index_wait_DCT(dev, dct, index_reg, index, val);
Zheng Baoeb75f652010-04-23 17:32:48 +00004953}
4954
4955static void SetCKETriState(struct MCTStatStruc *pMCTstat,
4956 struct DCTStatStruc *pDCTstat, u8 dct)
4957{
4958 u32 val;
4959 u32 dev;
Timothy Pearson730a0432015-10-16 13:51:51 -05004960 u32 index_reg = 0x98;
Zheng Baoeb75f652010-04-23 17:32:48 +00004961 u32 index;
4962 u16 word;
4963
4964 /* Tri-state unused CKEs when motherboard termination is available */
4965
4966 /* FIXME: skip for Ax */
4967
4968 dev = pDCTstat->dev_dct;
4969 word = pDCTstat->CSPresent;
4970
4971 index = 0x0c;
Timothy Pearson730a0432015-10-16 13:51:51 -05004972 val = Get_NB32_index_wait_DCT(dev, dct, index_reg, index);
Zheng Baoeb75f652010-04-23 17:32:48 +00004973 if ((word & 0x55) == 0)
4974 val |= 1 << 12;
4975
4976 if ((word & 0xAA) == 0)
4977 val |= 1 << 13;
4978
Timothy Pearson730a0432015-10-16 13:51:51 -05004979 Set_NB32_index_wait_DCT(dev, dct, index_reg, index, val);
Zheng Baoeb75f652010-04-23 17:32:48 +00004980}
4981
4982static void SetODTTriState(struct MCTStatStruc *pMCTstat,
4983 struct DCTStatStruc *pDCTstat, u8 dct)
4984{
4985 u32 val;
4986 u32 dev;
Timothy Pearson730a0432015-10-16 13:51:51 -05004987 u32 index_reg = 0x98;
Zheng Baoeb75f652010-04-23 17:32:48 +00004988 u8 cs;
4989 u32 index;
4990 u8 odt;
4991 u8 max_dimms;
4992
4993 /* FIXME: skip for Ax */
4994
4995 dev = pDCTstat->dev_dct;
4996
4997 /* Tri-state unused ODTs when motherboard termination is available */
4998 max_dimms = (u8) mctGet_NVbits(NV_MAX_DIMMS);
4999 odt = 0x0F; /* ODT tri-state setting */
5000
5001 if (pDCTstat->Status & (1 <<SB_Registered)) {
5002 for (cs = 0; cs < 8; cs += 2) {
5003 if (pDCTstat->CSPresent & (1 << cs)) {
5004 odt &= ~(1 << (cs / 2));
5005 if (mctGet_NVbits(NV_4RANKType) != 0) { /* quad-rank capable platform */
5006 if (pDCTstat->CSPresent & (1 << (cs + 1)))
5007 odt &= ~(4 << (cs / 2));
5008 }
5009 }
5010 }
5011 } else { /* AM3 package */
5012 val = ~(pDCTstat->CSPresent);
5013 odt = val & 9; /* swap bits 1 and 2 */
5014 if (val & (1 << 1))
5015 odt |= 1 << 2;
5016 if (val & (1 << 2))
5017 odt |= 1 << 1;
5018 }
5019
5020 index = 0x0C;
Timothy Pearson730a0432015-10-16 13:51:51 -05005021 val = Get_NB32_index_wait_DCT(dev, dct, index_reg, index);
Zheng Baoeb75f652010-04-23 17:32:48 +00005022 val |= ((odt & 0xFF) << 8); /* set bits 11:8 ODTTriState[3:0] */
Timothy Pearson730a0432015-10-16 13:51:51 -05005023 Set_NB32_index_wait_DCT(dev, dct, index_reg, index, val);
Zheng Baoeb75f652010-04-23 17:32:48 +00005024
5025}
5026
Timothy Pearson730a0432015-10-16 13:51:51 -05005027/* Family 15h */
5028static void InitDDRPhy(struct MCTStatStruc *pMCTstat,
5029 struct DCTStatStruc *pDCTstat, u8 dct)
5030{
5031 uint8_t index;
5032 uint32_t dword;
5033 uint8_t ddr_voltage_index;
5034 uint8_t amd_voltage_level_index = 0;
5035 uint32_t index_reg = 0x98;
5036 uint32_t dev = pDCTstat->dev_dct;
5037
5038 printk(BIOS_DEBUG, "%s: Start\n", __func__);
5039
5040 /* Find current DDR supply voltage for this DCT */
5041 ddr_voltage_index = dct_ddr_voltage_index(pDCTstat, dct);
5042
5043 /* Fam15h BKDG v3.14 section 2.10.5.3
5044 * The remainder of the Phy Initialization algorithm picks up in phyAssistedMemFnceTraining
5045 */
5046 for (dct = 0; dct < 2; dct++) {
5047 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0000000b, 0x80000000);
5048 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0fe013, 0x00000118);
5049
5050 /* Program desired VDDIO level */
5051 if (ddr_voltage_index & 0x4) {
5052 /* 1.25V */
5053 amd_voltage_level_index = 0x2;
5054 } else if (ddr_voltage_index & 0x2) {
5055 /* 1.35V */
5056 amd_voltage_level_index = 0x1;
5057 } else if (ddr_voltage_index & 0x1) {
5058 /* 1.50V */
5059 amd_voltage_level_index = 0x0;
5060 }
5061
5062 /* D18F2x9C_x0D0F_0[F,8:0]1F_dct[1:0][RxVioLvl] */
5063 for (index = 0; index < 0x9; index++) {
5064 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f001f | (index << 8));
5065 dword &= ~(0x3 << 3);
5066 dword |= (amd_voltage_level_index << 3);
5067 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f001f | (index << 8), dword);
5068 }
5069
5070 /* D18F2x9C_x0D0F_[C,8,2][2:0]1F_dct[1:0][RxVioLvl] */
5071 for (index = 0; index < 0x3; index++) {
5072 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f201f | (index << 8));
5073 dword &= ~(0x3 << 3);
5074 dword |= (amd_voltage_level_index << 3);
5075 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f201f | (index << 8), dword);
5076 }
5077 for (index = 0; index < 0x2; index++) {
5078 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f801f | (index << 8));
5079 dword &= ~(0x3 << 3);
5080 dword |= (amd_voltage_level_index << 3);
5081 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f801f | (index << 8), dword);
5082 }
5083 for (index = 0; index < 0x1; index++) {
5084 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0fc01f | (index << 8));
5085 dword &= ~(0x3 << 3);
5086 dword |= (amd_voltage_level_index << 3);
5087 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0fc01f | (index << 8), dword);
5088 }
5089
5090 /* D18F2x9C_x0D0F_4009_dct[1:0][CmpVioLvl, ComparatorAdjust] */
5091 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f4009);
5092 dword &= ~(0x0000c00c);
5093 dword |= (amd_voltage_level_index << 14);
5094 dword |= (amd_voltage_level_index << 2);
5095 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f4009, dword);
5096 }
5097
5098 printk(BIOS_DEBUG, "%s: Done\n", __func__);
5099}
5100
Zheng Baoeb75f652010-04-23 17:32:48 +00005101static void InitPhyCompensation(struct MCTStatStruc *pMCTstat,
5102 struct DCTStatStruc *pDCTstat, u8 dct)
5103{
5104 u8 i;
Timothy Pearson730a0432015-10-16 13:51:51 -05005105 u32 index_reg = 0x98;
Zheng Baoeb75f652010-04-23 17:32:48 +00005106 u32 dev = pDCTstat->dev_dct;
Zheng Baoeb75f652010-04-23 17:32:48 +00005107 u32 valx = 0;
Timothy Pearson730a0432015-10-16 13:51:51 -05005108 uint8_t index;
5109 uint32_t dword;
Zheng Baoeb75f652010-04-23 17:32:48 +00005110 const u8 *p;
5111
Timothy Pearson730a0432015-10-16 13:51:51 -05005112 printk(BIOS_DEBUG, "%s: Start\n", __func__);
Zheng Baoeb75f652010-04-23 17:32:48 +00005113
Timothy Pearson730a0432015-10-16 13:51:51 -05005114 if (is_fam15h()) {
5115 /* Algorithm detailed in the Fam15h BKDG Rev. 3.14 section 2.10.5.3.4 */
5116 uint32_t tx_pre;
5117 uint32_t drive_strength;
5118
5119 /* Program D18F2x9C_x0D0F_E003_dct[1:0][DisAutoComp, DisablePredriverCal] */
5120 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0fe003);
5121 dword |= (0x3 << 13);
5122 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0fe003, dword);
5123
5124 /* Determine TxPreP/TxPreN for data lanes (Stage 1) */
5125 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x00000000);
5126 drive_strength = (dword >> 20) & 0x7; /* DqsDrvStren */
5127 tx_pre = fam15h_phy_predriver_calibration_code(pDCTstat, dct, drive_strength);
5128
5129 /* Program TxPreP/TxPreN for data lanes (Stage 1) */
5130 for (index = 0; index < 0x9; index++) {
5131 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f0006 | (index << 8));
5132 dword &= ~(0xfff);
5133 dword |= tx_pre;
5134 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f0006 | (index << 8), dword);
Zheng Baoeb75f652010-04-23 17:32:48 +00005135 }
Timothy Pearson730a0432015-10-16 13:51:51 -05005136
5137 /* Determine TxPreP/TxPreN for data lanes (Stage 2) */
5138 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x00000000);
5139 drive_strength = (dword >> 16) & 0x7; /* DataDrvStren */
5140 tx_pre = fam15h_phy_predriver_calibration_code(pDCTstat, dct, drive_strength);
5141
5142 /* Program TxPreP/TxPreN for data lanes (Stage 2) */
5143 for (index = 0; index < 0x9; index++) {
5144 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f000a | (index << 8));
5145 dword &= ~(0xfff);
5146 dword |= tx_pre;
5147 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f000a | (index << 8), dword);
5148 }
5149 for (index = 0; index < 0x9; index++) {
5150 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f0002 | (index << 8));
5151 dword &= ~(0xfff);
5152 dword |= (0x8000 | tx_pre);
5153 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f0002 | (index << 8), dword);
5154 }
5155
5156 /* Determine TxPreP/TxPreN for command/address lines (Stage 1) */
5157 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x00000000);
5158 drive_strength = (dword >> 4) & 0x7; /* CsOdtDrvStren */
5159 tx_pre = fam15h_phy_predriver_cmd_addr_calibration_code(pDCTstat, dct, drive_strength);
5160
5161 /* Program TxPreP/TxPreN for command/address lines (Stage 1) */
5162 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f8006);
5163 dword &= ~(0xfff);
5164 dword |= tx_pre;
5165 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f8006, dword);
5166 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f800a);
5167 dword &= ~(0xfff);
5168 dword |= tx_pre;
5169 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f800a, dword);
5170 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f8002);
5171 dword &= ~(0xfff);
5172 dword |= (0x8000 | tx_pre);
5173 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f8002, dword);
5174
5175 /* Determine TxPreP/TxPreN for command/address lines (Stage 2) */
5176 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x00000000);
5177 drive_strength = (dword >> 8) & 0x7; /* AddrCmdDrvStren */
5178 tx_pre = fam15h_phy_predriver_cmd_addr_calibration_code(pDCTstat, dct, drive_strength);
5179
5180 /* Program TxPreP/TxPreN for command/address lines (Stage 2) */
5181 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f8106);
5182 dword &= ~(0xfff);
5183 dword |= tx_pre;
5184 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f8106, dword);
5185 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f810a);
5186 dword &= ~(0xfff);
5187 dword |= tx_pre;
5188 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f810a, dword);
5189 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0fc006);
5190 dword &= ~(0xfff);
5191 dword |= tx_pre;
5192 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0fc006, dword);
5193 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0fc00a);
5194 dword &= ~(0xfff);
5195 dword |= tx_pre;
5196 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0fc00a, dword);
5197 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0fc00e);
5198 dword &= ~(0xfff);
5199 dword |= tx_pre;
5200 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0fc00e, dword);
5201 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0fc012);
5202 dword &= ~(0xfff);
5203 dword |= tx_pre;
5204 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0fc012, dword);
5205 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f8102);
5206 dword &= ~(0xfff);
5207 dword |= (0x8000 | tx_pre);
5208 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f8102, dword);
5209
5210 /* Determine TxPreP/TxPreN for command/address lines (Stage 3) */
5211 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x00000000);
5212 drive_strength = (dword >> 0) & 0x7; /* CkeDrvStren */
5213 tx_pre = fam15h_phy_predriver_cmd_addr_calibration_code(pDCTstat, dct, drive_strength);
5214
5215 /* Program TxPreP/TxPreN for command/address lines (Stage 3) */
5216 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0fc002);
5217 dword &= ~(0xfff);
5218 dword |= (0x8000 | tx_pre);
5219 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0fc002, dword);
5220
5221 /* Determine TxPreP/TxPreN for clock lines */
5222 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x00000000);
5223 drive_strength = (dword >> 12) & 0x7; /* ClkDrvStren */
5224 tx_pre = fam15h_phy_predriver_clk_calibration_code(pDCTstat, dct, drive_strength);
5225
5226 /* Program TxPreP/TxPreN for clock lines */
5227 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f2002);
5228 dword &= ~(0xfff);
5229 dword |= (0x8000 | tx_pre);
5230 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f2002, dword);
5231 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f2102);
5232 dword &= ~(0xfff);
5233 dword |= (0x8000 | tx_pre);
5234 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f2102, dword);
5235 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f2202);
5236 dword &= ~(0xfff);
5237 dword |= (0x8000 | tx_pre);
5238 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0d0f2202, dword);
5239 } else {
5240 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, 0x00);
5241 dword = 0;
5242 for (i=0; i < 6; i++) {
5243 switch (i) {
5244 case 0:
5245 case 4:
5246 p = Table_Comp_Rise_Slew_15x;
5247 valx = p[(dword >> 16) & 3];
5248 break;
5249 case 1:
5250 case 5:
5251 p = Table_Comp_Fall_Slew_15x;
5252 valx = p[(dword >> 16) & 3];
5253 break;
5254 case 2:
5255 p = Table_Comp_Rise_Slew_20x;
5256 valx = p[(dword >> 8) & 3];
5257 break;
5258 case 3:
5259 p = Table_Comp_Fall_Slew_20x;
5260 valx = p[(dword >> 8) & 3];
5261 break;
5262 }
5263 dword |= valx << (5 * i);
5264 }
5265
5266 Set_NB32_index_wait_DCT(dev, dct, index_reg, 0x0a, dword);
Zheng Baoeb75f652010-04-23 17:32:48 +00005267 }
5268
Timothy Pearson730a0432015-10-16 13:51:51 -05005269 printk(BIOS_DEBUG, "%s: Done\n", __func__);
Zheng Baoeb75f652010-04-23 17:32:48 +00005270}
5271
5272static void mct_EarlyArbEn_D(struct MCTStatStruc *pMCTstat,
Zheng Bao69436e12011-01-06 02:18:12 +00005273 struct DCTStatStruc *pDCTstat, u8 dct)
Zheng Baoeb75f652010-04-23 17:32:48 +00005274{
Timothy Pearson730a0432015-10-16 13:51:51 -05005275 if (!is_fam15h()) {
5276 u32 reg;
5277 u32 val;
5278 u32 dev = pDCTstat->dev_dct;
Zheng Baoeb75f652010-04-23 17:32:48 +00005279
Timothy Pearson730a0432015-10-16 13:51:51 -05005280 /* GhEnhancement #18429 modified by askar: For low NB CLK :
5281 * Memclk ratio, the DCT may need to arbitrate early to avoid
5282 * unnecessary bubbles.
5283 * bit 19 of F2x[1,0]78 Dram Control Register, set this bit only when
5284 * NB CLK : Memclk ratio is between 3:1 (inclusive) to 4:5 (inclusive)
5285 */
5286 reg = 0x78;
5287 val = Get_NB32_DCT(dev, dct, reg);
Zheng Baoeb75f652010-04-23 17:32:48 +00005288
Timothy Pearson730a0432015-10-16 13:51:51 -05005289 if (pDCTstat->LogicalCPUID & (AMD_DR_Cx | AMD_DR_Dx))
5290 val |= (1 << EarlyArbEn);
5291 else if (CheckNBCOFEarlyArbEn(pMCTstat, pDCTstat))
5292 val |= (1 << EarlyArbEn);
Zheng Baoeb75f652010-04-23 17:32:48 +00005293
Timothy Pearson730a0432015-10-16 13:51:51 -05005294 Set_NB32_DCT(dev, dct, reg, val);
5295 }
Zheng Baoeb75f652010-04-23 17:32:48 +00005296}
5297
5298static u8 CheckNBCOFEarlyArbEn(struct MCTStatStruc *pMCTstat,
5299 struct DCTStatStruc *pDCTstat)
5300{
5301 u32 reg;
5302 u32 val;
5303 u32 tmp;
5304 u32 rem;
5305 u32 dev = pDCTstat->dev_dct;
5306 u32 hi, lo;
5307 u8 NbDid = 0;
5308
5309 /* Check if NB COF >= 4*Memclk, if it is not, return a fatal error
5310 */
5311
5312 /* 3*(Fn2xD4[NBFid]+4)/(2^NbDid)/(3+Fn2x94[MemClkFreq]) */
5313 _RDMSR(0xC0010071, &lo, &hi);
5314 if (lo & (1 << 22))
5315 NbDid |= 1;
5316
5317 reg = 0x94;
Timothy Pearson730a0432015-10-16 13:51:51 -05005318 val = Get_NB32_DCT(dev, 0, reg);
Zheng Baoeb75f652010-04-23 17:32:48 +00005319 if (!(val & (1 << MemClkFreqVal)))
Timothy Pearson730a0432015-10-16 13:51:51 -05005320 val = Get_NB32_DCT(dev, 1, reg); /* get the DCT1 value */
Zheng Baoeb75f652010-04-23 17:32:48 +00005321
5322 val &= 0x07;
5323 val += 3;
5324 if (NbDid)
5325 val <<= 1;
5326 tmp = val;
5327
5328 dev = pDCTstat->dev_nbmisc;
5329 reg = 0xD4;
5330 val = Get_NB32(dev, reg);
5331 val &= 0x1F;
5332 val += 3;
5333 val *= 3;
5334 val = val / tmp;
5335 rem = val % tmp;
5336 tmp >>= 1;
5337
5338 /* Yes this could be nicer but this was how the asm was.... */
5339 if (val < 3) { /* NClk:MemClk < 3:1 */
5340 return 0;
5341 } else if (val > 4) { /* NClk:MemClk >= 5:1 */
5342 return 0;
5343 } else if ((val == 4) && (rem > tmp)) { /* NClk:MemClk > 4.5:1 */
5344 return 0;
5345 } else {
5346 return 1; /* 3:1 <= NClk:MemClk <= 4.5:1*/
5347 }
5348}
5349
5350static void mct_ResetDataStruct_D(struct MCTStatStruc *pMCTstat,
5351 struct DCTStatStruc *pDCTstatA)
5352{
5353 u8 Node;
5354 u32 i;
5355 struct DCTStatStruc *pDCTstat;
5356 u32 start, stop;
5357 u8 *p;
5358 u16 host_serv1, host_serv2;
5359
5360 /* Initialize Data structures by clearing all entries to 0 */
5361 p = (u8 *) pMCTstat;
5362 for (i = 0; i < sizeof(struct MCTStatStruc); i++) {
5363 p[i] = 0;
5364 }
5365
5366 for (Node = 0; Node < 8; Node++) {
5367 pDCTstat = pDCTstatA + Node;
5368 host_serv1 = pDCTstat->HostBiosSrvc1;
5369 host_serv2 = pDCTstat->HostBiosSrvc2;
5370
5371 p = (u8 *) pDCTstat;
5372 start = 0;
5373 stop = ((u32) &((struct DCTStatStruc *)0)->CH_MaxRdLat[2]);
5374 for (i = start; i < stop ; i++) {
5375 p[i] = 0;
5376 }
5377
5378 start = ((u32) &((struct DCTStatStruc *)0)->CH_D_BC_RCVRDLY[2][4]);
5379 stop = sizeof(struct DCTStatStruc);
5380 for (i = start; i < stop; i++) {
5381 p[i] = 0;
5382 }
5383 pDCTstat->HostBiosSrvc1 = host_serv1;
5384 pDCTstat->HostBiosSrvc2 = host_serv2;
5385 }
5386}
5387
5388static void mct_BeforeDramInit_Prod_D(struct MCTStatStruc *pMCTstat,
Timothy Pearson730a0432015-10-16 13:51:51 -05005389 struct DCTStatStruc *pDCTstat, u8 dct)
5390{
5391 mct_ProgramODT_D(pMCTstat, pDCTstat, dct);
5392}
5393
5394static void mct_ProgramODT_D(struct MCTStatStruc *pMCTstat,
5395 struct DCTStatStruc *pDCTstat, u8 dct)
Zheng Baoeb75f652010-04-23 17:32:48 +00005396{
5397 u8 i;
Timothy Pearson730a0432015-10-16 13:51:51 -05005398 u32 dword;
Zheng Baoeb75f652010-04-23 17:32:48 +00005399 u32 dev = pDCTstat->dev_dct;
5400
Timothy Pearson730a0432015-10-16 13:51:51 -05005401 /* FIXME
5402 * Mainboards need to be able to specify the maximum number of DIMMs installable per channel
5403 * For now assume a maximum of 2 DIMMs per channel can be installed
5404 */
5405 uint8_t MaxDimmsInstallable = 2;
5406
5407 if (is_fam15h()) {
5408 /* Obtain number of DIMMs on channel */
5409 uint8_t dimm_count = pDCTstat->MAdimms[dct];
5410 uint8_t rank_count_dimm0;
5411 uint8_t rank_count_dimm1;
5412 uint32_t odt_pattern_0;
5413 uint32_t odt_pattern_1;
5414 uint32_t odt_pattern_2;
5415 uint32_t odt_pattern_3;
5416 uint8_t write_odt_duration;
5417 uint8_t read_odt_duration;
5418 uint8_t write_odt_delay;
5419 uint8_t read_odt_delay;
5420
5421 /* Select appropriate ODT pattern for installed DIMMs
5422 * Refer to the Fam15h BKDG Rev. 3.14, page 149 onwards
5423 */
5424 if (pDCTstat->C_DCTPtr[dct]->Status[DCT_STATUS_REGISTERED]) {
5425 if (MaxDimmsInstallable == 2) {
5426 if (dimm_count == 1) {
5427 /* 1 DIMM detected */
5428 rank_count_dimm1 = pDCTstat->C_DCTPtr[dct]->DimmRanks[1];
5429 if (rank_count_dimm1 == 1) {
5430 odt_pattern_0 = 0x00000000;
5431 odt_pattern_1 = 0x00000000;
5432 odt_pattern_2 = 0x00000000;
5433 odt_pattern_3 = 0x00020000;
5434 } else if (rank_count_dimm1 == 2) {
5435 odt_pattern_0 = 0x00000000;
5436 odt_pattern_1 = 0x00000000;
5437 odt_pattern_2 = 0x00000000;
5438 odt_pattern_3 = 0x08020000;
5439 } else if (rank_count_dimm1 == 4) {
5440 odt_pattern_0 = 0x00000000;
5441 odt_pattern_1 = 0x00000000;
5442 odt_pattern_2 = 0x020a0000;
5443 odt_pattern_3 = 0x080a0000;
5444 } else {
5445 /* Fallback */
5446 odt_pattern_0 = 0x00000000;
5447 odt_pattern_1 = 0x00000000;
5448 odt_pattern_2 = 0x00000000;
5449 odt_pattern_3 = 0x08020000;
5450 }
5451 } else {
5452 /* 2 DIMMs detected */
5453 rank_count_dimm0 = pDCTstat->C_DCTPtr[dct]->DimmRanks[0];
5454 rank_count_dimm1 = pDCTstat->C_DCTPtr[dct]->DimmRanks[1];
5455 if ((rank_count_dimm0 < 4) && (rank_count_dimm1 < 4)) {
5456 odt_pattern_0 = 0x00000000;
5457 odt_pattern_1 = 0x01010202;
5458 odt_pattern_2 = 0x00000000;
5459 odt_pattern_3 = 0x09030603;
5460 } else if ((rank_count_dimm0 < 4) && (rank_count_dimm1 == 4)) {
5461 odt_pattern_0 = 0x01010000;
5462 odt_pattern_1 = 0x01010a0a;
5463 odt_pattern_2 = 0x01090000;
5464 odt_pattern_3 = 0x01030e0b;
5465 } else if ((rank_count_dimm0 == 4) && (rank_count_dimm1 < 4)) {
5466 odt_pattern_0 = 0x00000202;
5467 odt_pattern_1 = 0x05050202;
5468 odt_pattern_2 = 0x00000206;
5469 odt_pattern_3 = 0x0d070203;
5470 } else if ((rank_count_dimm0 == 4) && (rank_count_dimm1 == 4)) {
5471 odt_pattern_0 = 0x05050a0a;
5472 odt_pattern_1 = 0x05050a0a;
5473 odt_pattern_2 = 0x050d0a0e;
5474 odt_pattern_3 = 0x05070a0b;
5475 } else {
5476 /* Fallback */
5477 odt_pattern_0 = 0x00000000;
5478 odt_pattern_1 = 0x00000000;
5479 odt_pattern_2 = 0x00000000;
5480 odt_pattern_3 = 0x00000000;
5481 }
5482 }
5483 } else {
5484 /* FIXME
5485 * 3 DIMMs per channel UNIMPLEMENTED
5486 */
5487 odt_pattern_0 = 0x00000000;
5488 odt_pattern_1 = 0x00000000;
5489 odt_pattern_2 = 0x00000000;
5490 odt_pattern_3 = 0x00000000;
5491 }
5492 } else if (pDCTstat->C_DCTPtr[dct]->Status[DCT_STATUS_LOAD_REDUCED]) {
5493 /* TODO
5494 * Load reduced dimms UNIMPLEMENTED
5495 */
5496 odt_pattern_0 = 0x00000000;
5497 odt_pattern_1 = 0x00000000;
5498 odt_pattern_2 = 0x00000000;
5499 odt_pattern_3 = 0x00000000;
5500 } else {
5501 if (MaxDimmsInstallable == 2) {
5502 if (dimm_count == 1) {
5503 /* 1 DIMM detected */
5504 rank_count_dimm1 = pDCTstat->C_DCTPtr[dct]->DimmRanks[1];
5505 if (rank_count_dimm1 == 1) {
5506 odt_pattern_0 = 0x00000000;
5507 odt_pattern_1 = 0x00000000;
5508 odt_pattern_2 = 0x00000000;
5509 odt_pattern_3 = 0x00020000;
5510 } else if (rank_count_dimm1 == 2) {
5511 odt_pattern_0 = 0x00000000;
5512 odt_pattern_1 = 0x00000000;
5513 odt_pattern_2 = 0x00000000;
5514 odt_pattern_3 = 0x08020000;
5515 } else {
5516 /* Fallback */
5517 odt_pattern_0 = 0x00000000;
5518 odt_pattern_1 = 0x00000000;
5519 odt_pattern_2 = 0x00000000;
5520 odt_pattern_3 = 0x08020000;
5521 }
5522 } else {
5523 /* 2 DIMMs detected */
5524 odt_pattern_0 = 0x00000000;
5525 odt_pattern_1 = 0x01010202;
5526 odt_pattern_2 = 0x00000000;
5527 odt_pattern_3 = 0x09030603;
5528 }
5529 } else {
5530 /* FIXME
5531 * 3 DIMMs per channel UNIMPLEMENTED
5532 */
5533 odt_pattern_0 = 0x00000000;
5534 odt_pattern_1 = 0x00000000;
5535 odt_pattern_2 = 0x00000000;
5536 odt_pattern_3 = 0x00000000;
5537 }
5538 }
5539
5540 if (pDCTstat->C_DCTPtr[dct]->Status[DCT_STATUS_LOAD_REDUCED]) {
5541 /* TODO
5542 * Load reduced dimms UNIMPLEMENTED
5543 */
5544 write_odt_duration = 0x0;
5545 read_odt_duration = 0x0;
5546 write_odt_delay = 0x0;
5547 read_odt_delay = 0x0;
5548 } else {
5549 uint8_t tcl;
5550 uint8_t tcwl;
5551 tcl = Get_NB32_DCT(dev, dct, 0x200) & 0x1f;
5552 tcwl = Get_NB32_DCT(dev, dct, 0x20c) & 0x1f;
5553
5554 write_odt_duration = 0x6;
5555 read_odt_duration = 0x6;
5556 write_odt_delay = 0x0;
5557 if (tcl > tcwl)
5558 read_odt_delay = tcl - tcwl;
5559 else
5560 read_odt_delay = 0x0;
5561 }
5562
5563 /* Program ODT pattern */
5564 Set_NB32_DCT(dev, dct, 0x230, odt_pattern_1);
5565 Set_NB32_DCT(dev, dct, 0x234, odt_pattern_0);
5566 Set_NB32_DCT(dev, dct, 0x238, odt_pattern_3);
5567 Set_NB32_DCT(dev, dct, 0x23c, odt_pattern_2);
5568 dword = Get_NB32_DCT(dev, dct, 0x240);
5569 dword &= ~(0x7 << 12); /* WrOdtOnDuration = write_odt_duration */
5570 dword |= (write_odt_duration & 0x7) << 12;
5571 dword &= ~(0x7 << 8); /* WrOdtTrnOnDly = write_odt_delay */
5572 dword |= (write_odt_delay & 0x7) << 8;
5573 dword &= ~(0xf << 4); /* RdOdtOnDuration = read_odt_duration */
5574 dword |= (read_odt_duration & 0xf) << 4;
5575 dword &= ~(0xf); /* RdOdtTrnOnDly = read_odt_delay */
5576 dword |= (read_odt_delay & 0xf);
5577 Set_NB32_DCT(dev, dct, 0x240, dword);
5578 } else if (pDCTstat->LogicalCPUID & AMD_DR_Dx) {
Edward O'Callaghanba363d32014-05-23 05:58:27 +10005579 if (pDCTstat->Speed == 3)
Zheng Baoeb75f652010-04-23 17:32:48 +00005580 dword = 0x00000800;
5581 else
5582 dword = 0x00000000;
5583 for (i=0; i < 2; i++) {
Timothy Pearson730a0432015-10-16 13:51:51 -05005584 Set_NB32_DCT(dev, i, 0x98, 0x0D000030);
5585 Set_NB32_DCT(dev, i, 0x9C, dword);
5586 Set_NB32_DCT(dev, i, 0x98, 0x4D040F30);
Timothy Pearsonb8a355d2015-09-05 17:55:58 -05005587
5588 /* Obtain number of DIMMs on channel */
5589 uint8_t dimm_count = pDCTstat->MAdimms[i];
5590 uint8_t rank_count_dimm0;
5591 uint8_t rank_count_dimm1;
5592 uint32_t odt_pattern_0;
5593 uint32_t odt_pattern_1;
5594 uint32_t odt_pattern_2;
5595 uint32_t odt_pattern_3;
5596
5597 /* Select appropriate ODT pattern for installed DIMMs
Timothy Pearson730a0432015-10-16 13:51:51 -05005598 * Refer to the Fam10h BKDG Rev. 3.62, page 120 onwards
Timothy Pearsonb8a355d2015-09-05 17:55:58 -05005599 */
5600 if (pDCTstat->C_DCTPtr[i]->Status[DCT_STATUS_REGISTERED]) {
5601 if (MaxDimmsInstallable == 2) {
5602 if (dimm_count == 1) {
5603 /* 1 DIMM detected */
5604 rank_count_dimm1 = pDCTstat->C_DCTPtr[i]->DimmRanks[1];
5605 if (rank_count_dimm1 == 1) {
5606 odt_pattern_0 = 0x00000000;
5607 odt_pattern_1 = 0x00000000;
5608 odt_pattern_2 = 0x00000000;
5609 odt_pattern_3 = 0x00020000;
5610 } else if (rank_count_dimm1 == 2) {
5611 odt_pattern_0 = 0x00000000;
5612 odt_pattern_1 = 0x00000000;
5613 odt_pattern_2 = 0x00000000;
5614 odt_pattern_3 = 0x02080000;
5615 } else if (rank_count_dimm1 == 4) {
5616 odt_pattern_0 = 0x00000000;
5617 odt_pattern_1 = 0x00000000;
5618 odt_pattern_2 = 0x020a0000;
5619 odt_pattern_3 = 0x080a0000;
5620 } else {
5621 /* Fallback */
5622 odt_pattern_0 = 0x00000000;
5623 odt_pattern_1 = 0x00000000;
5624 odt_pattern_2 = 0x00000000;
5625 odt_pattern_3 = 0x00000000;
5626 }
5627 } else {
5628 /* 2 DIMMs detected */
5629 rank_count_dimm0 = pDCTstat->C_DCTPtr[i]->DimmRanks[0];
5630 rank_count_dimm1 = pDCTstat->C_DCTPtr[i]->DimmRanks[1];
5631 if ((rank_count_dimm0 < 4) && (rank_count_dimm1 < 4)) {
5632 odt_pattern_0 = 0x00000000;
5633 odt_pattern_1 = 0x01010202;
5634 odt_pattern_2 = 0x00000000;
5635 odt_pattern_3 = 0x09030603;
5636 } else if ((rank_count_dimm0 < 4) && (rank_count_dimm1 == 4)) {
5637 odt_pattern_0 = 0x01010000;
5638 odt_pattern_1 = 0x01010a0a;
5639 odt_pattern_2 = 0x01090000;
5640 odt_pattern_3 = 0x01030e0b;
5641 } else if ((rank_count_dimm0 == 4) && (rank_count_dimm1 < 4)) {
5642 odt_pattern_0 = 0x00000202;
5643 odt_pattern_1 = 0x05050202;
5644 odt_pattern_2 = 0x00000206;
5645 odt_pattern_3 = 0x0d070203;
5646 } else if ((rank_count_dimm0 == 4) && (rank_count_dimm1 == 4)) {
5647 odt_pattern_0 = 0x05050a0a;
5648 odt_pattern_1 = 0x05050a0a;
5649 odt_pattern_2 = 0x050d0a0e;
5650 odt_pattern_3 = 0x05070a0b;
5651 } else {
5652 /* Fallback */
5653 odt_pattern_0 = 0x00000000;
5654 odt_pattern_1 = 0x00000000;
5655 odt_pattern_2 = 0x00000000;
5656 odt_pattern_3 = 0x00000000;
5657 }
5658 }
5659 } else {
5660 /* FIXME
5661 * 3 DIMMs per channel UNIMPLEMENTED
5662 */
5663 odt_pattern_0 = 0x00000000;
5664 odt_pattern_1 = 0x00000000;
5665 odt_pattern_2 = 0x00000000;
5666 odt_pattern_3 = 0x00000000;
5667 }
5668 } else {
5669 if (MaxDimmsInstallable == 2) {
5670 if (dimm_count == 1) {
5671 /* 1 DIMM detected */
5672 rank_count_dimm1 = pDCTstat->C_DCTPtr[i]->DimmRanks[1];
5673 if (rank_count_dimm1 == 1) {
5674 odt_pattern_0 = 0x00000000;
5675 odt_pattern_1 = 0x00000000;
5676 odt_pattern_2 = 0x00000000;
5677 odt_pattern_3 = 0x00020000;
5678 } else if (rank_count_dimm1 == 2) {
5679 odt_pattern_0 = 0x00000000;
5680 odt_pattern_1 = 0x00000000;
5681 odt_pattern_2 = 0x00000000;
5682 odt_pattern_3 = 0x02080000;
5683 } else {
5684 /* Fallback */
5685 odt_pattern_0 = 0x00000000;
5686 odt_pattern_1 = 0x00000000;
5687 odt_pattern_2 = 0x00000000;
5688 odt_pattern_3 = 0x00000000;
5689 }
5690 } else {
5691 /* 2 DIMMs detected */
5692 odt_pattern_0 = 0x00000000;
5693 odt_pattern_1 = 0x01010202;
5694 odt_pattern_2 = 0x00000000;
5695 odt_pattern_3 = 0x09030603;
5696 }
5697 } else {
5698 /* FIXME
5699 * 3 DIMMs per channel UNIMPLEMENTED
5700 */
5701 odt_pattern_0 = 0x00000000;
5702 odt_pattern_1 = 0x00000000;
5703 odt_pattern_2 = 0x00000000;
5704 odt_pattern_3 = 0x00000000;
5705 }
5706 }
5707
5708 /* Program ODT pattern */
Timothy Pearson730a0432015-10-16 13:51:51 -05005709 Set_NB32_index_wait_DCT(dev, i, 0xf0, 0x180, odt_pattern_1);
5710 Set_NB32_index_wait_DCT(dev, i, 0xf0, 0x181, odt_pattern_0);
5711 Set_NB32_index_wait_DCT(dev, i, 0xf0, 0x182, odt_pattern_3);
5712 Set_NB32_index_wait_DCT(dev, i, 0xf0, 0x183, odt_pattern_2);
Zheng Baoeb75f652010-04-23 17:32:48 +00005713 }
5714 }
5715}
5716
Zheng Bao69436e12011-01-06 02:18:12 +00005717static void mct_EnDllShutdownSR(struct MCTStatStruc *pMCTstat,
5718 struct DCTStatStruc *pDCTstat, u8 dct)
5719{
Zheng Bao69436e12011-01-06 02:18:12 +00005720 u32 dev = pDCTstat->dev_dct, val;
5721
5722 /* Write 0000_07D0h to register F2x[1, 0]98_x4D0FE006 */
5723 if (pDCTstat->LogicalCPUID & (AMD_DR_DAC2_OR_C3)) {
Timothy Pearson730a0432015-10-16 13:51:51 -05005724 Set_NB32_DCT(dev, dct, 0x9C, 0x1C);
5725 Set_NB32_DCT(dev, dct, 0x98, 0x4D0FE006);
5726 Set_NB32_DCT(dev, dct, 0x9C, 0x13D);
5727 Set_NB32_DCT(dev, dct, 0x98, 0x4D0FE007);
Zheng Bao69436e12011-01-06 02:18:12 +00005728
Timothy Pearson730a0432015-10-16 13:51:51 -05005729 val = Get_NB32_DCT(dev, dct, 0x90);
Zheng Bao69436e12011-01-06 02:18:12 +00005730 val &= ~(1 << 27/* DisDllShutdownSR */);
Timothy Pearson730a0432015-10-16 13:51:51 -05005731 Set_NB32_DCT(dev, dct, 0x90, val);
Zheng Bao69436e12011-01-06 02:18:12 +00005732 }
5733}
5734
Zheng Baoeb75f652010-04-23 17:32:48 +00005735static u32 mct_DisDllShutdownSR(struct MCTStatStruc *pMCTstat,
5736 struct DCTStatStruc *pDCTstat, u32 DramConfigLo, u8 dct)
5737{
Zheng Baoeb75f652010-04-23 17:32:48 +00005738 u32 dev = pDCTstat->dev_dct;
5739
5740 /* Write 0000_07D0h to register F2x[1, 0]98_x4D0FE006 */
Zheng Bao69436e12011-01-06 02:18:12 +00005741 if (pDCTstat->LogicalCPUID & (AMD_DR_DAC2_OR_C3)) {
Timothy Pearson730a0432015-10-16 13:51:51 -05005742 Set_NB32_DCT(dev, dct, 0x9C, 0x7D0);
5743 Set_NB32_DCT(dev, dct, 0x98, 0x4D0FE006);
5744 Set_NB32_DCT(dev, dct, 0x9C, 0x190);
5745 Set_NB32_DCT(dev, dct, 0x98, 0x4D0FE007);
Zheng Bao69436e12011-01-06 02:18:12 +00005746
5747 DramConfigLo |= /* DisDllShutdownSR */ 1 << 27;
Zheng Baoeb75f652010-04-23 17:32:48 +00005748 }
5749
Zheng Bao69436e12011-01-06 02:18:12 +00005750 return DramConfigLo;
Zheng Baoeb75f652010-04-23 17:32:48 +00005751}
5752
5753void mct_SetClToNB_D(struct MCTStatStruc *pMCTstat,
5754 struct DCTStatStruc *pDCTstat)
5755{
5756 u32 lo, hi;
5757 u32 msr;
5758
5759 /* FIXME: Maybe check the CPUID? - not for now. */
5760 /* pDCTstat->LogicalCPUID; */
5761
5762 msr = BU_CFG2;
5763 _RDMSR(msr, &lo, &hi);
5764 lo |= 1 << ClLinesToNbDis;
5765 _WRMSR(msr, lo, hi);
5766}
5767
5768void mct_ClrClToNB_D(struct MCTStatStruc *pMCTstat,
5769 struct DCTStatStruc *pDCTstat)
5770{
5771
5772 u32 lo, hi;
5773 u32 msr;
5774
5775 /* FIXME: Maybe check the CPUID? - not for now. */
5776 /* pDCTstat->LogicalCPUID; */
5777
5778 msr = BU_CFG2;
5779 _RDMSR(msr, &lo, &hi);
5780 if (!pDCTstat->ClToNB_flag)
5781 lo &= ~(1<<ClLinesToNbDis);
5782 _WRMSR(msr, lo, hi);
5783
5784}
5785
5786void mct_SetWbEnhWsbDis_D(struct MCTStatStruc *pMCTstat,
5787 struct DCTStatStruc *pDCTstat)
5788{
5789 u32 lo, hi;
5790 u32 msr;
5791
5792 /* FIXME: Maybe check the CPUID? - not for now. */
5793 /* pDCTstat->LogicalCPUID; */
5794
5795 msr = BU_CFG;
5796 _RDMSR(msr, &lo, &hi);
5797 hi |= (1 << WbEnhWsbDis_D);
5798 _WRMSR(msr, lo, hi);
5799}
5800
5801void mct_ClrWbEnhWsbDis_D(struct MCTStatStruc *pMCTstat,
5802 struct DCTStatStruc *pDCTstat)
5803{
5804 u32 lo, hi;
5805 u32 msr;
5806
5807 /* FIXME: Maybe check the CPUID? - not for now. */
5808 /* pDCTstat->LogicalCPUID; */
5809
5810 msr = BU_CFG;
5811 _RDMSR(msr, &lo, &hi);
5812 hi &= ~(1 << WbEnhWsbDis_D);
5813 _WRMSR(msr, lo, hi);
5814}
5815
Zheng Baoeb75f652010-04-23 17:32:48 +00005816void ProgDramMRSReg_D(struct MCTStatStruc *pMCTstat,
5817 struct DCTStatStruc *pDCTstat, u8 dct)
5818{
5819 u32 DramMRS, dword;
5820 u8 byte;
5821
5822 DramMRS = 0;
5823
5824 /* Set chip select CKE control mode */
5825 if (mctGet_NVbits(NV_CKE_CTL)) {
5826 if (pDCTstat->CSPresent == 3) {
5827 u16 word;
5828 word = pDCTstat->DIMMSPDCSE;
5829 if (dct == 0)
5830 word &= 0b01010100;
5831 else
5832 word &= 0b10101000;
5833 if (word == 0)
5834 DramMRS |= 1 << 23;
5835 }
5836 }
Timothy Pearson730a0432015-10-16 13:51:51 -05005837
5838 if (is_fam15h()) {
5839 DramMRS |= (0x1 << 23); /* PchgPDModeSel = 1 */
Zheng Baoeb75f652010-04-23 17:32:48 +00005840 } else {
Timothy Pearson730a0432015-10-16 13:51:51 -05005841 /*
5842 DRAM MRS Register
5843 DrvImpCtrl: drive impedance control.01b(34 ohm driver; Ron34 = Rzq/7)
5844 */
5845 DramMRS |= 1 << 2;
5846 /* Dram nominal termination: */
5847 byte = pDCTstat->MAdimms[dct];
5848 if (!(pDCTstat->Status & (1 << SB_Registered))) {
5849 DramMRS |= 1 << 7; /* 60 ohms */
5850 if (byte & 2) {
5851 if (pDCTstat->Speed < 6)
5852 DramMRS |= 1 << 8; /* 40 ohms */
5853 else
5854 DramMRS |= 1 << 9; /* 30 ohms */
5855 }
5856 }
5857 /* Dram dynamic termination: Disable(1DIMM), 120ohm(>=2DIMM) */
5858 if (!(pDCTstat->Status & (1 << SB_Registered))) {
5859 if (byte >= 2) {
5860 if (pDCTstat->Speed == 7)
5861 DramMRS |= 1 << 10;
5862 else
5863 DramMRS |= 1 << 11;
5864 }
5865 } else {
5866 DramMRS |= mct_DramTermDyn_RDimm(pMCTstat, pDCTstat, byte);
5867 }
5868
5869 /* Qoff=0, output buffers enabled */
5870 /* Tcwl */
5871 DramMRS |= (pDCTstat->Speed - 4) << 20;
5872 /* ASR=1, auto self refresh */
5873 /* SRT=0 */
5874 DramMRS |= 1 << 18;
Zheng Baoeb75f652010-04-23 17:32:48 +00005875 }
5876
5877 /* burst length control */
5878 if (pDCTstat->Status & (1 << SB_128bitmode))
5879 DramMRS |= 1 << 1;
Zheng Baoeb75f652010-04-23 17:32:48 +00005880
Timothy Pearson730a0432015-10-16 13:51:51 -05005881 dword = Get_NB32_DCT(pDCTstat->dev_dct, dct, 0x84);
5882 if (is_fam15h())
5883 dword &= ~0x00800003;
5884 else
5885 dword &= ~0x00fc2f8f;
Zheng Baoeb75f652010-04-23 17:32:48 +00005886 dword |= DramMRS;
Timothy Pearson730a0432015-10-16 13:51:51 -05005887 Set_NB32_DCT(pDCTstat->dev_dct, dct, 0x84, dword);
Zheng Baoeb75f652010-04-23 17:32:48 +00005888}
5889
Timothy Pearson730a0432015-10-16 13:51:51 -05005890void mct_SetDramConfigHi_D(struct MCTStatStruc *pMCTstat,
5891 struct DCTStatStruc *pDCTstat, u32 dct, u32 DramConfigHi)
Zheng Baoeb75f652010-04-23 17:32:48 +00005892{
5893 /* Bug#15114: Comp. update interrupted by Freq. change can cause
5894 * subsequent update to be invalid during any MemClk frequency change:
5895 * Solution: From the bug report:
5896 * 1. A software-initiated frequency change should be wrapped into the
5897 * following sequence :
5898 * - a) Disable Compensation (F2[1, 0]9C_x08[30] )
5899 * b) Reset the Begin Compensation bit (D3CMP->COMP_CONFIG[0]) in all the compensation engines
5900 * c) Do frequency change
5901 * d) Enable Compensation (F2[1, 0]9C_x08[30] )
5902 * 2. A software-initiated Disable Compensation should always be
5903 * followed by step b) of the above steps.
5904 * Silicon Status: Fixed In Rev B0
5905 *
5906 * Errata#177: DRAM Phy Automatic Compensation Updates May Be Invalid
5907 * Solution: BIOS should disable the phy automatic compensation prior
5908 * to initiating a memory clock frequency change as follows:
5909 * 1. Disable PhyAutoComp by writing 1'b1 to F2x[1, 0]9C_x08[30]
5910 * 2. Reset the Begin Compensation bits by writing 32'h0 to
5911 * F2x[1, 0]9C_x4D004F00
5912 * 3. Perform frequency change
5913 * 4. Enable PhyAutoComp by writing 1'b0 to F2x[1, 0]9C_08[30]
5914 * In addition, any time software disables the automatic phy
5915 * compensation it should reset the begin compensation bit per step 2.
5916 * Silicon Status: Fixed in DR-B0
5917 */
5918
5919 u32 dev = pDCTstat->dev_dct;
Timothy Pearson730a0432015-10-16 13:51:51 -05005920 u32 index_reg = 0x98;
Zheng Baoeb75f652010-04-23 17:32:48 +00005921 u32 index;
5922
Timothy Pearson730a0432015-10-16 13:51:51 -05005923 uint32_t dword;
Zheng Baoeb75f652010-04-23 17:32:48 +00005924
Timothy Pearson730a0432015-10-16 13:51:51 -05005925 if (is_fam15h()) {
5926 /* Initial setup for frequency change
5927 * 9C_x0000_0004 must be configured before MemClkFreqVal is set
5928 */
Zheng Baoeb75f652010-04-23 17:32:48 +00005929
Timothy Pearson730a0432015-10-16 13:51:51 -05005930 /* Program D18F2x9C_x0D0F_E006_dct[1:0][PllLockTime] = 0x190 */
5931 dword = Get_NB32_index_wait_DCT(pDCTstat->dev_dct, dct, index_reg, 0x0d0fe006);
5932 dword &= ~(0x0000ffff);
5933 dword |= 0x00000190;
5934 Set_NB32_index_wait_DCT(pDCTstat->dev_dct, dct, index_reg, 0x0d0fe006, dword);
Zheng Baoeb75f652010-04-23 17:32:48 +00005935
Timothy Pearson730a0432015-10-16 13:51:51 -05005936 dword = Get_NB32_DCT(dev, dct, 0x94);
5937 dword &= ~(1 << MemClkFreqVal);
5938 Set_NB32_DCT(dev, dct, 0x94, dword);
5939
5940 dword = DramConfigHi;
5941 dword &= ~(1 << MemClkFreqVal);
5942 Set_NB32_DCT(dev, dct, 0x94, dword);
5943
5944 mctGet_PS_Cfg_D(pMCTstat, pDCTstat, dct);
5945 set_2t_configuration(pMCTstat, pDCTstat, dct);
5946 mct_BeforePlatformSpec(pMCTstat, pDCTstat, dct);
5947 mct_PlatformSpec(pMCTstat, pDCTstat, dct);
5948 } else {
5949 index = 0x08;
5950 dword = Get_NB32_index_wait_DCT(dev, dct, index_reg, index);
5951 if (!(dword & (1 << DisAutoComp)))
5952 Set_NB32_index_wait_DCT(dev, dct, index_reg, index, dword | (1 << DisAutoComp));
5953
5954 mct_Wait(100);
5955 }
5956
5957 /* Program the DRAM Configuration High register */
5958 Set_NB32_DCT(dev, dct, 0x94, DramConfigHi);
5959
5960 if (is_fam15h()) {
5961 /* Wait until F2x[1, 0]94[FreqChgInProg]=0. */
5962 do {
5963 dword = Get_NB32_DCT(pDCTstat->dev_dct, dct, 0x94);
5964 } while (dword & (1 << FreqChgInProg));
5965
5966 /* Program D18F2x9C_x0D0F_E006_dct[1:0][PllLockTime] = 0xf */
5967 dword = Get_NB32_index_wait_DCT(pDCTstat->dev_dct, dct, index_reg, 0x0d0fe006);
5968 dword &= ~(0x0000ffff);
5969 dword |= 0x0000000f;
5970 Set_NB32_index_wait_DCT(pDCTstat->dev_dct, dct, index_reg, 0x0d0fe006, dword);
5971 }
Zheng Baoeb75f652010-04-23 17:32:48 +00005972}
5973
5974static void mct_BeforeDQSTrain_D(struct MCTStatStruc *pMCTstat,
5975 struct DCTStatStruc *pDCTstatA)
5976{
Timothy Pearson730a0432015-10-16 13:51:51 -05005977 if (!is_fam15h()) {
5978 u8 Node;
5979 struct DCTStatStruc *pDCTstat;
Zheng Baoeb75f652010-04-23 17:32:48 +00005980
Timothy Pearson730a0432015-10-16 13:51:51 -05005981 /* Errata 178
5982 *
5983 * Bug#15115: Uncertainty In The Sync Chain Leads To Setup Violations
5984 * In TX FIFO
5985 * Solution: BIOS should program DRAM Control Register[RdPtrInit] =
5986 * 5h, (F2x[1, 0]78[3:0] = 5h).
5987 * Silicon Status: Fixed In Rev B0
5988 *
5989 * Bug#15880: Determine validity of reset settings for DDR PHY timing.
5990 * Solution: At least, set WrDqs fine delay to be 0 for DDR3 training.
5991 */
5992 for (Node = 0; Node < 8; Node++) {
5993 pDCTstat = pDCTstatA + Node;
Zheng Baoeb75f652010-04-23 17:32:48 +00005994
Timothy Pearson730a0432015-10-16 13:51:51 -05005995 if (pDCTstat->NodePresent) {
5996 mct_BeforeDQSTrainSamp(pDCTstat); /* only Bx */
5997 mct_ResetDLL_D(pMCTstat, pDCTstat, 0);
5998 mct_ResetDLL_D(pMCTstat, pDCTstat, 1);
5999 }
Xavi Drudis Ferran7cdf1ec2010-09-27 21:08:40 +00006000 }
Zheng Baoeb75f652010-04-23 17:32:48 +00006001 }
6002}
6003
Timothy Pearsonb8a355d2015-09-05 17:55:58 -05006004/* Erratum 350 */
Zheng Baoeb75f652010-04-23 17:32:48 +00006005static void mct_ResetDLL_D(struct MCTStatStruc *pMCTstat,
6006 struct DCTStatStruc *pDCTstat, u8 dct)
6007{
6008 u8 Receiver;
6009 u32 dev = pDCTstat->dev_dct;
Zheng Baoeb75f652010-04-23 17:32:48 +00006010 u32 addr;
6011 u32 lo, hi;
6012 u8 wrap32dis = 0;
6013 u8 valid = 0;
6014
6015 /* Skip reset DLL for B3 */
6016 if (pDCTstat->LogicalCPUID & AMD_DR_B3) {
6017 return;
6018 }
6019
Timothy Pearson730a0432015-10-16 13:51:51 -05006020 /* Skip reset DLL for Family 15h */
6021 if (is_fam15h()) {
6022 return;
6023 }
6024
Zheng Baoeb75f652010-04-23 17:32:48 +00006025 addr = HWCR;
6026 _RDMSR(addr, &lo, &hi);
6027 if(lo & (1<<17)) { /* save the old value */
6028 wrap32dis = 1;
6029 }
6030 lo |= (1<<17); /* HWCR.wrap32dis */
6031 /* Setting wrap32dis allows 64-bit memory references in 32bit mode */
6032 _WRMSR(addr, lo, hi);
6033
6034 pDCTstat->Channel = dct;
6035 Receiver = mct_InitReceiver_D(pDCTstat, dct);
6036 /* there are four receiver pairs, loosely associated with chipselects.*/
6037 for (; Receiver < 8; Receiver += 2) {
6038 if (mct_RcvrRankEnabled_D(pMCTstat, pDCTstat, dct, Receiver)) {
6039 addr = mct_GetRcvrSysAddr_D(pMCTstat, pDCTstat, dct, Receiver, &valid);
6040 if (valid) {
6041 mct_Read1LTestPattern_D(pMCTstat, pDCTstat, addr); /* cache fills */
6042
6043 /* Write 0000_8000h to register F2x[1,0]9C_xD080F0C */
Timothy Pearson730a0432015-10-16 13:51:51 -05006044 Set_NB32_index_wait_DCT(dev, dct, 0x98, 0xD080F0C, 0x00008000);
Zheng Baoeb75f652010-04-23 17:32:48 +00006045 mct_Wait(80); /* wait >= 300ns */
6046
6047 /* Write 0000_0000h to register F2x[1,0]9C_xD080F0C */
Timothy Pearson730a0432015-10-16 13:51:51 -05006048 Set_NB32_index_wait_DCT(dev, dct, 0x98, 0xD080F0C, 0x00000000);
Zheng Baoeb75f652010-04-23 17:32:48 +00006049 mct_Wait(800); /* wait >= 2us */
6050 break;
6051 }
6052 }
6053 }
6054
6055 if(!wrap32dis) {
6056 addr = HWCR;
6057 _RDMSR(addr, &lo, &hi);
6058 lo &= ~(1<<17); /* restore HWCR.wrap32dis */
6059 _WRMSR(addr, lo, hi);
6060 }
6061}
6062
6063static void mct_EnableDatIntlv_D(struct MCTStatStruc *pMCTstat,
6064 struct DCTStatStruc *pDCTstat)
6065{
6066 u32 dev = pDCTstat->dev_dct;
6067 u32 val;
6068
6069 /* Enable F2x110[DctDatIntlv] */
6070 /* Call back not required mctHookBeforeDatIntlv_D() */
6071 /* FIXME Skip for Ax */
6072 if (!pDCTstat->GangedMode) {
6073 val = Get_NB32(dev, 0x110);
6074 val |= 1 << 5; /* DctDatIntlv */
6075 Set_NB32(dev, 0x110, val);
6076
6077 /* FIXME Skip for Cx */
6078 dev = pDCTstat->dev_nbmisc;
6079 val = Get_NB32(dev, 0x8C); /* NB Configuration Hi */
6080 val |= 1 << (36-32); /* DisDatMask */
6081 Set_NB32(dev, 0x8C, val);
6082 }
6083}
6084
6085static void SetDllSpeedUp_D(struct MCTStatStruc *pMCTstat,
6086 struct DCTStatStruc *pDCTstat, u8 dct)
6087{
Timothy Pearson730a0432015-10-16 13:51:51 -05006088 if (!is_fam15h()) {
6089 u32 val;
6090 u32 dev = pDCTstat->dev_dct;
Zheng Baoeb75f652010-04-23 17:32:48 +00006091
Timothy Pearson730a0432015-10-16 13:51:51 -05006092 if (pDCTstat->Speed >= mhz_to_memclk_config(800)) { /* DDR1600 and above */
6093 /* Set bit13 PowerDown to register F2x[1, 0]98_x0D080F10 */
6094 Set_NB32_DCT(dev, dct, 0x98, 0x0D080F10);
6095 val = Get_NB32_DCT(dev, dct, 0x9C);
6096 val |= 1 < 13;
6097 Set_NB32_DCT(dev, dct, 0x9C, val);
6098 Set_NB32_DCT(dev, dct, 0x98, 0x4D080F10);
Zheng Baoeb75f652010-04-23 17:32:48 +00006099
Timothy Pearson730a0432015-10-16 13:51:51 -05006100 /* Set bit13 PowerDown to register F2x[1, 0]98_x0D080F11 */
6101 Set_NB32_DCT(dev, dct, 0x98, 0x0D080F11);
6102 val = Get_NB32_DCT(dev, dct, 0x9C);
6103 val |= 1 < 13;
6104 Set_NB32_DCT(dev, dct, 0x9C, val);
6105 Set_NB32_DCT(dev, dct, 0x98, 0x4D080F11);
Zheng Baoeb75f652010-04-23 17:32:48 +00006106
Timothy Pearson730a0432015-10-16 13:51:51 -05006107 /* Set bit13 PowerDown to register F2x[1, 0]98_x0D088F30 */
6108 Set_NB32_DCT(dev, dct, 0x98, 0x0D088F30);
6109 val = Get_NB32_DCT(dev, dct, 0x9C);
6110 val |= 1 < 13;
6111 Set_NB32_DCT(dev, dct, 0x9C, val);
6112 Set_NB32_DCT(dev, dct, 0x98, 0x4D088F30);
Zheng Baoeb75f652010-04-23 17:32:48 +00006113
Timothy Pearson730a0432015-10-16 13:51:51 -05006114 /* Set bit13 PowerDown to register F2x[1, 0]98_x0D08CF30 */
6115 Set_NB32_DCT(dev, dct, 0x98, 0x0D08CF30);
6116 val = Get_NB32_DCT(dev, dct, 0x9C);
6117 val |= 1 < 13;
6118 Set_NB32_DCT(dev, dct, 0x9C, val);
6119 Set_NB32_DCT(dev, dct, 0x98, 0x4D08CF30);
6120 }
Zheng Baoeb75f652010-04-23 17:32:48 +00006121 }
6122}
6123
6124static void SyncSetting(struct DCTStatStruc *pDCTstat)
6125{
6126 /* set F2x78[ChSetupSync] when F2x[1, 0]9C_x04[AddrCmdSetup, CsOdtSetup,
6127 * CkeSetup] setups for one DCT are all 0s and at least one of the setups,
6128 * F2x[1, 0]9C_x04[AddrCmdSetup, CsOdtSetup, CkeSetup], of the other
6129 * controller is 1
6130 */
6131 u32 cha, chb;
6132 u32 dev = pDCTstat->dev_dct;
6133 u32 val;
6134
6135 cha = pDCTstat->CH_ADDR_TMG[0] & 0x0202020;
6136 chb = pDCTstat->CH_ADDR_TMG[1] & 0x0202020;
6137
6138 if ((cha != chb) && ((cha == 0) || (chb == 0))) {
6139 val = Get_NB32(dev, 0x78);
6140 val |= 1 << ChSetupSync;
6141 Set_NB32(dev, 0x78, val);
6142 }
6143}
6144
6145static void AfterDramInit_D(struct DCTStatStruc *pDCTstat, u8 dct) {
6146
6147 u32 val;
Zheng Baoeb75f652010-04-23 17:32:48 +00006148 u32 dev = pDCTstat->dev_dct;
6149
6150 if (pDCTstat->LogicalCPUID & (AMD_DR_B2 | AMD_DR_B3)) {
6151 mct_Wait(10000); /* Wait 50 us*/
6152 val = Get_NB32(dev, 0x110);
6153 if (!(val & (1 << DramEnabled))) {
6154 /* If 50 us expires while DramEnable =0 then do the following */
Timothy Pearson730a0432015-10-16 13:51:51 -05006155 val = Get_NB32_DCT(dev, dct, 0x90);
Zheng Baoeb75f652010-04-23 17:32:48 +00006156 val &= ~(1 << Width128); /* Program Width128 = 0 */
Timothy Pearson730a0432015-10-16 13:51:51 -05006157 Set_NB32_DCT(dev, dct, 0x90, val);
Zheng Baoeb75f652010-04-23 17:32:48 +00006158
Timothy Pearson730a0432015-10-16 13:51:51 -05006159 val = Get_NB32_index_wait_DCT(dev, dct, 0x98, 0x05); /* Perform dummy CSR read to F2x09C_x05 */
Zheng Baoeb75f652010-04-23 17:32:48 +00006160
6161 if (pDCTstat->GangedMode) {
Timothy Pearson730a0432015-10-16 13:51:51 -05006162 val = Get_NB32_DCT(dev, dct, 0x90);
Zheng Baoeb75f652010-04-23 17:32:48 +00006163 val |= 1 << Width128; /* Program Width128 = 0 */
Timothy Pearson730a0432015-10-16 13:51:51 -05006164 Set_NB32_DCT(dev, dct, 0x90, val);
Zheng Baoeb75f652010-04-23 17:32:48 +00006165 }
6166 }
6167 }
6168}
6169
6170/* ==========================================================
6171 * 6-bit Bank Addressing Table
6172 * RR=rows-13 binary
6173 * B=Banks-2 binary
6174 * CCC=Columns-9 binary
6175 * ==========================================================
6176 * DCT CCCBRR Rows Banks Columns 64-bit CS Size
6177 * Encoding
6178 * 0000 000000 13 2 9 128MB
6179 * 0001 001000 13 2 10 256MB
6180 * 0010 001001 14 2 10 512MB
6181 * 0011 010000 13 2 11 512MB
6182 * 0100 001100 13 3 10 512MB
6183 * 0101 001101 14 3 10 1GB
6184 * 0110 010001 14 2 11 1GB
6185 * 0111 001110 15 3 10 2GB
6186 * 1000 010101 14 3 11 2GB
6187 * 1001 010110 15 3 11 4GB
6188 * 1010 001111 16 3 10 4GB
6189 * 1011 010111 16 3 11 8GB
6190 */
6191u8 crcCheck(u8 smbaddr)
6192{
6193 u8 byte_use;
6194 u8 Index;
6195 u16 CRC;
6196 u8 byte, i;
6197
6198 byte_use = mctRead_SPD(smbaddr, SPD_ByteUse);
6199 if (byte_use & 0x80)
6200 byte_use = 117;
6201 else
6202 byte_use = 126;
6203
6204 CRC = 0;
6205 for (Index = 0; Index < byte_use; Index ++) {
6206 byte = mctRead_SPD(smbaddr, Index);
6207 CRC ^= byte << 8;
6208 for (i=0; i<8; i++) {
6209 if (CRC & 0x8000) {
6210 CRC <<= 1;
6211 CRC ^= 0x1021;
6212 } else
6213 CRC <<= 1;
6214 }
6215 }
6216 return CRC == (mctRead_SPD(smbaddr, SPD_byte_127) << 8 | mctRead_SPD(smbaddr, SPD_byte_126));
6217}