blob: 1faed5a2e96f64673df5ecfb4d13dbddd0f0c2df [file] [log] [blame]
Zheng Baoeb75f652010-04-23 17:32:48 +00001/*
2 * This file is part of the coreboot project.
3 *
4 * Copyright (C) 2010 Advanced Micro Devices, Inc.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; version 2 of the License.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
18 */
19
20/* Description: Main memory controller system configuration for DDR 3 */
21
22/* KNOWN ISSUES - ERRATA
23 *
24 * Trtp is not calculated correctly when the controller is in 64-bit mode, it
25 * is 1 busclock off. No fix planned. The controller is not ordinarily in
26 * 64-bit mode.
27 *
28 * 32 Byte burst not supported. No fix planned. The controller is not
29 * ordinarily in 64-bit mode.
30 *
31 * Trc precision does not use extra Jedec defined fractional component.
32 * InsteadTrc (course) is rounded up to nearest 1 ns.
33 *
34 * Mini and Micro DIMM not supported. Only RDIMM, UDIMM, SO-DIMM defined types
35 * supported.
36 */
37
38static u8 ReconfigureDIMMspare_D(struct MCTStatStruc *pMCTstat,
39 struct DCTStatStruc *pDCTstatA);
40static void DQSTiming_D(struct MCTStatStruc *pMCTstat,
41 struct DCTStatStruc *pDCTstatA);
42static void LoadDQSSigTmgRegs_D(struct MCTStatStruc *pMCTstat,
43 struct DCTStatStruc *pDCTstatA);
44static void HTMemMapInit_D(struct MCTStatStruc *pMCTstat,
45 struct DCTStatStruc *pDCTstatA);
46static void MCTMemClr_D(struct MCTStatStruc *pMCTstat,
47 struct DCTStatStruc *pDCTstatA);
48static void DCTMemClr_Init_D(struct MCTStatStruc *pMCTstat,
49 struct DCTStatStruc *pDCTstat);
50static void DCTMemClr_Sync_D(struct MCTStatStruc *pMCTstat,
51 struct DCTStatStruc *pDCTstat);
52static void MCTMemClrSync_D(struct MCTStatStruc *pMCTstat,
53 struct DCTStatStruc *pDCTstatA);
54static u8 NodePresent_D(u8 Node);
55static void SyncDCTsReady_D(struct MCTStatStruc *pMCTstat,
56 struct DCTStatStruc *pDCTstatA);
57static void StartupDCT_D(struct MCTStatStruc *pMCTstat,
58 struct DCTStatStruc *pDCTstat, u8 dct);
59static void ClearDCT_D(struct MCTStatStruc *pMCTstat,
60 struct DCTStatStruc *pDCTstat, u8 dct);
61static u8 AutoCycTiming_D(struct MCTStatStruc *pMCTstat,
62 struct DCTStatStruc *pDCTstat, u8 dct);
63static void GetPresetmaxF_D(struct MCTStatStruc *pMCTstat,
64 struct DCTStatStruc *pDCTstat);
65static void SPDGetTCL_D(struct MCTStatStruc *pMCTstat,
66 struct DCTStatStruc *pDCTstat, u8 dct);
67static u8 AutoConfig_D(struct MCTStatStruc *pMCTstat,
68 struct DCTStatStruc *pDCTstat, u8 dct);
69static u8 PlatformSpec_D(struct MCTStatStruc *pMCTstat,
70 struct DCTStatStruc *pDCTstat, u8 dct);
71static void SPDSetBanks_D(struct MCTStatStruc *pMCTstat,
72 struct DCTStatStruc *pDCTstat, u8 dct);
73static void StitchMemory_D(struct MCTStatStruc *pMCTstat,
74 struct DCTStatStruc *pDCTstat, u8 dct);
75static u16 Get_Fk_D(u8 k);
76static u8 Get_DIMMAddress_D(struct DCTStatStruc *pDCTstat, u8 i);
77static void mct_initDCT(struct MCTStatStruc *pMCTstat,
78 struct DCTStatStruc *pDCTstat);
79static void mct_DramInit(struct MCTStatStruc *pMCTstat,
80 struct DCTStatStruc *pDCTstat, u8 dct);
81static u8 mct_PlatformSpec(struct MCTStatStruc *pMCTstat,
82 struct DCTStatStruc *pDCTstat, u8 dct);
Zheng Bao69436e12011-01-06 02:18:12 +000083static u8 mct_BeforePlatformSpec(struct MCTStatStruc *pMCTstat,
84 struct DCTStatStruc *pDCTstat, u8 dct);
Zheng Baoeb75f652010-04-23 17:32:48 +000085static void mct_SyncDCTsReady(struct DCTStatStruc *pDCTstat);
86static void Get_Trdrd(struct MCTStatStruc *pMCTstat,
87 struct DCTStatStruc *pDCTstat, u8 dct);
88static void mct_AfterGetCLT(struct MCTStatStruc *pMCTstat,
89 struct DCTStatStruc *pDCTstat, u8 dct);
90static u8 mct_SPDCalcWidth(struct MCTStatStruc *pMCTstat,\
91 struct DCTStatStruc *pDCTstat, u8 dct);
92static void mct_AfterStitchMemory(struct MCTStatStruc *pMCTstat,
93 struct DCTStatStruc *pDCTstat, u8 dct);
94static u8 mct_DIMMPresence(struct MCTStatStruc *pMCTstat,
95 struct DCTStatStruc *pDCTstat, u8 dct);
96static void Set_OtherTiming(struct MCTStatStruc *pMCTstat,
97 struct DCTStatStruc *pDCTstat, u8 dct);
98static void Get_Twrwr(struct MCTStatStruc *pMCTstat,
99 struct DCTStatStruc *pDCTstat, u8 dct);
100static void Get_Twrrd(struct MCTStatStruc *pMCTstat,
101 struct DCTStatStruc *pDCTstat, u8 dct);
102static void Get_TrwtTO(struct MCTStatStruc *pMCTstat,
103 struct DCTStatStruc *pDCTstat, u8 dct);
104static void Get_TrwtWB(struct MCTStatStruc *pMCTstat,
105 struct DCTStatStruc *pDCTstat);
106static void Get_DqsRcvEnGross_Diff(struct DCTStatStruc *pDCTstat,
107 u32 dev, u32 index_reg);
108static void Get_WrDatGross_Diff(struct DCTStatStruc *pDCTstat, u8 dct,
109 u32 dev, u32 index_reg);
110static u16 Get_DqsRcvEnGross_MaxMin(struct DCTStatStruc *pDCTstat,
111 u32 dev, u32 index_reg, u32 index);
112static void mct_FinalMCT_D(struct MCTStatStruc *pMCTstat,
113 struct DCTStatStruc *pDCTstat);
114static u16 Get_WrDatGross_MaxMin(struct DCTStatStruc *pDCTstat, u8 dct,
115 u32 dev, u32 index_reg, u32 index);
116static void mct_InitialMCT_D(struct MCTStatStruc *pMCTstat,
117 struct DCTStatStruc *pDCTstat);
118static void mct_init(struct MCTStatStruc *pMCTstat,
119 struct DCTStatStruc *pDCTstat);
120static void clear_legacy_Mode(struct MCTStatStruc *pMCTstat,
121 struct DCTStatStruc *pDCTstat);
122static void mct_HTMemMapExt(struct MCTStatStruc *pMCTstat,
123 struct DCTStatStruc *pDCTstatA);
124static void SetCSTriState(struct MCTStatStruc *pMCTstat,
125 struct DCTStatStruc *pDCTstat, u8 dct);
126static void SetCKETriState(struct MCTStatStruc *pMCTstat,
127 struct DCTStatStruc *pDCTstat, u8 dct);
128static void SetODTTriState(struct MCTStatStruc *pMCTstat,
129 struct DCTStatStruc *pDCTstat, u8 dct);
130static void InitPhyCompensation(struct MCTStatStruc *pMCTstat,
131 struct DCTStatStruc *pDCTstat, u8 dct);
132static u32 mct_NodePresent_D(void);
133static void mct_OtherTiming(struct MCTStatStruc *pMCTstat,
134 struct DCTStatStruc *pDCTstatA);
135static void mct_ResetDataStruct_D(struct MCTStatStruc *pMCTstat,
136 struct DCTStatStruc *pDCTstatA);
137static void mct_EarlyArbEn_D(struct MCTStatStruc *pMCTstat,
Zheng Bao69436e12011-01-06 02:18:12 +0000138 struct DCTStatStruc *pDCTstat, u8 dct);
Zheng Baoeb75f652010-04-23 17:32:48 +0000139static void mct_BeforeDramInit_Prod_D(struct MCTStatStruc *pMCTstat,
140 struct DCTStatStruc *pDCTstat);
141void mct_ClrClToNB_D(struct MCTStatStruc *pMCTstat,
142 struct DCTStatStruc *pDCTstat);
143static u8 CheckNBCOFEarlyArbEn(struct MCTStatStruc *pMCTstat,
144 struct DCTStatStruc *pDCTstat);
145void mct_ClrWbEnhWsbDis_D(struct MCTStatStruc *pMCTstat,
146 struct DCTStatStruc *pDCTstat);
147static void mct_BeforeDQSTrain_D(struct MCTStatStruc *pMCTstat,
148 struct DCTStatStruc *pDCTstatA);
149static void AfterDramInit_D(struct DCTStatStruc *pDCTstat, u8 dct);
150static void mct_ResetDLL_D(struct MCTStatStruc *pMCTstat,
151 struct DCTStatStruc *pDCTstat, u8 dct);
152static void ProgDramMRSReg_D(struct MCTStatStruc *pMCTstat,
153 struct DCTStatStruc *pDCTstat, u8 dct);
154static void mct_DramInit_Sw_D(struct MCTStatStruc *pMCTstat,
155 struct DCTStatStruc *pDCTstat, u8 dct);
156static u32 mct_DisDllShutdownSR(struct MCTStatStruc *pMCTstat,
157 struct DCTStatStruc *pDCTstat, u32 DramConfigLo, u8 dct);
Zheng Bao69436e12011-01-06 02:18:12 +0000158static void mct_EnDllShutdownSR(struct MCTStatStruc *pMCTstat,
159 struct DCTStatStruc *pDCTstat, u8 dct);
Zheng Baoeb75f652010-04-23 17:32:48 +0000160
Zheng Bao69436e12011-01-06 02:18:12 +0000161static u32 mct_MR1Odt_RDimm(struct MCTStatStruc *pMCTstat,
162 struct DCTStatStruc *pDCTstat, u8 dct, u32 MrsChipSel);
Zheng Baoeb75f652010-04-23 17:32:48 +0000163static u32 mct_DramTermDyn_RDimm(struct MCTStatStruc *pMCTstat,
164 struct DCTStatStruc *pDCTstat, u8 dimm);
165static u32 mct_SetDramConfigMisc2(struct DCTStatStruc *pDCTstat, u8 dct, u32 misc2);
166static void mct_BeforeDQSTrainSamp(struct DCTStatStruc *pDCTstat);
167static void mct_WriteLevelization_HW(struct MCTStatStruc *pMCTstat, struct DCTStatStruc *pDCTstatA);
168static u8 Get_Latency_Diff(struct MCTStatStruc *pMCTstat,
169 struct DCTStatStruc *pDCTstat, u8 dct);
170static void SyncSetting(struct DCTStatStruc *pDCTstat);
171static u8 crcCheck(u8 smbaddr);
Zheng Bao69436e12011-01-06 02:18:12 +0000172static void mct_ExtMCTConfig_Bx(struct DCTStatStruc *pDCTstat);
173static void mct_ExtMCTConfig_Cx(struct DCTStatStruc *pDCTstat);
Zheng Baoeb75f652010-04-23 17:32:48 +0000174
175/*See mctAutoInitMCT header for index relationships to CL and T*/
176static const u16 Table_F_k[] = {00,200,266,333,400,533 };
177static const u8 Tab_BankAddr[] = {0x3F,0x01,0x09,0x3F,0x3F,0x11,0x0A,0x19,0x12,0x1A,0x21,0x22,0x23};
178static const u8 Table_DQSRcvEn_Offset[] = {0x00,0x01,0x10,0x11,0x2};
179
180/****************************************************************************
181 Describe how platform maps MemClk pins to logical DIMMs. The MemClk pins
182 are identified based on BKDG definition of Fn2x88[MemClkDis] bitmap.
183 AGESA will base on this value to disable unused MemClk to save power.
184
185 If MEMCLK_MAPPING or MEMCLK_MAPPING contains all zeroes, AGESA will use
186 default MemClkDis setting based on package type.
187
188 Example:
189 BKDG definition of Fn2x88[MemClkDis] bitmap for AM3 package is like below:
190 Bit AM3/S1g3 pin name
191 0 M[B,A]_CLK_H/L[0]
192 1 M[B,A]_CLK_H/L[1]
193 2 M[B,A]_CLK_H/L[2]
194 3 M[B,A]_CLK_H/L[3]
195 4 M[B,A]_CLK_H/L[4]
196 5 M[B,A]_CLK_H/L[5]
197 6 M[B,A]_CLK_H/L[6]
198 7 M[B,A]_CLK_H/L[7]
199
200 And platform has the following routing:
201 CS0 M[B,A]_CLK_H/L[4]
202 CS1 M[B,A]_CLK_H/L[2]
203 CS2 M[B,A]_CLK_H/L[3]
204 CS3 M[B,A]_CLK_H/L[5]
205
206 Then:
207 ; CS0 CS1 CS2 CS3 CS4 CS5 CS6 CS7
208 MEMCLK_MAPPING EQU 00010000b, 00000100b, 00001000b, 00100000b, 00000000b, 00000000b, 00000000b, 00000000b
209*/
210
211/* Note: If you are not sure about the pin mappings at initial stage, we dont have to disable MemClk.
212 * Set entries in the tables all 0xFF. */
213static const u8 Tab_L1CLKDis[] = {0x20, 0x20, 0x10, 0x10, 0x08, 0x08, 0x04, 0x04};
214static const u8 Tab_AM3CLKDis[] = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00};
215static const u8 Tab_S1CLKDis[] = {0xA2, 0xA2, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00};
216static const u8 Tab_ManualCLKDis[]= {0x10, 0x04, 0x08, 0x20, 0x00, 0x00, 0x00, 0x00};
217
218static const u8 Table_Comp_Rise_Slew_20x[] = {7, 3, 2, 2, 0xFF};
219static const u8 Table_Comp_Rise_Slew_15x[] = {7, 7, 3, 2, 0xFF};
220static const u8 Table_Comp_Fall_Slew_20x[] = {7, 5, 3, 2, 0xFF};
221static const u8 Table_Comp_Fall_Slew_15x[] = {7, 7, 5, 3, 0xFF};
222
223static void mctAutoInitMCT_D(struct MCTStatStruc *pMCTstat,
224 struct DCTStatStruc *pDCTstatA)
225{
226 /*
227 * Memory may be mapped contiguously all the way up to 4GB (depending on setup
228 * options). It is the responsibility of PCI subsystem to create an uncacheable
229 * IO region below 4GB and to adjust TOP_MEM downward prior to any IO mapping or
230 * accesses. It is the same responsibility of the CPU sub-system prior to
231 * accessing LAPIC.
232 *
233 * Slot Number is an external convention, and is determined by OEM with accompanying
234 * silk screening. OEM may choose to use Slot number convention which is consistent
235 * with DIMM number conventions. All AMD engineering platforms do.
236 *
237 * Build Requirements:
238 * 1. MCT_SEG0_START and MCT_SEG0_END macros to begin and end the code segment,
239 * defined in mcti.inc.
240 *
241 * Run-Time Requirements:
242 * 1. Complete Hypertransport Bus Configuration
243 * 2. SMBus Controller Initialized
244 * 1. BSP in Big Real Mode
245 * 2. Stack at SS:SP, located somewhere between A000:0000 and F000:FFFF
246 * 3. Checksummed or Valid NVRAM bits
247 * 4. MCG_CTL=-1, MC4_CTL_EN=0 for all CPUs
248 * 5. MCi_STS from shutdown/warm reset recorded (if desired) prior to entry
249 * 6. All var MTRRs reset to zero
250 * 7. State of NB_CFG.DisDatMsk set properly on all CPUs
251 * 8. All CPUs at 2Ghz Speed (unless DQS training is not installed).
252 * 9. All cHT links at max Speed/Width (unless DQS training is not installed).
253 *
254 *
255 * Global relationship between index values and item values:
256 *
257 * pDCTstat.CASL pDCTstat.Speed
258 * j CL(j) k F(k)
259 * --------------------------
260 * 0 2.0 - -
261 * 1 3.0 1 200 Mhz
262 * 2 4.0 2 266 Mhz
263 * 3 5.0 3 333 Mhz
264 * 4 6.0 4 400 Mhz
265 * 5 7.0 5 533 Mhz
266 * 6 8.0 6 667 Mhz
267 * 7 9.0 7 800 Mhz
268 */
269 u8 Node, NodesWmem;
270 u32 node_sys_base;
271
272restartinit:
273 mctInitMemGPIOs_A_D(); /* Set any required GPIOs*/
274 NodesWmem = 0;
275 node_sys_base = 0;
276 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
277 struct DCTStatStruc *pDCTstat;
278 pDCTstat = pDCTstatA + Node;
279 pDCTstat->Node_ID = Node;
280 pDCTstat->dev_host = PA_HOST(Node);
281 pDCTstat->dev_map = PA_MAP(Node);
282 pDCTstat->dev_dct = PA_DCT(Node);
283 pDCTstat->dev_nbmisc = PA_NBMISC(Node);
284 pDCTstat->NodeSysBase = node_sys_base;
285
286 mct_init(pMCTstat, pDCTstat);
287 mctNodeIDDebugPort_D();
288 pDCTstat->NodePresent = NodePresent_D(Node);
289 if (pDCTstat->NodePresent) { /* See if Node is there*/
290 clear_legacy_Mode(pMCTstat, pDCTstat);
291 pDCTstat->LogicalCPUID = mctGetLogicalCPUID_D(Node);
292
293 mct_InitialMCT_D(pMCTstat, pDCTstat);
294
295 mctSMBhub_Init(Node); /* Switch SMBUS crossbar to proper node*/
296
297 mct_initDCT(pMCTstat, pDCTstat);
298 if (pDCTstat->ErrCode == SC_FatalErr) {
299 goto fatalexit; /* any fatal errors?*/
300 } else if (pDCTstat->ErrCode < SC_StopError) {
301 NodesWmem++;
302 }
303 } /* if Node present */
304 node_sys_base = pDCTstat->NodeSysBase;
305 node_sys_base += (pDCTstat->NodeSysLimit + 2) & ~0x0F;
306 }
307 if (NodesWmem == 0) {
308 printk(BIOS_DEBUG, "No Nodes?!\n");
309 goto fatalexit;
310 }
311
312 printk(BIOS_DEBUG, "mctAutoInitMCT_D: SyncDCTsReady_D\n");
313 SyncDCTsReady_D(pMCTstat, pDCTstatA); /* Make sure DCTs are ready for accesses.*/
314
315 printk(BIOS_DEBUG, "mctAutoInitMCT_D: HTMemMapInit_D\n");
316 HTMemMapInit_D(pMCTstat, pDCTstatA); /* Map local memory into system address space.*/
317 mctHookAfterHTMap();
318
319 printk(BIOS_DEBUG, "mctAutoInitMCT_D: CPUMemTyping_D\n");
320 CPUMemTyping_D(pMCTstat, pDCTstatA); /* Map dram into WB/UC CPU cacheability */
321 mctHookAfterCPU(); /* Setup external northbridge(s) */
322
323 printk(BIOS_DEBUG, "mctAutoInitMCT_D: DQSTiming_D\n");
324 DQSTiming_D(pMCTstat, pDCTstatA); /* Get Receiver Enable and DQS signal timing*/
325
326 printk(BIOS_DEBUG, "mctAutoInitMCT_D: UMAMemTyping_D\n");
327 UMAMemTyping_D(pMCTstat, pDCTstatA); /* Fix up for UMA sizing */
328
329 printk(BIOS_DEBUG, "mctAutoInitMCT_D: :OtherTiming\n");
330 mct_OtherTiming(pMCTstat, pDCTstatA);
331
332 if (ReconfigureDIMMspare_D(pMCTstat, pDCTstatA)) { /* RESET# if 1st pass of DIMM spare enabled*/
333 goto restartinit;
334 }
335
336 InterleaveNodes_D(pMCTstat, pDCTstatA);
337 InterleaveChannels_D(pMCTstat, pDCTstatA);
338
339 printk(BIOS_DEBUG, "mctAutoInitMCT_D: ECCInit_D\n");
340 if (ECCInit_D(pMCTstat, pDCTstatA)) { /* Setup ECC control and ECC check-bits*/
341 printk(BIOS_DEBUG, "mctAutoInitMCT_D: MCTMemClr_D\n");
342 MCTMemClr_D(pMCTstat,pDCTstatA);
343 }
344
Zheng Bao69436e12011-01-06 02:18:12 +0000345 mct_FinalMCT_D(pMCTstat, pDCTstatA);
Zheng Baoeb75f652010-04-23 17:32:48 +0000346 printk(BIOS_DEBUG, "All Done\n");
347 return;
348
349fatalexit:
350 die("mct_d: fatalexit");
351}
352
353static u8 ReconfigureDIMMspare_D(struct MCTStatStruc *pMCTstat,
354 struct DCTStatStruc *pDCTstatA)
355{
356 u8 ret;
357
358 if (mctGet_NVbits(NV_CS_SpareCTL)) {
359 if (MCT_DIMM_SPARE_NO_WARM) {
360 /* Do no warm-reset DIMM spare */
361 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
362 LoadDQSSigTmgRegs_D(pMCTstat, pDCTstatA);
363 ret = 0;
364 } else {
365 mct_ResetDataStruct_D(pMCTstat, pDCTstatA);
366 pMCTstat->GStatus |= 1 << GSB_EnDIMMSpareNW;
367 ret = 1;
368 }
369 } else {
370 /* Do warm-reset DIMM spare */
371 if (mctGet_NVbits(NV_DQSTrainCTL))
372 mctWarmReset_D();
373 ret = 0;
374 }
375 } else {
376 ret = 0;
377 }
378
379 return ret;
380}
381
382static void DQSTiming_D(struct MCTStatStruc *pMCTstat,
383 struct DCTStatStruc *pDCTstatA)
384{
385 u8 nv_DQSTrainCTL;
386
387 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
388 return;
389 }
390
391 nv_DQSTrainCTL = mctGet_NVbits(NV_DQSTrainCTL);
392 /* FIXME: BOZO- DQS training every time*/
393 nv_DQSTrainCTL = 1;
394
395 mct_BeforeDQSTrain_D(pMCTstat, pDCTstatA);
396 phyAssistedMemFnceTraining(pMCTstat, pDCTstatA);
397
398 if (nv_DQSTrainCTL) {
399 mctHookBeforeAnyTraining(pMCTstat, pDCTstatA);
400 /* TODO: should be in mctHookBeforeAnyTraining */
401 _WRMSR(0x26C, 0x04040404, 0x04040404);
402 _WRMSR(0x26D, 0x04040404, 0x04040404);
403 _WRMSR(0x26E, 0x04040404, 0x04040404);
404 _WRMSR(0x26F, 0x04040404, 0x04040404);
405 mct_WriteLevelization_HW(pMCTstat, pDCTstatA);
406
407 TrainReceiverEn_D(pMCTstat, pDCTstatA, FirstPass);
408
409 mct_TrainDQSPos_D(pMCTstat, pDCTstatA);
410
411 /* Second Pass never used for Barcelona! */
412 /* TrainReceiverEn_D(pMCTstat, pDCTstatA, SecondPass); */
413
414 mctSetEccDQSRcvrEn_D(pMCTstat, pDCTstatA);
415
416 /* FIXME - currently uses calculated value TrainMaxReadLatency_D(pMCTstat, pDCTstatA); */
417 mctHookAfterAnyTraining();
418 mctSaveDQSSigTmg_D();
419
420 MCTMemClr_D(pMCTstat, pDCTstatA);
421 } else {
422 mctGetDQSSigTmg_D(); /* get values into data structure */
423 LoadDQSSigTmgRegs_D(pMCTstat, pDCTstatA); /* load values into registers.*/
424 /* mctDoWarmResetMemClr_D(); */
425 MCTMemClr_D(pMCTstat, pDCTstatA);
426 }
427}
428
429static void LoadDQSSigTmgRegs_D(struct MCTStatStruc *pMCTstat,
430 struct DCTStatStruc *pDCTstatA)
431{
432 u8 Node, Receiver, Channel, Dir, DIMM;
433 u32 dev;
434 u32 index_reg;
435 u32 reg;
436 u32 index;
437 u32 val;
438 u8 ByteLane;
439 u8 txdqs;
440
441 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
442 struct DCTStatStruc *pDCTstat;
443 pDCTstat = pDCTstatA + Node;
444
445 if (pDCTstat->DCTSysLimit) {
446 dev = pDCTstat->dev_dct;
447 for (Channel = 0;Channel < 2; Channel++) {
448 /* there are four receiver pairs,
449 loosely associated with chipselects.*/
450 index_reg = 0x98 + Channel * 0x100;
451 for (Receiver = 0; Receiver < 8; Receiver += 2) {
452 /* Set Receiver Enable Values */
453 mct_SetRcvrEnDly_D(pDCTstat,
454 0, /* RcvrEnDly */
455 1, /* FinalValue, From stack */
456 Channel,
457 Receiver,
458 dev, index_reg,
459 (Receiver >> 1) * 3 + 0x10, /* Addl_Index */
460 2); /* Pass Second Pass ? */
461 /* Restore Write levelization training data */
462 for (ByteLane = 0; ByteLane < 9; ByteLane ++) {
463 txdqs = pDCTstat->CH_D_B_TxDqs[Channel][Receiver >> 1][ByteLane];
464 index = Table_DQSRcvEn_Offset[ByteLane >> 1];
465 index += (Receiver >> 1) * 3 + 0x10 + 0x20; /* Addl_Index */
466 val = Get_NB32_index_wait(dev, 0x98 + 0x100*Channel, index);
467 if (ByteLane & 1) { /* odd byte lane */
468 val &= ~(0xFF << 16);
469 val |= txdqs << 16;
470 } else {
471 val &= ~0xFF;
472 val |= txdqs;
473 }
474 Set_NB32_index_wait(dev, 0x98 + 0x100*Channel, index, val);
475 }
476 }
477 }
478 for (Channel = 0; Channel<2; Channel++) {
479 SetEccDQSRcvrEn_D(pDCTstat, Channel);
480 }
481
482 for (Channel = 0; Channel < 2; Channel++) {
483 u8 *p;
484 index_reg = 0x98 + Channel * 0x100;
485
486 /* NOTE:
487 * when 400, 533, 667, it will support dimm0/1/2/3,
488 * and set conf for dimm0, hw will copy to dimm1/2/3
489 * set for dimm1, hw will copy to dimm3
490 * Rev A/B only support DIMM0/1 when 800Mhz and above
491 * + 0x100 to next dimm
492 * Rev C support DIMM0/1/2/3 when 800Mhz and above
493 * + 0x100 to next dimm
494 */
495 for (DIMM = 0; DIMM < 4; DIMM++) {
496 if (DIMM == 0) {
497 index = 0; /* CHA Write Data Timing Low */
498 } else {
499 if (pDCTstat->Speed >= 4) {
500 index = 0x100 * DIMM;
501 } else {
502 break;
503 }
504 }
505 for (Dir = 0; Dir < 2; Dir++) {/* RD/WR */
506 p = pDCTstat->CH_D_DIR_B_DQS[Channel][DIMM][Dir];
507 val = stream_to_int(p); /* CHA Read Data Timing High */
508 Set_NB32_index_wait(dev, index_reg, index+1, val);
509 val = stream_to_int(p+4); /* CHA Write Data Timing High */
510 Set_NB32_index_wait(dev, index_reg, index+2, val);
511 val = *(p+8); /* CHA Write ECC Timing */
512 Set_NB32_index_wait(dev, index_reg, index+3, val);
513 index += 4;
514 }
515 }
516 }
517
518 for (Channel = 0; Channel<2; Channel++) {
519 reg = 0x78 + Channel * 0x100;
520 val = Get_NB32(dev, reg);
521 val &= ~(0x3ff<<22);
522 val |= ((u32) pDCTstat->CH_MaxRdLat[Channel] << 22);
523 val &= ~(1<<DqsRcvEnTrain);
524 Set_NB32(dev, reg, val); /* program MaxRdLatency to correspond with current delay*/
525 }
526 }
527 }
528}
529
530static void HTMemMapInit_D(struct MCTStatStruc *pMCTstat,
531 struct DCTStatStruc *pDCTstatA)
532{
533 u8 Node;
534 u32 NextBase, BottomIO;
535 u8 _MemHoleRemap, DramHoleBase, DramHoleOffset;
536 u32 HoleSize, DramSelBaseAddr;
537
538 u32 val;
539 u32 base;
540 u32 limit;
541 u32 dev, devx;
542 struct DCTStatStruc *pDCTstat;
543
544 _MemHoleRemap = mctGet_NVbits(NV_MemHole);
545
546 if (pMCTstat->HoleBase == 0) {
547 DramHoleBase = mctGet_NVbits(NV_BottomIO);
548 } else {
549 DramHoleBase = pMCTstat->HoleBase >> (24-8);
550 }
551
552 BottomIO = DramHoleBase << (24-8);
553
554 NextBase = 0;
555 pDCTstat = pDCTstatA + 0;
556 dev = pDCTstat->dev_map;
557
558 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
559 pDCTstat = pDCTstatA + Node;
560 devx = pDCTstat->dev_map;
561 DramSelBaseAddr = 0;
562 pDCTstat = pDCTstatA + Node; /* ??? */
563 if (!pDCTstat->GangedMode) {
564 DramSelBaseAddr = pDCTstat->NodeSysLimit - pDCTstat->DCTSysLimit;
565 /*In unganged mode, we must add DCT0 and DCT1 to DCTSysLimit */
566 val = pDCTstat->NodeSysLimit;
567 if ((val & 0xFF) == 0xFE) {
568 DramSelBaseAddr++;
569 val++;
570 }
571 pDCTstat->DCTSysLimit = val;
572 }
573
574 base = pDCTstat->DCTSysBase;
575 limit = pDCTstat->DCTSysLimit;
576 if (limit > base) {
577 base += NextBase;
578 limit += NextBase;
579 DramSelBaseAddr += NextBase;
580 printk(BIOS_DEBUG, " Node: %02x base: %02x limit: %02x BottomIO: %02x\n", Node, base, limit, BottomIO);
581
582 if (_MemHoleRemap) {
583 if ((base < BottomIO) && (limit >= BottomIO)) {
584 /* HW Dram Remap */
585 pDCTstat->Status |= 1 << SB_HWHole;
586 pMCTstat->GStatus |= 1 << GSB_HWHole;
587 pDCTstat->DCTSysBase = base;
588 pDCTstat->DCTSysLimit = limit;
589 pDCTstat->DCTHoleBase = BottomIO;
590 pMCTstat->HoleBase = BottomIO;
591 HoleSize = _4GB_RJ8 - BottomIO; /* HoleSize[39:8] */
592 if ((DramSelBaseAddr > 0) && (DramSelBaseAddr < BottomIO))
593 base = DramSelBaseAddr;
594 val = ((base + HoleSize) >> (24-8)) & 0xFF;
595 DramHoleOffset = val;
596 val <<= 8; /* shl 16, rol 24 */
597 val |= DramHoleBase << 24;
598 val |= 1 << DramHoleValid;
599 Set_NB32(devx, 0xF0, val); /* Dram Hole Address Reg */
600 pDCTstat->DCTSysLimit += HoleSize;
601 base = pDCTstat->DCTSysBase;
602 limit = pDCTstat->DCTSysLimit;
603 } else if (base == BottomIO) {
604 /* SW Node Hoist */
605 pMCTstat->GStatus |= 1<<GSB_SpIntRemapHole;
606 pDCTstat->Status |= 1<<SB_SWNodeHole;
607 pMCTstat->GStatus |= 1<<GSB_SoftHole;
608 pMCTstat->HoleBase = base;
609 limit -= base;
610 base = _4GB_RJ8;
611 limit += base;
612 pDCTstat->DCTSysBase = base;
613 pDCTstat->DCTSysLimit = limit;
614 } else {
615 /* No Remapping. Normal Contiguous mapping */
616 pDCTstat->DCTSysBase = base;
617 pDCTstat->DCTSysLimit = limit;
618 }
619 } else {
620 /*No Remapping. Normal Contiguous mapping*/
621 pDCTstat->DCTSysBase = base;
622 pDCTstat->DCTSysLimit = limit;
623 }
624 base |= 3; /* set WE,RE fields*/
625 pMCTstat->SysLimit = limit;
626 }
627 Set_NB32(dev, 0x40 + (Node << 3), base); /* [Node] + Dram Base 0 */
628
629 val = limit & 0xFFFF0000;
630 val |= Node;
631 Set_NB32(dev, 0x44 + (Node << 3), val); /* set DstNode */
632
633 printk(BIOS_DEBUG, " Node: %02x base: %02x limit: %02x \n", Node, base, limit);
634 limit = pDCTstat->DCTSysLimit;
635 if (limit) {
636 NextBase = (limit & 0xFFFF0000) + 0x10000;
637 }
638 }
639
640 /* Copy dram map from Node 0 to Node 1-7 */
641 for (Node = 1; Node < MAX_NODES_SUPPORTED; Node++) {
642 u32 reg;
643 pDCTstat = pDCTstatA + Node;
644 devx = pDCTstat->dev_map;
645
646 if (pDCTstat->NodePresent) {
647 reg = 0x40; /*Dram Base 0*/
648 do {
649 val = Get_NB32(dev, reg);
650 Set_NB32(devx, reg, val);
651 reg += 4;
652 } while ( reg < 0x80);
653 } else {
654 break; /* stop at first absent Node */
655 }
656 }
657
658 /*Copy dram map to F1x120/124*/
659 mct_HTMemMapExt(pMCTstat, pDCTstatA);
660}
661
662static void MCTMemClr_D(struct MCTStatStruc *pMCTstat,
663 struct DCTStatStruc *pDCTstatA)
664{
665
666 /* Initiates a memory clear operation for all node. The mem clr
Zheng Baoc3af12f2010-10-08 05:08:47 +0000667 * is done in parallel. After the memclr is complete, all processors
Zheng Baoeb75f652010-04-23 17:32:48 +0000668 * status are checked to ensure that memclr has completed.
669 */
670 u8 Node;
671 struct DCTStatStruc *pDCTstat;
672
673 if (!mctGet_NVbits(NV_DQSTrainCTL)){
674 /* FIXME: callback to wrapper: mctDoWarmResetMemClr_D */
675 } else { /* NV_DQSTrainCTL == 1 */
676 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
677 pDCTstat = pDCTstatA + Node;
678
679 if (pDCTstat->NodePresent) {
680 DCTMemClr_Init_D(pMCTstat, pDCTstat);
681 }
682 }
683 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
684 pDCTstat = pDCTstatA + Node;
685
686 if (pDCTstat->NodePresent) {
687 DCTMemClr_Sync_D(pMCTstat, pDCTstat);
688 }
689 }
690 }
691}
692
693static void DCTMemClr_Init_D(struct MCTStatStruc *pMCTstat,
694 struct DCTStatStruc *pDCTstat)
695{
696 u32 val;
697 u32 dev;
698 u32 reg;
699
700 /* Initiates a memory clear operation on one node */
701 if (pDCTstat->DCTSysLimit) {
702 dev = pDCTstat->dev_dct;
703 reg = 0x110;
704
705 do {
706 val = Get_NB32(dev, reg);
707 } while (val & (1 << MemClrBusy));
708
709 val |= (1 << MemClrInit);
710 Set_NB32(dev, reg, val);
711 }
712}
713
714static void MCTMemClrSync_D(struct MCTStatStruc *pMCTstat,
715 struct DCTStatStruc *pDCTstatA)
716{
717 /* Ensures that memory clear has completed on all node.*/
718 u8 Node;
719 struct DCTStatStruc *pDCTstat;
720
721 if (!mctGet_NVbits(NV_DQSTrainCTL)){
722 /* callback to wrapper: mctDoWarmResetMemClr_D */
723 } else { /* NV_DQSTrainCTL == 1 */
724 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
725 pDCTstat = pDCTstatA + Node;
726
727 if (pDCTstat->NodePresent) {
728 DCTMemClr_Sync_D(pMCTstat, pDCTstat);
729 }
730 }
731 }
732}
733
734static void DCTMemClr_Sync_D(struct MCTStatStruc *pMCTstat,
735 struct DCTStatStruc *pDCTstat)
736{
737 u32 val;
738 u32 dev = pDCTstat->dev_dct;
739 u32 reg;
740
741 /* Ensure that a memory clear operation has completed on one node */
742 if (pDCTstat->DCTSysLimit){
743 reg = 0x110;
744
745 do {
746 val = Get_NB32(dev, reg);
747 } while (val & (1 << MemClrBusy));
748
749 do {
750 val = Get_NB32(dev, reg);
751 } while (!(val & (1 << Dr_MemClrStatus)));
752 }
753
754 val = 0x0FE40FC0; /* BKDG recommended */
755 val |= MCCH_FlushWrOnStpGnt; /* Set for S3 */
756 Set_NB32(dev, 0x11C, val);
757}
758
759static u8 NodePresent_D(u8 Node)
760{
761 /*
762 * Determine if a single Hammer Node exists within the network.
763 */
764 u32 dev;
765 u32 val;
766 u32 dword;
767 u8 ret = 0;
768
769 dev = PA_HOST(Node); /*test device/vendor id at host bridge */
770 val = Get_NB32(dev, 0);
771 dword = mct_NodePresent_D(); /* FIXME: BOZO -11001022h rev for F */
772 if (val == dword) { /* AMD Hammer Family CPU HT Configuration */
773 if (oemNodePresent_D(Node, &ret))
774 goto finish;
775 /* Node ID register */
776 val = Get_NB32(dev, 0x60);
777 val &= 0x07;
778 dword = Node;
779 if (val == dword) /* current nodeID = requested nodeID ? */
780 ret = 1;
781 }
782finish:
783 return ret;
784}
785
786static void DCTInit_D(struct MCTStatStruc *pMCTstat, struct DCTStatStruc *pDCTstat, u8 dct)
787{
788 /*
789 * Initialize DRAM on single Athlon 64/Opteron Node.
790 */
791 u8 stopDCTflag;
792 u32 val;
793
794 ClearDCT_D(pMCTstat, pDCTstat, dct);
795 stopDCTflag = 1; /*preload flag with 'disable' */
796 /* enable DDR3 support */
797 val = Get_NB32(pDCTstat->dev_dct, 0x94 + dct * 0x100);
798 val |= 1 << Ddr3Mode;
799 Set_NB32(pDCTstat->dev_dct, 0x94 + dct * 0x100, val);
800 if (mct_DIMMPresence(pMCTstat, pDCTstat, dct) < SC_StopError) {
801 printk(BIOS_DEBUG, "\t\tDCTInit_D: mct_DIMMPresence Done\n");
802 if (mct_SPDCalcWidth(pMCTstat, pDCTstat, dct) < SC_StopError) {
803 printk(BIOS_DEBUG, "\t\tDCTInit_D: mct_SPDCalcWidth Done\n");
804 if (AutoCycTiming_D(pMCTstat, pDCTstat, dct) < SC_StopError) {
805 printk(BIOS_DEBUG, "\t\tDCTInit_D: AutoCycTiming_D Done\n");
806 if (AutoConfig_D(pMCTstat, pDCTstat, dct) < SC_StopError) {
807 printk(BIOS_DEBUG, "\t\tDCTInit_D: AutoConfig_D Done\n");
808 if (PlatformSpec_D(pMCTstat, pDCTstat, dct) < SC_StopError) {
809 printk(BIOS_DEBUG, "\t\tDCTInit_D: PlatformSpec_D Done\n");
810 stopDCTflag = 0;
811 if (!(pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW))) {
812 printk(BIOS_DEBUG, "\t\tDCTInit_D: StartupDCT_D\n");
813 StartupDCT_D(pMCTstat, pDCTstat, dct); /*yeaahhh! */
814 }
815 }
816 }
817 }
818 }
819 }
820
821 if (stopDCTflag) {
822 u32 reg_off = dct * 0x100;
823 val = 1<<DisDramInterface;
824 Set_NB32(pDCTstat->dev_dct, reg_off+0x94, val);
825 /*To maximize power savings when DisDramInterface=1b,
826 all of the MemClkDis bits should also be set.*/
827 val = 0xFF000000;
828 Set_NB32(pDCTstat->dev_dct, reg_off+0x88, val);
829 } else {
Zheng Bao69436e12011-01-06 02:18:12 +0000830 mct_EnDllShutdownSR(pMCTstat, pDCTstat, dct);
Zheng Baoeb75f652010-04-23 17:32:48 +0000831 }
832}
833
834static void SyncDCTsReady_D(struct MCTStatStruc *pMCTstat,
835 struct DCTStatStruc *pDCTstatA)
836{
837 /* Wait (and block further access to dram) for all DCTs to be ready,
838 * by polling all InitDram bits and waiting for possible memory clear
839 * operations to be complete. Read MemClkFreqVal bit to see if
840 * the DIMMs are present in this node.
841 */
842 u8 Node;
843 u32 val;
844
845 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
846 struct DCTStatStruc *pDCTstat;
847 pDCTstat = pDCTstatA + Node;
848 mct_SyncDCTsReady(pDCTstat);
849 }
850 /* v6.1.3 */
851 /* re-enable phy compensation engine when dram init is completed on all nodes. */
852 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
853 struct DCTStatStruc *pDCTstat;
854 pDCTstat = pDCTstatA + Node;
855 if (pDCTstat->NodePresent) {
856 if (pDCTstat->DIMMValidDCT[0] > 0 || pDCTstat->DIMMValidDCT[1] > 0) {
857 /* re-enable phy compensation engine when dram init on both DCTs is completed. */
858 val = Get_NB32_index_wait(pDCTstat->dev_dct, 0x98, 0x8);
859 val &= ~(1 << DisAutoComp);
860 Set_NB32_index_wait(pDCTstat->dev_dct, 0x98, 0x8, val);
861 }
862 }
863 }
864 /* wait 750us before any memory access can be made. */
865 mct_Wait(15000);
866}
867
868static void StartupDCT_D(struct MCTStatStruc *pMCTstat,
869 struct DCTStatStruc *pDCTstat, u8 dct)
870{
871 /* Read MemClkFreqVal bit to see if the DIMMs are present in this node.
872 * If the DIMMs are present then set the DRAM Enable bit for this node.
873 *
874 * Setting dram init starts up the DCT state machine, initializes the
875 * dram devices with MRS commands, and kicks off any
876 * HW memory clear process that the chip is capable of. The sooner
877 * that dram init is set for all nodes, the faster the memory system
878 * initialization can complete. Thus, the init loop is unrolled into
Zheng Baoc3af12f2010-10-08 05:08:47 +0000879 * two loops so as to start the processes for non BSP nodes sooner.
Zheng Baoeb75f652010-04-23 17:32:48 +0000880 * This procedure will not wait for the process to finish.
881 * Synchronization is handled elsewhere.
882 */
883 u32 val;
884 u32 dev;
885 u32 reg_off = dct * 0x100;
886
887 dev = pDCTstat->dev_dct;
888 val = Get_NB32(dev, 0x94 + reg_off);
889 if (val & (1<<MemClkFreqVal)) {
890 mctHookBeforeDramInit(); /* generalized Hook */
891 if (!(pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)))
892 mct_DramInit(pMCTstat, pDCTstat, dct);
893 AfterDramInit_D(pDCTstat, dct);
894 mctHookAfterDramInit(); /* generalized Hook*/
895 }
896}
897
898static void ClearDCT_D(struct MCTStatStruc *pMCTstat,
899 struct DCTStatStruc *pDCTstat, u8 dct)
900{
901 u32 reg_end;
902 u32 dev = pDCTstat->dev_dct;
903 u32 reg = 0x40 + 0x100 * dct;
904 u32 val = 0;
905
906 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
907 reg_end = 0x78 + 0x100 * dct;
908 } else {
909 reg_end = 0xA4 + 0x100 * dct;
910 }
911
912 while(reg < reg_end) {
Zheng Bao69436e12011-01-06 02:18:12 +0000913 if ((reg & 0xFF) == 0x90) {
914 if (pDCTstat->LogicalCPUID & AMD_DR_Dx) {
915 val = Get_NB32(dev, reg); /* get DRAMConfigLow */
916 val |= 0x08000000; /* preserve value of DisDllShutdownSR for only Rev.D */
917 }
918 }
Zheng Baoeb75f652010-04-23 17:32:48 +0000919 Set_NB32(dev, reg, val);
Zheng Bao69436e12011-01-06 02:18:12 +0000920 val = 0;
Zheng Baoeb75f652010-04-23 17:32:48 +0000921 reg += 4;
922 }
923
924 val = 0;
925 dev = pDCTstat->dev_map;
926 reg = 0xF0;
927 Set_NB32(dev, reg, val);
928}
929
930static void SPD2ndTiming(struct MCTStatStruc *pMCTstat,
931 struct DCTStatStruc *pDCTstat, u8 dct)
932{
933 u8 i;
934 u16 Twr, Trtp;
935 u16 Trp, Trrd, Trcd, Tras, Trc;
936 u8 Trfc[4];
937 u16 Tfaw;
938 u32 DramTimingLo, DramTimingHi;
939 u8 tCK16x;
940 u16 Twtr;
941 u8 LDIMM;
942 u8 MTB16x;
943 u8 byte;
944 u32 dword;
945 u32 dev;
946 u32 reg_off;
947 u32 val;
948 u16 smbaddr;
949
950 /* Gather all DIMM mini-max values for cycle timing data */
951 Trp = 0;
952 Trrd = 0;
953 Trcd = 0;
954 Trtp = 0;
955 Tras = 0;
956 Trc = 0;
957 Twr = 0;
958 Twtr = 0;
959 for (i=0; i < 4; i++)
960 Trfc[i] = 0;
961 Tfaw = 0;
962
963 for ( i = 0; i< MAX_DIMMS_SUPPORTED; i++) {
964 LDIMM = i >> 1;
965 if (pDCTstat->DIMMValid & (1 << i)) {
966 smbaddr = Get_DIMMAddress_D(pDCTstat, (dct + i));
967
968 val = mctRead_SPD(smbaddr, SPD_MTBDivisor); /* MTB=Dividend/Divisor */
969 MTB16x = ((mctRead_SPD(smbaddr, SPD_MTBDividend) & 0xFF)<<4);
970 MTB16x /= val; /* transfer to MTB*16 */
971
972 byte = mctRead_SPD(smbaddr, SPD_tRPmin);
973 val = byte * MTB16x;
974 if (Trp < val)
975 Trp = val;
976
977 byte = mctRead_SPD(smbaddr, SPD_tRRDmin);
978 val = byte * MTB16x;
979 if (Trrd < val)
980 Trrd = val;
981
982 byte = mctRead_SPD(smbaddr, SPD_tRCDmin);
983 val = byte * MTB16x;
984 if (Trcd < val)
985 Trcd = val;
986
987 byte = mctRead_SPD(smbaddr, SPD_tRTPmin);
988 val = byte * MTB16x;
989 if (Trtp < val)
990 Trtp = val;
991
992 byte = mctRead_SPD(smbaddr, SPD_tWRmin);
993 val = byte * MTB16x;
994 if (Twr < val)
995 Twr = val;
996
997 byte = mctRead_SPD(smbaddr, SPD_tWTRmin);
998 val = byte * MTB16x;
999 if (Twtr < val)
1000 Twtr = val;
1001
1002 val = mctRead_SPD(smbaddr, SPD_Upper_tRAS_tRC) & 0xFF;
1003 val >>= 4;
1004 val <<= 8;
1005 val |= mctRead_SPD(smbaddr, SPD_tRCmin) & 0xFF;
1006 val *= MTB16x;
1007 if (Trc < val)
1008 Trc = val;
1009
1010 byte = mctRead_SPD(smbaddr, SPD_Density) & 0xF;
1011 if (Trfc[LDIMM] < byte)
1012 Trfc[LDIMM] = byte;
1013
1014 val = mctRead_SPD(smbaddr, SPD_Upper_tRAS_tRC) & 0xF;
1015 val <<= 8;
1016 val |= (mctRead_SPD(smbaddr, SPD_tRASmin) & 0xFF);
1017 val *= MTB16x;
1018 if (Tras < val)
1019 Tras = val;
1020
1021 val = mctRead_SPD(smbaddr, SPD_Upper_tFAW) & 0xF;
1022 val <<= 8;
1023 val |= mctRead_SPD(smbaddr, SPD_tFAWmin) & 0xFF;
1024 val *= MTB16x;
1025 if (Tfaw < val)
1026 Tfaw = val;
1027 } /* Dimm Present */
1028 }
1029
1030 /* Convert DRAM CycleTiming values and store into DCT structure */
1031 byte = pDCTstat->DIMMAutoSpeed;
1032 if (byte == 7)
1033 tCK16x = 20;
1034 else if (byte == 6)
1035 tCK16x = 24;
1036 else if (byte == 5)
1037 tCK16x = 30;
1038 else
1039 tCK16x = 40;
1040
1041 /* Notes:
1042 1. All secondary time values given in SPDs are in binary with units of ns.
1043 2. Some time values are scaled by 16, in order to have least count of 0.25 ns
1044 (more accuracy). JEDEC SPD spec. shows which ones are x1 and x4.
1045 3. Internally to this SW, cycle time, tCK16x, is scaled by 16 to match time values
1046 */
1047
1048 /* Tras */
1049 pDCTstat->DIMMTras = (u16)Tras;
1050 val = Tras / tCK16x;
1051 if (Tras % tCK16x) { /* round up number of busclocks */
1052 val++;
1053 }
1054 if (val < Min_TrasT)
1055 val = Min_TrasT;
1056 else if (val > Max_TrasT)
1057 val = Max_TrasT;
1058 pDCTstat->Tras = val;
1059
1060 /* Trp */
1061 pDCTstat->DIMMTrp = Trp;
1062 val = Trp / tCK16x;
1063 if (Trp % tCK16x) { /* round up number of busclocks */
1064 val++;
1065 }
1066 if (val < Min_TrpT)
1067 val = Min_TrpT;
1068 else if (val > Max_TrpT)
1069 val = Max_TrpT;
1070 pDCTstat->Trp = val;
1071
1072 /*Trrd*/
1073 pDCTstat->DIMMTrrd = Trrd;
1074 val = Trrd / tCK16x;
1075 if (Trrd % tCK16x) { /* round up number of busclocks */
1076 val++;
1077 }
1078 if (val < Min_TrrdT)
1079 val = Min_TrrdT;
1080 else if (val > Max_TrrdT)
1081 val = Max_TrrdT;
1082 pDCTstat->Trrd = val;
1083
1084 /* Trcd */
1085 pDCTstat->DIMMTrcd = Trcd;
1086 val = Trcd / tCK16x;
1087 if (Trcd % tCK16x) { /* round up number of busclocks */
1088 val++;
1089 }
1090 if (val < Min_TrcdT)
1091 val = Min_TrcdT;
1092 else if (val > Max_TrcdT)
1093 val = Max_TrcdT;
1094 pDCTstat->Trcd = val;
1095
1096 /* Trc */
1097 pDCTstat->DIMMTrc = Trc;
1098 val = Trc / tCK16x;
1099 if (Trc % tCK16x) { /* round up number of busclocks */
1100 val++;
1101 }
1102 if (val < Min_TrcT)
1103 val = Min_TrcT;
1104 else if (val > Max_TrcT)
1105 val = Max_TrcT;
1106 pDCTstat->Trc = val;
1107
1108 /* Trtp */
1109 pDCTstat->DIMMTrtp = Trtp;
1110 val = Trtp / tCK16x;
1111 if (Trtp % tCK16x) {
1112 val ++;
1113 }
1114 if (val < Min_TrtpT)
1115 val = Min_TrtpT;
1116 else if (val > Max_TrtpT)
1117 val = Max_TrtpT;
1118 pDCTstat->Trtp = val;
1119
1120 /* Twr */
1121 pDCTstat->DIMMTwr = Twr;
1122 val = Twr / tCK16x;
1123 if (Twr % tCK16x) { /* round up number of busclocks */
1124 val++;
1125 }
1126 if (val < Min_TwrT)
1127 val = Min_TwrT;
1128 else if (val > Max_TwrT)
1129 val = Max_TwrT;
1130 pDCTstat->Twr = val;
1131
1132 /* Twtr */
1133 pDCTstat->DIMMTwtr = Twtr;
1134 val = Twtr / tCK16x;
1135 if (Twtr % tCK16x) { /* round up number of busclocks */
1136 val++;
1137 }
1138 if (val < Min_TwtrT)
1139 val = Min_TwtrT;
1140 else if (val > Max_TwtrT)
1141 val = Max_TwtrT;
1142 pDCTstat->Twtr = val;
1143
1144 /* Trfc0-Trfc3 */
1145 for (i=0; i<4; i++)
1146 pDCTstat->Trfc[i] = Trfc[i];
1147
1148 /* Tfaw */
1149 pDCTstat->DIMMTfaw = Tfaw;
1150 val = Tfaw / tCK16x;
1151 if (Tfaw % tCK16x) { /* round up number of busclocks */
1152 val++;
1153 }
1154 if (val < Min_TfawT)
1155 val = Min_TfawT;
1156 else if (val > Max_TfawT)
1157 val = Max_TfawT;
1158 pDCTstat->Tfaw = val;
1159
1160 mctAdjustAutoCycTmg_D();
1161
1162 /* Program DRAM Timing values */
1163 DramTimingLo = 0; /* Dram Timing Low init */
1164 val = pDCTstat->CASL - 2; /* pDCTstat.CASL to reg. definition */
1165 DramTimingLo |= val;
1166
1167 val = pDCTstat->Trcd - Bias_TrcdT;
1168 DramTimingLo |= val<<4;
1169
1170 val = pDCTstat->Trp - Bias_TrpT;
1171 val = mct_AdjustSPDTimings(pMCTstat, pDCTstat, val);
1172 DramTimingLo |= val<<7;
1173
1174 val = pDCTstat->Trtp - Bias_TrtpT;
1175 DramTimingLo |= val<<10;
1176
1177 val = pDCTstat->Tras - Bias_TrasT;
1178 DramTimingLo |= val<<12;
1179
1180 val = pDCTstat->Trc - Bias_TrcT;
1181 DramTimingLo |= val<<16;
1182
1183 val = pDCTstat->Trrd - Bias_TrrdT;
1184 DramTimingLo |= val<<22;
1185
1186 DramTimingHi = 0; /* Dram Timing High init */
1187 val = pDCTstat->Twtr - Bias_TwtrT;
1188 DramTimingHi |= val<<8;
1189
1190 val = 2;
1191 DramTimingHi |= val<<16;
1192
1193 val = 0;
1194 for (i=4;i>0;i--) {
1195 val <<= 3;
1196 val |= Trfc[i-1];
1197 }
1198 DramTimingHi |= val << 20;
1199
1200 dev = pDCTstat->dev_dct;
1201 reg_off = 0x100 * dct;
1202 /* Twr */
1203 val = pDCTstat->Twr;
1204 if (val == 10)
1205 val = 9;
1206 else if (val == 12)
1207 val = 10;
1208 val = mct_AdjustSPDTimings(pMCTstat, pDCTstat, val);
1209 val -= Bias_TwrT;
1210 val <<= 4;
1211 dword = Get_NB32(dev, 0x84 + reg_off);
1212 dword &= ~0x70;
1213 dword |= val;
1214 Set_NB32(dev, 0x84 + reg_off, dword);
1215
1216 /* Tfaw */
1217 val = pDCTstat->Tfaw;
1218 val = mct_AdjustSPDTimings(pMCTstat, pDCTstat, val);
1219 val -= Bias_TfawT;
1220 val >>= 1;
1221 val <<= 28;
1222 dword = Get_NB32(dev, 0x94 + reg_off);
1223 dword &= ~0xf0000000;
1224 dword |= val;
1225 Set_NB32(dev, 0x94 + reg_off, dword);
1226
1227 /* dev = pDCTstat->dev_dct; */
1228 /* reg_off = 0x100 * dct; */
1229
1230 if (pDCTstat->Speed > 4) {
1231 val = Get_NB32(dev, 0x88 + reg_off);
1232 val &= 0xFF000000;
1233 DramTimingLo |= val;
1234 }
1235 Set_NB32(dev, 0x88 + reg_off, DramTimingLo); /*DCT Timing Low*/
1236
1237 if (pDCTstat->Speed > 4) {
Zheng Bao951a0fe2010-09-21 01:24:55 +00001238 DramTimingHi |= 1 << DisAutoRefresh;
Zheng Baoeb75f652010-04-23 17:32:48 +00001239 }
1240 DramTimingHi |= 0x000018FF;
1241 Set_NB32(dev, 0x8c + reg_off, DramTimingHi); /*DCT Timing Hi*/
1242
1243 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
1244}
1245
1246static u8 AutoCycTiming_D(struct MCTStatStruc *pMCTstat,
1247 struct DCTStatStruc *pDCTstat, u8 dct)
1248{
1249 /* Initialize DCT Timing registers as per DIMM SPD.
1250 * For primary timing (T, CL) use best case T value.
1251 * For secondary timing params., use most aggressive settings
1252 * of slowest DIMM.
1253 *
1254 * There are three components to determining "maximum frequency":
1255 * SPD component, Bus load component, and "Preset" max frequency
1256 * component.
1257 *
1258 * The SPD component is a function of the min cycle time specified
1259 * by each DIMM, and the interaction of cycle times from all DIMMs
1260 * in conjunction with CAS latency. The SPD component only applies
1261 * when user timing mode is 'Auto'.
1262 *
1263 * The Bus load component is a limiting factor determined by electrical
1264 * characteristics on the bus as a result of varying number of device
1265 * loads. The Bus load component is specific to each platform but may
1266 * also be a function of other factors. The bus load component only
1267 * applies when user timing mode is 'Auto'.
1268 *
1269 * The Preset component is subdivided into three items and is
1270 * the minimum of the set: Silicon revision, user limit
1271 * setting when user timing mode is 'Auto' and memclock mode
1272 * is 'Limit', OEM build specification of the maximum
1273 * frequency. The Preset component is only applies when user
1274 * timing mode is 'Auto'.
1275 */
1276
1277 /* Get primary timing (CAS Latency and Cycle Time) */
1278 if (pDCTstat->Speed == 0) {
1279 mctGet_MaxLoadFreq(pDCTstat);
1280
1281 /* and Factor in presets (setup options, Si cap, etc.) */
1282 GetPresetmaxF_D(pMCTstat, pDCTstat);
1283
1284 /* Go get best T and CL as specified by DIMM mfgs. and OEM */
1285 SPDGetTCL_D(pMCTstat, pDCTstat, dct);
1286 /* skip callback mctForce800to1067_D */
1287 pDCTstat->Speed = pDCTstat->DIMMAutoSpeed;
1288 pDCTstat->CASL = pDCTstat->DIMMCASL;
1289
1290 }
1291 mct_AfterGetCLT(pMCTstat, pDCTstat, dct);
1292
1293 SPD2ndTiming(pMCTstat, pDCTstat, dct);
1294
1295 printk(BIOS_DEBUG, "AutoCycTiming: Status %x\n", pDCTstat->Status);
1296 printk(BIOS_DEBUG, "AutoCycTiming: ErrStatus %x\n", pDCTstat->ErrStatus);
1297 printk(BIOS_DEBUG, "AutoCycTiming: ErrCode %x\n", pDCTstat->ErrCode);
1298 printk(BIOS_DEBUG, "AutoCycTiming: Done\n\n");
1299
1300 mctHookAfterAutoCycTmg();
1301
1302 return pDCTstat->ErrCode;
1303}
1304
1305static void GetPresetmaxF_D(struct MCTStatStruc *pMCTstat,
1306 struct DCTStatStruc *pDCTstat)
1307{
1308 /* Get max frequency from OEM platform definition, from any user
1309 * override (limiting) of max frequency, and from any Si Revision
1310 * Specific information. Return the least of these three in
1311 * DCTStatStruc.PresetmaxFreq.
1312 */
Zheng Baof7a999a2010-09-05 05:52:33 +00001313 /* TODO: Set the proper max frequency in wrappers/mcti_d.c. */
Zheng Baoeb75f652010-04-23 17:32:48 +00001314 u16 proposedFreq;
1315 u16 word;
1316
1317 /* Get CPU Si Revision defined limit (NPT) */
Marc Jones471f1032011-06-03 19:59:52 +00001318 proposedFreq = 800; /* Rev F0 programmable max memclock is */
Zheng Baoeb75f652010-04-23 17:32:48 +00001319
1320 /*Get User defined limit if "limit" mode */
1321 if ( mctGet_NVbits(NV_MCTUSRTMGMODE) == 1) {
1322 word = Get_Fk_D(mctGet_NVbits(NV_MemCkVal) + 1);
1323 if (word < proposedFreq)
1324 proposedFreq = word;
1325
1326 /* Get Platform defined limit */
1327 word = mctGet_NVbits(NV_MAX_MEMCLK);
1328 if (word < proposedFreq)
1329 proposedFreq = word;
1330
1331 word = pDCTstat->PresetmaxFreq;
1332 if (word > proposedFreq)
1333 word = proposedFreq;
1334
1335 pDCTstat->PresetmaxFreq = word;
1336 }
1337 /* Check F3xE8[DdrMaxRate] for maximum DRAM data rate support */
1338}
1339
1340static void SPDGetTCL_D(struct MCTStatStruc *pMCTstat,
1341 struct DCTStatStruc *pDCTstat, u8 dct)
1342{
1343 /* Find the best T and CL primary timing parameter pair, per Mfg.,
1344 * for the given set of DIMMs, and store into DCTStatStruc
1345 * (.DIMMAutoSpeed and .DIMMCASL). See "Global relationship between
1346 * index values and item values" for definition of CAS latency
1347 * index (j) and Frequency index (k).
1348 */
1349 u8 i, CASLatLow, CASLatHigh;
1350 u16 tAAmin16x;
1351 u8 MTB16x;
1352 u16 tCKmin16x;
1353 u16 tCKproposed16x;
1354 u8 CLactual, CLdesired, CLT_Fail;
1355
Stefan Reinauer328a6942011-10-13 17:04:02 -07001356 u8 smbaddr, byte = 0, bytex = 0;
Zheng Baoeb75f652010-04-23 17:32:48 +00001357
1358 CASLatLow = 0xFF;
1359 CASLatHigh = 0xFF;
1360 tAAmin16x = 0;
1361 tCKmin16x = 0;
1362 CLT_Fail = 0;
1363
1364 for (i = 0; i < MAX_DIMMS_SUPPORTED; i++) {
1365 if (pDCTstat->DIMMValid & (1 << i)) {
1366 smbaddr = Get_DIMMAddress_D(pDCTstat, (dct + i));
1367 /* Step 1: Determine the common set of supported CAS Latency
1368 * values for all modules on the memory channel using the CAS
1369 * Latencies Supported in SPD bytes 14 and 15.
1370 */
1371 byte = mctRead_SPD(smbaddr, SPD_CASLow);
1372 CASLatLow &= byte;
1373 byte = mctRead_SPD(smbaddr, SPD_CASHigh);
1374 CASLatHigh &= byte;
1375 /* Step 2: Determine tAAmin(all) which is the largest tAAmin
1376 value for all modules on the memory channel (SPD byte 16). */
1377 byte = mctRead_SPD(smbaddr, SPD_MTBDivisor);
1378
1379 MTB16x = ((mctRead_SPD(smbaddr, SPD_MTBDividend) & 0xFF)<<4);
1380 MTB16x /= byte; /* transfer to MTB*16 */
1381
1382 byte = mctRead_SPD(smbaddr, SPD_tAAmin);
1383 if (tAAmin16x < byte * MTB16x)
1384 tAAmin16x = byte * MTB16x;
1385 /* Step 3: Determine tCKmin(all) which is the largest tCKmin
1386 value for all modules on the memory channel (SPD byte 12). */
1387 byte = mctRead_SPD(smbaddr, SPD_tCKmin);
1388
1389 if (tCKmin16x < byte * MTB16x)
1390 tCKmin16x = byte * MTB16x;
1391 }
1392 }
1393 /* calculate tCKproposed16x */
1394 tCKproposed16x = 16000 / pDCTstat->PresetmaxFreq;
1395 if (tCKmin16x > tCKproposed16x)
1396 tCKproposed16x = tCKmin16x;
1397
1398 /* mctHookTwo1333DimmOverride(); */
1399 /* For UDIMM, if there are two DDR3-1333 on the same channel,
1400 downgrade DDR speed to 1066. */
1401
1402 /* TODO: get user manual tCK16x(Freq.) and overwrite current tCKproposed16x if manual. */
1403 if (tCKproposed16x == 20)
1404 pDCTstat->TargetFreq = 7;
1405 else if (tCKproposed16x <= 24) {
1406 pDCTstat->TargetFreq = 6;
1407 tCKproposed16x = 24;
1408 }
1409 else if (tCKproposed16x <= 30) {
1410 pDCTstat->TargetFreq = 5;
1411 tCKproposed16x = 30;
1412 }
1413 else {
1414 pDCTstat->TargetFreq = 4;
1415 tCKproposed16x = 40;
1416 }
1417 /* Running through this loop twice:
1418 - First time find tCL at target frequency
1419 - Second tim find tCL at 400MHz */
1420
1421 for (;;) {
1422 CLT_Fail = 0;
1423 /* Step 4: For a proposed tCK value (tCKproposed) between tCKmin(all) and tCKmax,
1424 determine the desired CAS Latency. If tCKproposed is not a standard JEDEC
1425 value (2.5, 1.875, 1.5, or 1.25 ns) then tCKproposed must be adjusted to the
1426 next lower standard tCK value for calculating CLdesired.
1427 CLdesired = ceiling ( tAAmin(all) / tCKproposed )
1428 where tAAmin is defined in Byte 16. The ceiling function requires that the
1429 quotient be rounded up always. */
1430 CLdesired = tAAmin16x / tCKproposed16x;
1431 if (tAAmin16x % tCKproposed16x)
1432 CLdesired ++;
1433 /* Step 5: Chose an actual CAS Latency (CLactual) that is greather than or equal
1434 to CLdesired and is supported by all modules on the memory channel as
1435 determined in step 1. If no such value exists, choose a higher tCKproposed
1436 value and repeat steps 4 and 5 until a solution is found. */
1437 for (i = 0, CLactual = 4; i < 15; i++, CLactual++) {
1438 if ((CASLatHigh << 8 | CASLatLow) & (1 << i)) {
1439 if (CLdesired <= CLactual)
1440 break;
1441 }
1442 }
1443 if (i == 15)
1444 CLT_Fail = 1;
1445 /* Step 6: Once the calculation of CLactual is completed, the BIOS must also
1446 verify that this CAS Latency value does not exceed tAAmax, which is 20 ns
1447 for all DDR3 speed grades, by multiplying CLactual times tCKproposed. If
1448 not, choose a lower CL value and repeat steps 5 and 6 until a solution is found. */
1449 if (CLactual * tCKproposed16x > 320)
1450 CLT_Fail = 1;
1451 /* get CL and T */
1452 if (!CLT_Fail) {
1453 bytex = CLactual - 2;
1454 if (tCKproposed16x == 20)
1455 byte = 7;
1456 else if (tCKproposed16x == 24)
1457 byte = 6;
1458 else if (tCKproposed16x == 30)
1459 byte = 5;
1460 else
1461 byte = 4;
1462 } else {
1463 /* mctHookManualCLOverride */
1464 /* TODO: */
1465 }
1466
1467 if (tCKproposed16x != 40) {
1468 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
1469 pDCTstat->DIMMAutoSpeed = byte;
1470 pDCTstat->DIMMCASL = bytex;
1471 break;
1472 } else {
1473 pDCTstat->TargetCASL = bytex;
1474 tCKproposed16x = 40;
1475 }
1476 } else {
1477 pDCTstat->DIMMAutoSpeed = byte;
1478 pDCTstat->DIMMCASL = bytex;
1479 break;
1480 }
1481 }
1482
1483 printk(BIOS_DEBUG, "SPDGetTCL_D: DIMMCASL %x\n", pDCTstat->DIMMCASL);
1484 printk(BIOS_DEBUG, "SPDGetTCL_D: DIMMAutoSpeed %x\n", pDCTstat->DIMMAutoSpeed);
1485
1486 printk(BIOS_DEBUG, "SPDGetTCL_D: Status %x\n", pDCTstat->Status);
1487 printk(BIOS_DEBUG, "SPDGetTCL_D: ErrStatus %x\n", pDCTstat->ErrStatus);
1488 printk(BIOS_DEBUG, "SPDGetTCL_D: ErrCode %x\n", pDCTstat->ErrCode);
1489 printk(BIOS_DEBUG, "SPDGetTCL_D: Done\n\n");
1490}
1491
1492static u8 PlatformSpec_D(struct MCTStatStruc *pMCTstat,
1493 struct DCTStatStruc *pDCTstat, u8 dct)
1494{
1495 u32 dev;
1496 u32 reg;
1497 u32 val;
1498
1499 mctGet_PS_Cfg_D(pMCTstat, pDCTstat, dct);
1500
1501 if (pDCTstat->GangedMode == 1) {
1502 mctGet_PS_Cfg_D(pMCTstat, pDCTstat, 1);
Zheng Bao69436e12011-01-06 02:18:12 +00001503 mct_BeforePlatformSpec(pMCTstat, pDCTstat, 1);
Zheng Baoeb75f652010-04-23 17:32:48 +00001504 }
1505
1506 if ( pDCTstat->_2Tmode == 2) {
1507 dev = pDCTstat->dev_dct;
1508 reg = 0x94 + 0x100 * dct; /* Dram Configuration Hi */
1509 val = Get_NB32(dev, reg);
1510 val |= 1 << 20; /* 2T CMD mode */
1511 Set_NB32(dev, reg, val);
1512 }
1513
Zheng Bao69436e12011-01-06 02:18:12 +00001514 mct_BeforePlatformSpec(pMCTstat, pDCTstat, dct);
Zheng Baoeb75f652010-04-23 17:32:48 +00001515 mct_PlatformSpec(pMCTstat, pDCTstat, dct);
1516 if (pDCTstat->DIMMAutoSpeed == 4)
1517 InitPhyCompensation(pMCTstat, pDCTstat, dct);
1518 mctHookAfterPSCfg();
1519
1520 return pDCTstat->ErrCode;
1521}
1522
1523static u8 AutoConfig_D(struct MCTStatStruc *pMCTstat,
1524 struct DCTStatStruc *pDCTstat, u8 dct)
1525{
1526 u32 DramControl, DramTimingLo, Status;
1527 u32 DramConfigLo, DramConfigHi, DramConfigMisc, DramConfigMisc2;
1528 u32 val;
1529 u32 reg_off;
1530 u32 dev;
1531 u16 word;
1532 u32 dword;
1533 u8 byte;
1534
1535 DramConfigLo = 0;
1536 DramConfigHi = 0;
1537 DramConfigMisc = 0;
1538 DramConfigMisc2 = 0;
1539
Zheng Baoc3af12f2010-10-08 05:08:47 +00001540 /* set bank addressing and Masks, plus CS pops */
Zheng Baoeb75f652010-04-23 17:32:48 +00001541 SPDSetBanks_D(pMCTstat, pDCTstat, dct);
1542 if (pDCTstat->ErrCode == SC_StopError)
1543 goto AutoConfig_exit;
1544
1545 /* map chip-selects into local address space */
1546 StitchMemory_D(pMCTstat, pDCTstat, dct);
1547 InterleaveBanks_D(pMCTstat, pDCTstat, dct);
1548
1549 /* temp image of status (for convenience). RO usage! */
1550 Status = pDCTstat->Status;
1551
1552 dev = pDCTstat->dev_dct;
1553 reg_off = 0x100 * dct;
1554
1555
1556 /* Build Dram Control Register Value */
1557 DramConfigMisc2 = Get_NB32 (dev, 0xA8 + reg_off); /* Dram Control*/
1558 DramControl = Get_NB32 (dev, 0x78 + reg_off); /* Dram Control*/
1559
1560 /* FIXME: Skip mct_checkForDxSupport */
1561 /* REV_CALL mct_DoRdPtrInit if not Dx */
1562 if (pDCTstat->LogicalCPUID & AMD_DR_Bx)
1563 val = 5;
1564 else
1565 val = 6;
1566 DramControl &= ~0xFF;
Zheng Baoc3af12f2010-10-08 05:08:47 +00001567 DramControl |= val; /* RdPtrInit = 6 for Cx CPU */
Zheng Baoeb75f652010-04-23 17:32:48 +00001568
1569 if (mctGet_NVbits(NV_CLKHZAltVidC3))
1570 DramControl |= 1<<16; /* check */
1571
1572 DramControl |= 0x00002A00;
1573
1574 /* FIXME: Skip for Ax versions */
1575 /* callback not required - if (!mctParityControl_D()) */
1576 if (Status & (1 << SB_128bitmode))
1577 DramConfigLo |= 1 << Width128; /* 128-bit mode (normal) */
1578
1579 word = dct;
1580 dword = X4Dimm;
1581 while (word < 8) {
1582 if (pDCTstat->Dimmx4Present & (1 << word))
1583 DramConfigLo |= 1 << dword; /* X4Dimm[3:0] */
1584 word++;
1585 word++;
1586 dword++;
1587 }
1588
1589 if (!(Status & (1 << SB_Registered)))
Zheng Baoc3af12f2010-10-08 05:08:47 +00001590 DramConfigLo |= 1 << UnBuffDimm; /* Unbuffered DIMMs */
Zheng Baoeb75f652010-04-23 17:32:48 +00001591
1592 if (mctGet_NVbits(NV_ECC_CAP))
1593 if (Status & (1 << SB_ECCDIMMs))
1594 if ( mctGet_NVbits(NV_ECC))
1595 DramConfigLo |= 1 << DimmEcEn;
1596
1597 DramConfigLo = mct_DisDllShutdownSR(pMCTstat, pDCTstat, DramConfigLo, dct);
1598
1599 /* Build Dram Config Hi Register Value */
1600 dword = pDCTstat->Speed;
1601 DramConfigHi |= dword - 1; /* get MemClk encoding */
1602 DramConfigHi |= 1 << MemClkFreqVal;
1603
1604 if (Status & (1 << SB_Registered))
1605 if ((pDCTstat->Dimmx4Present != 0) && (pDCTstat->Dimmx8Present != 0))
1606 /* set only if x8 Registered DIMMs in System*/
1607 DramConfigHi |= 1 << RDqsEn;
1608
1609 if (mctGet_NVbits(NV_CKE_CTL))
1610 /*Chip Select control of CKE*/
1611 DramConfigHi |= 1 << 16;
1612
1613 /* Control Bank Swizzle */
1614 if (0) /* call back not needed mctBankSwizzleControl_D()) */
1615 DramConfigHi &= ~(1 << BankSwizzleMode);
1616 else
1617 DramConfigHi |= 1 << BankSwizzleMode; /* recommended setting (default) */
1618
1619 /* Check for Quadrank DIMM presence */
1620 if ( pDCTstat->DimmQRPresent != 0) {
1621 byte = mctGet_NVbits(NV_4RANKType);
1622 if (byte == 2)
1623 DramConfigHi |= 1 << 17; /* S4 (4-Rank SO-DIMMs) */
1624 else if (byte == 1)
1625 DramConfigHi |= 1 << 18; /* R4 (4-Rank Registered DIMMs) */
1626 }
1627
1628 if (0) /* call back not needed mctOverrideDcqBypMax_D ) */
1629 val = mctGet_NVbits(NV_BYPMAX);
1630 else
1631 val = 0x0f; /* recommended setting (default) */
1632 DramConfigHi |= val << 24;
1633
1634 if (pDCTstat->LogicalCPUID & (AMD_DR_Cx | AMD_DR_Bx))
1635 DramConfigHi |= 1 << DcqArbBypassEn;
1636
1637 /* Build MemClkDis Value from Dram Timing Lo and
1638 Dram Config Misc Registers
1639 1. We will assume that MemClkDis field has been preset prior to this
1640 point.
1641 2. We will only set MemClkDis bits if a DIMM is NOT present AND if:
1642 NV_AllMemClks <>0 AND SB_DiagClks ==0 */
1643
1644 /* Dram Timing Low (owns Clock Enable bits) */
1645 DramTimingLo = Get_NB32(dev, 0x88 + reg_off);
1646 if (mctGet_NVbits(NV_AllMemClks) == 0) {
1647 /* Special Jedec SPD diagnostic bit - "enable all clocks" */
1648 if (!(pDCTstat->Status & (1<<SB_DiagClks))) {
1649 const u8 *p;
1650 const u32 *q;
1651 p = Tab_ManualCLKDis;
1652 q = (u32 *)p;
1653
1654 byte = mctGet_NVbits(NV_PACK_TYPE);
1655 if (byte == PT_L1)
1656 p = Tab_L1CLKDis;
1657 else if (byte == PT_M2 || byte == PT_AS)
1658 p = Tab_AM3CLKDis;
1659 else
1660 p = Tab_S1CLKDis;
1661
1662 dword = 0;
1663 byte = 0xFF;
1664 while(dword < MAX_CS_SUPPORTED) {
1665 if (pDCTstat->CSPresent & (1<<dword)){
1666 /* re-enable clocks for the enabled CS */
1667 val = p[dword];
1668 byte &= ~val;
1669 }
1670 dword++ ;
1671 }
1672 DramTimingLo |= byte << 24;
1673 }
1674 }
1675
1676 printk(BIOS_DEBUG, "AutoConfig_D: DramControl: %x\n", DramControl);
1677 printk(BIOS_DEBUG, "AutoConfig_D: DramTimingLo: %x\n", DramTimingLo);
1678 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigMisc: %x\n", DramConfigMisc);
1679 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigMisc2: %x\n", DramConfigMisc2);
1680 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigLo: %x\n", DramConfigLo);
1681 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigHi: %x\n", DramConfigHi);
1682
1683 /* Write Values to the registers */
1684 Set_NB32(dev, 0x78 + reg_off, DramControl);
1685 Set_NB32(dev, 0x88 + reg_off, DramTimingLo);
1686 Set_NB32(dev, 0xA0 + reg_off, DramConfigMisc);
1687 DramConfigMisc2 = mct_SetDramConfigMisc2(pDCTstat, dct, DramConfigMisc2);
1688 Set_NB32(dev, 0xA8 + reg_off, DramConfigMisc2);
1689 Set_NB32(dev, 0x90 + reg_off, DramConfigLo);
1690 ProgDramMRSReg_D(pMCTstat, pDCTstat, dct);
1691 dword = Get_NB32(dev, 0x94 + reg_off);
1692 DramConfigHi |= dword;
1693 mct_SetDramConfigHi_D(pDCTstat, dct, DramConfigHi);
Zheng Bao69436e12011-01-06 02:18:12 +00001694 mct_EarlyArbEn_D(pMCTstat, pDCTstat, dct);
Zheng Baoeb75f652010-04-23 17:32:48 +00001695 mctHookAfterAutoCfg();
1696
1697 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
1698
1699 printk(BIOS_DEBUG, "AutoConfig: Status %x\n", pDCTstat->Status);
1700 printk(BIOS_DEBUG, "AutoConfig: ErrStatus %x\n", pDCTstat->ErrStatus);
1701 printk(BIOS_DEBUG, "AutoConfig: ErrCode %x\n", pDCTstat->ErrCode);
1702 printk(BIOS_DEBUG, "AutoConfig: Done\n\n");
1703AutoConfig_exit:
1704 return pDCTstat->ErrCode;
1705}
1706
1707static void SPDSetBanks_D(struct MCTStatStruc *pMCTstat,
1708 struct DCTStatStruc *pDCTstat, u8 dct)
1709{
1710 /* Set bank addressing, program Mask values and build a chip-select
1711 * population map. This routine programs PCI 0:24N:2x80 config register
1712 * and PCI 0:24N:2x60,64,68,6C config registers (CS Mask 0-3).
1713 */
1714 u8 ChipSel, Rows, Cols, Ranks, Banks;
1715 u32 BankAddrReg, csMask;
1716
1717 u32 val;
1718 u32 reg;
1719 u32 dev;
1720 u32 reg_off;
1721 u8 byte;
1722 u16 word;
1723 u32 dword;
1724 u16 smbaddr;
1725
1726 dev = pDCTstat->dev_dct;
1727 reg_off = 0x100 * dct;
1728
1729 BankAddrReg = 0;
1730 for (ChipSel = 0; ChipSel < MAX_CS_SUPPORTED; ChipSel+=2) {
1731 byte = ChipSel;
1732 if ((pDCTstat->Status & (1 << SB_64MuxedMode)) && ChipSel >=4)
1733 byte -= 3;
1734
1735 if (pDCTstat->DIMMValid & (1<<byte)) {
1736 smbaddr = Get_DIMMAddress_D(pDCTstat, (ChipSel + dct));
1737
1738 byte = mctRead_SPD(smbaddr, SPD_Addressing);
1739 Rows = (byte >> 3) & 0x7; /* Rows:0b=12-bit,... */
1740 Cols = byte & 0x7; /* Cols:0b=9-bit,... */
1741
1742 byte = mctRead_SPD(smbaddr, SPD_Density);
1743 Banks = (byte >> 4) & 7; /* Banks:0b=3-bit,... */
1744
1745 byte = mctRead_SPD(smbaddr, SPD_Organization);
1746 Ranks = ((byte >> 3) & 7) + 1;
1747
1748 /* Configure Bank encoding
1749 * Use a 6-bit key into a lookup table.
1750 * Key (index) = RRRBCC, where CC is the number of Columns minus 9,
1751 * RRR is the number of Rows minus 12, and B is the number of banks
1752 * minus 3.
1753 */
1754 byte = Cols;
1755 if (Banks == 1)
1756 byte |= 4;
1757
1758 byte |= Rows << 3; /* RRRBCC internal encode */
1759
1760 for (dword=0; dword < 13; dword++) {
1761 if (byte == Tab_BankAddr[dword])
1762 break;
1763 }
1764
1765 if (dword > 12)
1766 continue;
1767
1768 /* bit no. of CS field in address mapping reg.*/
1769 dword <<= (ChipSel<<1);
1770 BankAddrReg |= dword;
1771
1772 /* Mask value=(2pow(rows+cols+banks+3)-1)>>8,
1773 or 2pow(rows+cols+banks-5)-1*/
1774 csMask = 0;
1775
1776 byte = Rows + Cols; /* cl=rows+cols*/
1777 byte += 21; /* row:12+col:9 */
1778 byte -= 2; /* 3 banks - 5 */
1779
1780 if (pDCTstat->Status & (1 << SB_128bitmode))
1781 byte++; /* double mask size if in 128-bit mode*/
1782
1783 csMask |= 1 << byte;
1784 csMask--;
1785
1786 /*set ChipSelect population indicator even bits*/
1787 pDCTstat->CSPresent |= (1<<ChipSel);
1788 if (Ranks >= 2)
1789 /*set ChipSelect population indicator odd bits*/
1790 pDCTstat->CSPresent |= 1 << (ChipSel + 1);
1791
1792 reg = 0x60+(ChipSel<<1) + reg_off; /*Dram CS Mask Register */
1793 val = csMask;
1794 val &= 0x1FF83FE0; /* Mask out reserved bits.*/
1795 Set_NB32(dev, reg, val);
1796 } else {
1797 if (pDCTstat->DIMMSPDCSE & (1<<ChipSel))
1798 pDCTstat->CSTestFail |= (1<<ChipSel);
1799 } /* if DIMMValid*/
1800 } /* while ChipSel*/
1801
1802 SetCSTriState(pMCTstat, pDCTstat, dct);
1803 SetCKETriState(pMCTstat, pDCTstat, dct);
1804 SetODTTriState(pMCTstat, pDCTstat, dct);
1805
1806 if (pDCTstat->Status & (1 << SB_128bitmode)) {
1807 SetCSTriState(pMCTstat, pDCTstat, 1); /* force dct1) */
1808 SetCKETriState(pMCTstat, pDCTstat, 1); /* force dct1) */
1809 SetODTTriState(pMCTstat, pDCTstat, 1); /* force dct1) */
1810 }
1811
1812 word = pDCTstat->CSPresent;
1813 mctGetCS_ExcludeMap(); /* mask out specified chip-selects */
1814 word ^= pDCTstat->CSPresent;
1815 pDCTstat->CSTestFail |= word; /* enable ODT to disabled DIMMs */
1816 if (!pDCTstat->CSPresent)
1817 pDCTstat->ErrCode = SC_StopError;
1818
1819 reg = 0x80 + reg_off; /* Bank Addressing Register */
1820 Set_NB32(dev, reg, BankAddrReg);
1821
1822 pDCTstat->CSPresent_DCT[dct] = pDCTstat->CSPresent;
1823 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
1824
1825 printk(BIOS_DEBUG, "SPDSetBanks: CSPresent %x\n", pDCTstat->CSPresent_DCT[dct]);
1826 printk(BIOS_DEBUG, "SPDSetBanks: Status %x\n", pDCTstat->Status);
1827 printk(BIOS_DEBUG, "SPDSetBanks: ErrStatus %x\n", pDCTstat->ErrStatus);
1828 printk(BIOS_DEBUG, "SPDSetBanks: ErrCode %x\n", pDCTstat->ErrCode);
1829 printk(BIOS_DEBUG, "SPDSetBanks: Done\n\n");
1830}
1831
1832static void SPDCalcWidth_D(struct MCTStatStruc *pMCTstat,
1833 struct DCTStatStruc *pDCTstat)
1834{
1835 /* Per SPDs, check the symmetry of DIMM pairs (DIMM on Channel A
1836 * matching with DIMM on Channel B), the overall DIMM population,
1837 * and determine the width mode: 64-bit, 64-bit muxed, 128-bit.
1838 */
1839 u8 i;
1840 u8 smbaddr, smbaddr1;
1841 u8 byte, byte1;
1842
1843 /* Check Symmetry of Channel A and Channel B DIMMs
1844 (must be matched for 128-bit mode).*/
1845 for (i=0; i < MAX_DIMMS_SUPPORTED; i += 2) {
1846 if ((pDCTstat->DIMMValid & (1 << i)) && (pDCTstat->DIMMValid & (1<<(i+1)))) {
1847 smbaddr = Get_DIMMAddress_D(pDCTstat, i);
1848 smbaddr1 = Get_DIMMAddress_D(pDCTstat, i+1);
1849
1850 byte = mctRead_SPD(smbaddr, SPD_Addressing) & 0x7;
1851 byte1 = mctRead_SPD(smbaddr1, SPD_Addressing) & 0x7;
1852 if (byte != byte1) {
1853 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1854 break;
1855 }
1856
1857 byte = mctRead_SPD(smbaddr, SPD_Density) & 0x0f;
1858 byte1 = mctRead_SPD(smbaddr1, SPD_Density) & 0x0f;
1859 if (byte != byte1) {
1860 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1861 break;
1862 }
1863
1864 byte = mctRead_SPD(smbaddr, SPD_Organization) & 0x7;
1865 byte1 = mctRead_SPD(smbaddr1, SPD_Organization) & 0x7;
1866 if (byte != byte1) {
1867 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1868 break;
1869 }
1870
1871 byte = (mctRead_SPD(smbaddr, SPD_Organization) >> 3) & 0x7;
1872 byte1 = (mctRead_SPD(smbaddr1, SPD_Organization) >> 3) & 0x7;
1873 if (byte != byte1) {
1874 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1875 break;
1876 }
1877
1878 byte = mctRead_SPD(smbaddr, SPD_DMBANKS) & 7; /* #ranks-1 */
1879 byte1 = mctRead_SPD(smbaddr1, SPD_DMBANKS) & 7; /* #ranks-1 */
1880 if (byte != byte1) {
1881 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1882 break;
1883 }
1884
1885 }
1886 }
1887
1888}
1889
1890static void StitchMemory_D(struct MCTStatStruc *pMCTstat,
1891 struct DCTStatStruc *pDCTstat, u8 dct)
1892{
1893 /* Requires that Mask values for each bank be programmed first and that
1894 * the chip-select population indicator is correctly set.
1895 */
1896 u8 b = 0;
1897 u32 nxtcsBase, curcsBase;
1898 u8 p, q;
1899 u32 Sizeq, BiggestBank;
1900 u8 _DSpareEn;
1901
1902 u16 word;
1903 u32 dev;
1904 u32 reg;
1905 u32 reg_off;
1906 u32 val;
1907
1908 dev = pDCTstat->dev_dct;
1909 reg_off = 0x100 * dct;
1910
1911 _DSpareEn = 0;
1912
1913 /* CS Sparing 1=enabled, 0=disabled */
1914 if (mctGet_NVbits(NV_CS_SpareCTL) & 1) {
1915 if (MCT_DIMM_SPARE_NO_WARM) {
1916 /* Do no warm-reset DIMM spare */
1917 if (pMCTstat->GStatus & 1 << GSB_EnDIMMSpareNW) {
1918 word = pDCTstat->CSPresent;
1919 val = bsf(word);
1920 word &= ~(1<<val);
1921 if (word)
1922 /* Make sure at least two chip-selects are available */
1923 _DSpareEn = 1;
1924 else
1925 pDCTstat->ErrStatus |= 1 << SB_SpareDis;
1926 }
1927 } else {
1928 if (!mctGet_NVbits(NV_DQSTrainCTL)) { /*DQS Training 1=enabled, 0=disabled */
1929 word = pDCTstat->CSPresent;
1930 val = bsf(word);
1931 word &= ~(1 << val);
1932 if (word)
1933 /* Make sure at least two chip-selects are available */
1934 _DSpareEn = 1;
1935 else
1936 pDCTstat->ErrStatus |= 1 << SB_SpareDis;
1937 }
1938 }
1939 }
1940
1941 nxtcsBase = 0; /* Next available cs base ADDR[39:8] */
1942 for (p=0; p < MAX_DIMMS_SUPPORTED; p++) {
1943 BiggestBank = 0;
1944 for (q = 0; q < MAX_CS_SUPPORTED; q++) { /* from DIMMS to CS */
1945 if (pDCTstat->CSPresent & (1 << q)) { /* bank present? */
1946 reg = 0x40 + (q << 2) + reg_off; /* Base[q] reg.*/
1947 val = Get_NB32(dev, reg);
1948 if (!(val & 3)) { /* (CSEnable|Spare==1)bank is enabled already? */
1949 reg = 0x60 + (q << 1) + reg_off; /*Mask[q] reg.*/
1950 val = Get_NB32(dev, reg);
1951 val >>= 19;
1952 val++;
1953 val <<= 19;
1954 Sizeq = val; /* never used */
1955 if (val > BiggestBank) {
1956 /*Bingo! possibly Map this chip-select next! */
1957 BiggestBank = val;
1958 b = q;
1959 }
1960 }
1961 } /*if bank present */
1962 } /* while q */
1963 if (BiggestBank !=0) {
1964 curcsBase = nxtcsBase; /* curcsBase=nxtcsBase*/
1965 /* DRAM CS Base b Address Register offset */
1966 reg = 0x40 + (b << 2) + reg_off;
1967 if (_DSpareEn) {
1968 BiggestBank = 0;
1969 val = 1 << Spare; /* Spare Enable*/
1970 } else {
1971 val = curcsBase;
1972 val |= 1 << CSEnable; /* Bank Enable */
1973 }
1974 if (((reg - 0x40) >> 2) & 1) {
1975 if (!(pDCTstat->Status & (1 << SB_Registered))) {
1976 u16 dimValid;
1977 dimValid = pDCTstat->DIMMValid;
1978 if (dct & 1)
1979 dimValid <<= 1;
1980 if ((dimValid & pDCTstat->MirrPresU_NumRegR) != 0) {
1981 val |= 1 << onDimmMirror;
1982 }
1983 }
1984 }
1985 Set_NB32(dev, reg, val);
1986 if (_DSpareEn)
1987 _DSpareEn = 0;
1988 else
1989 /* let nxtcsBase+=Size[b] */
1990 nxtcsBase += BiggestBank;
1991 }
1992
1993 /* bank present but disabled?*/
1994 if ( pDCTstat->CSTestFail & (1 << p)) {
1995 /* DRAM CS Base b Address Register offset */
1996 reg = (p << 2) + 0x40 + reg_off;
1997 val = 1 << TestFail;
1998 Set_NB32(dev, reg, val);
1999 }
2000 }
2001
2002 if (nxtcsBase) {
2003 pDCTstat->DCTSysLimit = nxtcsBase - 1;
2004 mct_AfterStitchMemory(pMCTstat, pDCTstat, dct);
2005 }
2006
2007 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
2008
2009 printk(BIOS_DEBUG, "StitchMemory: Status %x\n", pDCTstat->Status);
2010 printk(BIOS_DEBUG, "StitchMemory: ErrStatus %x\n", pDCTstat->ErrStatus);
2011 printk(BIOS_DEBUG, "StitchMemory: ErrCode %x\n", pDCTstat->ErrCode);
2012 printk(BIOS_DEBUG, "StitchMemory: Done\n\n");
2013}
2014
2015static u16 Get_Fk_D(u8 k)
2016{
2017 return Table_F_k[k]; /* FIXME: k or k<<1 ? */
2018}
2019
2020static u8 DIMMPresence_D(struct MCTStatStruc *pMCTstat,
2021 struct DCTStatStruc *pDCTstat)
2022{
2023 /* Check DIMMs present, verify checksum, flag SDRAM type,
2024 * build population indicator bitmaps, and preload bus loading
2025 * of DIMMs into DCTStatStruc.
2026 * MAAload=number of devices on the "A" bus.
2027 * MABload=number of devices on the "B" bus.
2028 * MAAdimms=number of DIMMs on the "A" bus slots.
2029 * MABdimms=number of DIMMs on the "B" bus slots.
2030 * DATAAload=number of ranks on the "A" bus slots.
2031 * DATABload=number of ranks on the "B" bus slots.
2032 */
2033 u16 i, j;
2034 u8 smbaddr;
2035 u8 SPDCtrl;
2036 u16 RegDIMMPresent, MaxDimms;
2037 u8 devwidth;
2038 u16 DimmSlots;
2039 u8 byte = 0, bytex;
2040
2041 /* preload data structure with addrs */
2042 mctGet_DIMMAddr(pDCTstat, pDCTstat->Node_ID);
2043
2044 DimmSlots = MaxDimms = mctGet_NVbits(NV_MAX_DIMMS);
2045
2046 SPDCtrl = mctGet_NVbits(NV_SPDCHK_RESTRT);
2047
2048 RegDIMMPresent = 0;
2049 pDCTstat->DimmQRPresent = 0;
2050
Kerry She108d30b2010-08-30 07:24:13 +00002051 for (i = 0; i < MAX_DIMMS_SUPPORTED; i++) {
Zheng Baoeb75f652010-04-23 17:32:48 +00002052 if (i >= MaxDimms)
2053 break;
2054
2055 if ((pDCTstat->DimmQRPresent & (1 << i)) || (i < DimmSlots)) {
2056 int status;
2057 smbaddr = Get_DIMMAddress_D(pDCTstat, i);
2058 status = mctRead_SPD(smbaddr, SPD_ByteUse);
2059 if (status >= 0) { /* SPD access is ok */
2060 pDCTstat->DIMMPresent |= 1 << i;
2061 if (crcCheck(smbaddr)) { /* CRC is OK */
2062 byte = mctRead_SPD(smbaddr, SPD_TYPE);
2063 if (byte == JED_DDR3SDRAM) {
2064 /*Dimm is 'Present'*/
2065 pDCTstat->DIMMValid |= 1 << i;
2066 }
2067 } else {
2068 pDCTstat->DIMMSPDCSE = 1 << i;
2069 if (SPDCtrl == 0) {
2070 pDCTstat->ErrStatus |= 1 << SB_DIMMChkSum;
2071 pDCTstat->ErrCode = SC_StopError;
2072 } else {
2073 /*if NV_SPDCHK_RESTRT is set to 1, ignore faulty SPD checksum*/
2074 pDCTstat->ErrStatus |= 1<<SB_DIMMChkSum;
2075 byte = mctRead_SPD(smbaddr, SPD_TYPE);
2076 if (byte == JED_DDR3SDRAM)
2077 pDCTstat->DIMMValid |= 1 << i;
2078 }
2079 }
2080 /* Check module type */
2081 byte = mctRead_SPD(smbaddr, SPD_DIMMTYPE) & 0x7;
2082 if (byte == JED_RDIMM || byte == JED_MiniRDIMM)
2083 RegDIMMPresent |= 1 << i;
2084 /* Check ECC capable */
2085 byte = mctRead_SPD(smbaddr, SPD_BusWidth);
2086 if (byte & JED_ECC) {
2087 /* DIMM is ECC capable */
2088 pDCTstat->DimmECCPresent |= 1 << i;
2089 }
2090 /* Check if x4 device */
2091 devwidth = mctRead_SPD(smbaddr, SPD_Organization) & 0x7; /* 0:x4,1:x8,2:x16 */
2092 if (devwidth == 0) {
2093 /* DIMM is made with x4 or x16 drams */
2094 pDCTstat->Dimmx4Present |= 1 << i;
2095 } else if (devwidth == 1) {
2096 pDCTstat->Dimmx8Present |= 1 << i;
2097 } else if (devwidth == 2) {
2098 pDCTstat->Dimmx16Present |= 1 << i;
2099 }
2100
2101 byte = (mctRead_SPD(smbaddr, SPD_Organization) >> 3);
2102 byte &= 7;
2103 if (byte == 3) { /* 4ranks */
2104 /* if any DIMMs are QR, we have to make two passes through DIMMs*/
2105 if ( pDCTstat->DimmQRPresent == 0) {
2106 MaxDimms <<= 1;
2107 }
2108 if (i < DimmSlots) {
2109 pDCTstat->DimmQRPresent |= (1 << i) | (1 << (i+4));
2110 } else {
2111 pDCTstat->MAdimms[i & 1] --;
2112 }
2113 byte = 1; /* upper two ranks of QR DIMM will be counted on another DIMM number iteration*/
2114 } else if (byte == 1) { /* 2ranks */
2115 pDCTstat->DimmDRPresent |= 1 << i;
2116 }
2117 bytex = devwidth;
2118 if (devwidth == 0)
2119 bytex = 16;
2120 else if (devwidth == 1)
2121 bytex = 8;
2122 else if (devwidth == 2)
2123 bytex = 4;
2124
2125 byte++; /* al+1=rank# */
2126 if (byte == 2)
2127 bytex <<= 1; /*double Addr bus load value for dual rank DIMMs*/
2128
2129 j = i & (1<<0);
2130 pDCTstat->DATAload[j] += byte; /*number of ranks on DATA bus*/
2131 pDCTstat->MAload[j] += bytex; /*number of devices on CMD/ADDR bus*/
2132 pDCTstat->MAdimms[j]++; /*number of DIMMs on A bus */
2133
2134 /* check address mirror support for unbuffered dimm */
2135 /* check number of registers on a dimm for registered dimm */
2136 byte = mctRead_SPD(smbaddr, SPD_AddressMirror);
2137 if (RegDIMMPresent & (1 << i)) {
2138 if ((byte & 3) > 1)
2139 pDCTstat->MirrPresU_NumRegR |= 1 << i;
2140 } else {
2141 if ((byte & 1) == 1)
2142 pDCTstat->MirrPresU_NumRegR |= 1 << i;
2143 }
2144 /* Get byte62: Reference Raw Card information. We dont need it now. */
Zheng Bao9fae99f2010-08-31 06:10:54 +00002145 /* byte = mctRead_SPD(smbaddr, SPD_RefRawCard); */
2146 /* Get Byte65/66 for register manufacture ID code */
2147 if ((0x97 == mctRead_SPD(smbaddr, SPD_RegManufactureID_H)) &&
2148 (0x80 == mctRead_SPD(smbaddr, SPD_RegManufactureID_L))) {
2149 if (0x16 == mctRead_SPD(smbaddr, SPD_RegManRevID))
2150 pDCTstat->RegMan2Present |= 1 << i;
2151 else
2152 pDCTstat->RegMan1Present |= 1 << i;
2153 }
Zheng Baoeb75f652010-04-23 17:32:48 +00002154 /* Get Control word values for RC3. We dont need it. */
2155 byte = mctRead_SPD(smbaddr, 70);
2156 pDCTstat->CtrlWrd3 |= (byte >> 4) << (i << 2); /* C3 = SPD byte 70 [7:4] */
2157 /* Get Control word values for RC4, and RC5 */
2158 byte = mctRead_SPD(smbaddr, 71);
2159 pDCTstat->CtrlWrd4 |= (byte & 0xFF) << (i << 2); /* RC4 = SPD byte 71 [3:0] */
2160 pDCTstat->CtrlWrd5 |= (byte >> 4) << (i << 2); /* RC5 = SPD byte 71 [7:4] */
2161 }
2162 }
2163 }
2164 printk(BIOS_DEBUG, "\t DIMMPresence: DIMMValid=%x\n", pDCTstat->DIMMValid);
2165 printk(BIOS_DEBUG, "\t DIMMPresence: DIMMPresent=%x\n", pDCTstat->DIMMPresent);
2166 printk(BIOS_DEBUG, "\t DIMMPresence: RegDIMMPresent=%x\n", RegDIMMPresent);
2167 printk(BIOS_DEBUG, "\t DIMMPresence: DimmECCPresent=%x\n", pDCTstat->DimmECCPresent);
2168 printk(BIOS_DEBUG, "\t DIMMPresence: DimmPARPresent=%x\n", pDCTstat->DimmPARPresent);
2169 printk(BIOS_DEBUG, "\t DIMMPresence: Dimmx4Present=%x\n", pDCTstat->Dimmx4Present);
2170 printk(BIOS_DEBUG, "\t DIMMPresence: Dimmx8Present=%x\n", pDCTstat->Dimmx8Present);
2171 printk(BIOS_DEBUG, "\t DIMMPresence: Dimmx16Present=%x\n", pDCTstat->Dimmx16Present);
2172 printk(BIOS_DEBUG, "\t DIMMPresence: DimmPlPresent=%x\n", pDCTstat->DimmPlPresent);
2173 printk(BIOS_DEBUG, "\t DIMMPresence: DimmDRPresent=%x\n", pDCTstat->DimmDRPresent);
2174 printk(BIOS_DEBUG, "\t DIMMPresence: DimmQRPresent=%x\n", pDCTstat->DimmQRPresent);
2175 printk(BIOS_DEBUG, "\t DIMMPresence: DATAload[0]=%x\n", pDCTstat->DATAload[0]);
2176 printk(BIOS_DEBUG, "\t DIMMPresence: MAload[0]=%x\n", pDCTstat->MAload[0]);
2177 printk(BIOS_DEBUG, "\t DIMMPresence: MAdimms[0]=%x\n", pDCTstat->MAdimms[0]);
2178 printk(BIOS_DEBUG, "\t DIMMPresence: DATAload[1]=%x\n", pDCTstat->DATAload[1]);
2179 printk(BIOS_DEBUG, "\t DIMMPresence: MAload[1]=%x\n", pDCTstat->MAload[1]);
2180 printk(BIOS_DEBUG, "\t DIMMPresence: MAdimms[1]=%x\n", pDCTstat->MAdimms[1]);
2181
2182 if (pDCTstat->DIMMValid != 0) { /* If any DIMMs are present...*/
2183 if (RegDIMMPresent != 0) {
2184 if ((RegDIMMPresent ^ pDCTstat->DIMMValid) !=0) {
2185 /* module type DIMM mismatch (reg'ed, unbuffered) */
2186 pDCTstat->ErrStatus |= 1<<SB_DimmMismatchM;
2187 pDCTstat->ErrCode = SC_StopError;
2188 } else{
2189 /* all DIMMs are registered */
2190 pDCTstat->Status |= 1<<SB_Registered;
2191 }
2192 }
2193 if (pDCTstat->DimmECCPresent != 0) {
2194 if ((pDCTstat->DimmECCPresent ^ pDCTstat->DIMMValid )== 0) {
2195 /* all DIMMs are ECC capable */
2196 pDCTstat->Status |= 1<<SB_ECCDIMMs;
2197 }
2198 }
2199 if (pDCTstat->DimmPARPresent != 0) {
2200 if ((pDCTstat->DimmPARPresent ^ pDCTstat->DIMMValid) == 0) {
2201 /*all DIMMs are Parity capable */
2202 pDCTstat->Status |= 1<<SB_PARDIMMs;
2203 }
2204 }
2205 } else {
2206 /* no DIMMs present or no DIMMs that qualified. */
2207 pDCTstat->ErrStatus |= 1<<SB_NoDimms;
2208 pDCTstat->ErrCode = SC_StopError;
2209 }
2210
2211 printk(BIOS_DEBUG, "\t DIMMPresence: Status %x\n", pDCTstat->Status);
2212 printk(BIOS_DEBUG, "\t DIMMPresence: ErrStatus %x\n", pDCTstat->ErrStatus);
2213 printk(BIOS_DEBUG, "\t DIMMPresence: ErrCode %x\n", pDCTstat->ErrCode);
2214 printk(BIOS_DEBUG, "\t DIMMPresence: Done\n\n");
2215
2216 mctHookAfterDIMMpre();
2217
2218 return pDCTstat->ErrCode;
2219}
2220
2221static u8 Get_DIMMAddress_D(struct DCTStatStruc *pDCTstat, u8 i)
2222{
2223 u8 *p;
2224
2225 p = pDCTstat->DIMMAddr;
2226 /* mct_BeforeGetDIMMAddress(); */
2227 return p[i];
2228}
2229
2230static void mct_initDCT(struct MCTStatStruc *pMCTstat,
2231 struct DCTStatStruc *pDCTstat)
2232{
2233 u32 val;
2234 u8 err_code;
2235
2236 /* Config. DCT0 for Ganged or unganged mode */
2237 DCTInit_D(pMCTstat, pDCTstat, 0);
2238 if (pDCTstat->ErrCode == SC_FatalErr) {
2239 /* Do nothing goto exitDCTInit; any fatal errors? */
2240 } else {
2241 /* Configure DCT1 if unganged and enabled*/
2242 if (!pDCTstat->GangedMode) {
Kerry She108d30b2010-08-30 07:24:13 +00002243 if (pDCTstat->DIMMValidDCT[1] > 0) {
Zheng Baoeb75f652010-04-23 17:32:48 +00002244 err_code = pDCTstat->ErrCode; /* save DCT0 errors */
2245 pDCTstat->ErrCode = 0;
2246 DCTInit_D(pMCTstat, pDCTstat, 1);
2247 if (pDCTstat->ErrCode == 2) /* DCT1 is not Running */
2248 pDCTstat->ErrCode = err_code; /* Using DCT0 Error code to update pDCTstat.ErrCode */
2249 } else {
2250 val = 1 << DisDramInterface;
2251 Set_NB32(pDCTstat->dev_dct, 0x100 + 0x94, val);
2252 }
2253 }
2254 }
2255/* exitDCTInit: */
2256}
2257
2258static void mct_DramInit(struct MCTStatStruc *pMCTstat,
2259 struct DCTStatStruc *pDCTstat, u8 dct)
2260{
2261 mct_BeforeDramInit_Prod_D(pMCTstat, pDCTstat);
2262 mct_DramInit_Sw_D(pMCTstat, pDCTstat, dct);
2263 /* mct_DramInit_Hw_D(pMCTstat, pDCTstat, dct); */
2264}
2265
2266static u8 mct_setMode(struct MCTStatStruc *pMCTstat,
2267 struct DCTStatStruc *pDCTstat)
2268{
2269 u8 byte;
2270 u8 bytex;
2271 u32 val;
2272 u32 reg;
2273
2274 byte = bytex = pDCTstat->DIMMValid;
2275 bytex &= 0x55; /* CHA DIMM pop */
2276 pDCTstat->DIMMValidDCT[0] = bytex;
2277
2278 byte &= 0xAA; /* CHB DIMM popa */
2279 byte >>= 1;
2280 pDCTstat->DIMMValidDCT[1] = byte;
2281
2282 if (byte != bytex) {
2283 pDCTstat->ErrStatus &= ~(1 << SB_DimmMismatchO);
2284 } else {
2285 byte = mctGet_NVbits(NV_Unganged);
2286 if (byte)
2287 pDCTstat->ErrStatus |= (1 << SB_DimmMismatchO); /* Set temp. to avoid setting of ganged mode */
2288
2289 if (!(pDCTstat->ErrStatus & (1 << SB_DimmMismatchO))) {
2290 pDCTstat->GangedMode = 1;
2291 /* valid 128-bit mode population. */
2292 pDCTstat->Status |= 1 << SB_128bitmode;
2293 reg = 0x110;
2294 val = Get_NB32(pDCTstat->dev_dct, reg);
2295 val |= 1 << DctGangEn;
2296 Set_NB32(pDCTstat->dev_dct, reg, val);
2297 }
2298 if (byte) /* NV_Unganged */
2299 pDCTstat->ErrStatus &= ~(1 << SB_DimmMismatchO); /* Clear so that there is no DIMM missmatch error */
2300 }
2301 return pDCTstat->ErrCode;
2302}
2303
2304u32 Get_NB32(u32 dev, u32 reg)
2305{
2306 return pci_read_config32(dev, reg);
2307}
2308
2309void Set_NB32(u32 dev, u32 reg, u32 val)
2310{
2311 pci_write_config32(dev, reg, val);
2312}
2313
2314
2315u32 Get_NB32_index(u32 dev, u32 index_reg, u32 index)
2316{
2317 u32 dword;
2318
2319 Set_NB32(dev, index_reg, index);
2320 dword = Get_NB32(dev, index_reg+0x4);
2321
2322 return dword;
2323}
2324
2325void Set_NB32_index(u32 dev, u32 index_reg, u32 index, u32 data)
2326{
2327 Set_NB32(dev, index_reg, index);
2328 Set_NB32(dev, index_reg + 0x4, data);
2329}
2330
2331u32 Get_NB32_index_wait(u32 dev, u32 index_reg, u32 index)
2332{
2333
2334 u32 dword;
2335
2336
2337 index &= ~(1 << DctAccessWrite);
2338 Set_NB32(dev, index_reg, index);
2339 do {
2340 dword = Get_NB32(dev, index_reg);
2341 } while (!(dword & (1 << DctAccessDone)));
2342 dword = Get_NB32(dev, index_reg + 0x4);
2343
2344 return dword;
2345}
2346
2347void Set_NB32_index_wait(u32 dev, u32 index_reg, u32 index, u32 data)
2348{
2349 u32 dword;
2350
2351
2352 Set_NB32(dev, index_reg + 0x4, data);
2353 index |= (1 << DctAccessWrite);
2354 Set_NB32(dev, index_reg, index);
2355 do {
2356 dword = Get_NB32(dev, index_reg);
2357 } while (!(dword & (1 << DctAccessDone)));
2358
2359}
2360
Zheng Bao69436e12011-01-06 02:18:12 +00002361static u8 mct_BeforePlatformSpec(struct MCTStatStruc *pMCTstat,
2362 struct DCTStatStruc *pDCTstat, u8 dct)
2363{
2364 /* mct_checkForCxDxSupport_D */
2365 if (pDCTstat->LogicalCPUID & AMD_DR_GT_Bx) {
2366 /* 1. Write 00000000h to F2x[1,0]9C_xD08E000 */
2367 Set_NB32_index_wait(pDCTstat->dev_dct, 0x98 + dct * 0x100, 0x0D08E000, 0);
2368 /* 2. If DRAM Configuration Register[MemClkFreq] (F2x[1,0]94[2:0]) is
2369 greater than or equal to 011b (DDR-800 and higher),
2370 then write 00000080h to F2x[1,0]9C_xD02E001,
2371 else write 00000090h to F2x[1,0]9C_xD02E001. */
2372 if (pDCTstat->Speed >= 4)
2373 Set_NB32_index_wait(pDCTstat->dev_dct, 0x98 + dct * 0x100, 0xD02E001, 0x80);
2374 else
2375 Set_NB32_index_wait(pDCTstat->dev_dct, 0x98 + dct * 0x100, 0xD02E001, 0x90);
2376 }
2377 return pDCTstat->ErrCode;
2378}
2379
Zheng Baoeb75f652010-04-23 17:32:48 +00002380static u8 mct_PlatformSpec(struct MCTStatStruc *pMCTstat,
2381 struct DCTStatStruc *pDCTstat, u8 dct)
2382{
2383 /* Get platform specific config/timing values from the interface layer
2384 * and program them into DCT.
2385 */
2386
2387 u32 dev = pDCTstat->dev_dct;
2388 u32 index_reg;
2389 u8 i, i_start, i_end;
2390
2391 if (pDCTstat->GangedMode) {
2392 SyncSetting(pDCTstat);
2393 /* mct_SetupSync_D */
2394 i_start = 0;
2395 i_end = 2;
2396 } else {
2397 i_start = dct;
2398 i_end = dct + 1;
2399 }
2400 for (i=i_start; i<i_end; i++) {
2401 index_reg = 0x98 + (i * 0x100);
2402 Set_NB32_index_wait(dev, index_reg, 0x00, pDCTstat->CH_ODC_CTL[i]); /* Channel A Output Driver Compensation Control */
2403 Set_NB32_index_wait(dev, index_reg, 0x04, pDCTstat->CH_ADDR_TMG[i]); /* Channel A Output Driver Compensation Control */
2404 }
2405
2406 return pDCTstat->ErrCode;
Zheng Baoeb75f652010-04-23 17:32:48 +00002407}
2408
2409static void mct_SyncDCTsReady(struct DCTStatStruc *pDCTstat)
2410{
2411 u32 dev;
2412 u32 val;
2413
2414 if (pDCTstat->NodePresent) {
2415 dev = pDCTstat->dev_dct;
2416
2417 if ((pDCTstat->DIMMValidDCT[0] ) || (pDCTstat->DIMMValidDCT[1])) { /* This Node has dram */
2418 do {
2419 val = Get_NB32(dev, 0x110);
2420 } while (!(val & (1 << DramEnabled)));
2421 }
2422 } /* Node is present */
2423}
2424
2425static void mct_AfterGetCLT(struct MCTStatStruc *pMCTstat,
2426 struct DCTStatStruc *pDCTstat, u8 dct)
2427{
2428 if (!pDCTstat->GangedMode) {
2429 if (dct == 0 ) {
2430 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[dct];
2431 if (pDCTstat->DIMMValidDCT[dct] == 0)
2432 pDCTstat->ErrCode = SC_StopError;
2433 } else {
2434 pDCTstat->CSPresent = 0;
2435 pDCTstat->CSTestFail = 0;
2436 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[dct];
2437 if (pDCTstat->DIMMValidDCT[dct] == 0)
2438 pDCTstat->ErrCode = SC_StopError;
2439 }
2440 }
2441}
2442
2443static u8 mct_SPDCalcWidth(struct MCTStatStruc *pMCTstat,
2444 struct DCTStatStruc *pDCTstat, u8 dct)
2445{
2446 u8 ret;
2447 u32 val;
2448
2449 if ( dct == 0) {
2450 SPDCalcWidth_D(pMCTstat, pDCTstat);
2451 ret = mct_setMode(pMCTstat, pDCTstat);
2452 } else {
2453 ret = pDCTstat->ErrCode;
2454 }
2455
2456 if (pDCTstat->DIMMValidDCT[0] == 0) {
2457 val = Get_NB32(pDCTstat->dev_dct, 0x94);
2458 val |= 1 << DisDramInterface;
2459 Set_NB32(pDCTstat->dev_dct, 0x94, val);
2460 }
2461 if (pDCTstat->DIMMValidDCT[1] == 0) {
2462 val = Get_NB32(pDCTstat->dev_dct, 0x94 + 0x100);
2463 val |= 1 << DisDramInterface;
2464 Set_NB32(pDCTstat->dev_dct, 0x94 + 0x100, val);
2465 }
2466
2467 printk(BIOS_DEBUG, "SPDCalcWidth: Status %x\n", pDCTstat->Status);
2468 printk(BIOS_DEBUG, "SPDCalcWidth: ErrStatus %x\n", pDCTstat->ErrStatus);
2469 printk(BIOS_DEBUG, "SPDCalcWidth: ErrCode %x\n", pDCTstat->ErrCode);
2470 printk(BIOS_DEBUG, "SPDCalcWidth: Done\n");
2471 /* Disable dram interface before DRAM init */
2472
2473 return ret;
2474}
2475
2476static void mct_AfterStitchMemory(struct MCTStatStruc *pMCTstat,
2477 struct DCTStatStruc *pDCTstat, u8 dct)
2478{
2479 u32 val;
2480 u32 dword;
2481 u32 dev;
2482 u32 reg;
2483 u8 _MemHoleRemap;
2484 u32 DramHoleBase;
2485
2486 _MemHoleRemap = mctGet_NVbits(NV_MemHole);
2487 DramHoleBase = mctGet_NVbits(NV_BottomIO);
2488 DramHoleBase <<= 8;
2489 /* Increase hole size so;[31:24]to[31:16]
2490 * it has granularity of 128MB shl eax,8
2491 * Set 'effective' bottom IOmov DramHoleBase,eax
2492 */
2493 pMCTstat->HoleBase = (DramHoleBase & 0xFFFFF800) << 8;
2494
2495 /* In unganged mode, we must add DCT0 and DCT1 to DCTSysLimit */
2496 if (!pDCTstat->GangedMode) {
2497 dev = pDCTstat->dev_dct;
2498 pDCTstat->NodeSysLimit += pDCTstat->DCTSysLimit;
2499 /* if DCT0 and DCT1 both exist, set DctSelBaseAddr[47:27] to the top of DCT0 */
2500 if (dct == 0) {
2501 if (pDCTstat->DIMMValidDCT[1] > 0) {
2502 dword = pDCTstat->DCTSysLimit + 1;
2503 dword += pDCTstat->NodeSysBase;
2504 dword >>= 8; /* scale [39:8] to [47:27],and to F2x110[31:11] */
2505 if ((dword >= DramHoleBase) && _MemHoleRemap) {
2506 pMCTstat->HoleBase = (DramHoleBase & 0xFFFFF800) << 8;
2507 val = pMCTstat->HoleBase;
2508 val >>= 16;
2509 val = (((~val) & 0xFF) + 1);
2510 val <<= 8;
2511 dword += val;
2512 }
2513 reg = 0x110;
2514 val = Get_NB32(dev, reg);
2515 val &= 0x7F;
2516 val |= dword;
2517 val |= 3; /* Set F2x110[DctSelHiRngEn], F2x110[DctSelHi] */
2518 Set_NB32(dev, reg, val);
2519
2520 reg = 0x114;
2521 val = dword;
2522 Set_NB32(dev, reg, val);
2523 }
2524 } else {
2525 /* Program the DctSelBaseAddr value to 0
2526 if DCT 0 is disabled */
2527 if (pDCTstat->DIMMValidDCT[0] == 0) {
2528 dword = pDCTstat->NodeSysBase;
2529 dword >>= 8;
2530 if ((dword >= DramHoleBase) && _MemHoleRemap) {
2531 pMCTstat->HoleBase = (DramHoleBase & 0xFFFFF800) << 8;
2532 val = pMCTstat->HoleBase;
2533 val >>= 8;
2534 val &= ~(0xFFFF);
2535 val |= (((~val) & 0xFFFF) + 1);
2536 dword += val;
2537 }
2538 reg = 0x114;
2539 val = dword;
2540 Set_NB32(dev, reg, val);
2541
2542 reg = 0x110;
2543 val |= 3; /* Set F2x110[DctSelHiRngEn], F2x110[DctSelHi] */
2544 Set_NB32(dev, reg, val);
2545 }
2546 }
2547 } else {
2548 pDCTstat->NodeSysLimit += pDCTstat->DCTSysLimit;
2549 }
2550 printk(BIOS_DEBUG, "AfterStitch pDCTstat->NodeSysBase = %x\n", pDCTstat->NodeSysBase);
2551 printk(BIOS_DEBUG, "mct_AfterStitchMemory: pDCTstat->NodeSysLimit = %x\n", pDCTstat->NodeSysLimit);
2552}
2553
2554static u8 mct_DIMMPresence(struct MCTStatStruc *pMCTstat,
2555 struct DCTStatStruc *pDCTstat, u8 dct)
2556{
2557 u8 ret;
2558
Kerry She108d30b2010-08-30 07:24:13 +00002559 if (dct == 0)
Zheng Baoeb75f652010-04-23 17:32:48 +00002560 ret = DIMMPresence_D(pMCTstat, pDCTstat);
2561 else
2562 ret = pDCTstat->ErrCode;
2563
2564 return ret;
2565}
2566
2567/* mct_BeforeGetDIMMAddress inline in C */
2568
2569static void mct_OtherTiming(struct MCTStatStruc *pMCTstat,
2570 struct DCTStatStruc *pDCTstatA)
2571{
2572 u8 Node;
2573
2574 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
2575 struct DCTStatStruc *pDCTstat;
2576 pDCTstat = pDCTstatA + Node;
2577 if (pDCTstat->NodePresent) {
2578 if (pDCTstat->DIMMValidDCT[0]) {
2579 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[0];
2580 Set_OtherTiming(pMCTstat, pDCTstat, 0);
2581 }
2582 if (pDCTstat->DIMMValidDCT[1] && !pDCTstat->GangedMode ) {
2583 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[1];
2584 Set_OtherTiming(pMCTstat, pDCTstat, 1);
2585 }
2586 } /* Node is present*/
2587 } /* while Node */
2588}
2589
2590static void Set_OtherTiming(struct MCTStatStruc *pMCTstat,
2591 struct DCTStatStruc *pDCTstat, u8 dct)
2592{
2593 u32 reg;
2594 u32 reg_off = 0x100 * dct;
2595 u32 val;
2596 u32 dword;
2597 u32 dev = pDCTstat->dev_dct;
2598
2599 Get_DqsRcvEnGross_Diff(pDCTstat, dev, 0x98 + reg_off);
2600 Get_WrDatGross_Diff(pDCTstat, dct, dev, 0x98 + reg_off);
2601 Get_Trdrd(pMCTstat, pDCTstat, dct);
2602 Get_Twrwr(pMCTstat, pDCTstat, dct);
2603 Get_Twrrd(pMCTstat, pDCTstat, dct);
2604 Get_TrwtTO(pMCTstat, pDCTstat, dct);
2605 Get_TrwtWB(pMCTstat, pDCTstat);
2606
2607 reg = 0x8C + reg_off; /* Dram Timing Hi */
2608 val = Get_NB32(dev, reg);
2609 val &= 0xffff0300;
2610 dword = pDCTstat->TrwtTO;
2611 val |= dword << 4;
2612 dword = pDCTstat->Twrrd & 3;
2613 val |= dword << 10;
2614 dword = pDCTstat->Twrwr & 3;
2615 val |= dword << 12;
2616 dword = pDCTstat->Trdrd & 3;
2617 val |= dword << 14;
2618 dword = pDCTstat->TrwtWB;
2619 val |= dword;
2620 Set_NB32(dev, reg, val);
2621
2622 reg = 0x78 + reg_off;
2623 val = Get_NB32(dev, reg);
2624 val &= 0xFFFFC0FF;
2625 dword = pDCTstat->Twrrd >> 2;
2626 val |= dword << 8;
2627 dword = pDCTstat->Twrwr >> 2;
2628 val |= dword << 10;
2629 dword = pDCTstat->Trdrd >> 2;
2630 val |= dword << 12;
2631 Set_NB32(dev, reg, val);
2632}
2633
2634static void Get_Trdrd(struct MCTStatStruc *pMCTstat,
2635 struct DCTStatStruc *pDCTstat, u8 dct)
2636{
2637 int8_t Trdrd;
2638
2639 Trdrd = ((int8_t)(pDCTstat->DqsRcvEnGrossMax - pDCTstat->DqsRcvEnGrossMin) >> 1) + 1;
2640 if (Trdrd > 8)
2641 Trdrd = 8;
2642 pDCTstat->Trdrd = Trdrd;
2643}
2644
2645static void Get_Twrwr(struct MCTStatStruc *pMCTstat,
2646 struct DCTStatStruc *pDCTstat, u8 dct)
2647{
2648 int8_t Twrwr = 0;
2649
2650 Twrwr = ((int8_t)(pDCTstat->WrDatGrossMax - pDCTstat->WrDatGrossMin) >> 1) + 2;
2651
2652 if (Twrwr < 2)
2653 Twrwr = 2;
2654 else if (Twrwr > 9)
2655 Twrwr = 9;
2656
2657 pDCTstat->Twrwr = Twrwr;
2658}
2659
2660static void Get_Twrrd(struct MCTStatStruc *pMCTstat,
2661 struct DCTStatStruc *pDCTstat, u8 dct)
2662{
2663 u8 LDplus1;
2664 int8_t Twrrd;
2665
2666 LDplus1 = Get_Latency_Diff(pMCTstat, pDCTstat, dct);
2667
2668 Twrrd = ((int8_t)(pDCTstat->WrDatGrossMax - pDCTstat->DqsRcvEnGrossMin) >> 1) + 4 - LDplus1;
2669
2670 if (Twrrd < 2)
2671 Twrrd = 2;
2672 else if (Twrrd > 10)
2673 Twrrd = 10;
2674 pDCTstat->Twrrd = Twrrd;
2675}
2676
2677static void Get_TrwtTO(struct MCTStatStruc *pMCTstat,
2678 struct DCTStatStruc *pDCTstat, u8 dct)
2679{
2680 u8 LDplus1;
2681 int8_t TrwtTO;
2682
2683 LDplus1 = Get_Latency_Diff(pMCTstat, pDCTstat, dct);
2684
2685 TrwtTO = ((int8_t)(pDCTstat->DqsRcvEnGrossMax - pDCTstat->WrDatGrossMin) >> 1) + LDplus1;
2686
2687 pDCTstat->TrwtTO = TrwtTO;
2688}
2689
2690static void Get_TrwtWB(struct MCTStatStruc *pMCTstat,
2691 struct DCTStatStruc *pDCTstat)
2692{
2693 /* TrwtWB ensures read-to-write data-bus turnaround.
2694 This value should be one more than the programmed TrwtTO.*/
2695 pDCTstat->TrwtWB = pDCTstat->TrwtTO;
2696}
2697
2698static u8 Get_Latency_Diff(struct MCTStatStruc *pMCTstat,
2699 struct DCTStatStruc *pDCTstat, u8 dct)
2700{
2701 u32 reg_off = 0x100 * dct;
2702 u32 dev = pDCTstat->dev_dct;
2703 u32 val1, val2;
2704
2705 val1 = Get_NB32(dev, reg_off + 0x88) & 0xF;
2706 val2 = (Get_NB32(dev, reg_off + 0x84) >> 20) & 7;
2707
2708 return val1 - val2;
2709}
2710
2711static void Get_DqsRcvEnGross_Diff(struct DCTStatStruc *pDCTstat,
2712 u32 dev, u32 index_reg)
2713{
2714 u8 Smallest, Largest;
2715 u32 val;
2716 u8 byte, bytex;
2717
2718 /* The largest DqsRcvEnGrossDelay of any DIMM minus the
2719 DqsRcvEnGrossDelay of any other DIMM is equal to the Critical
2720 Gross Delay Difference (CGDD) */
2721 /* DqsRcvEn byte 1,0 */
2722 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x10);
2723 Largest = val & 0xFF;
2724 Smallest = (val >> 8) & 0xFF;
2725
2726 /* DqsRcvEn byte 3,2 */
2727 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x11);
2728 byte = val & 0xFF;
2729 bytex = (val >> 8) & 0xFF;
2730 if (bytex < Smallest)
2731 Smallest = bytex;
2732 if (byte > Largest)
2733 Largest = byte;
2734
2735 /* DqsRcvEn byte 5,4 */
2736 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x20);
2737 byte = val & 0xFF;
2738 bytex = (val >> 8) & 0xFF;
2739 if (bytex < Smallest)
2740 Smallest = bytex;
2741 if (byte > Largest)
2742 Largest = byte;
2743
2744 /* DqsRcvEn byte 7,6 */
2745 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x21);
2746 byte = val & 0xFF;
2747 bytex = (val >> 8) & 0xFF;
2748 if (bytex < Smallest)
2749 Smallest = bytex;
2750 if (byte > Largest)
2751 Largest = byte;
2752
2753 if (pDCTstat->DimmECCPresent> 0) {
2754 /*DqsRcvEn Ecc */
2755 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x12);
2756 byte = val & 0xFF;
2757 bytex = (val >> 8) & 0xFF;
2758 if (bytex < Smallest)
2759 Smallest = bytex;
2760 if (byte > Largest)
2761 Largest = byte;
2762 }
2763
2764 pDCTstat->DqsRcvEnGrossMax = Largest;
2765 pDCTstat->DqsRcvEnGrossMin = Smallest;
2766}
2767
2768static void Get_WrDatGross_Diff(struct DCTStatStruc *pDCTstat,
2769 u8 dct, u32 dev, u32 index_reg)
2770{
Stefan Reinauer328a6942011-10-13 17:04:02 -07002771 u8 Smallest = 0, Largest = 0;
Zheng Baoeb75f652010-04-23 17:32:48 +00002772 u32 val;
2773 u8 byte, bytex;
2774
2775 /* The largest WrDatGrossDlyByte of any DIMM minus the
2776 WrDatGrossDlyByte of any other DIMM is equal to CGDD */
2777 if (pDCTstat->DIMMValid & (1 << 0)) {
2778 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x01); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM0 */
2779 Largest = val & 0xFF;
2780 Smallest = (val >> 8) & 0xFF;
2781 }
2782 if (pDCTstat->DIMMValid & (1 << 2)) {
2783 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x101); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM1 */
2784 byte = val & 0xFF;
2785 bytex = (val >> 8) & 0xFF;
2786 if (bytex < Smallest)
2787 Smallest = bytex;
2788 if (byte > Largest)
2789 Largest = byte;
2790 }
2791
2792 /* If Cx, 2 more dimm need to be checked to find out the largest and smallest */
2793 if (pDCTstat->LogicalCPUID & AMD_DR_Cx) {
2794 if (pDCTstat->DIMMValid & (1 << 4)) {
2795 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x201); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM2 */
2796 byte = val & 0xFF;
2797 bytex = (val >> 8) & 0xFF;
2798 if (bytex < Smallest)
2799 Smallest = bytex;
2800 if (byte > Largest)
2801 Largest = byte;
2802 }
2803 if (pDCTstat->DIMMValid & (1 << 6)) {
2804 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x301); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM2 */
2805 byte = val & 0xFF;
2806 bytex = (val >> 8) & 0xFF;
2807 if (bytex < Smallest)
2808 Smallest = bytex;
2809 if (byte > Largest)
2810 Largest = byte;
2811 }
2812 }
2813
2814 pDCTstat->WrDatGrossMax = Largest;
2815 pDCTstat->WrDatGrossMin = Smallest;
2816}
2817
2818static u16 Get_DqsRcvEnGross_MaxMin(struct DCTStatStruc *pDCTstat,
2819 u32 dev, u32 index_reg,
2820 u32 index)
2821{
2822 u8 Smallest, Largest;
2823 u8 i;
2824 u8 byte;
2825 u32 val;
2826 u16 word;
2827 u8 ecc_reg = 0;
2828
2829 Smallest = 7;
2830 Largest = 0;
2831
2832 if (index == 0x12)
2833 ecc_reg = 1;
2834
2835 for (i=0; i < 8; i+=2) {
2836 if ( pDCTstat->DIMMValid & (1 << i)) {
2837 val = Get_NB32_index_wait(dev, index_reg, index);
2838 val &= 0x00E000E0;
2839 byte = (val >> 5) & 0xFF;
2840 if (byte < Smallest)
2841 Smallest = byte;
2842 if (byte > Largest)
2843 Largest = byte;
2844 if (!(ecc_reg)) {
2845 byte = (val >> (16 + 5)) & 0xFF;
2846 if (byte < Smallest)
2847 Smallest = byte;
2848 if (byte > Largest)
2849 Largest = byte;
2850 }
2851 }
Zheng Bao7b1a3c32010-09-28 04:43:16 +00002852 index += 3;
Zheng Baoeb75f652010-04-23 17:32:48 +00002853 } /* while ++i */
2854
2855 word = Smallest;
2856 word <<= 8;
2857 word |= Largest;
2858
2859 return word;
2860}
2861
2862static u16 Get_WrDatGross_MaxMin(struct DCTStatStruc *pDCTstat,
2863 u8 dct, u32 dev, u32 index_reg,
2864 u32 index)
2865{
2866 u8 Smallest, Largest;
2867 u8 i, j;
2868 u32 val;
2869 u8 byte;
2870 u16 word;
2871
2872 Smallest = 3;
2873 Largest = 0;
2874 for (i=0; i < 2; i++) {
2875 val = Get_NB32_index_wait(dev, index_reg, index);
2876 val &= 0x60606060;
2877 val >>= 5;
2878 for (j=0; j < 4; j++) {
2879 byte = val & 0xFF;
2880 if (byte < Smallest)
2881 Smallest = byte;
2882 if (byte > Largest)
2883 Largest = byte;
2884 val >>= 8;
2885 } /* while ++j */
2886 index++;
2887 } /*while ++i*/
2888
2889 if (pDCTstat->DimmECCPresent > 0) {
2890 index++;
2891 val = Get_NB32_index_wait(dev, index_reg, index);
2892 val &= 0x00000060;
2893 val >>= 5;
2894 byte = val & 0xFF;
2895 if (byte < Smallest)
2896 Smallest = byte;
2897 if (byte > Largest)
2898 Largest = byte;
2899 }
2900
2901 word = Smallest;
2902 word <<= 8;
2903 word |= Largest;
2904
2905 return word;
2906}
2907
Zheng Bao69436e12011-01-06 02:18:12 +00002908static void mct_PhyController_Config(struct MCTStatStruc *pMCTstat,
2909 struct DCTStatStruc *pDCTstat, u8 dct)
Zheng Baoeb75f652010-04-23 17:32:48 +00002910{
Zheng Bao69436e12011-01-06 02:18:12 +00002911 u32 index_reg = 0x98 + 0x100 * dct;
2912 u32 dev = pDCTstat->dev_dct;
2913 u32 val;
2914
2915 if (pDCTstat->LogicalCPUID & (AMD_DR_DAC2_OR_C3 | AMD_RB_C3)) {
2916 if (pDCTstat->Dimmx4Present == 0) {
2917 /* Set bit7 RxDqsUDllPowerDown to register F2x[1, 0]98_x0D0F0F13 for power saving */
2918 val = Get_NB32_index_wait(dev, index_reg, 0x0D0F0F13); /* Agesa v3 v6 might be wrong here. */
2919 val |= 1 << 7; /* BIOS should set this bit when x4 DIMMs are not present */
2920 Set_NB32_index_wait(dev, index_reg, 0x0D0F0F13, val);
2921 }
2922 }
2923
2924 if (pDCTstat->LogicalCPUID & AMD_DR_DAC2_OR_C3) {
2925 if (pDCTstat->DimmECCPresent == 0) {
2926 /* Set bit4 PwrDn to register F2x[1, 0]98_x0D0F0830 for power saving */
2927 val = Get_NB32_index_wait(dev, index_reg, 0x0D0F0830);
2928 val |= 1 << 4; /* BIOS should set this bit if ECC DIMMs are not present */
2929 Set_NB32_index_wait(dev, index_reg, 0x0D0F0830, val);
2930 }
2931 }
2932
2933}
2934
2935static void mct_FinalMCT_D(struct MCTStatStruc *pMCTstat,
2936 struct DCTStatStruc *pDCTstatA)
2937{
2938 u8 Node;
2939 struct DCTStatStruc *pDCTstat;
2940 u32 val;
2941
2942 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
2943 pDCTstat = pDCTstatA + Node;
2944
2945 if (pDCTstat->NodePresent) {
2946 mct_PhyController_Config(pMCTstat, pDCTstat, 0);
2947 mct_PhyController_Config(pMCTstat, pDCTstat, 1);
2948 }
2949 if (!(pDCTstat->LogicalCPUID & AMD_DR_Dx)) { /* mct_checkForDxSupport */
2950 mct_ExtMCTConfig_Cx(pDCTstat);
2951 mct_ExtMCTConfig_Bx(pDCTstat);
2952 } else { /* For Dx CPU */
2953 val = 0x0CE00F00 | 1 << 29/* FlushWrOnStpGnt */;
2954 if (!(pDCTstat->GangedMode))
2955 val |= 0x20; /* MctWrLimit = 8 for Unganed mode */
2956 else
2957 val |= 0x40; /* MctWrLimit = 16 for ganed mode */
2958 Set_NB32(pDCTstat->dev_dct, 0x11C, val);
2959
2960 val = Get_NB32(pDCTstat->dev_dct, 0x1B0);
2961 val &= 0xFFFFF8C0;
2962 val |= 0x101; /* BKDG recommended settings */
2963 val |= 0x0FC00000; /* Agesa V5 */
2964 if (!(pDCTstat->GangedMode))
2965 val |= 1 << 12;
2966 else
2967 val &= ~(1 << 12);
2968
2969 val &= 0x0FFFFFFF;
2970 switch (pDCTstat->Speed) {
2971 case 4:
2972 val |= 0x50000000; /* 5 for DDR800 */
2973 break;
2974 case 5:
2975 val |= 0x60000000; /* 6 for DDR1066 */
2976 break;
2977 case 6:
2978 val |= 0x80000000; /* 8 for DDR800 */
2979 break;
2980 default:
2981 val |= 0x90000000; /* 9 for DDR1600 */
2982 break;
2983 }
2984 Set_NB32(pDCTstat->dev_dct, 0x1B0, val);
2985 }
2986 }
2987
Arne Georg Gleditsche150e9a2010-09-09 10:35:52 +00002988 /* ClrClToNB_D postponed until we're done executing from ROM */
Zheng Baoeb75f652010-04-23 17:32:48 +00002989 mct_ClrWbEnhWsbDis_D(pMCTstat, pDCTstat);
Zheng Bao69436e12011-01-06 02:18:12 +00002990
2991 /* set F3x8C[DisFastTprWr] on all DR, if L3Size=0 */
2992 if (pDCTstat->LogicalCPUID & AMD_DR_ALL) {
2993 if (!(cpuid_edx(0x80000006) & 0xFFFC0000)) {
2994 val = Get_NB32(pDCTstat->dev_nbmisc, 0x8C);
2995 val |= 1 << 24;
2996 Set_NB32(pDCTstat->dev_nbmisc, 0x8C, val);
2997 }
2998 }
Zheng Baoeb75f652010-04-23 17:32:48 +00002999}
3000
3001static void mct_InitialMCT_D(struct MCTStatStruc *pMCTstat, struct DCTStatStruc *pDCTstat)
3002{
3003 mct_SetClToNB_D(pMCTstat, pDCTstat);
3004 mct_SetWbEnhWsbDis_D(pMCTstat, pDCTstat);
3005}
3006
3007static u32 mct_NodePresent_D(void)
3008{
3009 u32 val;
3010 val = 0x12001022;
3011 return val;
3012}
3013
3014static void mct_init(struct MCTStatStruc *pMCTstat,
3015 struct DCTStatStruc *pDCTstat)
3016{
3017 u32 lo, hi;
3018 u32 addr;
3019
3020 pDCTstat->GangedMode = 0;
3021 pDCTstat->DRPresent = 1;
3022
3023 /* enable extend PCI configuration access */
3024 addr = 0xC001001F;
3025 _RDMSR(addr, &lo, &hi);
3026 if (hi & (1 << (46-32))) {
3027 pDCTstat->Status |= 1 << SB_ExtConfig;
3028 } else {
3029 hi |= 1 << (46-32);
3030 _WRMSR(addr, lo, hi);
3031 }
3032}
3033
3034static void clear_legacy_Mode(struct MCTStatStruc *pMCTstat,
3035 struct DCTStatStruc *pDCTstat)
3036{
3037 u32 reg;
3038 u32 val;
3039 u32 dev = pDCTstat->dev_dct;
3040
3041 /* Clear Legacy BIOS Mode bit */
3042 reg = 0x94;
3043 val = Get_NB32(dev, reg);
3044 val &= ~(1<<LegacyBiosMode);
3045 Set_NB32(dev, reg, val);
3046
3047 reg = 0x94 + 0x100;
3048 val = Get_NB32(dev, reg);
3049 val &= ~(1<<LegacyBiosMode);
3050 Set_NB32(dev, reg, val);
3051}
3052
3053static void mct_HTMemMapExt(struct MCTStatStruc *pMCTstat,
3054 struct DCTStatStruc *pDCTstatA)
3055{
3056 u8 Node;
3057 u32 Drambase, Dramlimit;
3058 u32 val;
3059 u32 reg;
3060 u32 dev;
3061 u32 devx;
3062 u32 dword;
3063 struct DCTStatStruc *pDCTstat;
3064
3065 pDCTstat = pDCTstatA + 0;
3066 dev = pDCTstat->dev_map;
3067
3068 /* Copy dram map from F1x40/44,F1x48/4c,
3069 to F1x120/124(Node0),F1x120/124(Node1),...*/
3070 for (Node=0; Node < MAX_NODES_SUPPORTED; Node++) {
3071 pDCTstat = pDCTstatA + Node;
3072 devx = pDCTstat->dev_map;
3073
3074 /* get base/limit from Node0 */
3075 reg = 0x40 + (Node << 3); /* Node0/Dram Base 0 */
3076 val = Get_NB32(dev, reg);
3077 Drambase = val >> ( 16 + 3);
3078
3079 reg = 0x44 + (Node << 3); /* Node0/Dram Base 0 */
3080 val = Get_NB32(dev, reg);
3081 Dramlimit = val >> (16 + 3);
3082
3083 /* set base/limit to F1x120/124 per Node */
3084 if (pDCTstat->NodePresent) {
3085 reg = 0x120; /* F1x120,DramBase[47:27] */
3086 val = Get_NB32(devx, reg);
3087 val &= 0xFFE00000;
3088 val |= Drambase;
3089 Set_NB32(devx, reg, val);
3090
3091 reg = 0x124;
3092 val = Get_NB32(devx, reg);
3093 val &= 0xFFE00000;
3094 val |= Dramlimit;
3095 Set_NB32(devx, reg, val);
3096
3097 if ( pMCTstat->GStatus & ( 1 << GSB_HWHole)) {
3098 reg = 0xF0;
3099 val = Get_NB32(devx, reg);
3100 val |= (1 << DramMemHoistValid);
3101 val &= ~(0xFF << 24);
3102 dword = (pMCTstat->HoleBase >> (24 - 8)) & 0xFF;
3103 dword <<= 24;
3104 val |= dword;
3105 Set_NB32(devx, reg, val);
3106 }
3107
3108 }
3109 }
3110}
3111
3112static void SetCSTriState(struct MCTStatStruc *pMCTstat,
3113 struct DCTStatStruc *pDCTstat, u8 dct)
3114{
3115 u32 val;
3116 u32 dev = pDCTstat->dev_dct;
3117 u32 index_reg = 0x98 + 0x100 * dct;
3118 u32 index;
3119 u16 word;
3120
3121 /* Tri-state unused chipselects when motherboard
3122 termination is available */
3123
3124 /* FIXME: skip for Ax */
3125
3126 word = pDCTstat->CSPresent;
3127 if (pDCTstat->Status & (1 << SB_Registered)) {
3128 word |= (word & 0x55) << 1;
3129 }
3130 word = (~word) & 0xFF;
3131 index = 0x0c;
3132 val = Get_NB32_index_wait(dev, index_reg, index);
3133 val |= word;
3134 Set_NB32_index_wait(dev, index_reg, index, val);
3135}
3136
3137static void SetCKETriState(struct MCTStatStruc *pMCTstat,
3138 struct DCTStatStruc *pDCTstat, u8 dct)
3139{
3140 u32 val;
3141 u32 dev;
3142 u32 index_reg = 0x98 + 0x100 * dct;
3143 u32 index;
3144 u16 word;
3145
3146 /* Tri-state unused CKEs when motherboard termination is available */
3147
3148 /* FIXME: skip for Ax */
3149
3150 dev = pDCTstat->dev_dct;
3151 word = pDCTstat->CSPresent;
3152
3153 index = 0x0c;
3154 val = Get_NB32_index_wait(dev, index_reg, index);
3155 if ((word & 0x55) == 0)
3156 val |= 1 << 12;
3157
3158 if ((word & 0xAA) == 0)
3159 val |= 1 << 13;
3160
3161 Set_NB32_index_wait(dev, index_reg, index, val);
3162}
3163
3164static void SetODTTriState(struct MCTStatStruc *pMCTstat,
3165 struct DCTStatStruc *pDCTstat, u8 dct)
3166{
3167 u32 val;
3168 u32 dev;
3169 u32 index_reg = 0x98 + 0x100 * dct;
3170 u8 cs;
3171 u32 index;
3172 u8 odt;
3173 u8 max_dimms;
3174
3175 /* FIXME: skip for Ax */
3176
3177 dev = pDCTstat->dev_dct;
3178
3179 /* Tri-state unused ODTs when motherboard termination is available */
3180 max_dimms = (u8) mctGet_NVbits(NV_MAX_DIMMS);
3181 odt = 0x0F; /* ODT tri-state setting */
3182
3183 if (pDCTstat->Status & (1 <<SB_Registered)) {
3184 for (cs = 0; cs < 8; cs += 2) {
3185 if (pDCTstat->CSPresent & (1 << cs)) {
3186 odt &= ~(1 << (cs / 2));
3187 if (mctGet_NVbits(NV_4RANKType) != 0) { /* quad-rank capable platform */
3188 if (pDCTstat->CSPresent & (1 << (cs + 1)))
3189 odt &= ~(4 << (cs / 2));
3190 }
3191 }
3192 }
3193 } else { /* AM3 package */
3194 val = ~(pDCTstat->CSPresent);
3195 odt = val & 9; /* swap bits 1 and 2 */
3196 if (val & (1 << 1))
3197 odt |= 1 << 2;
3198 if (val & (1 << 2))
3199 odt |= 1 << 1;
3200 }
3201
3202 index = 0x0C;
3203 val = Get_NB32_index_wait(dev, index_reg, index);
3204 val |= ((odt & 0xFF) << 8); /* set bits 11:8 ODTTriState[3:0] */
3205 Set_NB32_index_wait(dev, index_reg, index, val);
3206
3207}
3208
3209static void InitPhyCompensation(struct MCTStatStruc *pMCTstat,
3210 struct DCTStatStruc *pDCTstat, u8 dct)
3211{
3212 u8 i;
3213 u32 index_reg = 0x98 + 0x100 * dct;
3214 u32 dev = pDCTstat->dev_dct;
3215 u32 val;
3216 u32 valx = 0;
3217 u32 dword;
3218 const u8 *p;
3219
3220 val = Get_NB32_index_wait(dev, index_reg, 0x00);
3221 dword = 0;
3222 for (i=0; i < 6; i++) {
3223 switch (i) {
3224 case 0:
3225 case 4:
3226 p = Table_Comp_Rise_Slew_15x;
3227 valx = p[(val >> 16) & 3];
3228 break;
3229 case 1:
3230 case 5:
3231 p = Table_Comp_Fall_Slew_15x;
3232 valx = p[(val >> 16) & 3];
3233 break;
3234 case 2:
3235 p = Table_Comp_Rise_Slew_20x;
3236 valx = p[(val >> 8) & 3];
3237 break;
3238 case 3:
3239 p = Table_Comp_Fall_Slew_20x;
3240 valx = p[(val >> 8) & 3];
3241 break;
3242
3243 }
3244 dword |= valx << (5 * i);
3245 }
3246
3247 /* Override/Exception */
3248 if (!pDCTstat->GangedMode) {
3249 i = 0; /* use i for the dct setting required */
3250 if (pDCTstat->MAdimms[0] < 4)
3251 i = 1;
Kerry She08c92e02010-09-04 06:13:02 +00003252 if (((pDCTstat->Speed == 2) || (pDCTstat->Speed == 3)) && (pDCTstat->MAdimms[i] == 4)) {
Zheng Baoeb75f652010-04-23 17:32:48 +00003253 dword &= 0xF18FFF18;
3254 index_reg = 0x98; /* force dct = 0 */
Kerry She08c92e02010-09-04 06:13:02 +00003255 }
Zheng Baoeb75f652010-04-23 17:32:48 +00003256 }
3257
3258 Set_NB32_index_wait(dev, index_reg, 0x0a, dword);
3259}
3260
3261static void mct_EarlyArbEn_D(struct MCTStatStruc *pMCTstat,
Zheng Bao69436e12011-01-06 02:18:12 +00003262 struct DCTStatStruc *pDCTstat, u8 dct)
Zheng Baoeb75f652010-04-23 17:32:48 +00003263{
3264 u32 reg;
3265 u32 val;
3266 u32 dev = pDCTstat->dev_dct;
3267
3268 /* GhEnhancement #18429 modified by askar: For low NB CLK :
3269 * Memclk ratio, the DCT may need to arbitrate early to avoid
3270 * unnecessary bubbles.
3271 * bit 19 of F2x[1,0]78 Dram Control Register, set this bit only when
3272 * NB CLK : Memclk ratio is between 3:1 (inclusive) to 4:5 (inclusive)
3273 */
Zheng Bao69436e12011-01-06 02:18:12 +00003274 reg = 0x78 + 0x100 * dct;
Zheng Baoeb75f652010-04-23 17:32:48 +00003275 val = Get_NB32(dev, reg);
3276
Zheng Bao69436e12011-01-06 02:18:12 +00003277 if (pDCTstat->LogicalCPUID & (AMD_DR_Cx | AMD_DR_Dx))
Zheng Baoeb75f652010-04-23 17:32:48 +00003278 val |= (1 << EarlyArbEn);
3279 else if (CheckNBCOFEarlyArbEn(pMCTstat, pDCTstat))
3280 val |= (1 << EarlyArbEn);
3281
3282 Set_NB32(dev, reg, val);
3283}
3284
3285static u8 CheckNBCOFEarlyArbEn(struct MCTStatStruc *pMCTstat,
3286 struct DCTStatStruc *pDCTstat)
3287{
3288 u32 reg;
3289 u32 val;
3290 u32 tmp;
3291 u32 rem;
3292 u32 dev = pDCTstat->dev_dct;
3293 u32 hi, lo;
3294 u8 NbDid = 0;
3295
3296 /* Check if NB COF >= 4*Memclk, if it is not, return a fatal error
3297 */
3298
3299 /* 3*(Fn2xD4[NBFid]+4)/(2^NbDid)/(3+Fn2x94[MemClkFreq]) */
3300 _RDMSR(0xC0010071, &lo, &hi);
3301 if (lo & (1 << 22))
3302 NbDid |= 1;
3303
3304 reg = 0x94;
3305 val = Get_NB32(dev, reg);
3306 if (!(val & (1 << MemClkFreqVal)))
3307 val = Get_NB32(dev, reg + 0x100); /* get the DCT1 value */
3308
3309 val &= 0x07;
3310 val += 3;
3311 if (NbDid)
3312 val <<= 1;
3313 tmp = val;
3314
3315 dev = pDCTstat->dev_nbmisc;
3316 reg = 0xD4;
3317 val = Get_NB32(dev, reg);
3318 val &= 0x1F;
3319 val += 3;
3320 val *= 3;
3321 val = val / tmp;
3322 rem = val % tmp;
3323 tmp >>= 1;
3324
3325 /* Yes this could be nicer but this was how the asm was.... */
3326 if (val < 3) { /* NClk:MemClk < 3:1 */
3327 return 0;
3328 } else if (val > 4) { /* NClk:MemClk >= 5:1 */
3329 return 0;
3330 } else if ((val == 4) && (rem > tmp)) { /* NClk:MemClk > 4.5:1 */
3331 return 0;
3332 } else {
3333 return 1; /* 3:1 <= NClk:MemClk <= 4.5:1*/
3334 }
3335}
3336
3337static void mct_ResetDataStruct_D(struct MCTStatStruc *pMCTstat,
3338 struct DCTStatStruc *pDCTstatA)
3339{
3340 u8 Node;
3341 u32 i;
3342 struct DCTStatStruc *pDCTstat;
3343 u32 start, stop;
3344 u8 *p;
3345 u16 host_serv1, host_serv2;
3346
3347 /* Initialize Data structures by clearing all entries to 0 */
3348 p = (u8 *) pMCTstat;
3349 for (i = 0; i < sizeof(struct MCTStatStruc); i++) {
3350 p[i] = 0;
3351 }
3352
3353 for (Node = 0; Node < 8; Node++) {
3354 pDCTstat = pDCTstatA + Node;
3355 host_serv1 = pDCTstat->HostBiosSrvc1;
3356 host_serv2 = pDCTstat->HostBiosSrvc2;
3357
3358 p = (u8 *) pDCTstat;
3359 start = 0;
3360 stop = ((u32) &((struct DCTStatStruc *)0)->CH_MaxRdLat[2]);
3361 for (i = start; i < stop ; i++) {
3362 p[i] = 0;
3363 }
3364
3365 start = ((u32) &((struct DCTStatStruc *)0)->CH_D_BC_RCVRDLY[2][4]);
3366 stop = sizeof(struct DCTStatStruc);
3367 for (i = start; i < stop; i++) {
3368 p[i] = 0;
3369 }
3370 pDCTstat->HostBiosSrvc1 = host_serv1;
3371 pDCTstat->HostBiosSrvc2 = host_serv2;
3372 }
3373}
3374
3375static void mct_BeforeDramInit_Prod_D(struct MCTStatStruc *pMCTstat,
3376 struct DCTStatStruc *pDCTstat)
3377{
3378 u8 i;
3379 u32 reg_off, dword;
3380 u32 dev = pDCTstat->dev_dct;
3381
3382 if (pDCTstat->LogicalCPUID & AMD_DR_Dx) {
3383 if ((pDCTstat->Speed == 3))
3384 dword = 0x00000800;
3385 else
3386 dword = 0x00000000;
3387 for (i=0; i < 2; i++) {
3388 reg_off = 0x100 * i;
3389 Set_NB32(dev, 0x98 + reg_off, 0x0D000030);
3390 Set_NB32(dev, 0x9C + reg_off, dword);
3391 Set_NB32(dev, 0x98 + reg_off, 0x4D040F30);
3392 }
3393 }
3394}
3395
Zheng Bao69436e12011-01-06 02:18:12 +00003396static void mct_EnDllShutdownSR(struct MCTStatStruc *pMCTstat,
3397 struct DCTStatStruc *pDCTstat, u8 dct)
3398{
3399 u32 reg_off = 0x100 * dct;
3400 u32 dev = pDCTstat->dev_dct, val;
3401
3402 /* Write 0000_07D0h to register F2x[1, 0]98_x4D0FE006 */
3403 if (pDCTstat->LogicalCPUID & (AMD_DR_DAC2_OR_C3)) {
3404 Set_NB32(dev, 0x9C + reg_off, 0x1C);
3405 Set_NB32(dev, 0x98 + reg_off, 0x4D0FE006);
3406 Set_NB32(dev, 0x9C + reg_off, 0x13D);
3407 Set_NB32(dev, 0x98 + reg_off, 0x4D0FE007);
3408
3409 val = Get_NB32(dev, 0x90 + reg_off);
3410 val &= ~(1 << 27/* DisDllShutdownSR */);
3411 Set_NB32(dev, 0x90 + reg_off, val);
3412 }
3413}
3414
Zheng Baoeb75f652010-04-23 17:32:48 +00003415static u32 mct_DisDllShutdownSR(struct MCTStatStruc *pMCTstat,
3416 struct DCTStatStruc *pDCTstat, u32 DramConfigLo, u8 dct)
3417{
3418 u32 reg_off = 0x100 * dct;
3419 u32 dev = pDCTstat->dev_dct;
3420
3421 /* Write 0000_07D0h to register F2x[1, 0]98_x4D0FE006 */
Zheng Bao69436e12011-01-06 02:18:12 +00003422 if (pDCTstat->LogicalCPUID & (AMD_DR_DAC2_OR_C3)) {
3423 Set_NB32(dev, 0x9C + reg_off, 0x7D0);
Zheng Baoeb75f652010-04-23 17:32:48 +00003424 Set_NB32(dev, 0x98 + reg_off, 0x4D0FE006);
Zheng Bao69436e12011-01-06 02:18:12 +00003425 Set_NB32(dev, 0x9C + reg_off, 0x190);
Zheng Baoeb75f652010-04-23 17:32:48 +00003426 Set_NB32(dev, 0x98 + reg_off, 0x4D0FE007);
Zheng Bao69436e12011-01-06 02:18:12 +00003427
3428 DramConfigLo |= /* DisDllShutdownSR */ 1 << 27;
Zheng Baoeb75f652010-04-23 17:32:48 +00003429 }
3430
Zheng Bao69436e12011-01-06 02:18:12 +00003431 return DramConfigLo;
Zheng Baoeb75f652010-04-23 17:32:48 +00003432}
3433
3434void mct_SetClToNB_D(struct MCTStatStruc *pMCTstat,
3435 struct DCTStatStruc *pDCTstat)
3436{
3437 u32 lo, hi;
3438 u32 msr;
3439
3440 /* FIXME: Maybe check the CPUID? - not for now. */
3441 /* pDCTstat->LogicalCPUID; */
3442
3443 msr = BU_CFG2;
3444 _RDMSR(msr, &lo, &hi);
3445 lo |= 1 << ClLinesToNbDis;
3446 _WRMSR(msr, lo, hi);
3447}
3448
3449void mct_ClrClToNB_D(struct MCTStatStruc *pMCTstat,
3450 struct DCTStatStruc *pDCTstat)
3451{
3452
3453 u32 lo, hi;
3454 u32 msr;
3455
3456 /* FIXME: Maybe check the CPUID? - not for now. */
3457 /* pDCTstat->LogicalCPUID; */
3458
3459 msr = BU_CFG2;
3460 _RDMSR(msr, &lo, &hi);
3461 if (!pDCTstat->ClToNB_flag)
3462 lo &= ~(1<<ClLinesToNbDis);
3463 _WRMSR(msr, lo, hi);
3464
3465}
3466
3467void mct_SetWbEnhWsbDis_D(struct MCTStatStruc *pMCTstat,
3468 struct DCTStatStruc *pDCTstat)
3469{
3470 u32 lo, hi;
3471 u32 msr;
3472
3473 /* FIXME: Maybe check the CPUID? - not for now. */
3474 /* pDCTstat->LogicalCPUID; */
3475
3476 msr = BU_CFG;
3477 _RDMSR(msr, &lo, &hi);
3478 hi |= (1 << WbEnhWsbDis_D);
3479 _WRMSR(msr, lo, hi);
3480}
3481
3482void mct_ClrWbEnhWsbDis_D(struct MCTStatStruc *pMCTstat,
3483 struct DCTStatStruc *pDCTstat)
3484{
3485 u32 lo, hi;
3486 u32 msr;
3487
3488 /* FIXME: Maybe check the CPUID? - not for now. */
3489 /* pDCTstat->LogicalCPUID; */
3490
3491 msr = BU_CFG;
3492 _RDMSR(msr, &lo, &hi);
3493 hi &= ~(1 << WbEnhWsbDis_D);
3494 _WRMSR(msr, lo, hi);
3495}
3496
Zheng Baoeb75f652010-04-23 17:32:48 +00003497void ProgDramMRSReg_D(struct MCTStatStruc *pMCTstat,
3498 struct DCTStatStruc *pDCTstat, u8 dct)
3499{
3500 u32 DramMRS, dword;
3501 u8 byte;
3502
3503 DramMRS = 0;
3504
3505 /* Set chip select CKE control mode */
3506 if (mctGet_NVbits(NV_CKE_CTL)) {
3507 if (pDCTstat->CSPresent == 3) {
3508 u16 word;
3509 word = pDCTstat->DIMMSPDCSE;
3510 if (dct == 0)
3511 word &= 0b01010100;
3512 else
3513 word &= 0b10101000;
3514 if (word == 0)
3515 DramMRS |= 1 << 23;
3516 }
3517 }
3518 /*
3519 DRAM MRS Register
3520 DrvImpCtrl: drive impedance control.01b(34 ohm driver; Ron34 = Rzq/7)
3521 */
3522 DramMRS |= 1 << 2;
3523 /* Dram nominal termination: */
3524 byte = pDCTstat->MAdimms[dct];
3525 if (!(pDCTstat->Status & (1 << SB_Registered))) {
3526 DramMRS |= 1 << 7; /* 60 ohms */
3527 if (byte & 2) {
3528 if (pDCTstat->Speed < 6)
3529 DramMRS |= 1 << 8; /* 40 ohms */
3530 else
3531 DramMRS |= 1 << 9; /* 30 ohms */
3532 }
3533 }
3534 /* Dram dynamic termination: Disable(1DIMM), 120ohm(>=2DIMM) */
3535 if (!(pDCTstat->Status & (1 << SB_Registered))) {
3536 if (byte >= 2) {
3537 if (pDCTstat->Speed == 7)
3538 DramMRS |= 1 << 10;
3539 else
3540 DramMRS |= 1 << 11;
3541 }
3542 } else {
3543 DramMRS |= mct_DramTermDyn_RDimm(pMCTstat, pDCTstat, byte);
3544 }
3545
3546 /* burst length control */
3547 if (pDCTstat->Status & (1 << SB_128bitmode))
3548 DramMRS |= 1 << 1;
3549 /* Qoff=0, output buffers enabled */
3550 /* Tcwl */
3551 DramMRS |= (pDCTstat->Speed - 4) << 20;
3552 /* ASR=1, auto self refresh */
3553 /* SRT=0 */
3554 DramMRS |= 1 << 18;
3555
3556 dword = Get_NB32(pDCTstat->dev_dct, 0x100 * dct + 0x84);
3557 dword &= ~0x00FC2F8F;
3558 dword |= DramMRS;
3559 Set_NB32(pDCTstat->dev_dct, 0x100 * dct + 0x84, dword);
3560}
3561
3562void mct_SetDramConfigHi_D(struct DCTStatStruc *pDCTstat, u32 dct,
3563 u32 DramConfigHi)
3564{
3565 /* Bug#15114: Comp. update interrupted by Freq. change can cause
3566 * subsequent update to be invalid during any MemClk frequency change:
3567 * Solution: From the bug report:
3568 * 1. A software-initiated frequency change should be wrapped into the
3569 * following sequence :
3570 * - a) Disable Compensation (F2[1, 0]9C_x08[30] )
3571 * b) Reset the Begin Compensation bit (D3CMP->COMP_CONFIG[0]) in all the compensation engines
3572 * c) Do frequency change
3573 * d) Enable Compensation (F2[1, 0]9C_x08[30] )
3574 * 2. A software-initiated Disable Compensation should always be
3575 * followed by step b) of the above steps.
3576 * Silicon Status: Fixed In Rev B0
3577 *
3578 * Errata#177: DRAM Phy Automatic Compensation Updates May Be Invalid
3579 * Solution: BIOS should disable the phy automatic compensation prior
3580 * to initiating a memory clock frequency change as follows:
3581 * 1. Disable PhyAutoComp by writing 1'b1 to F2x[1, 0]9C_x08[30]
3582 * 2. Reset the Begin Compensation bits by writing 32'h0 to
3583 * F2x[1, 0]9C_x4D004F00
3584 * 3. Perform frequency change
3585 * 4. Enable PhyAutoComp by writing 1'b0 to F2x[1, 0]9C_08[30]
3586 * In addition, any time software disables the automatic phy
3587 * compensation it should reset the begin compensation bit per step 2.
3588 * Silicon Status: Fixed in DR-B0
3589 */
3590
3591 u32 dev = pDCTstat->dev_dct;
3592 u32 index_reg = 0x98 + 0x100 * dct;
3593 u32 index;
3594
3595 u32 val;
3596
3597 index = 0x08;
3598 val = Get_NB32_index_wait(dev, index_reg, index);
3599 if (!(val & (1 << DisAutoComp)))
3600 Set_NB32_index_wait(dev, index_reg, index, val | (1 << DisAutoComp));
3601
3602 mct_Wait(100);
3603
3604 Set_NB32(dev, 0x94 + 0x100 * dct, DramConfigHi);
3605}
3606
3607static void mct_BeforeDQSTrain_D(struct MCTStatStruc *pMCTstat,
3608 struct DCTStatStruc *pDCTstatA)
3609{
3610 u8 Node;
3611 struct DCTStatStruc *pDCTstat;
3612
3613 /* Errata 178
3614 *
3615 * Bug#15115: Uncertainty In The Sync Chain Leads To Setup Violations
3616 * In TX FIFO
3617 * Solution: BIOS should program DRAM Control Register[RdPtrInit] =
3618 * 5h, (F2x[1, 0]78[3:0] = 5h).
3619 * Silicon Status: Fixed In Rev B0
3620 *
3621 * Bug#15880: Determine validity of reset settings for DDR PHY timing.
Zheng Baoc3af12f2010-10-08 05:08:47 +00003622 * Solution: At least, set WrDqs fine delay to be 0 for DDR3 training.
Zheng Baoeb75f652010-04-23 17:32:48 +00003623 */
3624 for (Node = 0; Node < 8; Node++) {
3625 pDCTstat = pDCTstatA + Node;
3626
Xavi Drudis Ferran7cdf1ec2010-09-27 21:08:40 +00003627 if (pDCTstat->NodePresent) {
Zheng Baoeb75f652010-04-23 17:32:48 +00003628 mct_BeforeDQSTrainSamp(pDCTstat); /* only Bx */
3629 mct_ResetDLL_D(pMCTstat, pDCTstat, 0);
3630 mct_ResetDLL_D(pMCTstat, pDCTstat, 1);
Xavi Drudis Ferran7cdf1ec2010-09-27 21:08:40 +00003631 }
Zheng Baoeb75f652010-04-23 17:32:48 +00003632 }
3633}
3634
3635static void mct_ResetDLL_D(struct MCTStatStruc *pMCTstat,
3636 struct DCTStatStruc *pDCTstat, u8 dct)
3637{
3638 u8 Receiver;
3639 u32 dev = pDCTstat->dev_dct;
3640 u32 reg_off = 0x100 * dct;
3641 u32 addr;
3642 u32 lo, hi;
3643 u8 wrap32dis = 0;
3644 u8 valid = 0;
3645
3646 /* Skip reset DLL for B3 */
3647 if (pDCTstat->LogicalCPUID & AMD_DR_B3) {
3648 return;
3649 }
3650
3651 addr = HWCR;
3652 _RDMSR(addr, &lo, &hi);
3653 if(lo & (1<<17)) { /* save the old value */
3654 wrap32dis = 1;
3655 }
3656 lo |= (1<<17); /* HWCR.wrap32dis */
3657 /* Setting wrap32dis allows 64-bit memory references in 32bit mode */
3658 _WRMSR(addr, lo, hi);
3659
3660 pDCTstat->Channel = dct;
3661 Receiver = mct_InitReceiver_D(pDCTstat, dct);
3662 /* there are four receiver pairs, loosely associated with chipselects.*/
3663 for (; Receiver < 8; Receiver += 2) {
3664 if (mct_RcvrRankEnabled_D(pMCTstat, pDCTstat, dct, Receiver)) {
3665 addr = mct_GetRcvrSysAddr_D(pMCTstat, pDCTstat, dct, Receiver, &valid);
3666 if (valid) {
3667 mct_Read1LTestPattern_D(pMCTstat, pDCTstat, addr); /* cache fills */
3668
3669 /* Write 0000_8000h to register F2x[1,0]9C_xD080F0C */
3670 Set_NB32_index_wait(dev, 0x98 + reg_off, 0x4D080F0C, 0x00008000);
3671 mct_Wait(80); /* wait >= 300ns */
3672
3673 /* Write 0000_0000h to register F2x[1,0]9C_xD080F0C */
3674 Set_NB32_index_wait(dev, 0x98 + reg_off, 0x4D080F0C, 0x00000000);
3675 mct_Wait(800); /* wait >= 2us */
3676 break;
3677 }
3678 }
3679 }
3680
3681 if(!wrap32dis) {
3682 addr = HWCR;
3683 _RDMSR(addr, &lo, &hi);
3684 lo &= ~(1<<17); /* restore HWCR.wrap32dis */
3685 _WRMSR(addr, lo, hi);
3686 }
3687}
3688
3689static void mct_EnableDatIntlv_D(struct MCTStatStruc *pMCTstat,
3690 struct DCTStatStruc *pDCTstat)
3691{
3692 u32 dev = pDCTstat->dev_dct;
3693 u32 val;
3694
3695 /* Enable F2x110[DctDatIntlv] */
3696 /* Call back not required mctHookBeforeDatIntlv_D() */
3697 /* FIXME Skip for Ax */
3698 if (!pDCTstat->GangedMode) {
3699 val = Get_NB32(dev, 0x110);
3700 val |= 1 << 5; /* DctDatIntlv */
3701 Set_NB32(dev, 0x110, val);
3702
3703 /* FIXME Skip for Cx */
3704 dev = pDCTstat->dev_nbmisc;
3705 val = Get_NB32(dev, 0x8C); /* NB Configuration Hi */
3706 val |= 1 << (36-32); /* DisDatMask */
3707 Set_NB32(dev, 0x8C, val);
3708 }
3709}
3710
3711static void SetDllSpeedUp_D(struct MCTStatStruc *pMCTstat,
3712 struct DCTStatStruc *pDCTstat, u8 dct)
3713{
3714 u32 val;
3715 u32 dev = pDCTstat->dev_dct;
3716 u32 reg_off = 0x100 * dct;
3717
3718 if (pDCTstat->Speed >= 7) { /* DDR1600 and above */
3719 /* Set bit13 PowerDown to register F2x[1, 0]98_x0D080F10 */
3720 Set_NB32(dev, reg_off + 0x98, 0x0D080F10);
3721 val = Get_NB32(dev, reg_off + 0x9C);
3722 val |= 1 < 13;
3723 Set_NB32(dev, reg_off + 0x9C, val);
3724 Set_NB32(dev, reg_off + 0x98, 0x4D080F10);
3725
3726 /* Set bit13 PowerDown to register F2x[1, 0]98_x0D080F11 */
3727 Set_NB32(dev, reg_off + 0x98, 0x0D080F11);
3728 val = Get_NB32(dev, reg_off + 0x9C);
3729 val |= 1 < 13;
3730 Set_NB32(dev, reg_off + 0x9C, val);
3731 Set_NB32(dev, reg_off + 0x98, 0x4D080F11);
3732
3733 /* Set bit13 PowerDown to register F2x[1, 0]98_x0D088F30 */
3734 Set_NB32(dev, reg_off + 0x98, 0x0D088F30);
3735 val = Get_NB32(dev, reg_off + 0x9C);
3736 val |= 1 < 13;
3737 Set_NB32(dev, reg_off + 0x9C, val);
3738 Set_NB32(dev, reg_off + 0x98, 0x4D088F30);
3739
3740 /* Set bit13 PowerDown to register F2x[1, 0]98_x0D08CF30 */
3741 Set_NB32(dev, reg_off + 0x98, 0x0D08CF30);
3742 val = Get_NB32(dev, reg_off + 0x9C);
3743 val |= 1 < 13;
3744 Set_NB32(dev, reg_off + 0x9C, val);
3745 Set_NB32(dev, reg_off + 0x98, 0x4D08CF30);
3746
3747 }
3748}
3749
3750static void SyncSetting(struct DCTStatStruc *pDCTstat)
3751{
3752 /* set F2x78[ChSetupSync] when F2x[1, 0]9C_x04[AddrCmdSetup, CsOdtSetup,
3753 * CkeSetup] setups for one DCT are all 0s and at least one of the setups,
3754 * F2x[1, 0]9C_x04[AddrCmdSetup, CsOdtSetup, CkeSetup], of the other
3755 * controller is 1
3756 */
3757 u32 cha, chb;
3758 u32 dev = pDCTstat->dev_dct;
3759 u32 val;
3760
3761 cha = pDCTstat->CH_ADDR_TMG[0] & 0x0202020;
3762 chb = pDCTstat->CH_ADDR_TMG[1] & 0x0202020;
3763
3764 if ((cha != chb) && ((cha == 0) || (chb == 0))) {
3765 val = Get_NB32(dev, 0x78);
3766 val |= 1 << ChSetupSync;
3767 Set_NB32(dev, 0x78, val);
3768 }
3769}
3770
3771static void AfterDramInit_D(struct DCTStatStruc *pDCTstat, u8 dct) {
3772
3773 u32 val;
3774 u32 reg_off = 0x100 * dct;
3775 u32 dev = pDCTstat->dev_dct;
3776
3777 if (pDCTstat->LogicalCPUID & (AMD_DR_B2 | AMD_DR_B3)) {
3778 mct_Wait(10000); /* Wait 50 us*/
3779 val = Get_NB32(dev, 0x110);
3780 if (!(val & (1 << DramEnabled))) {
3781 /* If 50 us expires while DramEnable =0 then do the following */
3782 val = Get_NB32(dev, 0x90 + reg_off);
3783 val &= ~(1 << Width128); /* Program Width128 = 0 */
3784 Set_NB32(dev, 0x90 + reg_off, val);
3785
3786 val = Get_NB32_index_wait(dev, 0x98 + reg_off, 0x05); /* Perform dummy CSR read to F2x09C_x05 */
3787
3788 if (pDCTstat->GangedMode) {
3789 val = Get_NB32(dev, 0x90 + reg_off);
3790 val |= 1 << Width128; /* Program Width128 = 0 */
3791 Set_NB32(dev, 0x90 + reg_off, val);
3792 }
3793 }
3794 }
3795}
3796
3797/* ==========================================================
3798 * 6-bit Bank Addressing Table
3799 * RR=rows-13 binary
3800 * B=Banks-2 binary
3801 * CCC=Columns-9 binary
3802 * ==========================================================
3803 * DCT CCCBRR Rows Banks Columns 64-bit CS Size
3804 * Encoding
3805 * 0000 000000 13 2 9 128MB
3806 * 0001 001000 13 2 10 256MB
3807 * 0010 001001 14 2 10 512MB
3808 * 0011 010000 13 2 11 512MB
3809 * 0100 001100 13 3 10 512MB
3810 * 0101 001101 14 3 10 1GB
3811 * 0110 010001 14 2 11 1GB
3812 * 0111 001110 15 3 10 2GB
3813 * 1000 010101 14 3 11 2GB
3814 * 1001 010110 15 3 11 4GB
3815 * 1010 001111 16 3 10 4GB
3816 * 1011 010111 16 3 11 8GB
3817 */
3818u8 crcCheck(u8 smbaddr)
3819{
3820 u8 byte_use;
3821 u8 Index;
3822 u16 CRC;
3823 u8 byte, i;
3824
3825 byte_use = mctRead_SPD(smbaddr, SPD_ByteUse);
3826 if (byte_use & 0x80)
3827 byte_use = 117;
3828 else
3829 byte_use = 126;
3830
3831 CRC = 0;
3832 for (Index = 0; Index < byte_use; Index ++) {
3833 byte = mctRead_SPD(smbaddr, Index);
3834 CRC ^= byte << 8;
3835 for (i=0; i<8; i++) {
3836 if (CRC & 0x8000) {
3837 CRC <<= 1;
3838 CRC ^= 0x1021;
3839 } else
3840 CRC <<= 1;
3841 }
3842 }
3843 return CRC == (mctRead_SPD(smbaddr, SPD_byte_127) << 8 | mctRead_SPD(smbaddr, SPD_byte_126));
3844}