blob: 1faed5a2e96f64673df5ecfb4d13dbddd0f0c2df [file] [log] [blame]
Zheng Baoeb75f652010-04-23 17:32:48 +00001/*
2 * This file is part of the coreboot project.
3 *
4 * Copyright (C) 2010 Advanced Micro Devices, Inc.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; version 2 of the License.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
18 */
19
20/* Description: Main memory controller system configuration for DDR 3 */
21
22/* KNOWN ISSUES - ERRATA
23 *
24 * Trtp is not calculated correctly when the controller is in 64-bit mode, it
25 * is 1 busclock off. No fix planned. The controller is not ordinarily in
26 * 64-bit mode.
27 *
28 * 32 Byte burst not supported. No fix planned. The controller is not
29 * ordinarily in 64-bit mode.
30 *
31 * Trc precision does not use extra Jedec defined fractional component.
32 * InsteadTrc (course) is rounded up to nearest 1 ns.
33 *
34 * Mini and Micro DIMM not supported. Only RDIMM, UDIMM, SO-DIMM defined types
35 * supported.
36 */
37
38static u8 ReconfigureDIMMspare_D(struct MCTStatStruc *pMCTstat,
39 struct DCTStatStruc *pDCTstatA);
40static void DQSTiming_D(struct MCTStatStruc *pMCTstat,
41 struct DCTStatStruc *pDCTstatA);
42static void LoadDQSSigTmgRegs_D(struct MCTStatStruc *pMCTstat,
43 struct DCTStatStruc *pDCTstatA);
44static void HTMemMapInit_D(struct MCTStatStruc *pMCTstat,
45 struct DCTStatStruc *pDCTstatA);
46static void MCTMemClr_D(struct MCTStatStruc *pMCTstat,
47 struct DCTStatStruc *pDCTstatA);
48static void DCTMemClr_Init_D(struct MCTStatStruc *pMCTstat,
49 struct DCTStatStruc *pDCTstat);
50static void DCTMemClr_Sync_D(struct MCTStatStruc *pMCTstat,
51 struct DCTStatStruc *pDCTstat);
52static void MCTMemClrSync_D(struct MCTStatStruc *pMCTstat,
53 struct DCTStatStruc *pDCTstatA);
54static u8 NodePresent_D(u8 Node);
55static void SyncDCTsReady_D(struct MCTStatStruc *pMCTstat,
56 struct DCTStatStruc *pDCTstatA);
57static void StartupDCT_D(struct MCTStatStruc *pMCTstat,
58 struct DCTStatStruc *pDCTstat, u8 dct);
59static void ClearDCT_D(struct MCTStatStruc *pMCTstat,
60 struct DCTStatStruc *pDCTstat, u8 dct);
61static u8 AutoCycTiming_D(struct MCTStatStruc *pMCTstat,
62 struct DCTStatStruc *pDCTstat, u8 dct);
63static void GetPresetmaxF_D(struct MCTStatStruc *pMCTstat,
64 struct DCTStatStruc *pDCTstat);
65static void SPDGetTCL_D(struct MCTStatStruc *pMCTstat,
66 struct DCTStatStruc *pDCTstat, u8 dct);
67static u8 AutoConfig_D(struct MCTStatStruc *pMCTstat,
68 struct DCTStatStruc *pDCTstat, u8 dct);
69static u8 PlatformSpec_D(struct MCTStatStruc *pMCTstat,
70 struct DCTStatStruc *pDCTstat, u8 dct);
71static void SPDSetBanks_D(struct MCTStatStruc *pMCTstat,
72 struct DCTStatStruc *pDCTstat, u8 dct);
73static void StitchMemory_D(struct MCTStatStruc *pMCTstat,
74 struct DCTStatStruc *pDCTstat, u8 dct);
75static u16 Get_Fk_D(u8 k);
76static u8 Get_DIMMAddress_D(struct DCTStatStruc *pDCTstat, u8 i);
77static void mct_initDCT(struct MCTStatStruc *pMCTstat,
78 struct DCTStatStruc *pDCTstat);
79static void mct_DramInit(struct MCTStatStruc *pMCTstat,
80 struct DCTStatStruc *pDCTstat, u8 dct);
81static u8 mct_PlatformSpec(struct MCTStatStruc *pMCTstat,
82 struct DCTStatStruc *pDCTstat, u8 dct);
Zheng Bao69436e12011-01-06 02:18:12 +000083static u8 mct_BeforePlatformSpec(struct MCTStatStruc *pMCTstat,
84 struct DCTStatStruc *pDCTstat, u8 dct);
Zheng Baoeb75f652010-04-23 17:32:48 +000085static void mct_SyncDCTsReady(struct DCTStatStruc *pDCTstat);
86static void Get_Trdrd(struct MCTStatStruc *pMCTstat,
87 struct DCTStatStruc *pDCTstat, u8 dct);
88static void mct_AfterGetCLT(struct MCTStatStruc *pMCTstat,
89 struct DCTStatStruc *pDCTstat, u8 dct);
90static u8 mct_SPDCalcWidth(struct MCTStatStruc *pMCTstat,\
91 struct DCTStatStruc *pDCTstat, u8 dct);
92static void mct_AfterStitchMemory(struct MCTStatStruc *pMCTstat,
93 struct DCTStatStruc *pDCTstat, u8 dct);
94static u8 mct_DIMMPresence(struct MCTStatStruc *pMCTstat,
95 struct DCTStatStruc *pDCTstat, u8 dct);
96static void Set_OtherTiming(struct MCTStatStruc *pMCTstat,
97 struct DCTStatStruc *pDCTstat, u8 dct);
98static void Get_Twrwr(struct MCTStatStruc *pMCTstat,
99 struct DCTStatStruc *pDCTstat, u8 dct);
100static void Get_Twrrd(struct MCTStatStruc *pMCTstat,
101 struct DCTStatStruc *pDCTstat, u8 dct);
102static void Get_TrwtTO(struct MCTStatStruc *pMCTstat,
103 struct DCTStatStruc *pDCTstat, u8 dct);
104static void Get_TrwtWB(struct MCTStatStruc *pMCTstat,
105 struct DCTStatStruc *pDCTstat);
106static void Get_DqsRcvEnGross_Diff(struct DCTStatStruc *pDCTstat,
107 u32 dev, u32 index_reg);
108static void Get_WrDatGross_Diff(struct DCTStatStruc *pDCTstat, u8 dct,
109 u32 dev, u32 index_reg);
110static u16 Get_DqsRcvEnGross_MaxMin(struct DCTStatStruc *pDCTstat,
111 u32 dev, u32 index_reg, u32 index);
112static void mct_FinalMCT_D(struct MCTStatStruc *pMCTstat,
113 struct DCTStatStruc *pDCTstat);
114static u16 Get_WrDatGross_MaxMin(struct DCTStatStruc *pDCTstat, u8 dct,
115 u32 dev, u32 index_reg, u32 index);
116static void mct_InitialMCT_D(struct MCTStatStruc *pMCTstat,
117 struct DCTStatStruc *pDCTstat);
118static void mct_init(struct MCTStatStruc *pMCTstat,
119 struct DCTStatStruc *pDCTstat);
120static void clear_legacy_Mode(struct MCTStatStruc *pMCTstat,
121 struct DCTStatStruc *pDCTstat);
122static void mct_HTMemMapExt(struct MCTStatStruc *pMCTstat,
123 struct DCTStatStruc *pDCTstatA);
124static void SetCSTriState(struct MCTStatStruc *pMCTstat,
125 struct DCTStatStruc *pDCTstat, u8 dct);
126static void SetCKETriState(struct MCTStatStruc *pMCTstat,
127 struct DCTStatStruc *pDCTstat, u8 dct);
128static void SetODTTriState(struct MCTStatStruc *pMCTstat,
129 struct DCTStatStruc *pDCTstat, u8 dct);
130static void InitPhyCompensation(struct MCTStatStruc *pMCTstat,
131 struct DCTStatStruc *pDCTstat, u8 dct);
132static u32 mct_NodePresent_D(void);
133static void mct_OtherTiming(struct MCTStatStruc *pMCTstat,
134 struct DCTStatStruc *pDCTstatA);
135static void mct_ResetDataStruct_D(struct MCTStatStruc *pMCTstat,
136 struct DCTStatStruc *pDCTstatA);
137static void mct_EarlyArbEn_D(struct MCTStatStruc *pMCTstat,
Zheng Bao69436e12011-01-06 02:18:12 +0000138 struct DCTStatStruc *pDCTstat, u8 dct);
Zheng Baoeb75f652010-04-23 17:32:48 +0000139static void mct_BeforeDramInit_Prod_D(struct MCTStatStruc *pMCTstat,
140 struct DCTStatStruc *pDCTstat);
141void mct_ClrClToNB_D(struct MCTStatStruc *pMCTstat,
142 struct DCTStatStruc *pDCTstat);
143static u8 CheckNBCOFEarlyArbEn(struct MCTStatStruc *pMCTstat,
144 struct DCTStatStruc *pDCTstat);
145void mct_ClrWbEnhWsbDis_D(struct MCTStatStruc *pMCTstat,
146 struct DCTStatStruc *pDCTstat);
147static void mct_BeforeDQSTrain_D(struct MCTStatStruc *pMCTstat,
148 struct DCTStatStruc *pDCTstatA);
149static void AfterDramInit_D(struct DCTStatStruc *pDCTstat, u8 dct);
150static void mct_ResetDLL_D(struct MCTStatStruc *pMCTstat,
151 struct DCTStatStruc *pDCTstat, u8 dct);
152static void ProgDramMRSReg_D(struct MCTStatStruc *pMCTstat,
153 struct DCTStatStruc *pDCTstat, u8 dct);
154static void mct_DramInit_Sw_D(struct MCTStatStruc *pMCTstat,
155 struct DCTStatStruc *pDCTstat, u8 dct);
156static u32 mct_DisDllShutdownSR(struct MCTStatStruc *pMCTstat,
157 struct DCTStatStruc *pDCTstat, u32 DramConfigLo, u8 dct);
Zheng Bao69436e12011-01-06 02:18:12 +0000158static void mct_EnDllShutdownSR(struct MCTStatStruc *pMCTstat,
159 struct DCTStatStruc *pDCTstat, u8 dct);
Zheng Baoeb75f652010-04-23 17:32:48 +0000160
Zheng Bao69436e12011-01-06 02:18:12 +0000161static u32 mct_MR1Odt_RDimm(struct MCTStatStruc *pMCTstat,
162 struct DCTStatStruc *pDCTstat, u8 dct, u32 MrsChipSel);
Zheng Baoeb75f652010-04-23 17:32:48 +0000163static u32 mct_DramTermDyn_RDimm(struct MCTStatStruc *pMCTstat,
164 struct DCTStatStruc *pDCTstat, u8 dimm);
165static u32 mct_SetDramConfigMisc2(struct DCTStatStruc *pDCTstat, u8 dct, u32 misc2);
166static void mct_BeforeDQSTrainSamp(struct DCTStatStruc *pDCTstat);
167static void mct_WriteLevelization_HW(struct MCTStatStruc *pMCTstat, struct DCTStatStruc *pDCTstatA);
168static u8 Get_Latency_Diff(struct MCTStatStruc *pMCTstat,
169 struct DCTStatStruc *pDCTstat, u8 dct);
170static void SyncSetting(struct DCTStatStruc *pDCTstat);
171static u8 crcCheck(u8 smbaddr);
Zheng Bao69436e12011-01-06 02:18:12 +0000172static void mct_ExtMCTConfig_Bx(struct DCTStatStruc *pDCTstat);
173static void mct_ExtMCTConfig_Cx(struct DCTStatStruc *pDCTstat);
Zheng Baoeb75f652010-04-23 17:32:48 +0000174
175/*See mctAutoInitMCT header for index relationships to CL and T*/
176static const u16 Table_F_k[] = {00,200,266,333,400,533 };
177static const u8 Tab_BankAddr[] = {0x3F,0x01,0x09,0x3F,0x3F,0x11,0x0A,0x19,0x12,0x1A,0x21,0x22,0x23};
178static const u8 Table_DQSRcvEn_Offset[] = {0x00,0x01,0x10,0x11,0x2};
179
180/****************************************************************************
181 Describe how platform maps MemClk pins to logical DIMMs. The MemClk pins
182 are identified based on BKDG definition of Fn2x88[MemClkDis] bitmap.
183 AGESA will base on this value to disable unused MemClk to save power.
184
185 If MEMCLK_MAPPING or MEMCLK_MAPPING contains all zeroes, AGESA will use
186 default MemClkDis setting based on package type.
187
188 Example:
189 BKDG definition of Fn2x88[MemClkDis] bitmap for AM3 package is like below:
190 Bit AM3/S1g3 pin name
191 0 M[B,A]_CLK_H/L[0]
192 1 M[B,A]_CLK_H/L[1]
193 2 M[B,A]_CLK_H/L[2]
194 3 M[B,A]_CLK_H/L[3]
195 4 M[B,A]_CLK_H/L[4]
196 5 M[B,A]_CLK_H/L[5]
197 6 M[B,A]_CLK_H/L[6]
198 7 M[B,A]_CLK_H/L[7]
199
200 And platform has the following routing:
201 CS0 M[B,A]_CLK_H/L[4]
202 CS1 M[B,A]_CLK_H/L[2]
203 CS2 M[B,A]_CLK_H/L[3]
204 CS3 M[B,A]_CLK_H/L[5]
205
206 Then:
207 ; CS0 CS1 CS2 CS3 CS4 CS5 CS6 CS7
208 MEMCLK_MAPPING EQU 00010000b, 00000100b, 00001000b, 00100000b, 00000000b, 00000000b, 00000000b, 00000000b
209*/
210
211/* Note: If you are not sure about the pin mappings at initial stage, we dont have to disable MemClk.
212 * Set entries in the tables all 0xFF. */
213static const u8 Tab_L1CLKDis[] = {0x20, 0x20, 0x10, 0x10, 0x08, 0x08, 0x04, 0x04};
214static const u8 Tab_AM3CLKDis[] = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00};
215static const u8 Tab_S1CLKDis[] = {0xA2, 0xA2, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00};
216static const u8 Tab_ManualCLKDis[]= {0x10, 0x04, 0x08, 0x20, 0x00, 0x00, 0x00, 0x00};
217
218static const u8 Table_Comp_Rise_Slew_20x[] = {7, 3, 2, 2, 0xFF};
219static const u8 Table_Comp_Rise_Slew_15x[] = {7, 7, 3, 2, 0xFF};
220static const u8 Table_Comp_Fall_Slew_20x[] = {7, 5, 3, 2, 0xFF};
221static const u8 Table_Comp_Fall_Slew_15x[] = {7, 7, 5, 3, 0xFF};
222
223static void mctAutoInitMCT_D(struct MCTStatStruc *pMCTstat,
224 struct DCTStatStruc *pDCTstatA)
225{
226 /*
227 * Memory may be mapped contiguously all the way up to 4GB (depending on setup
228 * options). It is the responsibility of PCI subsystem to create an uncacheable
229 * IO region below 4GB and to adjust TOP_MEM downward prior to any IO mapping or
230 * accesses. It is the same responsibility of the CPU sub-system prior to
231 * accessing LAPIC.
232 *
233 * Slot Number is an external convention, and is determined by OEM with accompanying
234 * silk screening. OEM may choose to use Slot number convention which is consistent
235 * with DIMM number conventions. All AMD engineering platforms do.
236 *
237 * Build Requirements:
238 * 1. MCT_SEG0_START and MCT_SEG0_END macros to begin and end the code segment,
239 * defined in mcti.inc.
240 *
241 * Run-Time Requirements:
242 * 1. Complete Hypertransport Bus Configuration
243 * 2. SMBus Controller Initialized
244 * 1. BSP in Big Real Mode
245 * 2. Stack at SS:SP, located somewhere between A000:0000 and F000:FFFF
246 * 3. Checksummed or Valid NVRAM bits
247 * 4. MCG_CTL=-1, MC4_CTL_EN=0 for all CPUs
248 * 5. MCi_STS from shutdown/warm reset recorded (if desired) prior to entry
249 * 6. All var MTRRs reset to zero
250 * 7. State of NB_CFG.DisDatMsk set properly on all CPUs
251 * 8. All CPUs at 2Ghz Speed (unless DQS training is not installed).
252 * 9. All cHT links at max Speed/Width (unless DQS training is not installed).
253 *
254 *
255 * Global relationship between index values and item values:
256 *
257 * pDCTstat.CASL pDCTstat.Speed
258 * j CL(j) k F(k)
259 * --------------------------
260 * 0 2.0 - -
261 * 1 3.0 1 200 Mhz
262 * 2 4.0 2 266 Mhz
263 * 3 5.0 3 333 Mhz
264 * 4 6.0 4 400 Mhz
265 * 5 7.0 5 533 Mhz
266 * 6 8.0 6 667 Mhz
267 * 7 9.0 7 800 Mhz
268 */
269 u8 Node, NodesWmem;
270 u32 node_sys_base;
271
272restartinit:
273 mctInitMemGPIOs_A_D(); /* Set any required GPIOs*/
274 NodesWmem = 0;
275 node_sys_base = 0;
276 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
277 struct DCTStatStruc *pDCTstat;
278 pDCTstat = pDCTstatA + Node;
279 pDCTstat->Node_ID = Node;
280 pDCTstat->dev_host = PA_HOST(Node);
281 pDCTstat->dev_map = PA_MAP(Node);
282 pDCTstat->dev_dct = PA_DCT(Node);
283 pDCTstat->dev_nbmisc = PA_NBMISC(Node);
284 pDCTstat->NodeSysBase = node_sys_base;
285
286 mct_init(pMCTstat, pDCTstat);
287 mctNodeIDDebugPort_D();
288 pDCTstat->NodePresent = NodePresent_D(Node);
289 if (pDCTstat->NodePresent) { /* See if Node is there*/
290 clear_legacy_Mode(pMCTstat, pDCTstat);
291 pDCTstat->LogicalCPUID = mctGetLogicalCPUID_D(Node);
292
293 mct_InitialMCT_D(pMCTstat, pDCTstat);
294
295 mctSMBhub_Init(Node); /* Switch SMBUS crossbar to proper node*/
296
297 mct_initDCT(pMCTstat, pDCTstat);
298 if (pDCTstat->ErrCode == SC_FatalErr) {
299 goto fatalexit; /* any fatal errors?*/
300 } else if (pDCTstat->ErrCode < SC_StopError) {
301 NodesWmem++;
302 }
303 } /* if Node present */
304 node_sys_base = pDCTstat->NodeSysBase;
305 node_sys_base += (pDCTstat->NodeSysLimit + 2) & ~0x0F;
306 }
307 if (NodesWmem == 0) {
308 printk(BIOS_DEBUG, "No Nodes?!\n");
309 goto fatalexit;
310 }
311
312 printk(BIOS_DEBUG, "mctAutoInitMCT_D: SyncDCTsReady_D\n");
313 SyncDCTsReady_D(pMCTstat, pDCTstatA); /* Make sure DCTs are ready for accesses.*/
314
315 printk(BIOS_DEBUG, "mctAutoInitMCT_D: HTMemMapInit_D\n");
316 HTMemMapInit_D(pMCTstat, pDCTstatA); /* Map local memory into system address space.*/
317 mctHookAfterHTMap();
318
319 printk(BIOS_DEBUG, "mctAutoInitMCT_D: CPUMemTyping_D\n");
320 CPUMemTyping_D(pMCTstat, pDCTstatA); /* Map dram into WB/UC CPU cacheability */
321 mctHookAfterCPU(); /* Setup external northbridge(s) */
322
323 printk(BIOS_DEBUG, "mctAutoInitMCT_D: DQSTiming_D\n");
324 DQSTiming_D(pMCTstat, pDCTstatA); /* Get Receiver Enable and DQS signal timing*/
325
326 printk(BIOS_DEBUG, "mctAutoInitMCT_D: UMAMemTyping_D\n");
327 UMAMemTyping_D(pMCTstat, pDCTstatA); /* Fix up for UMA sizing */
328
329 printk(BIOS_DEBUG, "mctAutoInitMCT_D: :OtherTiming\n");
330 mct_OtherTiming(pMCTstat, pDCTstatA);
331
332 if (ReconfigureDIMMspare_D(pMCTstat, pDCTstatA)) { /* RESET# if 1st pass of DIMM spare enabled*/
333 goto restartinit;
334 }
335
336 InterleaveNodes_D(pMCTstat, pDCTstatA);
337 InterleaveChannels_D(pMCTstat, pDCTstatA);
338
339 printk(BIOS_DEBUG, "mctAutoInitMCT_D: ECCInit_D\n");
340 if (ECCInit_D(pMCTstat, pDCTstatA)) { /* Setup ECC control and ECC check-bits*/
341 printk(BIOS_DEBUG, "mctAutoInitMCT_D: MCTMemClr_D\n");
342 MCTMemClr_D(pMCTstat,pDCTstatA);
343 }
344
Zheng Bao69436e12011-01-06 02:18:12 +0000345 mct_FinalMCT_D(pMCTstat, pDCTstatA);
Zheng Baoeb75f652010-04-23 17:32:48 +0000346 printk(BIOS_DEBUG, "All Done\n");
347 return;
348
349fatalexit:
350 die("mct_d: fatalexit");
351}
352
353static u8 ReconfigureDIMMspare_D(struct MCTStatStruc *pMCTstat,
354 struct DCTStatStruc *pDCTstatA)
355{
356 u8 ret;
357
358 if (mctGet_NVbits(NV_CS_SpareCTL)) {
359 if (MCT_DIMM_SPARE_NO_WARM) {
360 /* Do no warm-reset DIMM spare */
361 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
362 LoadDQSSigTmgRegs_D(pMCTstat, pDCTstatA);
363 ret = 0;
364 } else {
365 mct_ResetDataStruct_D(pMCTstat, pDCTstatA);
366 pMCTstat->GStatus |= 1 << GSB_EnDIMMSpareNW;
367 ret = 1;
368 }
369 } else {
370 /* Do warm-reset DIMM spare */
371 if (mctGet_NVbits(NV_DQSTrainCTL))
372 mctWarmReset_D();
373 ret = 0;
374 }
375 } else {
376 ret = 0;
377 }
378
379 return ret;
380}
381
382static void DQSTiming_D(struct MCTStatStruc *pMCTstat,
383 struct DCTStatStruc *pDCTstatA)
384{
385 u8 nv_DQSTrainCTL;
386
387 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
388 return;
389 }
390
391 nv_DQSTrainCTL = mctGet_NVbits(NV_DQSTrainCTL);
392 /* FIXME: BOZO- DQS training every time*/
393 nv_DQSTrainCTL = 1;
394
395 mct_BeforeDQSTrain_D(pMCTstat, pDCTstatA);
396 phyAssistedMemFnceTraining(pMCTstat, pDCTstatA);
397
398 if (nv_DQSTrainCTL) {
399 mctHookBeforeAnyTraining(pMCTstat, pDCTstatA);
400 /* TODO: should be in mctHookBeforeAnyTraining */
401 _WRMSR(0x26C, 0x04040404, 0x04040404);
402 _WRMSR(0x26D, 0x04040404, 0x04040404);
403 _WRMSR(0x26E, 0x04040404, 0x04040404);
404 _WRMSR(0x26F, 0x04040404, 0x04040404);
405 mct_WriteLevelization_HW(pMCTstat, pDCTstatA);
406
407 TrainReceiverEn_D(pMCTstat, pDCTstatA, FirstPass);
408
409 mct_TrainDQSPos_D(pMCTstat, pDCTstatA);
410
411 /* Second Pass never used for Barcelona! */
412 /* TrainReceiverEn_D(pMCTstat, pDCTstatA, SecondPass); */
413
414 mctSetEccDQSRcvrEn_D(pMCTstat, pDCTstatA);
415
416 /* FIXME - currently uses calculated value TrainMaxReadLatency_D(pMCTstat, pDCTstatA); */
417 mctHookAfterAnyTraining();
418 mctSaveDQSSigTmg_D();
419
420 MCTMemClr_D(pMCTstat, pDCTstatA);
421 } else {
422 mctGetDQSSigTmg_D(); /* get values into data structure */
423 LoadDQSSigTmgRegs_D(pMCTstat, pDCTstatA); /* load values into registers.*/
424 /* mctDoWarmResetMemClr_D(); */
425 MCTMemClr_D(pMCTstat, pDCTstatA);
426 }
427}
428
429static void LoadDQSSigTmgRegs_D(struct MCTStatStruc *pMCTstat,
430 struct DCTStatStruc *pDCTstatA)
431{
432 u8 Node, Receiver, Channel, Dir, DIMM;
433 u32 dev;
434 u32 index_reg;
435 u32 reg;
436 u32 index;
437 u32 val;
438 u8 ByteLane;
439 u8 txdqs;
440
441 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
442 struct DCTStatStruc *pDCTstat;
443 pDCTstat = pDCTstatA + Node;
444
445 if (pDCTstat->DCTSysLimit) {
446 dev = pDCTstat->dev_dct;
447 for (Channel = 0;Channel < 2; Channel++) {
448 /* there are four receiver pairs,
449 loosely associated with chipselects.*/
450 index_reg = 0x98 + Channel * 0x100;
451 for (Receiver = 0; Receiver < 8; Receiver += 2) {
452 /* Set Receiver Enable Values */
453 mct_SetRcvrEnDly_D(pDCTstat,
454 0, /* RcvrEnDly */
455 1, /* FinalValue, From stack */
456 Channel,
457 Receiver,
458 dev, index_reg,
459 (Receiver >> 1) * 3 + 0x10, /* Addl_Index */
460 2); /* Pass Second Pass ? */
461 /* Restore Write levelization training data */
462 for (ByteLane = 0; ByteLane < 9; ByteLane ++) {
463 txdqs = pDCTstat->CH_D_B_TxDqs[Channel][Receiver >> 1][ByteLane];
464 index = Table_DQSRcvEn_Offset[ByteLane >> 1];
465 index += (Receiver >> 1) * 3 + 0x10 + 0x20; /* Addl_Index */
466 val = Get_NB32_index_wait(dev, 0x98 + 0x100*Channel, index);
467 if (ByteLane & 1) { /* odd byte lane */
468 val &= ~(0xFF << 16);
469 val |= txdqs << 16;
470 } else {
471 val &= ~0xFF;
472 val |= txdqs;
473 }
474 Set_NB32_index_wait(dev, 0x98 + 0x100*Channel, index, val);
475 }
476 }
477 }
478 for (Channel = 0; Channel<2; Channel++) {
479 SetEccDQSRcvrEn_D(pDCTstat, Channel);
480 }
481
482 for (Channel = 0; Channel < 2; Channel++) {
483 u8 *p;
484 index_reg = 0x98 + Channel * 0x100;
485
486 /* NOTE:
487 * when 400, 533, 667, it will support dimm0/1/2/3,
488 * and set conf for dimm0, hw will copy to dimm1/2/3
489 * set for dimm1, hw will copy to dimm3
490 * Rev A/B only support DIMM0/1 when 800Mhz and above
491 * + 0x100 to next dimm
492 * Rev C support DIMM0/1/2/3 when 800Mhz and above
493 * + 0x100 to next dimm
494 */
495 for (DIMM = 0; DIMM < 4; DIMM++) {
496 if (DIMM == 0) {
497 index = 0; /* CHA Write Data Timing Low */
498 } else {
499 if (pDCTstat->Speed >= 4) {
500 index = 0x100 * DIMM;
501 } else {
502 break;
503 }
504 }
505 for (Dir = 0; Dir < 2; Dir++) {/* RD/WR */
506 p = pDCTstat->CH_D_DIR_B_DQS[Channel][DIMM][Dir];
507 val = stream_to_int(p); /* CHA Read Data Timing High */
508 Set_NB32_index_wait(dev, index_reg, index+1, val);
509 val = stream_to_int(p+4); /* CHA Write Data Timing High */
510 Set_NB32_index_wait(dev, index_reg, index+2, val);
511 val = *(p+8); /* CHA Write ECC Timing */
512 Set_NB32_index_wait(dev, index_reg, index+3, val);
513 index += 4;
514 }
515 }
516 }
517
518 for (Channel = 0; Channel<2; Channel++) {
519 reg = 0x78 + Channel * 0x100;
520 val = Get_NB32(dev, reg);
521 val &= ~(0x3ff<<22);
522 val |= ((u32) pDCTstat->CH_MaxRdLat[Channel] << 22);
523 val &= ~(1<<DqsRcvEnTrain);
524 Set_NB32(dev, reg, val); /* program MaxRdLatency to correspond with current delay*/
525 }
526 }
527 }
528}
529
530static void HTMemMapInit_D(struct MCTStatStruc *pMCTstat,
531 struct DCTStatStruc *pDCTstatA)
532{
533 u8 Node;
534 u32 NextBase, BottomIO;
535 u8 _MemHoleRemap, DramHoleBase, DramHoleOffset;
536 u32 HoleSize, DramSelBaseAddr;
537
538 u32 val;
539 u32 base;
540 u32 limit;
541 u32 dev, devx;
542 struct DCTStatStruc *pDCTstat;
543
544 _MemHoleRemap = mctGet_NVbits(NV_MemHole);
545
546 if (pMCTstat->HoleBase == 0) {
547 DramHoleBase = mctGet_NVbits(NV_BottomIO);
548 } else {
549 DramHoleBase = pMCTstat->HoleBase >> (24-8);
550 }
551
552 BottomIO = DramHoleBase << (24-8);
553
554 NextBase = 0;
555 pDCTstat = pDCTstatA + 0;
556 dev = pDCTstat->dev_map;
557
558 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
559 pDCTstat = pDCTstatA + Node;
560 devx = pDCTstat->dev_map;
561 DramSelBaseAddr = 0;
562 pDCTstat = pDCTstatA + Node; /* ??? */
563 if (!pDCTstat->GangedMode) {
564 DramSelBaseAddr = pDCTstat->NodeSysLimit - pDCTstat->DCTSysLimit;
565 /*In unganged mode, we must add DCT0 and DCT1 to DCTSysLimit */
566 val = pDCTstat->NodeSysLimit;
567 if ((val & 0xFF) == 0xFE) {
568 DramSelBaseAddr++;
569 val++;
570 }
571 pDCTstat->DCTSysLimit = val;
572 }
573
574 base = pDCTstat->DCTSysBase;
575 limit = pDCTstat->DCTSysLimit;
576 if (limit > base) {
577 base += NextBase;
578 limit += NextBase;
579 DramSelBaseAddr += NextBase;
580 printk(BIOS_DEBUG, " Node: %02x base: %02x limit: %02x BottomIO: %02x\n", Node, base, limit, BottomIO);
581
582 if (_MemHoleRemap) {
583 if ((base < BottomIO) && (limit >= BottomIO)) {
584 /* HW Dram Remap */
585 pDCTstat->Status |= 1 << SB_HWHole;
586 pMCTstat->GStatus |= 1 << GSB_HWHole;
587 pDCTstat->DCTSysBase = base;
588 pDCTstat->DCTSysLimit = limit;
589 pDCTstat->DCTHoleBase = BottomIO;
590 pMCTstat->HoleBase = BottomIO;
591 HoleSize = _4GB_RJ8 - BottomIO; /* HoleSize[39:8] */
592 if ((DramSelBaseAddr > 0) && (DramSelBaseAddr < BottomIO))
593 base = DramSelBaseAddr;
594 val = ((base + HoleSize) >> (24-8)) & 0xFF;
595 DramHoleOffset = val;
596 val <<= 8; /* shl 16, rol 24 */
597 val |= DramHoleBase << 24;
598 val |= 1 << DramHoleValid;
599 Set_NB32(devx, 0xF0, val); /* Dram Hole Address Reg */
600 pDCTstat->DCTSysLimit += HoleSize;
601 base = pDCTstat->DCTSysBase;
602 limit = pDCTstat->DCTSysLimit;
603 } else if (base == BottomIO) {
604 /* SW Node Hoist */
605 pMCTstat->GStatus |= 1<<GSB_SpIntRemapHole;
606 pDCTstat->Status |= 1<<SB_SWNodeHole;
607 pMCTstat->GStatus |= 1<<GSB_SoftHole;
608 pMCTstat->HoleBase = base;
609 limit -= base;
610 base = _4GB_RJ8;
611 limit += base;
612 pDCTstat->DCTSysBase = base;
613 pDCTstat->DCTSysLimit = limit;
614 } else {
615 /* No Remapping. Normal Contiguous mapping */
616 pDCTstat->DCTSysBase = base;
617 pDCTstat->DCTSysLimit = limit;
618 }
619 } else {
620 /*No Remapping. Normal Contiguous mapping*/
621 pDCTstat->DCTSysBase = base;
622 pDCTstat->DCTSysLimit = limit;
623 }
624 base |= 3; /* set WE,RE fields*/
625 pMCTstat->SysLimit = limit;
626 }
627 Set_NB32(dev, 0x40 + (Node << 3), base); /* [Node] + Dram Base 0 */
628
629 val = limit & 0xFFFF0000;
630 val |= Node;
631 Set_NB32(dev, 0x44 + (Node << 3), val); /* set DstNode */
632
633 printk(BIOS_DEBUG, " Node: %02x base: %02x limit: %02x \n", Node, base, limit);
634 limit = pDCTstat->DCTSysLimit;
635 if (limit) {
636 NextBase = (limit & 0xFFFF0000) + 0x10000;
637 }
638 }
639
640 /* Copy dram map from Node 0 to Node 1-7 */
641 for (Node = 1; Node < MAX_NODES_SUPPORTED; Node++) {
642 u32 reg;
643 pDCTstat = pDCTstatA + Node;
644 devx = pDCTstat->dev_map;
645
646 if (pDCTstat->NodePresent) {
647 reg = 0x40; /*Dram Base 0*/
648 do {
649 val = Get_NB32(dev, reg);
650 Set_NB32(devx, reg, val);
651 reg += 4;
652 } while ( reg < 0x80);
653 } else {
654 break; /* stop at first absent Node */
655 }
656 }
657
658 /*Copy dram map to F1x120/124*/
659 mct_HTMemMapExt(pMCTstat, pDCTstatA);
660}
661
662static void MCTMemClr_D(struct MCTStatStruc *pMCTstat,
663 struct DCTStatStruc *pDCTstatA)
664{
665
666 /* Initiates a memory clear operation for all node. The mem clr
Zheng Baoc3af12f2010-10-08 05:08:47 +0000667 * is done in parallel. After the memclr is complete, all processors
Zheng Baoeb75f652010-04-23 17:32:48 +0000668 * status are checked to ensure that memclr has completed.
669 */
670 u8 Node;
671 struct DCTStatStruc *pDCTstat;
672
673 if (!mctGet_NVbits(NV_DQSTrainCTL)){
674 /* FIXME: callback to wrapper: mctDoWarmResetMemClr_D */
675 } else { /* NV_DQSTrainCTL == 1 */
676 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
677 pDCTstat = pDCTstatA + Node;
678
679 if (pDCTstat->NodePresent) {
680 DCTMemClr_Init_D(pMCTstat, pDCTstat);
681 }
682 }
683 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
684 pDCTstat = pDCTstatA + Node;
685
686 if (pDCTstat->NodePresent) {
687 DCTMemClr_Sync_D(pMCTstat, pDCTstat);
688 }
689 }
690 }
691}
692
693static void DCTMemClr_Init_D(struct MCTStatStruc *pMCTstat,
694 struct DCTStatStruc *pDCTstat)
695{
696 u32 val;
697 u32 dev;
698 u32 reg;
699
700 /* Initiates a memory clear operation on one node */
701 if (pDCTstat->DCTSysLimit) {
702 dev = pDCTstat->dev_dct;
703 reg = 0x110;
704
705 do {
706 val = Get_NB32(dev, reg);
707 } while (val & (1 << MemClrBusy));
708
709 val |= (1 << MemClrInit);
710 Set_NB32(dev, reg, val);
711 }
712}
713
714static void MCTMemClrSync_D(struct MCTStatStruc *pMCTstat,
715 struct DCTStatStruc *pDCTstatA)
716{
717 /* Ensures that memory clear has completed on all node.*/
718 u8 Node;
719 struct DCTStatStruc *pDCTstat;
720
721 if (!mctGet_NVbits(NV_DQSTrainCTL)){
722 /* callback to wrapper: mctDoWarmResetMemClr_D */
723 } else { /* NV_DQSTrainCTL == 1 */
724 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
725 pDCTstat = pDCTstatA + Node;
726
727 if (pDCTstat->NodePresent) {
728 DCTMemClr_Sync_D(pMCTstat, pDCTstat);
729 }
730 }
731 }
732}
733
734static void DCTMemClr_Sync_D(struct MCTStatStruc *pMCTstat,
735 struct DCTStatStruc *pDCTstat)
736{
737 u32 val;
738 u32 dev = pDCTstat->dev_dct;
739 u32 reg;
740
741 /* Ensure that a memory clear operation has completed on one node */
742 if (pDCTstat->DCTSysLimit){
743 reg = 0x110;
744
745 do {
746 val = Get_NB32(dev, reg);
747 } while (val & (1 << MemClrBusy));
748
749 do {
750 val = Get_NB32(dev, reg);
751 } while (!(val & (1 << Dr_MemClrStatus)));
752 }
753
754 val = 0x0FE40FC0; /* BKDG recommended */
755 val |= MCCH_FlushWrOnStpGnt; /* Set for S3 */
756 Set_NB32(dev, 0x11C, val);
757}
758
759static u8 NodePresent_D(u8 Node)
760{
761 /*
762 * Determine if a single Hammer Node exists within the network.
763 */
764 u32 dev;
765 u32 val;
766 u32 dword;
767 u8 ret = 0;
768
769 dev = PA_HOST(Node); /*test device/vendor id at host bridge */
770 val = Get_NB32(dev, 0);
771 dword = mct_NodePresent_D(); /* FIXME: BOZO -11001022h rev for F */
772 if (val == dword) { /* AMD Hammer Family CPU HT Configuration */
773 if (oemNodePresent_D(Node, &ret))
774 goto finish;
775 /* Node ID register */
776 val = Get_NB32(dev, 0x60);
777 val &= 0x07;
778 dword = Node;
779 if (val == dword) /* current nodeID = requested nodeID ? */
780 ret = 1;
781 }
782finish:
783 return ret;
784}
785
786static void DCTInit_D(struct MCTStatStruc *pMCTstat, struct DCTStatStruc *pDCTstat, u8 dct)
787{
788 /*
789 * Initialize DRAM on single Athlon 64/Opteron Node.
790 */
791 u8 stopDCTflag;
792 u32 val;
793
794 ClearDCT_D(pMCTstat, pDCTstat, dct);
795 stopDCTflag = 1; /*preload flag with 'disable' */
796 /* enable DDR3 support */
797 val = Get_NB32(pDCTstat->dev_dct, 0x94 + dct * 0x100);
798 val |= 1 << Ddr3Mode;
799 Set_NB32(pDCTstat->dev_dct, 0x94 + dct * 0x100, val);
800 if (mct_DIMMPresence(pMCTstat, pDCTstat, dct) < SC_StopError) {
801 printk(BIOS_DEBUG, "\t\tDCTInit_D: mct_DIMMPresence Done\n");
802 if (mct_SPDCalcWidth(pMCTstat, pDCTstat, dct) < SC_StopError) {
803 printk(BIOS_DEBUG, "\t\tDCTInit_D: mct_SPDCalcWidth Done\n");
804 if (AutoCycTiming_D(pMCTstat, pDCTstat, dct) < SC_StopError) {
805 printk(BIOS_DEBUG, "\t\tDCTInit_D: AutoCycTiming_D Done\n");
806 if (AutoConfig_D(pMCTstat, pDCTstat, dct) < SC_StopError) {
807 printk(BIOS_DEBUG, "\t\tDCTInit_D: AutoConfig_D Done\n");
808 if (PlatformSpec_D(pMCTstat, pDCTstat, dct) < SC_StopError) {
809 printk(BIOS_DEBUG, "\t\tDCTInit_D: PlatformSpec_D Done\n");
810 stopDCTflag = 0;
811 if (!(pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW))) {
812 printk(BIOS_DEBUG, "\t\tDCTInit_D: StartupDCT_D\n");
813 StartupDCT_D(pMCTstat, pDCTstat, dct); /*yeaahhh! */
814 }
815 }
816 }
817 }
818 }
819 }
820
821 if (stopDCTflag) {
822 u32 reg_off = dct * 0x100;
823 val = 1<<DisDramInterface;
824 Set_NB32(pDCTstat->dev_dct, reg_off+0x94, val);
825 /*To maximize power savings when DisDramInterface=1b,
826 all of the MemClkDis bits should also be set.*/
827 val = 0xFF000000;
828 Set_NB32(pDCTstat->dev_dct, reg_off+0x88, val);
829 } else {
Zheng Bao69436e12011-01-06 02:18:12 +0000830 mct_EnDllShutdownSR(pMCTstat, pDCTstat, dct);
Zheng Baoeb75f652010-04-23 17:32:48 +0000831 }
832}
833
834static void SyncDCTsReady_D(struct MCTStatStruc *pMCTstat,
835 struct DCTStatStruc *pDCTstatA)
836{
837 /* Wait (and block further access to dram) for all DCTs to be ready,
838 * by polling all InitDram bits and waiting for possible memory clear
839 * operations to be complete. Read MemClkFreqVal bit to see if
840 * the DIMMs are present in this node.
841 */
842 u8 Node;
843 u32 val;
844
845 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
846 struct DCTStatStruc *pDCTstat;
847 pDCTstat = pDCTstatA + Node;
848 mct_SyncDCTsReady(pDCTstat);
849 }
850 /* v6.1.3 */
851 /* re-enable phy compensation engine when dram init is completed on all nodes. */
852 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
853 struct DCTStatStruc *pDCTstat;
854 pDCTstat = pDCTstatA + Node;
855 if (pDCTstat->NodePresent) {
856 if (pDCTstat->DIMMValidDCT[0] > 0 || pDCTstat->DIMMValidDCT[1] > 0) {
857 /* re-enable phy compensation engine when dram init on both DCTs is completed. */
858 val = Get_NB32_index_wait(pDCTstat->dev_dct, 0x98, 0x8);
859 val &= ~(1 << DisAutoComp);
860 Set_NB32_index_wait(pDCTstat->dev_dct, 0x98, 0x8, val);
861 }
862 }
863 }
864 /* wait 750us before any memory access can be made. */
865 mct_Wait(15000);
866}
867
868static void StartupDCT_D(struct MCTStatStruc *pMCTstat,
869 struct DCTStatStruc *pDCTstat, u8 dct)
870{
871 /* Read MemClkFreqVal bit to see if the DIMMs are present in this node.
872 * If the DIMMs are present then set the DRAM Enable bit for this node.
873 *
874 * Setting dram init starts up the DCT state machine, initializes the
875 * dram devices with MRS commands, and kicks off any
876 * HW memory clear process that the chip is capable of. The sooner
877 * that dram init is set for all nodes, the faster the memory system
878 * initialization can complete. Thus, the init loop is unrolled into
Zheng Baoc3af12f2010-10-08 05:08:47 +0000879 * two loops so as to start the processes for non BSP nodes sooner.
Zheng Baoeb75f652010-04-23 17:32:48 +0000880 * This procedure will not wait for the process to finish.
881 * Synchronization is handled elsewhere.
882 */
883 u32 val;
884 u32 dev;
885 u32 reg_off = dct * 0x100;
886
887 dev = pDCTstat->dev_dct;
888 val = Get_NB32(dev, 0x94 + reg_off);
889 if (val & (1<<MemClkFreqVal)) {
890 mctHookBeforeDramInit(); /* generalized Hook */
891 if (!(pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)))
892 mct_DramInit(pMCTstat, pDCTstat, dct);
893 AfterDramInit_D(pDCTstat, dct);
894 mctHookAfterDramInit(); /* generalized Hook*/
895 }
896}
897
898static void ClearDCT_D(struct MCTStatStruc *pMCTstat,
899 struct DCTStatStruc *pDCTstat, u8 dct)
900{
901 u32 reg_end;
902 u32 dev = pDCTstat->dev_dct;
903 u32 reg = 0x40 + 0x100 * dct;
904 u32 val = 0;
905
906 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
907 reg_end = 0x78 + 0x100 * dct;
908 } else {
909 reg_end = 0xA4 + 0x100 * dct;
910 }
911
912 while(reg < reg_end) {
Zheng Bao69436e12011-01-06 02:18:12 +0000913 if ((reg & 0xFF) == 0x90) {
914 if (pDCTstat->LogicalCPUID & AMD_DR_Dx) {
915 val = Get_NB32(dev, reg); /* get DRAMConfigLow */
916 val |= 0x08000000; /* preserve value of DisDllShutdownSR for only Rev.D */
917 }
918 }
Zheng Baoeb75f652010-04-23 17:32:48 +0000919 Set_NB32(dev, reg, val);
Zheng Bao69436e12011-01-06 02:18:12 +0000920 val = 0;
Zheng Baoeb75f652010-04-23 17:32:48 +0000921 reg += 4;
922 }
923
924 val = 0;
925 dev = pDCTstat->dev_map;
926 reg = 0xF0;
927 Set_NB32(dev, reg, val);
928}
929
930static void SPD2ndTiming(struct MCTStatStruc *pMCTstat,
931 struct DCTStatStruc *pDCTstat, u8 dct)
932{
933 u8 i;
934 u16 Twr, Trtp;
935 u16 Trp, Trrd, Trcd, Tras, Trc;
936 u8 Trfc[4];
937 u16 Tfaw;
938 u32 DramTimingLo, DramTimingHi;
939 u8 tCK16x;
940 u16 Twtr;
941 u8 LDIMM;
942 u8 MTB16x;
943 u8 byte;
944 u32 dword;
945 u32 dev;
946 u32 reg_off;
947 u32 val;
948 u16 smbaddr;
949
950 /* Gather all DIMM mini-max values for cycle timing data */
951 Trp = 0;
952 Trrd = 0;
953 Trcd = 0;
954 Trtp = 0;
955 Tras = 0;
956 Trc = 0;
957 Twr = 0;
958 Twtr = 0;
959 for (i=0; i < 4; i++)
960 Trfc[i] = 0;
961 Tfaw = 0;
962
963 for ( i = 0; i< MAX_DIMMS_SUPPORTED; i++) {
964 LDIMM = i >> 1;
965 if (pDCTstat->DIMMValid & (1 << i)) {
966 smbaddr = Get_DIMMAddress_D(pDCTstat, (dct + i));
967
968 val = mctRead_SPD(smbaddr, SPD_MTBDivisor); /* MTB=Dividend/Divisor */
969 MTB16x = ((mctRead_SPD(smbaddr, SPD_MTBDividend) & 0xFF)<<4);
970 MTB16x /= val; /* transfer to MTB*16 */
971
972 byte = mctRead_SPD(smbaddr, SPD_tRPmin);
973 val = byte * MTB16x;
974 if (Trp < val)
975 Trp = val;
976
977 byte = mctRead_SPD(smbaddr, SPD_tRRDmin);
978 val = byte * MTB16x;
979 if (Trrd < val)
980 Trrd = val;
981
982 byte = mctRead_SPD(smbaddr, SPD_tRCDmin);
983 val = byte * MTB16x;
984 if (Trcd < val)
985 Trcd = val;
986
987 byte = mctRead_SPD(smbaddr, SPD_tRTPmin);
988 val = byte * MTB16x;
989 if (Trtp < val)
990 Trtp = val;
991
992 byte = mctRead_SPD(smbaddr, SPD_tWRmin);
993 val = byte * MTB16x;
994 if (Twr < val)
995 Twr = val;
996
997 byte = mctRead_SPD(smbaddr, SPD_tWTRmin);
998 val = byte * MTB16x;
999 if (Twtr < val)
1000 Twtr = val;
1001
1002 val = mctRead_SPD(smbaddr, SPD_Upper_tRAS_tRC) & 0xFF;
1003 val >>= 4;
1004 val <<= 8;
1005 val |= mctRead_SPD(smbaddr, SPD_tRCmin) & 0xFF;
1006 val *= MTB16x;
1007 if (Trc < val)
1008 Trc = val;
1009
1010 byte = mctRead_SPD(smbaddr, SPD_Density) & 0xF;
1011 if (Trfc[LDIMM] < byte)
1012 Trfc[LDIMM] = byte;
1013
1014 val = mctRead_SPD(smbaddr, SPD_Upper_tRAS_tRC) & 0xF;
1015 val <<= 8;
1016 val |= (mctRead_SPD(smbaddr, SPD_tRASmin) & 0xFF);
1017 val *= MTB16x;
1018 if (Tras < val)
1019 Tras = val;
1020
1021 val = mctRead_SPD(smbaddr, SPD_Upper_tFAW) & 0xF;
1022 val <<= 8;
1023 val |= mctRead_SPD(smbaddr, SPD_tFAWmin) & 0xFF;
1024 val *= MTB16x;
1025 if (Tfaw < val)
1026 Tfaw = val;
1027 } /* Dimm Present */
1028 }
1029
1030 /* Convert DRAM CycleTiming values and store into DCT structure */
1031 byte = pDCTstat->DIMMAutoSpeed;
1032 if (byte == 7)
1033 tCK16x = 20;
1034 else if (byte == 6)
1035 tCK16x = 24;
1036 else if (byte == 5)
1037 tCK16x = 30;
1038 else
1039 tCK16x = 40;
1040
1041 /* Notes:
1042 1. All secondary time values given in SPDs are in binary with units of ns.
1043 2. Some time values are scaled by 16, in order to have least count of 0.25 ns
1044 (more accuracy). JEDEC SPD spec. shows which ones are x1 and x4.
1045 3. Internally to this SW, cycle time, tCK16x, is scaled by 16 to match time values
1046 */
1047
1048 /* Tras */
1049 pDCTstat->DIMMTras = (u16)Tras;
1050 val = Tras / tCK16x;
1051 if (Tras % tCK16x) { /* round up number of busclocks */
1052 val++;
1053 }
1054 if (val < Min_TrasT)
1055 val = Min_TrasT;
1056 else if (val > Max_TrasT)
1057 val = Max_TrasT;
1058 pDCTstat->Tras = val;
1059
1060 /* Trp */
1061 pDCTstat->DIMMTrp = Trp;
1062 val = Trp / tCK16x;
1063 if (Trp % tCK16x) { /* round up number of busclocks */
1064 val++;
1065 }
1066 if (val < Min_TrpT)
1067 val = Min_TrpT;
1068 else if (val > Max_TrpT)
1069 val = Max_TrpT;
1070 pDCTstat->Trp = val;
1071
1072 /*Trrd*/
1073 pDCTstat->DIMMTrrd = Trrd;
1074 val = Trrd / tCK16x;
1075 if (Trrd % tCK16x) { /* round up number of busclocks */
1076 val++;
1077 }
1078 if (val < Min_TrrdT)
1079 val = Min_TrrdT;
1080 else if (val > Max_TrrdT)
1081 val = Max_TrrdT;
1082 pDCTstat->Trrd = val;
1083
1084 /* Trcd */
1085 pDCTstat->DIMMTrcd = Trcd;
1086 val = Trcd / tCK16x;
1087 if (Trcd % tCK16x) { /* round up number of busclocks */
1088 val++;
1089 }
1090 if (val < Min_TrcdT)
1091 val = Min_TrcdT;
1092 else if (val > Max_TrcdT)
1093 val = Max_TrcdT;
1094 pDCTstat->Trcd = val;
1095
1096 /* Trc */
1097 pDCTstat->DIMMTrc = Trc;
1098 val = Trc / tCK16x;
1099 if (Trc % tCK16x) { /* round up number of busclocks */
1100 val++;
1101 }
1102 if (val < Min_TrcT)
1103 val = Min_TrcT;
1104 else if (val > Max_TrcT)
1105 val = Max_TrcT;
1106 pDCTstat->Trc = val;
1107
1108 /* Trtp */
1109 pDCTstat->DIMMTrtp = Trtp;
1110 val = Trtp / tCK16x;
1111 if (Trtp % tCK16x) {
1112 val ++;
1113 }
1114 if (val < Min_TrtpT)
1115 val = Min_TrtpT;
1116 else if (val > Max_TrtpT)
1117 val = Max_TrtpT;
1118 pDCTstat->Trtp = val;
1119
1120 /* Twr */
1121 pDCTstat->DIMMTwr = Twr;
1122 val = Twr / tCK16x;
1123 if (Twr % tCK16x) { /* round up number of busclocks */
1124 val++;
1125 }
1126 if (val < Min_TwrT)
1127 val = Min_TwrT;
1128 else if (val > Max_TwrT)
1129 val = Max_TwrT;
1130 pDCTstat->Twr = val;
1131
1132 /* Twtr */
1133 pDCTstat->DIMMTwtr = Twtr;
1134 val = Twtr / tCK16x;
1135 if (Twtr % tCK16x) { /* round up number of busclocks */
1136 val++;
1137 }
1138 if (val < Min_TwtrT)
1139 val = Min_TwtrT;
1140 else if (val > Max_TwtrT)
1141 val = Max_TwtrT;
1142 pDCTstat->Twtr = val;
1143
1144 /* Trfc0-Trfc3 */
1145 for (i=0; i<4; i++)
1146 pDCTstat->Trfc[i] = Trfc[i];
1147
1148 /* Tfaw */
1149 pDCTstat->DIMMTfaw = Tfaw;
1150 val = Tfaw / tCK16x;
1151 if (Tfaw % tCK16x) { /* round up number of busclocks */
1152 val++;
1153 }
1154 if (val < Min_TfawT)
1155 val = Min_TfawT;
1156 else if (val > Max_TfawT)
1157 val = Max_TfawT;
1158 pDCTstat->Tfaw = val;
1159
1160 mctAdjustAutoCycTmg_D();
1161
1162 /* Program DRAM Timing values */
1163 DramTimingLo = 0; /* Dram Timing Low init */
1164 val = pDCTstat->CASL - 2; /* pDCTstat.CASL to reg. definition */
1165 DramTimingLo |= val;
1166
1167 val = pDCTstat->Trcd - Bias_TrcdT;
1168 DramTimingLo |= val<<4;
1169
1170 val = pDCTstat->Trp - Bias_TrpT;
1171 val = mct_AdjustSPDTimings(pMCTstat, pDCTstat, val);
1172 DramTimingLo |= val<<7;
1173
1174 val = pDCTstat->Trtp - Bias_TrtpT;
1175 DramTimingLo |= val<<10;
1176
1177 val = pDCTstat->Tras - Bias_TrasT;
1178 DramTimingLo |= val<<12;
1179
1180 val = pDCTstat->Trc - Bias_TrcT;
1181 DramTimingLo |= val<<16;
1182
1183 val = pDCTstat->Trrd - Bias_TrrdT;
1184 DramTimingLo |= val<<22;
1185
1186 DramTimingHi = 0; /* Dram Timing High init */
1187 val = pDCTstat->Twtr - Bias_TwtrT;
1188 DramTimingHi |= val<<8;
1189
1190 val = 2;
1191 DramTimingHi |= val<<16;
1192
1193 val = 0;
1194 for (i=4;i>0;i--) {
1195 val <<= 3;
1196 val |= Trfc[i-1];
1197 }
1198 DramTimingHi |= val << 20;
1199
1200 dev = pDCTstat->dev_dct;
1201 reg_off = 0x100 * dct;
1202 /* Twr */
1203 val = pDCTstat->Twr;
1204 if (val == 10)
1205 val = 9;
1206 else if (val == 12)
1207 val = 10;
1208 val = mct_AdjustSPDTimings(pMCTstat, pDCTstat, val);
1209 val -= Bias_TwrT;
1210 val <<= 4;
1211 dword = Get_NB32(dev, 0x84 + reg_off);
1212 dword &= ~0x70;
1213 dword |= val;
1214 Set_NB32(dev, 0x84 + reg_off, dword);
1215
1216 /* Tfaw */
1217 val = pDCTstat->Tfaw;
1218 val = mct_AdjustSPDTimings(pMCTstat, pDCTstat, val);
1219 val -= Bias_TfawT;
1220 val >>= 1;
1221 val <<= 28;
1222 dword = Get_NB32(dev, 0x94 + reg_off);
1223 dword &= ~0xf0000000;
1224 dword |= val;
1225 Set_NB32(dev, 0x94 + reg_off, dword);
1226
1227 /* dev = pDCTstat->dev_dct; */
1228 /* reg_off = 0x100 * dct; */
1229
1230 if (pDCTstat->Speed > 4) {
1231 val = Get_NB32(dev, 0x88 + reg_off);
1232 val &= 0xFF000000;
1233 DramTimingLo |= val;
1234 }
1235 Set_NB32(dev, 0x88 + reg_off, DramTimingLo); /*DCT Timing Low*/
1236
1237 if (pDCTstat->Speed > 4) {
Zheng Bao951a0fe2010-09-21 01:24:55 +00001238 DramTimingHi |= 1 << DisAutoRefresh;
Zheng Baoeb75f652010-04-23 17:32:48 +00001239 }
1240 DramTimingHi |= 0x000018FF;
1241 Set_NB32(dev, 0x8c + reg_off, DramTimingHi); /*DCT Timing Hi*/
1242
1243 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
1244}
1245
1246static u8 AutoCycTiming_D(struct MCTStatStruc *pMCTstat,
1247 struct DCTStatStruc *pDCTstat, u8 dct)
1248{
1249 /* Initialize DCT Timing registers as per DIMM SPD.
1250 * For primary timing (T, CL) use best case T value.
1251 * For secondary timing params., use most aggressive settings
1252 * of slowest DIMM.
1253 *
1254 * There are three components to determining "maximum frequency":
1255 * SPD component, Bus load component, and "Preset" max frequency
1256 * component.
1257 *
1258 * The SPD component is a function of the min cycle time specified
1259 * by each DIMM, and the interaction of cycle times from all DIMMs
1260 * in conjunction with CAS latency. The SPD component only applies
1261 * when user timing mode is 'Auto'.
1262 *
1263 * The Bus load component is a limiting factor determined by electrical
1264 * characteristics on the bus as a result of varying number of device
1265 * loads. The Bus load component is specific to each platform but may
1266 * also be a function of other factors. The bus load component only
1267 * applies when user timing mode is 'Auto'.
1268 *
1269 * The Preset component is subdivided into three items and is
1270 * the minimum of the set: Silicon revision, user limit
1271 * setting when user timing mode is 'Auto' and memclock mode
1272 * is 'Limit', OEM build specification of the maximum
1273 * frequency. The Preset component is only applies when user
1274 * timing mode is 'Auto'.
1275 */
1276
1277 /* Get primary timing (CAS Latency and Cycle Time) */
1278 if (pDCTstat->Speed == 0) {
1279 mctGet_MaxLoadFreq(pDCTstat);
1280
1281 /* and Factor in presets (setup options, Si cap, etc.) */
1282 GetPresetmaxF_D(pMCTstat, pDCTstat);
1283
1284 /* Go get best T and CL as specified by DIMM mfgs. and OEM */
1285 SPDGetTCL_D(pMCTstat, pDCTstat, dct);
1286 /* skip callback mctForce800to1067_D */
1287 pDCTstat->Speed = pDCTstat->DIMMAutoSpeed;
1288 pDCTstat->CASL = pDCTstat->DIMMCASL;
1289
1290 }
1291 mct_AfterGetCLT(pMCTstat, pDCTstat, dct);
1292
1293 SPD2ndTiming(pMCTstat, pDCTstat, dct);
1294
1295 printk(BIOS_DEBUG, "AutoCycTiming: Status %x\n", pDCTstat->Status);
1296 printk(BIOS_DEBUG, "AutoCycTiming: ErrStatus %x\n", pDCTstat->ErrStatus);
1297 printk(BIOS_DEBUG, "AutoCycTiming: ErrCode %x\n", pDCTstat->ErrCode);
1298 printk(BIOS_DEBUG, "AutoCycTiming: Done\n\n");
1299
1300 mctHookAfterAutoCycTmg();
1301
1302 return pDCTstat->ErrCode;
1303}
1304
1305static void GetPresetmaxF_D(struct MCTStatStruc *pMCTstat,
1306 struct DCTStatStruc *pDCTstat)
1307{
1308 /* Get max frequency from OEM platform definition, from any user
1309 * override (limiting) of max frequency, and from any Si Revision
1310 * Specific information. Return the least of these three in
1311 * DCTStatStruc.PresetmaxFreq.
1312 */
Zheng Baof7a999a2010-09-05 05:52:33 +00001313 /* TODO: Set the proper max frequency in wrappers/mcti_d.c. */
Zheng Baoeb75f652010-04-23 17:32:48 +00001314 u16 proposedFreq;
1315 u16 word;
1316
1317 /* Get CPU Si Revision defined limit (NPT) */
Marc Jones471f1032011-06-03 19:59:52 +00001318 proposedFreq = 800; /* Rev F0 programmable max memclock is */
Zheng Baoeb75f652010-04-23 17:32:48 +00001319
1320 /*Get User defined limit if "limit" mode */
1321 if ( mctGet_NVbits(NV_MCTUSRTMGMODE) == 1) {
1322 word = Get_Fk_D(mctGet_NVbits(NV_MemCkVal) + 1);
1323 if (word < proposedFreq)
1324 proposedFreq = word;
1325
1326 /* Get Platform defined limit */
1327 word = mctGet_NVbits(NV_MAX_MEMCLK);
1328 if (word < proposedFreq)
1329 proposedFreq = word;
1330
1331 word = pDCTstat->PresetmaxFreq;
1332 if (word > proposedFreq)
1333 word = proposedFreq;
1334
1335 pDCTstat->PresetmaxFreq = word;
1336 }
1337 /* Check F3xE8[DdrMaxRate] for maximum DRAM data rate support */
1338}
1339
1340static void SPDGetTCL_D(struct MCTStatStruc *pMCTstat,
1341 struct DCTStatStruc *pDCTstat, u8 dct)
1342{
1343 /* Find the best T and CL primary timing parameter pair, per Mfg.,
1344 * for the given set of DIMMs, and store into DCTStatStruc
1345 * (.DIMMAutoSpeed and .DIMMCASL). See "Global relationship between
1346 * index values and item values" for definition of CAS latency
1347 * index (j) and Frequency index (k).
1348 */
1349 u8 i, CASLatLow, CASLatHigh;
1350 u16 tAAmin16x;
1351 u8 MTB16x;
1352 u16 tCKmin16x;
1353 u16 tCKproposed16x;
1354 u8 CLactual, CLdesired, CLT_Fail;
1355
Stefan Reinauer328a6942011-10-13 17:04:02 -07001356 u8 smbaddr, byte = 0, bytex = 0;
Zheng Baoeb75f652010-04-23 17:32:48 +00001357
1358 CASLatLow = 0xFF;
1359 CASLatHigh = 0xFF;
1360 tAAmin16x = 0;
1361 tCKmin16x = 0;
1362 CLT_Fail = 0;
1363
1364 for (i = 0; i < MAX_DIMMS_SUPPORTED; i++) {
1365 if (pDCTstat->DIMMValid & (1 << i)) {
1366 smbaddr = Get_DIMMAddress_D(pDCTstat, (dct + i));
1367 /* Step 1: Determine the common set of supported CAS Latency
1368 * values for all modules on the memory channel using the CAS
1369 * Latencies Supported in SPD bytes 14 and 15.
1370 */
1371 byte = mctRead_SPD(smbaddr, SPD_CASLow);
1372 CASLatLow &= byte;
1373 byte = mctRead_SPD(smbaddr, SPD_CASHigh);
1374 CASLatHigh &= byte;
1375 /* Step 2: Determine tAAmin(all) which is the largest tAAmin
1376 value for all modules on the memory channel (SPD byte 16). */
1377 byte = mctRead_SPD(smbaddr, SPD_MTBDivisor);
1378
1379 MTB16x = ((mctRead_SPD(smbaddr, SPD_MTBDividend) & 0xFF)<<4);
1380 MTB16x /= byte; /* transfer to MTB*16 */
1381
1382 byte = mctRead_SPD(smbaddr, SPD_tAAmin);
1383 if (tAAmin16x < byte * MTB16x)
1384 tAAmin16x = byte * MTB16x;
1385 /* Step 3: Determine tCKmin(all) which is the largest tCKmin
1386 value for all modules on the memory channel (SPD byte 12). */
1387 byte = mctRead_SPD(smbaddr, SPD_tCKmin);
1388
1389 if (tCKmin16x < byte * MTB16x)
1390 tCKmin16x = byte * MTB16x;
1391 }
1392 }
1393 /* calculate tCKproposed16x */
1394 tCKproposed16x = 16000 / pDCTstat->PresetmaxFreq;
1395 if (tCKmin16x > tCKproposed16x)
1396 tCKproposed16x = tCKmin16x;
1397
1398 /* mctHookTwo1333DimmOverride(); */
1399 /* For UDIMM, if there are two DDR3-1333 on the same channel,
1400 downgrade DDR speed to 1066. */
1401
1402 /* TODO: get user manual tCK16x(Freq.) and overwrite current tCKproposed16x if manual. */
1403 if (tCKproposed16x == 20)
1404 pDCTstat->TargetFreq = 7;
1405 else if (tCKproposed16x <= 24) {
1406 pDCTstat->TargetFreq = 6;
1407 tCKproposed16x = 24;
1408 }
1409 else if (tCKproposed16x <= 30) {
1410 pDCTstat->TargetFreq = 5;
1411 tCKproposed16x = 30;
1412 }
1413 else {
1414 pDCTstat->TargetFreq = 4;
1415 tCKproposed16x = 40;
1416 }
1417 /* Running through this loop twice:
1418 - First time find tCL at target frequency
1419 - Second tim find tCL at 400MHz */
1420
1421 for (;;) {
1422 CLT_Fail = 0;
1423 /* Step 4: For a proposed tCK value (tCKproposed) between tCKmin(all) and tCKmax,
1424 determine the desired CAS Latency. If tCKproposed is not a standard JEDEC
1425 value (2.5, 1.875, 1.5, or 1.25 ns) then tCKproposed must be adjusted to the
1426 next lower standard tCK value for calculating CLdesired.
1427 CLdesired = ceiling ( tAAmin(all) / tCKproposed )
1428 where tAAmin is defined in Byte 16. The ceiling function requires that the
1429 quotient be rounded up always. */
1430 CLdesired = tAAmin16x / tCKproposed16x;
1431 if (tAAmin16x % tCKproposed16x)
1432 CLdesired ++;
1433 /* Step 5: Chose an actual CAS Latency (CLactual) that is greather than or equal
1434 to CLdesired and is supported by all modules on the memory channel as
1435 determined in step 1. If no such value exists, choose a higher tCKproposed
1436 value and repeat steps 4 and 5 until a solution is found. */
1437 for (i = 0, CLactual = 4; i < 15; i++, CLactual++) {
1438 if ((CASLatHigh << 8 | CASLatLow) & (1 << i)) {
1439 if (CLdesired <= CLactual)
1440 break;
1441 }
1442 }
1443 if (i == 15)
1444 CLT_Fail = 1;
1445 /* Step 6: Once the calculation of CLactual is completed, the BIOS must also
1446 verify that this CAS Latency value does not exceed tAAmax, which is 20 ns
1447 for all DDR3 speed grades, by multiplying CLactual times tCKproposed. If
1448 not, choose a lower CL value and repeat steps 5 and 6 until a solution is found. */
1449 if (CLactual * tCKproposed16x > 320)
1450 CLT_Fail = 1;
1451 /* get CL and T */
1452 if (!CLT_Fail) {
1453 bytex = CLactual - 2;
1454 if (tCKproposed16x == 20)
1455 byte = 7;
1456 else if (tCKproposed16x == 24)
1457 byte = 6;
1458 else if (tCKproposed16x == 30)
1459 byte = 5;
1460 else
1461 byte = 4;
1462 } else {
1463 /* mctHookManualCLOverride */
1464 /* TODO: */
1465 }
1466
1467 if (tCKproposed16x != 40) {
1468 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
1469 pDCTstat->DIMMAutoSpeed = byte;
1470 pDCTstat->DIMMCASL = bytex;
1471 break;
1472 } else {
1473 pDCTstat->TargetCASL = bytex;
1474 tCKproposed16x = 40;
1475 }
1476 } else {
1477 pDCTstat->DIMMAutoSpeed = byte;
1478 pDCTstat->DIMMCASL = bytex;
1479 break;
1480 }
1481 }
1482
1483 printk(BIOS_DEBUG, "SPDGetTCL_D: DIMMCASL %x\n", pDCTstat->DIMMCASL);
1484 printk(BIOS_DEBUG, "SPDGetTCL_D: DIMMAutoSpeed %x\n", pDCTstat->DIMMAutoSpeed);
1485
1486 printk(BIOS_DEBUG, "SPDGetTCL_D: Status %x\n", pDCTstat->Status);
1487 printk(BIOS_DEBUG, "SPDGetTCL_D: ErrStatus %x\n", pDCTstat->ErrStatus);
1488 printk(BIOS_DEBUG, "SPDGetTCL_D: ErrCode %x\n", pDCTstat->ErrCode);
1489 printk(BIOS_DEBUG, "SPDGetTCL_D: Done\n\n");
1490}
1491
1492static u8 PlatformSpec_D(struct MCTStatStruc *pMCTstat,
1493 struct DCTStatStruc *pDCTstat, u8 dct)
1494{
1495 u32 dev;
1496 u32 reg;
1497 u32 val;
1498
1499 mctGet_PS_Cfg_D(pMCTstat, pDCTstat, dct);
1500
1501 if (pDCTstat->GangedMode == 1) {
1502 mctGet_PS_Cfg_D(pMCTstat, pDCTstat, 1);
Zheng Bao69436e12011-01-06 02:18:12 +00001503 mct_BeforePlatformSpec(pMCTstat, pDCTstat, 1);
Zheng Baoeb75f652010-04-23 17:32:48 +00001504 }
1505
1506 if ( pDCTstat->_2Tmode == 2) {
1507 dev = pDCTstat->dev_dct;
1508 reg = 0x94 + 0x100 * dct; /* Dram Configuration Hi */
1509 val = Get_NB32(dev, reg);
1510 val |= 1 << 20; /* 2T CMD mode */
1511 Set_NB32(dev, reg, val);
1512 }
1513
Zheng Bao69436e12011-01-06 02:18:12 +00001514 mct_BeforePlatformSpec(pMCTstat, pDCTstat, dct);
Zheng Baoeb75f652010-04-23 17:32:48 +00001515 mct_PlatformSpec(pMCTstat, pDCTstat, dct);
1516 if (pDCTstat->DIMMAutoSpeed == 4)
1517 InitPhyCompensation(pMCTstat, pDCTstat, dct);
1518 mctHookAfterPSCfg();
1519
1520 return pDCTstat->ErrCode;
1521}
1522
1523static u8 AutoConfig_D(struct MCTStatStruc *pMCTstat,
1524 struct DCTStatStruc *pDCTstat, u8 dct)
1525{
1526 u32 DramControl, DramTimingLo, Status;
1527 u32 DramConfigLo, DramConfigHi, DramConfigMisc, DramConfigMisc2;
1528 u32 val;
1529 u32 reg_off;
1530 u32 dev;
1531 u16 word;
1532 u32 dword;
1533 u8 byte;
1534
1535 DramConfigLo = 0;
1536 DramConfigHi = 0;
1537 DramConfigMisc = 0;
1538 DramConfigMisc2 = 0;
1539
Zheng Baoc3af12f2010-10-08 05:08:47 +00001540 /* set bank addressing and Masks, plus CS pops */
Zheng Baoeb75f652010-04-23 17:32:48 +00001541 SPDSetBanks_D(pMCTstat, pDCTstat, dct);
1542 if (pDCTstat->ErrCode == SC_StopError)
1543 goto AutoConfig_exit;
1544
1545 /* map chip-selects into local address space */
1546 StitchMemory_D(pMCTstat, pDCTstat, dct);
1547 InterleaveBanks_D(pMCTstat, pDCTstat, dct);
1548
1549 /* temp image of status (for convenience). RO usage! */
1550 Status = pDCTstat->Status;
1551
1552 dev = pDCTstat->dev_dct;
1553 reg_off = 0x100 * dct;
1554
1555
1556 /* Build Dram Control Register Value */
1557 DramConfigMisc2 = Get_NB32 (dev, 0xA8 + reg_off); /* Dram Control*/
1558 DramControl = Get_NB32 (dev, 0x78 + reg_off); /* Dram Control*/
1559
1560 /* FIXME: Skip mct_checkForDxSupport */
1561 /* REV_CALL mct_DoRdPtrInit if not Dx */
1562 if (pDCTstat->LogicalCPUID & AMD_DR_Bx)
1563 val = 5;
1564 else
1565 val = 6;
1566 DramControl &= ~0xFF;
Zheng Baoc3af12f2010-10-08 05:08:47 +00001567 DramControl |= val; /* RdPtrInit = 6 for Cx CPU */
Zheng Baoeb75f652010-04-23 17:32:48 +00001568
1569 if (mctGet_NVbits(NV_CLKHZAltVidC3))
1570 DramControl |= 1<<16; /* check */
1571
1572 DramControl |= 0x00002A00;
1573
1574 /* FIXME: Skip for Ax versions */
1575 /* callback not required - if (!mctParityControl_D()) */
1576 if (Status & (1 << SB_128bitmode))
1577 DramConfigLo |= 1 << Width128; /* 128-bit mode (normal) */
1578
1579 word = dct;
1580 dword = X4Dimm;
1581 while (word < 8) {
1582 if (pDCTstat->Dimmx4Present & (1 << word))
1583 DramConfigLo |= 1 << dword; /* X4Dimm[3:0] */
1584 word++;
1585 word++;
1586 dword++;
1587 }
1588
1589 if (!(Status & (1 << SB_Registered)))
Zheng Baoc3af12f2010-10-08 05:08:47 +00001590 DramConfigLo |= 1 << UnBuffDimm; /* Unbuffered DIMMs */
Zheng Baoeb75f652010-04-23 17:32:48 +00001591
1592 if (mctGet_NVbits(NV_ECC_CAP))
1593 if (Status & (1 << SB_ECCDIMMs))
1594 if ( mctGet_NVbits(NV_ECC))
1595 DramConfigLo |= 1 << DimmEcEn;
1596
1597 DramConfigLo = mct_DisDllShutdownSR(pMCTstat, pDCTstat, DramConfigLo, dct);
1598
1599 /* Build Dram Config Hi Register Value */
1600 dword = pDCTstat->Speed;
1601 DramConfigHi |= dword - 1; /* get MemClk encoding */
1602 DramConfigHi |= 1 << MemClkFreqVal;
1603
1604 if (Status & (1 << SB_Registered))
1605 if ((pDCTstat->Dimmx4Present != 0) && (pDCTstat->Dimmx8Present != 0))
1606 /* set only if x8 Registered DIMMs in System*/
1607 DramConfigHi |= 1 << RDqsEn;
1608
1609 if (mctGet_NVbits(NV_CKE_CTL))
1610 /*Chip Select control of CKE*/
1611 DramConfigHi |= 1 << 16;
1612
1613 /* Control Bank Swizzle */
1614 if (0) /* call back not needed mctBankSwizzleControl_D()) */
1615 DramConfigHi &= ~(1 << BankSwizzleMode);
1616 else
1617 DramConfigHi |= 1 << BankSwizzleMode; /* recommended setting (default) */
1618
1619 /* Check for Quadrank DIMM presence */
1620 if ( pDCTstat->DimmQRPresent != 0) {
1621 byte = mctGet_NVbits(NV_4RANKType);
1622 if (byte == 2)
1623 DramConfigHi |= 1 << 17; /* S4 (4-Rank SO-DIMMs) */
1624 else if (byte == 1)
1625 DramConfigHi |= 1 << 18; /* R4 (4-Rank Registered DIMMs) */
1626 }
1627
1628 if (0) /* call back not needed mctOverrideDcqBypMax_D ) */
1629 val = mctGet_NVbits(NV_BYPMAX);
1630 else
1631 val = 0x0f; /* recommended setting (default) */
1632 DramConfigHi |= val << 24;
1633
1634 if (pDCTstat->LogicalCPUID & (AMD_DR_Cx | AMD_DR_Bx))
1635 DramConfigHi |= 1 << DcqArbBypassEn;
1636
1637 /* Build MemClkDis Value from Dram Timing Lo and
1638 Dram Config Misc Registers
1639 1. We will assume that MemClkDis field has been preset prior to this
1640 point.
1641 2. We will only set MemClkDis bits if a DIMM is NOT present AND if:
1642 NV_AllMemClks <>0 AND SB_DiagClks ==0 */
1643
1644 /* Dram Timing Low (owns Clock Enable bits) */
1645 DramTimingLo = Get_NB32(dev, 0x88 + reg_off);
1646 if (mctGet_NVbits(NV_AllMemClks) == 0) {
1647 /* Special Jedec SPD diagnostic bit - "enable all clocks" */
1648 if (!(pDCTstat->Status & (1<<SB_DiagClks))) {
1649 const u8 *p;
1650 const u32 *q;
1651 p = Tab_ManualCLKDis;
1652 q = (u32 *)p;
1653
1654 byte = mctGet_NVbits(NV_PACK_TYPE);
1655 if (byte == PT_L1)
1656 p = Tab_L1CLKDis;
1657 else if (byte == PT_M2 || byte == PT_AS)
1658 p = Tab_AM3CLKDis;
1659 else
1660 p = Tab_S1CLKDis;
1661
1662 dword = 0;
1663 byte = 0xFF;
1664 while(dword < MAX_CS_SUPPORTED) {
1665 if (pDCTstat->CSPresent & (1<<dword)){
1666 /* re-enable clocks for the enabled CS */
1667 val = p[dword];
1668 byte &= ~val;
1669 }
1670 dword++ ;
1671 }
1672 DramTimingLo |= byte << 24;
1673 }
1674 }
1675
1676 printk(BIOS_DEBUG, "AutoConfig_D: DramControl: %x\n", DramControl);
1677 printk(BIOS_DEBUG, "AutoConfig_D: DramTimingLo: %x\n", DramTimingLo);
1678 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigMisc: %x\n", DramConfigMisc);
1679 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigMisc2: %x\n", DramConfigMisc2);
1680 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigLo: %x\n", DramConfigLo);
1681 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigHi: %x\n", DramConfigHi);
1682
1683 /* Write Values to the registers */
1684 Set_NB32(dev, 0x78 + reg_off, DramControl);
1685 Set_NB32(dev, 0x88 + reg_off, DramTimingLo);
1686 Set_NB32(dev, 0xA0 + reg_off, DramConfigMisc);
1687 DramConfigMisc2 = mct_SetDramConfigMisc2(pDCTstat, dct, DramConfigMisc2);
1688 Set_NB32(dev, 0xA8 + reg_off, DramConfigMisc2);
1689 Set_NB32(dev, 0x90 + reg_off, DramConfigLo);
1690 ProgDramMRSReg_D(pMCTstat, pDCTstat, dct);
1691 dword = Get_NB32(dev, 0x94 + reg_off);
1692 DramConfigHi |= dword;
1693 mct_SetDramConfigHi_D(pDCTstat, dct, DramConfigHi);
Zheng Bao69436e12011-01-06 02:18:12 +00001694 mct_EarlyArbEn_D(pMCTstat, pDCTstat, dct);
Zheng Baoeb75f652010-04-23 17:32:48 +00001695 mctHookAfterAutoCfg();
1696
1697 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
1698
1699 printk(BIOS_DEBUG, "AutoConfig: Status %x\n", pDCTstat->Status);
1700 printk(BIOS_DEBUG, "AutoConfig: ErrStatus %x\n", pDCTstat->ErrStatus);
1701 printk(BIOS_DEBUG, "AutoConfig: ErrCode %x\n", pDCTstat->ErrCode);
1702 printk(BIOS_DEBUG, "AutoConfig: Done\n\n");
1703AutoConfig_exit:
1704 return pDCTstat->ErrCode;
1705}
1706
1707static void SPDSetBanks_D(struct MCTStatStruc *pMCTstat,
1708 struct DCTStatStruc *pDCTstat, u8 dct)
1709{
1710 /* Set bank addressing, program Mask values and build a chip-select
1711 * population map. This routine programs PCI 0:24N:2x80 config register
1712 * and PCI 0:24N:2x60,64,68,6C config registers (CS Mask 0-3).
1713 */
1714 u8 ChipSel, Rows, Cols, Ranks, Banks;
1715 u32 BankAddrReg, csMask;
1716
1717 u32 val;
1718 u32 reg;
1719 u32 dev;
1720 u32 reg_off;
1721 u8 byte;
1722 u16 word;
1723 u32 dword;
1724 u16 smbaddr;
1725
1726 dev = pDCTstat->dev_dct;
1727 reg_off = 0x100 * dct;
1728
1729 BankAddrReg = 0;
1730 for (ChipSel = 0; ChipSel < MAX_CS_SUPPORTED; ChipSel+=2) {
1731 byte = ChipSel;
1732 if ((pDCTstat->Status & (1 << SB_64MuxedMode)) && ChipSel >=4)
1733 byte -= 3;
1734
1735 if (pDCTstat->DIMMValid & (1<<byte)) {
1736 smbaddr = Get_DIMMAddress_D(pDCTstat, (ChipSel + dct));
1737
1738 byte = mctRead_SPD(smbaddr, SPD_Addressing);
1739 Rows = (byte >> 3) & 0x7; /* Rows:0b=12-bit,... */
1740 Cols = byte & 0x7; /* Cols:0b=9-bit,... */
1741
1742 byte = mctRead_SPD(smbaddr, SPD_Density);
1743 Banks = (byte >> 4) & 7; /* Banks:0b=3-bit,... */
1744
1745 byte = mctRead_SPD(smbaddr, SPD_Organization);
1746 Ranks = ((byte >> 3) & 7) + 1;
1747
1748 /* Configure Bank encoding
1749 * Use a 6-bit key into a lookup table.
1750 * Key (index) = RRRBCC, where CC is the number of Columns minus 9,
1751 * RRR is the number of Rows minus 12, and B is the number of banks
1752 * minus 3.
1753 */
1754 byte = Cols;
1755 if (Banks == 1)
1756 byte |= 4;
1757
1758 byte |= Rows << 3; /* RRRBCC internal encode */
1759
1760 for (dword=0; dword < 13; dword++) {
1761 if (byte == Tab_BankAddr[dword])
1762 break;
1763 }
1764
1765 if (dword > 12)
1766 continue;
1767
1768 /* bit no. of CS field in address mapping reg.*/
1769 dword <<= (ChipSel<<1);
1770 BankAddrReg |= dword;
1771
1772 /* Mask value=(2pow(rows+cols+banks+3)-1)>>8,
1773 or 2pow(rows+cols+banks-5)-1*/
1774 csMask = 0;
1775
1776 byte = Rows + Cols; /* cl=rows+cols*/
1777 byte += 21; /* row:12+col:9 */
1778 byte -= 2; /* 3 banks - 5 */
1779
1780 if (pDCTstat->Status & (1 << SB_128bitmode))
1781 byte++; /* double mask size if in 128-bit mode*/
1782
1783 csMask |= 1 << byte;
1784 csMask--;
1785
1786 /*set ChipSelect population indicator even bits*/
1787 pDCTstat->CSPresent |= (1<<ChipSel);
1788 if (Ranks >= 2)
1789 /*set ChipSelect population indicator odd bits*/
1790 pDCTstat->CSPresent |= 1 << (ChipSel + 1);
1791
1792 reg = 0x60+(ChipSel<<1) + reg_off; /*Dram CS Mask Register */
1793 val = csMask;
1794 val &= 0x1FF83FE0; /* Mask out reserved bits.*/
1795 Set_NB32(dev, reg, val);
1796 } else {
1797 if (pDCTstat->DIMMSPDCSE & (1<<ChipSel))
1798 pDCTstat->CSTestFail |= (1<<ChipSel);
1799 } /* if DIMMValid*/
1800 } /* while ChipSel*/
1801
1802 SetCSTriState(pMCTstat, pDCTstat, dct);
1803 SetCKETriState(pMCTstat, pDCTstat, dct);
1804 SetODTTriState(pMCTstat, pDCTstat, dct);
1805
1806 if (pDCTstat->Status & (1 << SB_128bitmode)) {
1807 SetCSTriState(pMCTstat, pDCTstat, 1); /* force dct1) */
1808 SetCKETriState(pMCTstat, pDCTstat, 1); /* force dct1) */
1809 SetODTTriState(pMCTstat, pDCTstat, 1); /* force dct1) */
1810 }
1811
1812 word = pDCTstat->CSPresent;
1813 mctGetCS_ExcludeMap(); /* mask out specified chip-selects */
1814 word ^= pDCTstat->CSPresent;
1815 pDCTstat->CSTestFail |= word; /* enable ODT to disabled DIMMs */
1816 if (!pDCTstat->CSPresent)
1817 pDCTstat->ErrCode = SC_StopError;
1818
1819 reg = 0x80 + reg_off; /* Bank Addressing Register */
1820 Set_NB32(dev, reg, BankAddrReg);
1821
1822 pDCTstat->CSPresent_DCT[dct] = pDCTstat->CSPresent;
1823 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
1824
1825 printk(BIOS_DEBUG, "SPDSetBanks: CSPresent %x\n", pDCTstat->CSPresent_DCT[dct]);
1826 printk(BIOS_DEBUG, "SPDSetBanks: Status %x\n", pDCTstat->Status);
1827 printk(BIOS_DEBUG, "SPDSetBanks: ErrStatus %x\n", pDCTstat->ErrStatus);
1828 printk(BIOS_DEBUG, "SPDSetBanks: ErrCode %x\n", pDCTstat->ErrCode);
1829 printk(BIOS_DEBUG, "SPDSetBanks: Done\n\n");
1830}
1831
1832static void SPDCalcWidth_D(struct MCTStatStruc *pMCTstat,
1833 struct DCTStatStruc *pDCTstat)
1834{
1835 /* Per SPDs, check the symmetry of DIMM pairs (DIMM on Channel A
1836 * matching with DIMM on Channel B), the overall DIMM population,
1837 * and determine the width mode: 64-bit, 64-bit muxed, 128-bit.
1838 */
1839 u8 i;
1840 u8 smbaddr, smbaddr1;
1841 u8 byte, byte1;
1842
1843 /* Check Symmetry of Channel A and Channel B DIMMs
1844 (must be matched for 128-bit mode).*/
1845 for (i=0; i < MAX_DIMMS_SUPPORTED; i += 2) {
1846 if ((pDCTstat->DIMMValid & (1 << i)) && (pDCTstat->DIMMValid & (1<<(i+1)))) {
1847 smbaddr = Get_DIMMAddress_D(pDCTstat, i);
1848 smbaddr1 = Get_DIMMAddress_D(pDCTstat, i+1);
1849
1850 byte = mctRead_SPD(smbaddr, SPD_Addressing) & 0x7;
1851 byte1 = mctRead_SPD(smbaddr1, SPD_Addressing) & 0x7;
1852 if (byte != byte1) {
1853 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1854 break;
1855 }
1856
1857 byte = mctRead_SPD(smbaddr, SPD_Density) & 0x0f;
1858 byte1 = mctRead_SPD(smbaddr1, SPD_Density) & 0x0f;
1859 if (byte != byte1) {
1860 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1861 break;
1862 }
1863
1864 byte = mctRead_SPD(smbaddr, SPD_Organization) & 0x7;
1865 byte1 = mctRead_SPD(smbaddr1, SPD_Organization) & 0x7;
1866 if (byte != byte1) {
1867 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1868 break;
1869 }
1870
1871 byte = (mctRead_SPD(smbaddr, SPD_Organization) >> 3) & 0x7;
1872 byte1 = (mctRead_SPD(smbaddr1, SPD_Organization) >> 3) & 0x7;
1873 if (byte != byte1) {
1874 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1875 break;
1876 }
1877
1878 byte = mctRead_SPD(smbaddr, SPD_DMBANKS) & 7; /* #ranks-1 */
1879 byte1 = mctRead_SPD(smbaddr1, SPD_DMBANKS) & 7; /* #ranks-1 */
1880 if (byte != byte1) {
1881 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1882 break;
1883 }
1884
1885 }
1886 }
1887
1888}
1889
1890static void StitchMemory_D(struct MCTStatStruc *pMCTstat,
1891 struct DCTStatStruc *pDCTstat, u8 dct)
1892{
1893 /* Requires that Mask values for each bank be programmed first and that
1894 * the chip-select population indicator is correctly set.
1895 */
1896 u8 b = 0;
1897 u32 nxtcsBase, curcsBase;
1898 u8 p, q;
1899 u32 Sizeq, BiggestBank;
1900 u8 _DSpareEn;
1901
1902 u16 word;
1903 u32 dev;
1904 u32 reg;
1905 u32 reg_off;
1906 u32 val;
1907
1908 dev = pDCTstat->dev_dct;
1909 reg_off = 0x100 * dct;
1910
1911 _DSpareEn = 0;
1912
1913 /* CS Sparing 1=enabled, 0=disabled */
1914 if (mctGet_NVbits(NV_CS_SpareCTL) & 1) {
1915 if (MCT_DIMM_SPARE_NO_WARM) {
1916 /* Do no warm-reset DIMM spare */
1917 if (pMCTstat->GStatus & 1 << GSB_EnDIMMSpareNW) {
1918 word = pDCTstat->CSPresent;
1919 val = bsf(word);
1920 word &= ~(1<<val);
1921 if (word)
1922 /* Make sure at least two chip-selects are available */
1923 _DSpareEn = 1;
1924 else
1925 pDCTstat->ErrStatus |= 1 << SB_SpareDis;
1926 }
1927 } else {
1928 if (!mctGet_NVbits(NV_DQSTrainCTL)) { /*DQS Training 1=enabled, 0=disabled */
1929 word = pDCTstat->CSPresent;
1930 val = bsf(word);
1931 word &= ~(1 << val);
1932 if (word)
1933 /* Make sure at least two chip-selects are available */
1934 _DSpareEn = 1;
1935 else
1936 pDCTstat->ErrStatus |= 1 << SB_SpareDis;
1937 }
1938 }
1939 }
1940
1941 nxtcsBase = 0; /* Next available cs base ADDR[39:8] */
1942 for (p=0; p < MAX_DIMMS_SUPPORTED; p++) {
1943 BiggestBank = 0;
1944 for (q = 0; q < MAX_CS_SUPPORTED; q++) { /* from DIMMS to CS */
1945 if (pDCTstat->CSPresent & (1 << q)) { /* bank present? */
1946 reg = 0x40 + (q << 2) + reg_off; /* Base[q] reg.*/
1947 val = Get_NB32(dev, reg);
1948 if (!(val & 3)) { /* (CSEnable|Spare==1)bank is enabled already? */
1949 reg = 0x60 + (q << 1) + reg_off; /*Mask[q] reg.*/
1950 val = Get_NB32(dev, reg);
1951 val >>= 19;
1952 val++;
1953 val <<= 19;
1954 Sizeq = val; /* never used */
1955 if (val > BiggestBank) {
1956 /*Bingo! possibly Map this chip-select next! */
1957 BiggestBank = val;
1958 b = q;
1959 }
1960 }
1961 } /*if bank present */
1962 } /* while q */
1963 if (BiggestBank !=0) {
1964 curcsBase = nxtcsBase; /* curcsBase=nxtcsBase*/
1965 /* DRAM CS Base b Address Register offset */
1966 reg = 0x40 + (b << 2) + reg_off;
1967 if (_DSpareEn) {
1968 BiggestBank = 0;
1969 val = 1 << Spare; /* Spare Enable*/
1970 } else {
1971 val = curcsBase;
1972 val |= 1 << CSEnable; /* Bank Enable */
1973 }
1974 if (((reg - 0x40) >> 2) & 1) {
1975 if (!(pDCTstat->Status & (1 << SB_Registered))) {
1976 u16 dimValid;
1977 dimValid = pDCTstat->DIMMValid;
1978 if (dct & 1)
1979 dimValid <<= 1;
1980 if ((dimValid & pDCTstat->MirrPresU_NumRegR) != 0) {
1981 val |= 1 << onDimmMirror;
1982 }
1983 }
1984 }
1985 Set_NB32(dev, reg, val);
1986 if (_DSpareEn)
1987 _DSpareEn = 0;
1988 else
1989 /* let nxtcsBase+=Size[b] */
1990 nxtcsBase += BiggestBank;
1991 }
1992
1993 /* bank present but disabled?*/
1994 if ( pDCTstat->CSTestFail & (1 << p)) {
1995 /* DRAM CS Base b Address Register offset */
1996 reg = (p << 2) + 0x40 + reg_off;
1997 val = 1 << TestFail;
1998 Set_NB32(dev, reg, val);
1999 }
2000 }
2001
2002 if (nxtcsBase) {
2003 pDCTstat->DCTSysLimit = nxtcsBase - 1;
2004 mct_AfterStitchMemory(pMCTstat, pDCTstat, dct);
2005 }
2006
2007 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
2008
2009 printk(BIOS_DEBUG, "StitchMemory: Status %x\n", pDCTstat->Status);
2010 printk(BIOS_DEBUG, "StitchMemory: ErrStatus %x\n", pDCTstat->ErrStatus);
2011 printk(BIOS_DEBUG, "StitchMemory: ErrCode %x\n", pDCTstat->ErrCode);
2012 printk(BIOS_DEBUG, "StitchMemory: Done\n\n");
2013}
2014
2015static u16 Get_Fk_D(u8 k)
2016{
2017 return Table_F_k[k]; /* FIXME: k or k<<1 ? */
2018}
2019
2020static u8 DIMMPresence_D(struct MCTStatStruc *pMCTstat,
2021 struct DCTStatStruc *pDCTstat)
2022{
2023 /* Check DIMMs present, verify checksum, flag SDRAM type,
2024 * build population indicator bitmaps, and preload bus loading
2025 * of DIMMs into DCTStatStruc.
2026 * MAAload=number of devices on the "A" bus.
2027 * MABload=number of devices on the "B" bus.
2028 * MAAdimms=number of DIMMs on the "A" bus slots.
2029 * MABdimms=number of DIMMs on the "B" bus slots.
2030 * DATAAload=number of ranks on the "A" bus slots.
2031 * DATABload=number of ranks on the "B" bus slots.
2032 */
2033 u16 i, j;
2034 u8 smbaddr;
2035 u8 SPDCtrl;
2036 u16 RegDIMMPresent, MaxDimms;
2037 u8 devwidth;
2038 u16 DimmSlots;
2039 u8 byte = 0, bytex;
2040
2041 /* preload data structure with addrs */
2042 mctGet_DIMMAddr(pDCTstat, pDCTstat->Node_ID);
2043
2044 DimmSlots = MaxDimms = mctGet_NVbits(NV_MAX_DIMMS);
2045
2046 SPDCtrl = mctGet_NVbits(NV_SPDCHK_RESTRT);
2047
2048 RegDIMMPresent = 0;
2049 pDCTstat->DimmQRPresent = 0;
2050
Kerry She108d30b2010-08-30 07:24:13 +00002051 for (i = 0; i < MAX_DIMMS_SUPPORTED; i++) {
Zheng Baoeb75f652010-04-23 17:32:48 +00002052 if (i >= MaxDimms)
2053 break;
2054
2055 if ((pDCTstat->DimmQRPresent & (1 << i)) || (i < DimmSlots)) {
2056 int status;
2057 smbaddr = Get_DIMMAddress_D(pDCTstat, i);
2058 status = mctRead_SPD(smbaddr, SPD_ByteUse);
2059 if (status >= 0) { /* SPD access is ok */
2060 pDCTstat->DIMMPresent |= 1 << i;
2061 if (crcCheck(smbaddr)) { /* CRC is OK */
2062 byte = mctRead_SPD(smbaddr, SPD_TYPE);
2063 if (byte == JED_DDR3SDRAM) {
2064 /*Dimm is 'Present'*/
2065 pDCTstat->DIMMValid |= 1 << i;
2066 }
2067 } else {
2068 pDCTstat->DIMMSPDCSE = 1 << i;
2069 if (SPDCtrl == 0) {
2070 pDCTstat->ErrStatus |= 1 << SB_DIMMChkSum;
2071 pDCTstat->ErrCode = SC_StopError;
2072 } else {
2073 /*if NV_SPDCHK_RESTRT is set to 1, ignore faulty SPD checksum*/
2074 pDCTstat->ErrStatus |= 1<<SB_DIMMChkSum;
2075 byte = mctRead_SPD(smbaddr, SPD_TYPE);
2076 if (byte == JED_DDR3SDRAM)
2077 pDCTstat->DIMMValid |= 1 << i;
2078 }
2079 }
2080 /* Check module type */
2081 byte = mctRead_SPD(smbaddr, SPD_DIMMTYPE) & 0x7;
2082 if (byte == JED_RDIMM || byte == JED_MiniRDIMM)
2083 RegDIMMPresent |= 1 << i;
2084 /* Check ECC capable */
2085 byte = mctRead_SPD(smbaddr, SPD_BusWidth);
2086 if (byte & JED_ECC) {
2087 /* DIMM is ECC capable */
2088 pDCTstat->DimmECCPresent |= 1 << i;
2089 }
2090 /* Check if x4 device */
2091 devwidth = mctRead_SPD(smbaddr, SPD_Organization) & 0x7; /* 0:x4,1:x8,2:x16 */
2092 if (devwidth == 0) {
2093 /* DIMM is made with x4 or x16 drams */
2094 pDCTstat->Dimmx4Present |= 1 << i;
2095 } else if (devwidth == 1) {
2096 pDCTstat->Dimmx8Present |= 1 << i;
2097 } else if (devwidth == 2) {
2098 pDCTstat->Dimmx16Present |= 1 << i;
2099 }
2100
2101 byte = (mctRead_SPD(smbaddr, SPD_Organization) >> 3);
2102 byte &= 7;
2103 if (byte == 3) { /* 4ranks */
2104 /* if any DIMMs are QR, we have to make two passes through DIMMs*/
2105 if ( pDCTstat->DimmQRPresent == 0) {
2106 MaxDimms <<= 1;
2107 }
2108 if (i < DimmSlots) {
2109 pDCTstat->DimmQRPresent |= (1 << i) | (1 << (i+4));
2110 } else {
2111 pDCTstat->MAdimms[i & 1] --;
2112 }
2113 byte = 1; /* upper two ranks of QR DIMM will be counted on another DIMM number iteration*/
2114 } else if (byte == 1) { /* 2ranks */
2115 pDCTstat->DimmDRPresent |= 1 << i;
2116 }
2117 bytex = devwidth;
2118 if (devwidth == 0)
2119 bytex = 16;
2120 else if (devwidth == 1)
2121 bytex = 8;
2122 else if (devwidth == 2)
2123 bytex = 4;
2124
2125 byte++; /* al+1=rank# */
2126 if (byte == 2)
2127 bytex <<= 1; /*double Addr bus load value for dual rank DIMMs*/
2128
2129 j = i & (1<<0);
2130 pDCTstat->DATAload[j] += byte; /*number of ranks on DATA bus*/
2131 pDCTstat->MAload[j] += bytex; /*number of devices on CMD/ADDR bus*/
2132 pDCTstat->MAdimms[j]++; /*number of DIMMs on A bus */
2133
2134 /* check address mirror support for unbuffered dimm */
2135 /* check number of registers on a dimm for registered dimm */
2136 byte = mctRead_SPD(smbaddr, SPD_AddressMirror);
2137 if (RegDIMMPresent & (1 << i)) {
2138 if ((byte & 3) > 1)
2139 pDCTstat->MirrPresU_NumRegR |= 1 << i;
2140 } else {
2141 if ((byte & 1) == 1)
2142 pDCTstat->MirrPresU_NumRegR |= 1 << i;
2143 }
2144 /* Get byte62: Reference Raw Card information. We dont need it now. */
Zheng Bao9fae99f2010-08-31 06:10:54 +00002145 /* byte = mctRead_SPD(smbaddr, SPD_RefRawCard); */
2146 /* Get Byte65/66 for register manufacture ID code */
2147 if ((0x97 == mctRead_SPD(smbaddr, SPD_RegManufactureID_H)) &&
2148 (0x80 == mctRead_SPD(smbaddr, SPD_RegManufactureID_L))) {
2149 if (0x16 == mctRead_SPD(smbaddr, SPD_RegManRevID))
2150 pDCTstat->RegMan2Present |= 1 << i;
2151 else
2152 pDCTstat->RegMan1Present |= 1 << i;
2153 }
Zheng Baoeb75f652010-04-23 17:32:48 +00002154 /* Get Control word values for RC3. We dont need it. */
2155 byte = mctRead_SPD(smbaddr, 70);
2156 pDCTstat->CtrlWrd3 |= (byte >> 4) << (i << 2); /* C3 = SPD byte 70 [7:4] */
2157 /* Get Control word values for RC4, and RC5 */
2158 byte = mctRead_SPD(smbaddr, 71);
2159 pDCTstat->CtrlWrd4 |= (byte & 0xFF) << (i << 2); /* RC4 = SPD byte 71 [3:0] */
2160 pDCTstat->CtrlWrd5 |= (byte >> 4) << (i << 2); /* RC5 = SPD byte 71 [7:4] */
2161 }
2162 }
2163 }
2164 printk(BIOS_DEBUG, "\t DIMMPresence: DIMMValid=%x\n", pDCTstat->DIMMValid);
2165 printk(BIOS_DEBUG, "\t DIMMPresence: DIMMPresent=%x\n", pDCTstat->DIMMPresent);
2166 printk(BIOS_DEBUG, "\t DIMMPresence: RegDIMMPresent=%x\n", RegDIMMPresent);
2167 printk(BIOS_DEBUG, "\t DIMMPresence: DimmECCPresent=%x\n", pDCTstat->DimmECCPresent);
2168 printk(BIOS_DEBUG, "\t DIMMPresence: DimmPARPresent=%x\n", pDCTstat->DimmPARPresent);
2169 printk(BIOS_DEBUG, "\t DIMMPresence: Dimmx4Present=%x\n", pDCTstat->Dimmx4Present);
2170 printk(BIOS_DEBUG, "\t DIMMPresence: Dimmx8Present=%x\n", pDCTstat->Dimmx8Present);
2171 printk(BIOS_DEBUG, "\t DIMMPresence: Dimmx16Present=%x\n", pDCTstat->Dimmx16Present);
2172 printk(BIOS_DEBUG, "\t DIMMPresence: DimmPlPresent=%x\n", pDCTstat->DimmPlPresent);
2173 printk(BIOS_DEBUG, "\t DIMMPresence: DimmDRPresent=%x\n", pDCTstat->DimmDRPresent);
2174 printk(BIOS_DEBUG, "\t DIMMPresence: DimmQRPresent=%x\n", pDCTstat->DimmQRPresent);
2175 printk(BIOS_DEBUG, "\t DIMMPresence: DATAload[0]=%x\n", pDCTstat->DATAload[0]);
2176 printk(BIOS_DEBUG, "\t DIMMPresence: MAload[0]=%x\n", pDCTstat->MAload[0]);
2177 printk(BIOS_DEBUG, "\t DIMMPresence: MAdimms[0]=%x\n", pDCTstat->MAdimms[0]);
2178 printk(BIOS_DEBUG, "\t DIMMPresence: DATAload[1]=%x\n", pDCTstat->DATAload[1]);
2179 printk(BIOS_DEBUG, "\t DIMMPresence: MAload[1]=%x\n", pDCTstat->MAload[1]);
2180 printk(BIOS_DEBUG, "\t DIMMPresence: MAdimms[1]=%x\n", pDCTstat->MAdimms[1]);
2181
2182 if (pDCTstat->DIMMValid != 0) { /* If any DIMMs are present...*/
2183 if (RegDIMMPresent != 0) {
2184 if ((RegDIMMPresent ^ pDCTstat->DIMMValid) !=0) {
2185 /* module type DIMM mismatch (reg'ed, unbuffered) */
2186 pDCTstat->ErrStatus |= 1<<SB_DimmMismatchM;
2187 pDCTstat->ErrCode = SC_StopError;
2188 } else{
2189 /* all DIMMs are registered */
2190 pDCTstat->Status |= 1<<SB_Registered;
2191 }
2192 }
2193 if (pDCTstat->DimmECCPresent != 0) {
2194 if ((pDCTstat->DimmECCPresent ^ pDCTstat->DIMMValid )== 0) {
2195 /* all DIMMs are ECC capable */
2196 pDCTstat->Status |= 1<<SB_ECCDIMMs;
2197 }
2198 }
2199 if (pDCTstat->DimmPARPresent != 0) {
2200 if ((pDCTstat->DimmPARPresent ^ pDCTstat->DIMMValid) == 0) {
2201 /*all DIMMs are Parity capable */
2202 pDCTstat->Status |= 1<<SB_PARDIMMs;
2203 }
2204 }
2205 } else {
2206 /* no DIMMs present or no DIMMs that qualified. */
2207 pDCTstat->ErrStatus |= 1<<SB_NoDimms;
2208 pDCTstat->ErrCode = SC_StopError;
2209 }
2210
2211 printk(BIOS_DEBUG, "\t DIMMPresence: Status %x\n", pDCTstat->Status);
2212 printk(BIOS_DEBUG, "\t DIMMPresence: ErrStatus %x\n", pDCTstat->ErrStatus);
2213 printk(BIOS_DEBUG, "\t DIMMPresence: ErrCode %x\n", pDCTstat->ErrCode);
2214 printk(BIOS_DEBUG, "\t DIMMPresence: Done\n\n");
2215
2216 mctHookAfterDIMMpre();
2217
2218 return pDCTstat->ErrCode;
2219}
2220
2221static u8 Get_DIMMAddress_D(struct DCTStatStruc *pDCTstat, u8 i)
2222{
2223 u8 *p;
2224
2225 p = pDCTstat->DIMMAddr;
2226 /* mct_BeforeGetDIMMAddress(); */
2227 return p[i];
2228}
2229
2230static void mct_initDCT(struct MCTStatStruc *pMCTstat,
2231 struct DCTStatStruc *pDCTstat)
2232{
2233 u32 val;
2234 u8 err_code;
2235
2236 /* Config. DCT0 for Ganged or unganged mode */
2237 DCTInit_D(pMCTstat, pDCTstat, 0);
2238 if (pDCTstat->ErrCode == SC_FatalErr) {
2239 /* Do nothing goto exitDCTInit; any fatal errors? */
2240 } else {
2241 /* Configure DCT1 if unganged and enabled*/
2242 if (!pDCTstat->GangedMode) {
Kerry She108d30b2010-08-30 07:24:13 +00002243 if (pDCTstat->DIMMValidDCT[1] > 0) {
Zheng Baoeb75f652010-04-23 17:32:48 +00002244 err_code = pDCTstat->ErrCode; /* save DCT0 errors */
2245 pDCTstat->ErrCode = 0;
2246 DCTInit_D(pMCTstat, pDCTstat, 1);
2247 if (pDCTstat->ErrCode == 2) /* DCT1 is not Running */
2248 pDCTstat->ErrCode = err_code; /* Using DCT0 Error code to update pDCTstat.ErrCode */
2249 } else {
2250 val = 1 << DisDramInterface;
2251 Set_NB32(pDCTstat->dev_dct, 0x100 + 0x94, val);
2252 }
2253 }
2254 }
2255/* exitDCTInit: */
2256}
2257
2258static void mct_DramInit(struct MCTStatStruc *pMCTstat,
2259 struct DCTStatStruc *pDCTstat, u8 dct)
2260{
2261 mct_BeforeDramInit_Prod_D(pMCTstat, pDCTstat);
2262 mct_DramInit_Sw_D(pMCTstat, pDCTstat, dct);
2263 /* mct_DramInit_Hw_D(pMCTstat, pDCTstat, dct); */
2264}
2265
2266static u8 mct_setMode(struct MCTStatStruc *pMCTstat,
2267 struct DCTStatStruc *pDCTstat)
2268{
2269 u8 byte;
2270 u8 bytex;
2271 u32 val;
2272 u32 reg;
2273
2274 byte = bytex = pDCTstat->DIMMValid;
2275 bytex &= 0x55; /* CHA DIMM pop */
2276 pDCTstat->DIMMValidDCT[0] = bytex;
2277
2278 byte &= 0xAA; /* CHB DIMM popa */
2279 byte >>= 1;
2280 pDCTstat->DIMMValidDCT[1] = byte;
2281
2282 if (byte != bytex) {
2283 pDCTstat->ErrStatus &= ~(1 << SB_DimmMismatchO);
2284 } else {
2285 byte = mctGet_NVbits(NV_Unganged);
2286 if (byte)
2287 pDCTstat->ErrStatus |= (1 << SB_DimmMismatchO); /* Set temp. to avoid setting of ganged mode */
2288
2289 if (!(pDCTstat->ErrStatus & (1 << SB_DimmMismatchO))) {
2290 pDCTstat->GangedMode = 1;
2291 /* valid 128-bit mode population. */
2292 pDCTstat->Status |= 1 << SB_128bitmode;
2293 reg = 0x110;
2294 val = Get_NB32(pDCTstat->dev_dct, reg);
2295 val |= 1 << DctGangEn;
2296 Set_NB32(pDCTstat->dev_dct, reg, val);
2297 }
2298 if (byte) /* NV_Unganged */
2299 pDCTstat->ErrStatus &= ~(1 << SB_DimmMismatchO); /* Clear so that there is no DIMM missmatch error */
2300 }
2301 return pDCTstat->ErrCode;
2302}
2303
2304u32 Get_NB32(u32 dev, u32 reg)
2305{
2306 return pci_read_config32(dev, reg);
2307}
2308
2309void Set_NB32(u32 dev, u32 reg, u32 val)
2310{
2311 pci_write_config32(dev, reg, val);
2312}
2313
2314
2315u32 Get_NB32_index(u32 dev, u32 index_reg, u32 index)
2316{
2317 u32 dword;
2318
2319 Set_NB32(dev, index_reg, index);
2320 dword = Get_NB32(dev, index_reg+0x4);
2321
2322 return dword;
2323}
2324
2325void Set_NB32_index(u32 dev, u32 index_reg, u32 index, u32 data)
2326{
2327 Set_NB32(dev, index_reg, index);
2328 Set_NB32(dev, index_reg + 0x4, data);
2329}
2330
2331u32 Get_NB32_index_wait(u32 dev, u32 index_reg, u32 index)
2332{
2333
2334 u32 dword;
2335
2336
2337 index &= ~(1 << DctAccessWrite);
2338 Set_NB32(dev, index_reg, index);
2339 do {
2340 dword = Get_NB32(dev, index_reg);
2341 } while (!(dword & (1 << DctAccessDone)));
2342 dword = Get_NB32(dev, index_reg + 0x4);
2343
2344 return dword;
2345}
2346
2347void Set_NB32_index_wait(u32 dev, u32 index_reg, u32 index, u32 data)
2348{
2349 u32 dword;
2350
2351
2352 Set_NB32(dev, index_reg + 0x4, data);
2353 index |= (1 << DctAccessWrite);
2354 Set_NB32(dev, index_reg, index);
2355 do {
2356 dword = Get_NB32(dev, index_reg);
2357 } while (!(dword & (1 << DctAccessDone)));
2358
2359}
2360
Zheng Bao69436e12011-01-06 02:18:12 +00002361static u8 mct_BeforePlatformSpec(struct MCTStatStruc *pMCTstat,
2362 struct DCTStatStruc *pDCTstat, u8 dct)
2363{
2364 /* mct_checkForCxDxSupport_D */
2365 if (pDCTstat->LogicalCPUID & AMD_DR_GT_Bx) {
2366 /* 1. Write 00000000h to F2x[1,0]9C_xD08E000 */
2367 Set_NB32_index_wait(pDCTstat->dev_dct, 0x98 + dct * 0x100, 0x0D08E000, 0);
2368 /* 2. If DRAM Configuration Register[MemClkFreq] (F2x[1,0]94[2:0]) is
2369 greater than or equal to 011b (DDR-800 and higher),
2370 then write 00000080h to F2x[1,0]9C_xD02E001,
2371 else write 00000090h to F2x[1,0]9C_xD02E001. */
2372 if (pDCTstat->Speed >= 4)
2373 Set_NB32_index_wait(pDCTstat->dev_dct, 0x98 + dct * 0x100, 0xD02E001, 0x80);
2374 else
2375 Set_NB32_index_wait(pDCTstat->dev_dct, 0x98 + dct * 0x100, 0xD02E001, 0x90);
2376 }
2377 return pDCTstat->ErrCode;
2378}
2379
Zheng Baoeb75f652010-04-23 17:32:48 +00002380static u8 mct_PlatformSpec(struct MCTStatStruc *pMCTstat,
2381 struct DCTStatStruc *pDCTstat, u8 dct)
2382{
2383 /* Get platform specific config/timing values from the interface layer
2384 * and program them into DCT.
2385 */
2386
2387 u32 dev = pDCTstat->dev_dct;
2388 u32 index_reg;
2389 u8 i, i_start, i_end;
2390
2391 if (pDCTstat->GangedMode) {
2392 SyncSetting(pDCTstat);
2393 /* mct_SetupSync_D */
2394 i_start = 0;
2395 i_end = 2;
2396 } else {
2397 i_start = dct;
2398 i_end = dct + 1;
2399 }
2400 for (i=i_start; i<i_end; i++) {
2401 index_reg = 0x98 + (i * 0x100);
2402 Set_NB32_index_wait(dev, index_reg, 0x00, pDCTstat->CH_ODC_CTL[i]); /* Channel A Output Driver Compensation Control */
2403 Set_NB32_index_wait(dev, index_reg, 0x04, pDCTstat->CH_ADDR_TMG[i]); /* Channel A Output Driver Compensation Control */
2404 }
2405
2406 return pDCTstat->ErrCode;
Zheng Baoeb75f652010-04-23 17:32:48 +00002407}
2408
2409static void mct_SyncDCTsReady(struct DCTStatStruc *pDCTstat)
2410{
2411 u32 dev;
2412 u32 val;
2413
2414 if (pDCTstat->NodePresent) {
2415 dev = pDCTstat->dev_dct;
2416
2417 if ((pDCTstat->DIMMValidDCT[0] ) || (pDCTstat->DIMMValidDCT[1])) { /* This Node has dram */
2418 do {
2419 val = Get_NB32(dev, 0x110);
2420 } while (!(val & (1 << DramEnabled)));
2421 }
2422 } /* Node is present */
2423}
2424
2425static void mct_AfterGetCLT(struct MCTStatStruc *pMCTstat,
2426 struct DCTStatStruc *pDCTstat, u8 dct)
2427{
2428 if (!pDCTstat->GangedMode) {
2429 if (dct == 0 ) {
2430 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[dct];
2431 if (pDCTstat->DIMMValidDCT[dct] == 0)
2432 pDCTstat->ErrCode = SC_StopError;
2433 } else {
2434 pDCTstat->CSPresent = 0;
2435 pDCTstat->CSTestFail = 0;
2436 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[dct];
2437 if (pDCTstat->DIMMValidDCT[dct] == 0)
2438 pDCTstat->ErrCode = SC_StopError;
2439 }
2440 }
2441}
2442
2443static u8 mct_SPDCalcWidth(struct MCTStatStruc *pMCTstat,
2444 struct DCTStatStruc *pDCTstat, u8 dct)
2445{
2446 u8 ret;
2447 u32 val;
2448
2449 if ( dct == 0) {
2450 SPDCalcWidth_D(pMCTstat, pDCTstat);
2451 ret = mct_setMode(pMCTstat, pDCTstat);
2452 } else {
2453 ret = pDCTstat->ErrCode;
2454 }
2455
2456 if (pDCTstat->DIMMValidDCT[0] == 0) {
2457 val = Get_NB32(pDCTstat->dev_dct, 0x94);
2458 val |= 1 << DisDramInterface;
2459 Set_NB32(pDCTstat->dev_dct, 0x94, val);
2460 }
2461 if (pDCTstat->DIMMValidDCT[1] == 0) {
2462 val = Get_NB32(pDCTstat->dev_dct, 0x94 + 0x100);
2463 val |= 1 << DisDramInterface;
2464 Set_NB32(pDCTstat->dev_dct, 0x94 + 0x100, val);
2465 }
2466
2467 printk(BIOS_DEBUG, "SPDCalcWidth: Status %x\n", pDCTstat->Status);
2468 printk(BIOS_DEBUG, "SPDCalcWidth: ErrStatus %x\n", pDCTstat->ErrStatus);
2469 printk(BIOS_DEBUG, "SPDCalcWidth: ErrCode %x\n", pDCTstat->ErrCode);
2470 printk(BIOS_DEBUG, "SPDCalcWidth: Done\n");
2471 /* Disable dram interface before DRAM init */
2472
2473 return ret;
2474}
2475
2476static void mct_AfterStitchMemory(struct MCTStatStruc *pMCTstat,
2477 struct DCTStatStruc *pDCTstat, u8 dct)
2478{
2479 u32 val;
2480 u32 dword;
2481 u32 dev;
2482 u32 reg;
2483 u8 _MemHoleRemap;
2484 u32 DramHoleBase;
2485
2486 _MemHoleRemap = mctGet_NVbits(NV_MemHole);
2487 DramHoleBase = mctGet_NVbits(NV_BottomIO);
2488 DramHoleBase <<= 8;
2489 /* Increase hole size so;[31:24]to[31:16]
2490 * it has granularity of 128MB shl eax,8
2491 * Set 'effective' bottom IOmov DramHoleBase,eax
2492 */
2493 pMCTstat->HoleBase = (DramHoleBase & 0xFFFFF800) << 8;
2494
2495 /* In unganged mode, we must add DCT0 and DCT1 to DCTSysLimit */
2496 if (!pDCTstat->GangedMode) {
2497 dev = pDCTstat->dev_dct;
2498 pDCTstat->NodeSysLimit += pDCTstat->DCTSysLimit;
2499 /* if DCT0 and DCT1 both exist, set DctSelBaseAddr[47:27] to the top of DCT0 */
2500 if (dct == 0) {
2501 if (pDCTstat->DIMMValidDCT[1] > 0) {
2502 dword = pDCTstat->DCTSysLimit + 1;
2503 dword += pDCTstat->NodeSysBase;
2504 dword >>= 8; /* scale [39:8] to [47:27],and to F2x110[31:11] */
2505 if ((dword >= DramHoleBase) && _MemHoleRemap) {
2506 pMCTstat->HoleBase = (DramHoleBase & 0xFFFFF800) << 8;
2507 val = pMCTstat->HoleBase;
2508 val >>= 16;
2509 val = (((~val) & 0xFF) + 1);
2510 val <<= 8;
2511 dword += val;
2512 }
2513 reg = 0x110;
2514 val = Get_NB32(dev, reg);
2515 val &= 0x7F;
2516 val |= dword;
2517 val |= 3; /* Set F2x110[DctSelHiRngEn], F2x110[DctSelHi] */
2518 Set_NB32(dev, reg, val);
2519
2520 reg = 0x114;
2521 val = dword;
2522 Set_NB32(dev, reg, val);
2523 }
2524 } else {
2525 /* Program the DctSelBaseAddr value to 0
2526 if DCT 0 is disabled */
2527 if (pDCTstat->DIMMValidDCT[0] == 0) {
2528 dword = pDCTstat->NodeSysBase;
2529 dword >>= 8;
2530 if ((dword >= DramHoleBase) && _MemHoleRemap) {
2531 pMCTstat->HoleBase = (DramHoleBase & 0xFFFFF800) << 8;
2532 val = pMCTstat->HoleBase;
2533 val >>= 8;
2534 val &= ~(0xFFFF);
2535 val |= (((~val) & 0xFFFF) + 1);
2536 dword += val;
</