blob: b5826579babf49261d042d0771ba27e6fe1ec72d [file] [log] [blame]
Peter Kaoda1e02a2015-07-31 17:11:14 +08001/*
2 * This file is part of the coreboot project.
3 *
4 * Copyright 2015 MediaTek Inc.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; version 2 of the License.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 */
15
16#include <arch/barrier.h>
17#include <arch/io.h>
18#include <assert.h>
19#include <console/console.h>
20#include <delay.h>
21#include <soc/addressmap.h>
22#include <soc/dramc_common.h>
23#include <soc/dramc_register.h>
24#include <soc/dramc_pi_api.h>
25#include <soc/emi.h>
26#include <soc/mt6391.h>
27#include <soc/pll.h>
28#include <soc/spm.h>
29#include <string.h>
30#include <types.h>
31
32struct mem_pll {
33 u8 delay;
34 u8 phase;
35 u8 done;
36};
37
38inline u8 is_dual_rank(u32 channel,
39 const struct mt8173_sdram_params *sdram_params)
40{
41 /* judge ranks from EMI_CONA[17] (cha) and EMI_CONA[16] (chb) */
Elyes HAOUAS88607a42018-10-05 10:36:45 +020042 return (sdram_params->emi_set.cona & (1 << (17 - channel))) ? 1 : 0;
Peter Kaoda1e02a2015-07-31 17:11:14 +080043}
44
45static void mem_pll_pre_init(u32 channel)
46{
47 write32(&ch[channel].ddrphy_regs->lpddr2_3, 0x1 << 29 | 0x1 << 25 |
48 0xf << 16 | 0xffff);
49
50 write32(&ch[channel].ddrphy_regs->lpddr2_4, 0x1 << 29 | 0x1 << 25 |
51 0xf << 16 | 0xffff);
52
53 /* adjust DQS/DQM phase to get best margin */
54 write32(&ch[channel].ddrphy_regs->selph12, 0x1 << 28 | 0xf << 20 |
55 0x1 << 12 | 0xf << 4);
56 /* adjust DQ phase to get best margin */
57 write32(&ch[channel].ddrphy_regs->selph13, 0xffffffff << 0);
58 write32(&ch[channel].ddrphy_regs->selph14, 0xffffffff << 0);
59
60 /* fix OCV effect */
61 write32(&ch[channel].ddrphy_regs->selph15, 0x1 << 4 | 0xf << 0);
62
63 /* pll register control by CPU and select internal pipe path */
64 write32(&ch[channel].ddrphy_regs->peri[2], 0x11 << 24 | 0x11 << 16 |
65 0xff << 8 | 0x11 << 0);
66 write32(&ch[channel].ddrphy_regs->peri[3], 0x11 << 24 | 0x51 << 16 |
67 0x11 << 8 | 0x11 << 0);
68
69 /* enable clock sync and spm control clock */
70 write32(&ch[channel].ddrphy_regs->mempll_divider, 0x9 << 24 |
71 0x1 << 15 |
72 0x2 << 4 |
73 0x1 << 1 |
74 0x1 << 0);
75 /* pll2 enable from CPU control */
76 write32(&ch[channel].ddrphy_regs->mempll05_divider, 0x1 << 27);
77
78 /* enable chip top memory clock */
79 setbits_le32(&ch[channel].ddrphy_regs->mempll_divider, 0x1 << 4);
80
81 /* disable C/A and DQ M_CK clock gating */
82 clrbits_le32(&ch[channel].ddrphy_regs->ddrphy_cg_ctrl, 0x1 << 2 |
83 0x1 << 1);
84
85 /* enable spm control clock */
86 clrbits_le32(&ch[channel].ddrphy_regs->mempll_divider, 0x1 << 15 |
87 0x1 << 0);
88 /* enable dramc 2X mode */
89 setbits_le32(&ch[channel].ao_regs->ddr2ctl, 1 << 0);
90
91 /* select internal clock path */
92 write32(&ch[channel].ddrphy_regs->peri[0], 0x21 << 24 | 0x27 << 16 |
93 0x1b << 8 | 0x3 << 0);
94
95 write32(&ch[channel].ddrphy_regs->peri[1], 0x50 << 24 | 0x96 << 16 |
96 0x6 << 8 | 0x1e << 0);
97
98 /* trigger to make memory clock correct phase */
99 setbits_le32(&ch[channel].ddrphy_regs->mempll_divider, 0x1 << 24 |
100 0x1 << 7);
101
Elyes HAOUAS4a83f1c2016-08-25 21:07:59 +0200102 if (channel == CHANNEL_A) {
Peter Kaoda1e02a2015-07-31 17:11:14 +0800103 /* select memory clock sync for channel A (internal source) */
104 clrbits_le32(&ch[channel].ddrphy_regs->mempll_divider, 0x1 << 3);
105 }
106}
107
108static void mem_pll_init_set_params(u32 channel)
109{
110 u32 pattern1, pattern2, pattern3;
111 u32 mempll_ic_3_0, mempll_bp_3_0;
112 u32 mempll_fbdiv_6_0, mempll_m4pdiv_1_0;
113 u32 mempll_br_1_0, mempll_bc_1_0, mempll_ir_3_0;
114
115 mempll_fbdiv_6_0 = 0x7 << 16;
116 mempll_br_1_0 = 0x1 << 10;
117 mempll_bc_1_0 = 0x0 << 8;
118 mempll_ir_3_0 = 0xc << 28;
119 mempll_ic_3_0 = 0x6 << 8;
120 mempll_bp_3_0 = 0x1 << 12;
121 mempll_m4pdiv_1_0 = 0x0 << 28;
122
123 write32(&ch[channel].ddrphy_regs->mempll[14], 0x0);
124
125 write32(&ch[channel].ddrphy_regs->mempll[3], 0x3 << 30 |
126 0x1 << 28);
127 /* mempll 2 config */
128 pattern1 = mempll_ir_3_0 | mempll_fbdiv_6_0 | mempll_ic_3_0;
129 pattern2 = mempll_m4pdiv_1_0;
130 pattern3 = mempll_bp_3_0 | mempll_br_1_0 | mempll_bc_1_0;
131
132 /* mempll2_autok_en = 1, mempll2_autok_load = 1 */
133 write32(&ch[channel].ddrphy_regs->mempll[5], 0x1 << 26 | 0x3 << 24 |
134 0x1 << 23 | pattern1);
135 write32(&ch[channel].ddrphy_regs->mempll[6], 0x1 << 30 | 0x3 << 26 |
136 0x3 << 14 | pattern2);
137 write32(&ch[channel].ddrphy_regs->mempll[7], 0x1 << 17 | 0x1 << 0 |
138 pattern3);
139 /* mempll 4 */
140 write32(&ch[channel].ddrphy_regs->mempll[11], 0x1 << 26 | 0x3 << 24 |
141 0x1 << 23 | pattern1);
142 write32(&ch[channel].ddrphy_regs->mempll[12], 0x1 << 30 | 0x3 << 26 |
143 0x3 << 14 | pattern2);
144 write32(&ch[channel].ddrphy_regs->mempll[13], 0x1 << 0 | pattern3);
145
146 /* mempll 3 - enable signal tie together */
147 write32(&ch[channel].ddrphy_regs->mempll[8], 0x1 << 26 | 0x3 << 24 |
148 0x1 << 23 | pattern1);
149 write32(&ch[channel].ddrphy_regs->mempll[9], 0x1 << 30 | 0x3 << 26 |
150 0x3 << 14 | pattern2);
151 write32(&ch[channel].ddrphy_regs->mempll[10], 0x1 << 17 | 0x1 << 0 |
152 pattern3);
153}
154
155static void mem_pll_init_phase_sync(u32 channel)
156{
157 write32(&ch[channel].ddrphy_regs->mempll_divider, BIT(27) | BIT(24) |
158 BIT(7) | BIT(5) |
159 BIT(4) | BIT(0));
160 /* spm control clock enable */
161 clrsetbits_le32(&ch[channel].ddrphy_regs->mempll_divider, BIT(0),
162 BIT(1));
163
164 clrsetbits_le32(&ch[channel].ddrphy_regs->mempll_divider, BIT(1),
165 BIT(0));
166}
167
168static void pll_phase_adjust(u32 channel, struct mem_pll *mempll, int reg_offs)
169{
170 switch (mempll->phase) {
171
172 case MEMPLL_INIT:
173 /* initial phase: zero out RG_MEPLL(2,3,4)_(REF_DL,FB)_DL */
174 clrbits_le32(&ch[channel].ddrphy_regs->mempll[reg_offs],
175 0x1f << MEMPLL_REF_DL_SHIFT |
176 0x1f << MEMPLL_FB_DL_SHIFT);
177 break;
178
179 case MEMPLL_REF_LAG:
180 /* REF lag FBK, delay FBK */
181 clrsetbits_le32(&ch[channel].ddrphy_regs->mempll[reg_offs],
182 0x1f << MEMPLL_REF_DL_SHIFT |
183 0x1f << MEMPLL_FB_DL_SHIFT,
184 mempll->delay << MEMPLL_FB_DL_SHIFT);
185 break;
186
187 case MEMPLL_REF_LEAD:
188 /* REF lead FBK, delay REF */
189 clrsetbits_le32(&ch[channel].ddrphy_regs->mempll[reg_offs],
190 0x1f << MEMPLL_REF_DL_SHIFT |
191 0x1f << MEMPLL_FB_DL_SHIFT,
192 mempll->delay << MEMPLL_REF_DL_SHIFT);
193 };
194}
195
196static void pll_phase_check(u32 channel, struct mem_pll *mempll, int idx)
197{
198 u32 value = read32(&ch[channel].ddrphy_regs->jmeter_pll_st[idx]);
199 u16 one_count = (u16)((value >> 16) & 0xffff);
200 u16 zero_count = (u16)(value & 0xffff);
201
202 dramc_dbg_msg("PLL %d, phase %d, one_count %d, zero_count %d\n",
203 (idx + 2), mempll->phase, one_count, zero_count);
204
205 switch (mempll->phase) {
206
207 case MEMPLL_INIT:
208 if ((one_count - zero_count) > JMETER_COUNT_N) {
209 /* REF lag FBK */
210 mempll->phase = MEMPLL_REF_LAG;
211 mempll->delay++;
212 } else if ((zero_count - one_count) > JMETER_COUNT_N) {
213 /* REF lead FBK */
214 mempll->phase = MEMPLL_REF_LEAD;
215 mempll->delay++;
216 } else {
217 /* in-phase at initial */
218 mempll->done = 1;
219 }
220 break;
221
222 case MEMPLL_REF_LAG:
223 if (JMETER_COUNT_N >= (one_count - zero_count)) {
224 mempll->done = 1;
225 } else {
226 mempll->delay++;
227 }
228 break;
229
230 case MEMPLL_REF_LEAD:
231 if (JMETER_COUNT_N >= (zero_count - one_count)) {
232 mempll->done = 1;
233 } else {
234 mempll->delay++;
235 }
236 }
237}
238
239static void mem_pll_phase_cali(u32 channel)
240{
241 u32 i;
242
243 struct mem_pll mempll[3] =
244 {
245 {0, 0, 0},
246 {0, 0, 0},
247 {0, 0, 0},
248 };
249
250 dramc_dbg_msg("[PLL_Phase_Calib] ===== PLL Phase Calibration: ");
251 dramc_dbg_msg("CHANNEL %d (0: CHA, 1: CHB) =====\n", channel);
252
253 /* 1. set jitter meter count number to 1024 for mempll 2 3 4 */
254 for (i = 0; i < 3; i++)
255 clrsetbits_le32(&ch[channel].ddrphy_regs->jmeter[i],
256 JMETER_COUNTER_MASK,
257 JMETER_COUNT << JMETER_COUNTER_SHIFT);
258
259 while (1) {
260
261 for (i = 0; i < 3; i++) {
262 if (!mempll[i].done) {
263 pll_phase_adjust(channel, &mempll[i], (i + 2) * 3);
264 }
265 }
266
Tristan Shiehf42db112018-06-06 12:52:20 +0800267 udelay(20); /* delay 20us for external loop pll stable */
Peter Kaoda1e02a2015-07-31 17:11:14 +0800268
269 /* 2. enable mempll 2 3 4 jitter meter */
270 for (i = 0; i < 3; i++)
271 setbits_le32(&ch[channel].ddrphy_regs->jmeter[i],
272 JMETER_EN_BIT);
273
274 /* 3. wait for jitter meter complete */
275 udelay(JMETER_WAIT_DONE_US);
276
277 /* 4. check jitter meter counter value for mempll 2 3 4 */
278 for (i = 0; i < 3; i++) {
279 if (!mempll[i].done) {
280 pll_phase_check(channel, &mempll[i], i);
281 }
282 }
283
284 /* 5. disable mempll 2 3 4 jitter meter */
285 for (i = 0; i < 3; i++)
286 clrbits_le32(&ch[channel].ddrphy_regs->jmeter[i],
287 JMETER_EN_BIT);
288
289 /* 6. all done early break */
290 if (mempll[0].done && mempll[1].done && mempll[2].done)
291 break;
292
293 /* 7. delay line overflow break */
294 for (i = 0; i < 3; i++) {
Elyes HAOUAS4a83f1c2016-08-25 21:07:59 +0200295 if (mempll[i].delay >= 32) {
Peter Kaoda1e02a2015-07-31 17:11:14 +0800296 die("MEMPLL calibration fail\n");
297 }
298 }
299 }
300
301 dramc_dbg_msg("pll done: ");
302
303 dramc_dbg_msg("%d, %d, %d\n",
304 mempll[0].done, mempll[1].done, mempll[2].done);
305 dramc_dbg_msg("pll dl: %d, %d, %d\n",
306 mempll[0].delay, mempll[1].delay, mempll[2].delay);
307}
308
309void mem_pll_init(const struct mt8173_sdram_params *sdram_params)
310{
311 u32 channel;
312
313 /* udelay waits for PLL to stabilize in this function */
314 printk(BIOS_DEBUG, "[PLL] mempll_init and cali\n");
315
316 /* mempll pre_init for two channels */
317 for (channel = 0; channel < CHANNEL_NUM; channel++)
318 mem_pll_pre_init(channel);
319
320 /* only set once in MPLL */
321 mt_mem_pll_config_pre(sdram_params);
322
323 for (channel = 0; channel < CHANNEL_NUM; channel++)
324 mem_pll_init_set_params(channel);
325
Tristan Shiehf42db112018-06-06 12:52:20 +0800326 udelay(1); /* wait after da_mpll_sdm_iso_en goes low */
Peter Kaoda1e02a2015-07-31 17:11:14 +0800327
328 /* only set once in MPLL */
329 mt_mem_pll_config_post();
330
331 udelay(100);
332
333 for (channel = 0; channel < CHANNEL_NUM; channel++) {
334
335 /* mempll_bias_en */
336 write32(&ch[channel].ddrphy_regs->mempll[3], 0xd << 28 |
337 0x1 << 6);
338 udelay(2);
339
340 /* mempll2_en -> mempll4_en -> mempll3_en */
341 setbits_le32(&ch[channel].ddrphy_regs->mempll[5], 1 << 0);
342 setbits_le32(&ch[channel].ddrphy_regs->mempll[11], 1 << 0);
343 setbits_le32(&ch[channel].ddrphy_regs->mempll[8], 1 << 0);
344
345 udelay(100);
346
347 /* mempll_bias_lpf_en */
348 setbits_le32(&ch[channel].ddrphy_regs->mempll[3], 1 << 7);
349
350 udelay(30);
351
352 /* select mempll4 band register */
353 setbits_le32(&ch[channel].ddrphy_regs->mempll[4], 1 << 26);
354 clrbits_le32(&ch[channel].ddrphy_regs->mempll[4], 1 << 26);
355
356 /* PLL ready */
357
358 /* disable mempll2_en -> mempll4_en -> mempll3_en */
359 clrbits_le32(&ch[channel].ddrphy_regs->mempll[5], 1 << 0);
360 clrbits_le32(&ch[channel].ddrphy_regs->mempll[11], 1 << 0);
361 clrbits_le32(&ch[channel].ddrphy_regs->mempll[8], 1 << 0);
362
363 /* disable autok mempll2_en -> mempll4_en -> mempll3_en */
364 clrbits_le32(&ch[channel].ddrphy_regs->mempll[5], 1 << 23);
365 clrbits_le32(&ch[channel].ddrphy_regs->mempll[11], 1 << 23);
366 clrbits_le32(&ch[channel].ddrphy_regs->mempll[8], 1 << 23);
367
368 udelay(1);
369
370 /* mempll[2->4->3]_fb_mck_sel=1 (switch to outer loop) */
371 setbits_le32(&ch[channel].ddrphy_regs->mempll[6], 1 << 25);
372 setbits_le32(&ch[channel].ddrphy_regs->mempll[12], 1 << 25);
373 setbits_le32(&ch[channel].ddrphy_regs->mempll[9], 1 << 25);
374
375 udelay(1);
376
377 /* enable mempll2_en -> mempll4_en -> mempll3_en */
378 setbits_le32(&ch[channel].ddrphy_regs->mempll[5], 1 << 0);
379 setbits_le32(&ch[channel].ddrphy_regs->mempll[11], 1 << 0);
380 setbits_le32(&ch[channel].ddrphy_regs->mempll[8], 1 << 0);
381 }
382
383 /* mempll new power-on */
Tristan Shieh0423a2b2018-06-12 15:04:43 +0800384 write32(&mtk_spm->poweron_config_set, 0x1 << 0 |
Peter Kaoda1e02a2015-07-31 17:11:14 +0800385 SPM_PROJECT_CODE << 16);
386 /* request mempll reset/pdn mode */
Tristan Shieh0423a2b2018-06-12 15:04:43 +0800387 setbits_le32(&mtk_spm->power_on_val0, 0x1 << 27);
Peter Kaoda1e02a2015-07-31 17:11:14 +0800388
389 udelay(2);
390
391 /* unrequest mempll reset/pdn mode and wait settle */
Tristan Shieh0423a2b2018-06-12 15:04:43 +0800392 clrbits_le32(&mtk_spm->power_on_val0, 0x1 << 27);
Peter Kaoda1e02a2015-07-31 17:11:14 +0800393
Tristan Shiehf42db112018-06-06 12:52:20 +0800394 udelay(31); /* PLL ready */
Peter Kaoda1e02a2015-07-31 17:11:14 +0800395
396 for (channel = 0; channel < CHANNEL_NUM; channel++)
397 mem_pll_init_phase_sync(channel);
398
399 udelay(1);
400
401 /* mempll calibration for two channels */
402 for (channel = 0; channel < CHANNEL_NUM; channel++)
403 mem_pll_phase_cali(channel);
404
Tristan Shiehf42db112018-06-06 12:52:20 +0800405 div2_phase_sync(); /* phase sync for channel B */
Peter Kaoda1e02a2015-07-31 17:11:14 +0800406
407 mt_mem_pll_mux();
408}
409
410void dramc_pre_init(u32 channel, const struct mt8173_sdram_params *sdram_params)
411{
412 /* txdly_cs, txdly_cs1 */
413 write32(&ch[channel].ao_regs->selph1, 0x0);
414 /* txdly_dqsgate, txdly_dqsgate_p1 */
415 write32(&ch[channel].ao_regs->selph2, 0x3 << 20 | 0x2 << 12);
416 /* txldy_ra* */
417 write32(&ch[channel].ao_regs->selph3, 0x0);
418 /* txldy_ra* */
419 write32(&ch[channel].ao_regs->selph4, 0x0);
420
421 /* setting of write latency (WL=8) */
422 write32(&ch[channel].ao_regs->selph7, 0x3333 << 16 | 0x3333);
423 write32(&ch[channel].ao_regs->selph8, 0x3333 << 16 | 0x3333);
424 write32(&ch[channel].ao_regs->selph9, 0x3333 << 16 | 0x3333);
425 write32(&ch[channel].ao_regs->selph10, 0x5555 << 16 | 0xffff);
426 write32(&ch[channel].ao_regs->selph11, 0x55 << 16 | 0xff);
427
428 write32(&ch[channel].ao_regs->selph5, 0x1 << 26 | 0x2 << 22 |
429 0x1 << 20 | 0x5 << 16 |
430 0x5555);
431
432 write32(&ch[channel].ao_regs->selph6_1, 0x4 << 8 | 0x3 << 4 |
433 0x2 << 0);
434
435 write32(&ch[channel].ao_regs->ac_time_05t,
436 sdram_params->ac_timing.actim05t);
437}
438
439static void mrs_write(int channel, int rank, u32 mrs_value, unsigned int dly)
440{
441 write32(&ch[channel].ao_regs->mrs, rank << 28 | mrs_value);
442
443 write32(&ch[channel].ao_regs->spcmd, 0x1);
444 udelay(dly);
445 write32(&ch[channel].ao_regs->spcmd, 0x0);
446}
447
448static void dramc_set_mrs_value(int channel, int rank,
449 const struct mt8173_sdram_params *sdram_params)
450{
451 /* MR63 -> Reset, Wait >=10us if not check DAI */
452 mrs_write(channel, rank, sdram_params->mrs_set.mrs_63, 10);
453 /* MR10 -> ZQ Init, tZQINIT>=1us */
454 mrs_write(channel, rank, sdram_params->mrs_set.mrs_10, 1);
455 /* MR3 driving stregth set to max */
456 mrs_write(channel, rank, sdram_params->mrs_set.mrs_3, 1);
457 /* MR1 */
458 mrs_write(channel, rank, sdram_params->mrs_set.mrs_1, 1);
459 /* MR2 */
460 mrs_write(channel, rank, sdram_params->mrs_set.mrs_2, 1);
461 /* MR11 ODT disable */
462 mrs_write(channel, rank, sdram_params->mrs_set.mrs_11, 1);
463}
464
465void dramc_init(u32 channel, const struct mt8173_sdram_params *sdram_params)
466{
467 u32 bit, dual_rank_set;
468
469 const struct mt8173_calib_params *calib_params;
470
471 dual_rank_set = is_dual_rank(channel, sdram_params);
472 calib_params = &sdram_params->calib_params;
473
474 write32(&ch[channel].ddrphy_regs->peri[2], 0x1 << 12 |
475 0x1 << 4);
476
477 write32(&ch[channel].ddrphy_regs->peri[3], 0x0);
478
479 write32(&ch[channel].ao_regs->test2_4,
480 sdram_params->ac_timing.test2_4);
481
482 write32(&ch[channel].ao_regs->clk1delay, 0x1 << 23 |
483 0x1 << 22 |
484 0x1 << 21);
485
486 /* rank config */
487 assert((sdram_params->ac_timing.rkcfg & 0x1) == dual_rank_set);
488 write32(&ch[channel].ao_regs->rkcfg,
489 sdram_params->ac_timing.rkcfg);
490
491 /* pimux */
492 write32(&ch[channel].ao_regs->mckdly, 0x1 << 30 |
493 0x1 << 20 |
494 0x1 << 4);
495
496 write32(&ch[channel].ddrphy_regs->mckdly, 0x1 << 8);
497
498 write32(&ch[channel].ao_regs->padctl4, 0x1 << 0);
499
500 /* tCKEH/tCKEL extend 1T */
501 write32(&ch[channel].ao_regs->dummy, 0x1 << 31 |
502 0x3 << 10 |
503 0x1 << 4);
504
505 /* driving control */
506 write32(&ch[channel].ao_regs->iodrv6, DEFAULT_DRIVING |
507 DRIVING_DS2_0 << 20 |
508 DRIVING_DS2_0 << 4);
509
510 write32(&ch[channel].ddrphy_regs->drvctl1, DEFAULT_DRIVING |
511 DRIVING_DS2_0 << 20);
512
513 write32(&ch[channel].ao_regs->drvctl1, DEFAULT_DRIVING |
514 DRIVING_DS2_0 << 4);
515
516 /* enable dqs signal output */
517 write32(&ch[channel].ddrphy_regs->ioctl, 0x0);
518
519 /* rank 0 dqs gating delay */
520 write32(&ch[channel].ao_regs->dqsien[0], 0x40 << 24 |
521 0x40 << 16 |
522 0x40 << 8 |
523 0x40 << 0);
524
525 write32(&ch[channel].ao_regs->dqsctl1, 0x1 << 28 |
526 0x5 << 24);
527
528 write32(&ch[channel].ao_regs->dqsctl2, 0x5 << 0);
529 write32(&ch[channel].ao_regs->phyctl1, 0x1 << 25);
530 write32(&ch[channel].ao_regs->gddr3ctl1, 0x1 << 24);
531 write32(&ch[channel].ddrphy_regs->gddr3ctl1, 0x1 << 28);
532 write32(&ch[channel].ao_regs->arbctl0, 0x80 << 0);
533
534 /* enable clock pad 0 */
535 write32(&ch[channel].ao_regs->clkctl, 0x1 << 28);
536
537 udelay(1);
538
539 write32(&ch[channel].ao_regs->conf1,
540 sdram_params->ac_timing.conf1);
541
PH Hsucf6526c2016-05-17 08:47:56 +0800542 /* bit 17,18 would bypass some dummy path */
Peter Kaoda1e02a2015-07-31 17:11:14 +0800543 write32(&ch[channel].ddrphy_regs->dqsgctl, 0x1 << 31 |
544 0x1 << 30 |
PH Hsucf6526c2016-05-17 08:47:56 +0800545 0x1 << 17 |
546 0x1 << 18 |
Peter Kaoda1e02a2015-07-31 17:11:14 +0800547 0x1 << 4 |
548 0x1 << 0);
549
550 write32(&ch[channel].ao_regs->dqscal0, 0x0);
551 write32(&ch[channel].ddrphy_regs->dqscal0, 0x0);
552
553 write32(&ch[channel].ao_regs->actim0,
554 sdram_params->ac_timing.actim);
555
556 write32(&ch[channel].ao_regs->misctl0,
557 sdram_params->ac_timing.misctl0);
558 write32(&ch[channel].ddrphy_regs->misctl0,
559 sdram_params->ac_timing.misctl0);
560
561 write32(&ch[channel].ao_regs->perfctl0, 0x1 << 20);
562
563 write32(&ch[channel].ao_regs->ddr2ctl,
564 sdram_params->ac_timing.ddr2ctl);
565 write32(&ch[channel].ddrphy_regs->ddr2ctl,
566 sdram_params->ac_timing.ddr2ctl);
567
568 write32(&ch[channel].ao_regs->misc, 0xb << 8 |
569 0x1 << 7 |
570 0x1 << 6 |
571 0x1 << 5);
572
573 write32(&ch[channel].ao_regs->dllconf, 0xf << 28 |
574 0x1 << 24);
575
576 write32(&ch[channel].ao_regs->actim1,
577 sdram_params->ac_timing.actim1);
578
579 write32(&ch[channel].ddrphy_regs->dqsisel, 0x0);
580
581 /* disable ODT before ZQ calibration */
582 write32(&ch[channel].ao_regs->wodt, 0x1 << 0);
583
584 write32(&ch[channel].ao_regs->padctl4, 0x1 << 2 |
585 0x1 << 0);
586
Tristan Shiehf42db112018-06-06 12:52:20 +0800587 udelay(200); /* tINIT3 > 200us */
Peter Kaoda1e02a2015-07-31 17:11:14 +0800588
589 write32(&ch[channel].ao_regs->gddr3ctl1, 0x1 << 24 |
590 0x1 << 20);
591
592 write32(&ch[channel].ddrphy_regs->gddr3ctl1, 0x1 << 28);
593
594 /* set mode register value */
595 dramc_set_mrs_value(channel, 0, sdram_params);
596
597 if (dual_rank_set)
598 dramc_set_mrs_value(channel, 1, sdram_params);
599
600 write32(&ch[channel].ao_regs->gddr3ctl1,
601 sdram_params->ac_timing.gddr3ctl1);
602 write32(&ch[channel].ddrphy_regs->gddr3ctl1,
603 sdram_params->ac_timing.gddr3ctl1);
604
605 write32(&ch[channel].ao_regs->dramc_pd_ctrl,
606 sdram_params->ac_timing.pd_ctrl);
607
608 write32(&ch[channel].ao_regs->padctl4, 0x1 << 0);
609 write32(&ch[channel].ao_regs->perfctl0, 0x1 << 20 | 0x1 << 0);
610 write32(&ch[channel].ao_regs->zqcs, 0xa << 8 | 0x56 << 0);
611 write32(&ch[channel].ddrphy_regs->padctl1, 0x0);
612
613 write32(&ch[channel].ao_regs->test2_3,
614 sdram_params->ac_timing.test2_3);
615
616 write32(&ch[channel].ao_regs->conf2,
617 sdram_params->ac_timing.conf2);
618
619 write32(&ch[channel].ddrphy_regs->padctl2, 0x0);
620
621 /* DISABLE_DRVREF */
622 write32(&ch[channel].ao_regs->ocdk, 0x0);
623 write32(&ch[channel].ddrphy_regs->ocdk, 0x0);
624
625 write32(&ch[channel].ao_regs->r1deldly, 0x12 << 24 |
626 0x12 << 16 |
627 0x12 << 8 |
628 0x12 << 0);
629
630 write32(&ch[channel].ao_regs->padctl7, 0x0);
631
632 /* CLKTDN, DS0TDN, DS1TDN, DS2TDN, DS3TDN */
633 setbits_le32(&ch[channel].ddrphy_regs->tdsel[2], 0x1 << 31 |
634 0x1 << 29 |
635 0x1 << 27 |
636 0x1 << 25 |
637 0x1 << 1);
638 /* DISABLE_PERBANK_REFRESH */
639 clrbits_le32(&ch[channel].ao_regs->rkcfg, 0x1 << 7);
640
641 /* clear R_DMREFTHD to reduce MR4 wait refresh queue time */
642 clrbits_le32(&ch[channel].ao_regs->conf2, 0x7 << 24);
643
644 /* duty default value */
645 write32(&ch[channel].ddrphy_regs->phyclkduty, 0x1 << 28 |
646 0x1 << 16);
647
648 if (!dual_rank_set) {
649 /* single rank, CKE1 always off */
650 setbits_le32(&ch[channel].ao_regs->gddr3ctl1, 0x1 << 21);
651 }
652
653 /* default dqs rx perbit input delay */
654 write32(&ch[channel].ao_regs->r0deldly,
655 calib_params->rx_dqs_dly[channel]);
656
657 write32(&ch[channel].ao_regs->r1deldly,
658 calib_params->rx_dqs_dly[channel]);
659
660 for (bit = 0; bit < DQS_BIT_NUMBER; bit++)
661 write32(&ch[channel].ao_regs->dqidly[bit],
662 calib_params->rx_dq_dly[channel][bit]);
663}
664
665void div2_phase_sync(void)
666{
667 clrbits_le32(&ch[CHANNEL_B].ddrphy_regs->mempll_divider,
668 1 << MEMCLKENB_SHIFT);
669 udelay(1);
670
671 setbits_le32(&ch[CHANNEL_B].ddrphy_regs->mempll_divider,
672 1 << MEMCLKENB_SHIFT);
673}
674
675void dramc_phy_reset(u32 channel)
676{
677 /* reset phy */
678 setbits_le32(&ch[channel].ddrphy_regs->phyctl1,
679 1 << PHYCTL1_PHYRST_SHIFT);
680
681 /* read data counter reset */
682 setbits_le32(&ch[channel].ao_regs->gddr3ctl1,
683 1 << GDDR3CTL1_RDATRST_SHIFT);
684
Tristan Shiehf42db112018-06-06 12:52:20 +0800685 udelay(1); /* delay 1ns */
Peter Kaoda1e02a2015-07-31 17:11:14 +0800686
687 clrbits_le32(&ch[channel].ao_regs->gddr3ctl1,
688 1 << GDDR3CTL1_RDATRST_SHIFT);
689
690 clrbits_le32(&ch[channel].ddrphy_regs->phyctl1,
691 1 << PHYCTL1_PHYRST_SHIFT);
692}
693
694void dramc_runtime_config(u32 channel,
695 const struct mt8173_sdram_params *sdram_params)
696{
PH Hsucf6526c2016-05-17 08:47:56 +0800697 setbits_le32(&ch[channel].ddrphy_regs->dqsgctl,
698 BIT(17)|BIT(18));
699
Peter Kaoda1e02a2015-07-31 17:11:14 +0800700 /* enable hw gating */
701 setbits_le32(&ch[channel].ao_regs->dqscal0,
702 1 << DQSCAL0_STBCALEN_SHIFT);
703
704 /* if frequency >1600, tCKE should >7 clk */
705 setbits_le32(&ch[channel].ao_regs->dummy, 0x1 << 4);
706
Elyes HAOUAS4a83f1c2016-08-25 21:07:59 +0200707 if (sdram_params->dram_freq * 2 < 1600 * MHz)
Peter Kaoda1e02a2015-07-31 17:11:14 +0800708 die("set tCKE error in runtime config");
709
710 /* DDRPHY C/A and DQ M_CK clock gating enable */
711 setbits_le32(&ch[channel].ddrphy_regs->ddrphy_cg_ctrl, 0x1 << 2 |
712 0x1 << 1);
713
714 setbits_le32(&ch[channel].ao_regs->perfctl0, BIT(19) | BIT(14) |
715 BIT(11) | BIT(10) |
716 BIT(9) | BIT(8) |
717 BIT(4) | BIT(0));
718 /* ZQCS_ENABLE */
719 if (sdram_params->emi_set.cona & 0x1) {
720 /* dual channel, clear ZQCSCNT */
721 clrbits_le32(&ch[channel].ao_regs->spcmd, 0xff << 16);
722 /* set ZQCSMASK for different channels */
723 if (channel == CHANNEL_A) {
724 clrbits_le32(&ch[channel].ao_regs->perfctl0, 0x1 << 24);
725 } else {
726 setbits_le32(&ch[channel].ao_regs->perfctl0, 0x1 << 24);
727 }
728 /* enable ZQCSDUAL */
729 setbits_le32(&ch[channel].ao_regs->perfctl0, 0x1 << 25);
730 } else {
731 /* single channel, set ZQCSCNT */
732 setbits_le32(&ch[channel].ao_regs->spcmd, 0x8 << 16);
733 }
734}
735
736void transfer_to_spm_control(void)
737{
738 u32 msk;
739
740 msk = BIT(7) | BIT(11) | BIT(15);
Tristan Shieh17180af2018-07-02 17:20:13 +0800741 clrbits_le32(&mtk_apmixed->ap_pll_con3, msk);
Peter Kaoda1e02a2015-07-31 17:11:14 +0800742
743 msk = BIT(0) | BIT(4) | BIT(8);
744 clrbits_le32(&ch[CHANNEL_A].ddrphy_regs->peri[3], msk);
745
746 msk = BIT(0) | BIT(8);
747 clrbits_le32(&ch[CHANNEL_B].ddrphy_regs->peri[3], msk);
748
749 msk = BIT(0) | BIT(9) | BIT(10) | BIT(11) | BIT(16) | BIT(24);
750 clrbits_le32(&ch[CHANNEL_A].ddrphy_regs->peri[2], msk);
751 clrbits_le32(&ch[CHANNEL_B].ddrphy_regs->peri[2], msk);
752}
753
754void transfer_to_reg_control(void)
755{
756 u32 val;
757
758 val = BIT(7) | BIT(11) | BIT(15);
Tristan Shieh17180af2018-07-02 17:20:13 +0800759 setbits_le32(&mtk_apmixed->ap_pll_con3, val);
Peter Kaoda1e02a2015-07-31 17:11:14 +0800760
761 val = BIT(0) | BIT(4) | BIT(8);
762 setbits_le32(&ch[CHANNEL_A].ddrphy_regs->peri[3], val);
763
764 val = BIT(0) | BIT(8);
765 write32(&ch[CHANNEL_B].ddrphy_regs->peri[3], val);
766
767 val = BIT(0) | BIT(9) | BIT(10) | BIT(11) | BIT(16) | BIT(24);
768 setbits_le32(&ch[CHANNEL_A].ddrphy_regs->peri[2], val);
769 setbits_le32(&ch[CHANNEL_B].ddrphy_regs->peri[2], val);
770}
771
772u32 dramc_engine2(u32 channel, enum dram_tw_op wr, u32 test2_1, u32 test2_2,
773 u8 testaudpat, u8 log2loopcount)
774{
775 u32 value;
776
777 if (log2loopcount > 15)
778 die("Invalid loopcount of engine2!");
779
780 /* Disable Test Agent1, Test Agent2 write/read */
781 clrbits_le32(&ch[channel].ao_regs->conf2, CONF2_TEST1_EN |
782 CONF2_TEST2R_EN |
783 CONF2_TEST2W_EN);
784
785 /* 1. set pattern, base address, offset address */
786 write32(&ch[channel].nao_regs->test2_1, test2_1);
787 write32(&ch[channel].nao_regs->test2_2, test2_2);
788
789 /* 2. select test pattern */
790 /* TESTXTALKPAT | TESTAUDPAT
791 * ISI 0 | 0
792 * AUD 0 | 1
793 * XTALK 1 | 0
794 * UNKNOW 1 | 1
795 */
796 switch (testaudpat) {
797 case XTALK:
798 /* TESTAUDPAT = 0 */
799 clrbits_le32(&ch[channel].ao_regs->test2_3,
800 TEST2_3_TESTAUDPAT_EN);
801 /* TESTXTALKPAT = 1, select xtalk pattern
802 * TESTAUDMODE = 0, read only
803 * TESTAUDBITINV = 0, no bit inversion
804 */
805 clrsetbits_le32(&ch[channel].ao_regs->test2_4,
806 TEST2_4_TESTAUDBITINV_EN |
807 TEST2_4_TESTAUDMODE_EN,
808 TEST2_4_TESTXTALKPAT_EN);
809 break;
810 case AUDIO:
811 /* TESTAUDPAT = 1 */
812 setbits_le32(&ch[channel].ao_regs->test2_3,
813 TEST2_3_TESTAUDPAT_EN);
814 /* TESTXTALKPAT = 0
815 * TESTAUDINIT = 0x11
816 * TESTAUDINC = 0x0d
817 * TESTAUDBITINV = 1
818 * TESTAUDMODE = 1
819 */
820 clrsetbits_le32(&ch[channel].ao_regs->test2_4,
821 TEST2_4_TESTXTALKPAT_EN |
822 TEST2_4_TESTAUDINIT_MASK |
823 TEST2_4_TESTAUDINC_MASK,
824 TEST2_4_TESTAUDMODE_EN |
825 TEST2_4_TESTAUDBITINV_EN |
826 0x11 << TEST2_4_TESTAUDINIT_SHIFT |
827 0xd << TEST2_4_TESTAUDINC_SHIFT);
828
829 break;
830 case ISI:
831 /* TESTAUDPAT = 0 */
832 clrbits_le32(&ch[channel].ao_regs->test2_3,
833 TEST2_3_TESTAUDPAT_EN);
834 /* TESTXTALKPAT = 0 */
835 clrbits_le32(&ch[channel].ao_regs->test2_4,
836 TEST2_4_TESTXTALKPAT_EN);
837 }
838
839 /* 3. set loop number */
840 clrsetbits_le32(&ch[channel].ao_regs->test2_3, TEST2_3_TESTCNT_MASK,
841 log2loopcount << TEST2_3_TESTCNT_SHIFT);
842
843 /* 4. enable read/write test */
844 if (wr == TE_OP_READ_CHECK) {
845 if ((testaudpat == 1) || (testaudpat == 2)) {
846 /* if audio pattern, enable read only */
847 /* (disable write after read), */
848 /* AUDMODE=0x48[15]=0 */
849 clrbits_le32(&ch[channel].ao_regs->test2_4,
850 TEST2_4_TESTAUDMODE_EN);
851 }
852
853 /* enable read, 0x008[30:30] */
854 setbits_le32(&ch[channel].ao_regs->conf2, CONF2_TEST2R_EN);
855 } else if (wr == TE_OP_WRITE_READ_CHECK) {
856 /* enable write, 0x008[31:31] */
857 setbits_le32(&ch[channel].ao_regs->conf2, CONF2_TEST2W_EN);
858
859 /* check "read data compare ready" bit */
860 do {
861 value = read32(&ch[channel].nao_regs->testrpt);
862 } while ((value & (1 << TESTRPT_DM_CMP_CPT_SHIFT)) == 0);
863
864 /* Disable Test Agent2 write and enable Test Agent2 read */
865 clrbits_le32(&ch[channel].ao_regs->conf2, CONF2_TEST2W_EN);
866 setbits_le32(&ch[channel].ao_regs->conf2, CONF2_TEST2R_EN);
867 }
868
869 /* 5 check "read data compare ready" bit */
870 do {
871 value = read32(&ch[channel].nao_regs->testrpt);
872 } while ((value & (1 << TESTRPT_DM_CMP_CPT_SHIFT)) == 0);
873
874 /* delay 10ns after ready check from DE suggestion (1us here) */
875 udelay(1);
876
877 /* read CMP_ERR result */
878 value = read32(&ch[channel].nao_regs->cmp_err);
879
880 /* 6 disable read */
881 clrbits_le32(&ch[channel].ao_regs->conf2, CONF2_TEST2R_EN);
882
883 /* return CMP_ERR result, pass: 0, failure: otherwise */
884 return value;
885}