drivers/mipi: Add support for KD_KD110N11_51IE panel
[coreboot2.git] / src / soc / mediatek / mt8173 / dramc_pi_basic_api.c
blobe58ea53bbdae673adb3b1cda4eb506ecfc329373
1 /* SPDX-License-Identifier: GPL-2.0-only */
3 #include <device/mmio.h>
4 #include <assert.h>
5 #include <console/console.h>
6 #include <delay.h>
7 #include <soc/addressmap.h>
8 #include <soc/dramc_common.h>
9 #include <soc/dramc_register.h>
10 #include <soc/dramc_pi_api.h>
11 #include <soc/dramc_soc.h>
12 #include <soc/emi.h>
13 #include <soc/mt6391.h>
14 #include <soc/pll.h>
15 #include <soc/spm.h>
16 #include <types.h>
18 struct mem_pll {
19 u8 delay;
20 u8 phase;
21 u8 done;
24 inline u8 is_dual_rank(u32 channel,
25 const struct mt8173_sdram_params *sdram_params)
27 /* judge ranks from EMI_CONA[17] (cha) and EMI_CONA[16] (chb) */
28 return (sdram_params->emi_set.cona & (1 << (17 - channel))) ? 1 : 0;
31 static void mem_pll_pre_init(u32 channel)
33 write32(&ch[channel].ddrphy_regs->lpddr2_3, 0x1 << 29 | 0x1 << 25 |
34 0xf << 16 | 0xffff);
36 write32(&ch[channel].ddrphy_regs->lpddr2_4, 0x1 << 29 | 0x1 << 25 |
37 0xf << 16 | 0xffff);
39 /* adjust DQS/DQM phase to get best margin */
40 write32(&ch[channel].ddrphy_regs->selph12, 0x1 << 28 | 0xf << 20 |
41 0x1 << 12 | 0xf << 4);
42 /* adjust DQ phase to get best margin */
43 write32(&ch[channel].ddrphy_regs->selph13, 0xffffffff << 0);
44 write32(&ch[channel].ddrphy_regs->selph14, 0xffffffff << 0);
46 /* fix OCV effect */
47 write32(&ch[channel].ddrphy_regs->selph15, 0x1 << 4 | 0xf << 0);
49 /* pll register control by CPU and select internal pipe path */
50 write32(&ch[channel].ddrphy_regs->peri[2], 0x11 << 24 | 0x11 << 16 |
51 0xff << 8 | 0x11 << 0);
52 write32(&ch[channel].ddrphy_regs->peri[3], 0x11 << 24 | 0x51 << 16 |
53 0x11 << 8 | 0x11 << 0);
55 /* enable clock sync and spm control clock */
56 write32(&ch[channel].ddrphy_regs->mempll_divider, 0x9 << 24 |
57 0x1 << 15 |
58 0x2 << 4 |
59 0x1 << 1 |
60 0x1 << 0);
61 /* pll2 enable from CPU control */
62 write32(&ch[channel].ddrphy_regs->mempll05_divider, 0x1 << 27);
64 /* enable chip top memory clock */
65 setbits32(&ch[channel].ddrphy_regs->mempll_divider, 0x1 << 4);
67 /* disable C/A and DQ M_CK clock gating */
68 clrbits32(&ch[channel].ddrphy_regs->ddrphy_cg_ctrl, 0x1 << 2 |
69 0x1 << 1);
71 /* enable spm control clock */
72 clrbits32(&ch[channel].ddrphy_regs->mempll_divider, 0x1 << 15 |
73 0x1 << 0);
74 /* enable dramc 2X mode */
75 setbits32(&ch[channel].ao_regs->ddr2ctl, 1 << 0);
77 /* select internal clock path */
78 write32(&ch[channel].ddrphy_regs->peri[0], 0x21 << 24 | 0x27 << 16 |
79 0x1b << 8 | 0x3 << 0);
81 write32(&ch[channel].ddrphy_regs->peri[1], 0x50 << 24 | 0x96 << 16 |
82 0x6 << 8 | 0x1e << 0);
84 /* trigger to make memory clock correct phase */
85 setbits32(&ch[channel].ddrphy_regs->mempll_divider, 0x1 << 24 |
86 0x1 << 7);
88 if (channel == CHANNEL_A) {
89 /* select memory clock sync for channel A (internal source) */
90 clrbits32(&ch[channel].ddrphy_regs->mempll_divider, 0x1 << 3);
94 static void mem_pll_init_set_params(u32 channel)
96 u32 pattern1, pattern2, pattern3;
97 u32 mempll_ic_3_0, mempll_bp_3_0;
98 u32 mempll_fbdiv_6_0, mempll_m4pdiv_1_0;
99 u32 mempll_br_1_0, mempll_bc_1_0, mempll_ir_3_0;
101 mempll_fbdiv_6_0 = 0x7 << 16;
102 mempll_br_1_0 = 0x1 << 10;
103 mempll_bc_1_0 = 0x0 << 8;
104 mempll_ir_3_0 = 0xc << 28;
105 mempll_ic_3_0 = 0x6 << 8;
106 mempll_bp_3_0 = 0x1 << 12;
107 mempll_m4pdiv_1_0 = 0x0 << 28;
109 write32(&ch[channel].ddrphy_regs->mempll[14], 0x0);
111 write32(&ch[channel].ddrphy_regs->mempll[3], 0x3 << 30 |
112 0x1 << 28);
113 /* mempll 2 config */
114 pattern1 = mempll_ir_3_0 | mempll_fbdiv_6_0 | mempll_ic_3_0;
115 pattern2 = mempll_m4pdiv_1_0;
116 pattern3 = mempll_bp_3_0 | mempll_br_1_0 | mempll_bc_1_0;
118 /* mempll2_autok_en = 1, mempll2_autok_load = 1 */
119 write32(&ch[channel].ddrphy_regs->mempll[5], 0x1 << 26 | 0x3 << 24 |
120 0x1 << 23 | pattern1);
121 write32(&ch[channel].ddrphy_regs->mempll[6], 0x1 << 30 | 0x3 << 26 |
122 0x3 << 14 | pattern2);
123 write32(&ch[channel].ddrphy_regs->mempll[7], 0x1 << 17 | 0x1 << 0 |
124 pattern3);
125 /* mempll 4 */
126 write32(&ch[channel].ddrphy_regs->mempll[11], 0x1 << 26 | 0x3 << 24 |
127 0x1 << 23 | pattern1);
128 write32(&ch[channel].ddrphy_regs->mempll[12], 0x1 << 30 | 0x3 << 26 |
129 0x3 << 14 | pattern2);
130 write32(&ch[channel].ddrphy_regs->mempll[13], 0x1 << 0 | pattern3);
132 /* mempll 3 - enable signal tie together */
133 write32(&ch[channel].ddrphy_regs->mempll[8], 0x1 << 26 | 0x3 << 24 |
134 0x1 << 23 | pattern1);
135 write32(&ch[channel].ddrphy_regs->mempll[9], 0x1 << 30 | 0x3 << 26 |
136 0x3 << 14 | pattern2);
137 write32(&ch[channel].ddrphy_regs->mempll[10], 0x1 << 17 | 0x1 << 0 |
138 pattern3);
141 static void mem_pll_init_phase_sync(u32 channel)
143 write32(&ch[channel].ddrphy_regs->mempll_divider, BIT(27) | BIT(24) |
144 BIT(7) | BIT(5) |
145 BIT(4) | BIT(0));
146 /* spm control clock enable */
147 clrsetbits32(&ch[channel].ddrphy_regs->mempll_divider, BIT(0),
148 BIT(1));
150 clrsetbits32(&ch[channel].ddrphy_regs->mempll_divider, BIT(1),
151 BIT(0));
154 static void pll_phase_adjust(u32 channel, struct mem_pll *mempll, int reg_offs)
156 switch (mempll->phase) {
157 case MEMPLL_INIT:
158 /* initial phase: zero out RG_MEPLL(2,3,4)_(REF_DL,FB)_DL */
159 clrbits32(&ch[channel].ddrphy_regs->mempll[reg_offs],
160 0x1f << MEMPLL_REF_DL_SHIFT |
161 0x1f << MEMPLL_FB_DL_SHIFT);
162 break;
164 case MEMPLL_REF_LAG:
165 /* REF lag FBK, delay FBK */
166 clrsetbits32(&ch[channel].ddrphy_regs->mempll[reg_offs],
167 0x1f << MEMPLL_REF_DL_SHIFT |
168 0x1f << MEMPLL_FB_DL_SHIFT,
169 mempll->delay << MEMPLL_FB_DL_SHIFT);
170 break;
172 case MEMPLL_REF_LEAD:
173 /* REF lead FBK, delay REF */
174 clrsetbits32(&ch[channel].ddrphy_regs->mempll[reg_offs],
175 0x1f << MEMPLL_REF_DL_SHIFT |
176 0x1f << MEMPLL_FB_DL_SHIFT,
177 mempll->delay << MEMPLL_REF_DL_SHIFT);
181 static void pll_phase_check(u32 channel, struct mem_pll *mempll, int idx)
183 u32 value = read32(&ch[channel].ddrphy_regs->jmeter_pll_st[idx]);
184 u16 one_count = (u16)((value >> 16) & 0xffff);
185 u16 zero_count = (u16)(value & 0xffff);
187 dramc_dbg("PLL %d, phase %d, one_count %d, zero_count %d\n",
188 (idx + 2), mempll->phase, one_count, zero_count);
190 switch (mempll->phase) {
191 case MEMPLL_INIT:
192 if ((one_count - zero_count) > JMETER_COUNT_N) {
193 /* REF lag FBK */
194 mempll->phase = MEMPLL_REF_LAG;
195 mempll->delay++;
196 } else if ((zero_count - one_count) > JMETER_COUNT_N) {
197 /* REF lead FBK */
198 mempll->phase = MEMPLL_REF_LEAD;
199 mempll->delay++;
200 } else {
201 /* in-phase at initial */
202 mempll->done = 1;
204 break;
206 case MEMPLL_REF_LAG:
207 if (JMETER_COUNT_N >= (one_count - zero_count)) {
208 mempll->done = 1;
209 } else {
210 mempll->delay++;
212 break;
214 case MEMPLL_REF_LEAD:
215 if (JMETER_COUNT_N >= (zero_count - one_count)) {
216 mempll->done = 1;
217 } else {
218 mempll->delay++;
223 static void mem_pll_phase_cali(u32 channel)
225 u32 i;
227 struct mem_pll mempll[3] =
229 {0, 0, 0},
230 {0, 0, 0},
231 {0, 0, 0},
234 dramc_dbg("[PLL_Phase_Calib] ===== PLL Phase Calibration: ");
235 dramc_dbg("CHANNEL %d (0: CHA, 1: CHB) =====\n", channel);
237 /* 1. set jitter meter count number to 1024 for mempll 2 3 4 */
238 for (i = 0; i < 3; i++)
239 clrsetbits32(&ch[channel].ddrphy_regs->jmeter[i],
240 JMETER_COUNTER_MASK,
241 JMETER_COUNT << JMETER_COUNTER_SHIFT);
243 while (1) {
244 for (i = 0; i < 3; i++) {
245 if (!mempll[i].done) {
246 pll_phase_adjust(channel, &mempll[i], (i + 2) * 3);
250 udelay(20); /* delay 20us for external loop pll stable */
252 /* 2. enable mempll 2 3 4 jitter meter */
253 for (i = 0; i < 3; i++)
254 setbits32(&ch[channel].ddrphy_regs->jmeter[i],
255 JMETER_EN_BIT);
257 /* 3. wait for jitter meter complete */
258 udelay(JMETER_WAIT_DONE_US);
260 /* 4. check jitter meter counter value for mempll 2 3 4 */
261 for (i = 0; i < 3; i++) {
262 if (!mempll[i].done) {
263 pll_phase_check(channel, &mempll[i], i);
267 /* 5. disable mempll 2 3 4 jitter meter */
268 for (i = 0; i < 3; i++)
269 clrbits32(&ch[channel].ddrphy_regs->jmeter[i],
270 JMETER_EN_BIT);
272 /* 6. all done early break */
273 if (mempll[0].done && mempll[1].done && mempll[2].done)
274 break;
276 /* 7. delay line overflow break */
277 for (i = 0; i < 3; i++) {
278 if (mempll[i].delay >= 32) {
279 die("MEMPLL calibration fail\n");
284 dramc_dbg("pll done: ");
286 dramc_dbg("%d, %d, %d\n",
287 mempll[0].done, mempll[1].done, mempll[2].done);
288 dramc_dbg("pll dl: %d, %d, %d\n",
289 mempll[0].delay, mempll[1].delay, mempll[2].delay);
292 void mem_pll_init(const struct mt8173_sdram_params *sdram_params)
294 u32 channel;
296 /* udelay waits for PLL to stabilize in this function */
297 printk(BIOS_DEBUG, "[PLL] mempll_init and cali\n");
299 /* mempll pre_init for two channels */
300 for (channel = 0; channel < CHANNEL_NUM; channel++)
301 mem_pll_pre_init(channel);
303 /* only set once in MPLL */
304 mt_mem_pll_config_pre(sdram_params);
306 for (channel = 0; channel < CHANNEL_NUM; channel++)
307 mem_pll_init_set_params(channel);
309 udelay(1); /* wait after da_mpll_sdm_iso_en goes low */
311 /* only set once in MPLL */
312 mt_mem_pll_config_post();
314 udelay(100);
316 for (channel = 0; channel < CHANNEL_NUM; channel++) {
317 /* mempll_bias_en */
318 write32(&ch[channel].ddrphy_regs->mempll[3], 0xd << 28 |
319 0x1 << 6);
320 udelay(2);
322 /* mempll2_en -> mempll4_en -> mempll3_en */
323 setbits32(&ch[channel].ddrphy_regs->mempll[5], 1 << 0);
324 setbits32(&ch[channel].ddrphy_regs->mempll[11], 1 << 0);
325 setbits32(&ch[channel].ddrphy_regs->mempll[8], 1 << 0);
327 udelay(100);
329 /* mempll_bias_lpf_en */
330 setbits32(&ch[channel].ddrphy_regs->mempll[3], 1 << 7);
332 udelay(30);
334 /* select mempll4 band register */
335 setbits32(&ch[channel].ddrphy_regs->mempll[4], 1 << 26);
336 clrbits32(&ch[channel].ddrphy_regs->mempll[4], 1 << 26);
338 /* PLL ready */
340 /* disable mempll2_en -> mempll4_en -> mempll3_en */
341 clrbits32(&ch[channel].ddrphy_regs->mempll[5], 1 << 0);
342 clrbits32(&ch[channel].ddrphy_regs->mempll[11], 1 << 0);
343 clrbits32(&ch[channel].ddrphy_regs->mempll[8], 1 << 0);
345 /* disable autok mempll2_en -> mempll4_en -> mempll3_en */
346 clrbits32(&ch[channel].ddrphy_regs->mempll[5], 1 << 23);
347 clrbits32(&ch[channel].ddrphy_regs->mempll[11], 1 << 23);
348 clrbits32(&ch[channel].ddrphy_regs->mempll[8], 1 << 23);
350 udelay(1);
352 /* mempll[2->4->3]_fb_mck_sel=1 (switch to outer loop) */
353 setbits32(&ch[channel].ddrphy_regs->mempll[6], 1 << 25);
354 setbits32(&ch[channel].ddrphy_regs->mempll[12], 1 << 25);
355 setbits32(&ch[channel].ddrphy_regs->mempll[9], 1 << 25);
357 udelay(1);
359 /* enable mempll2_en -> mempll4_en -> mempll3_en */
360 setbits32(&ch[channel].ddrphy_regs->mempll[5], 1 << 0);
361 setbits32(&ch[channel].ddrphy_regs->mempll[11], 1 << 0);
362 setbits32(&ch[channel].ddrphy_regs->mempll[8], 1 << 0);
365 /* mempll new power-on */
366 write32(&mtk_spm->poweron_config_set, 0x1 << 0 |
367 SPM_PROJECT_CODE << 16);
368 /* request mempll reset/pdn mode */
369 setbits32(&mtk_spm->power_on_val0, 0x1 << 27);
371 udelay(2);
373 /* unrequest mempll reset/pdn mode and wait settle */
374 clrbits32(&mtk_spm->power_on_val0, 0x1 << 27);
376 udelay(31); /* PLL ready */
378 for (channel = 0; channel < CHANNEL_NUM; channel++)
379 mem_pll_init_phase_sync(channel);
381 udelay(1);
383 /* mempll calibration for two channels */
384 for (channel = 0; channel < CHANNEL_NUM; channel++)
385 mem_pll_phase_cali(channel);
387 div2_phase_sync(); /* phase sync for channel B */
389 mt_mem_pll_mux();
392 void dramc_pre_init(u32 channel, const struct mt8173_sdram_params *sdram_params)
394 /* txdly_cs, txdly_cs1 */
395 write32(&ch[channel].ao_regs->selph1, 0x0);
396 /* txdly_dqsgate, txdly_dqsgate_p1 */
397 write32(&ch[channel].ao_regs->selph2, 0x3 << 20 | 0x2 << 12);
398 /* txldy_ra* */
399 write32(&ch[channel].ao_regs->selph3, 0x0);
400 /* txldy_ra* */
401 write32(&ch[channel].ao_regs->selph4, 0x0);
403 /* setting of write latency (WL=8) */
404 write32(&ch[channel].ao_regs->selph7, 0x3333 << 16 | 0x3333);
405 write32(&ch[channel].ao_regs->selph8, 0x3333 << 16 | 0x3333);
406 write32(&ch[channel].ao_regs->selph9, 0x3333 << 16 | 0x3333);
407 write32(&ch[channel].ao_regs->selph10, 0x5555 << 16 | 0xffff);
408 write32(&ch[channel].ao_regs->selph11, 0x55 << 16 | 0xff);
410 write32(&ch[channel].ao_regs->selph5, 0x1 << 26 | 0x2 << 22 |
411 0x1 << 20 | 0x5 << 16 |
412 0x5555);
414 write32(&ch[channel].ao_regs->selph6_1, 0x4 << 8 | 0x3 << 4 |
415 0x2 << 0);
417 write32(&ch[channel].ao_regs->ac_time_05t,
418 sdram_params->ac_timing.actim05t);
421 static void mrs_write(int channel, int rank, u32 mrs_value, unsigned int dly)
423 write32(&ch[channel].ao_regs->mrs, rank << 28 | mrs_value);
425 write32(&ch[channel].ao_regs->spcmd, 0x1);
426 udelay(dly);
427 write32(&ch[channel].ao_regs->spcmd, 0x0);
430 static void dramc_set_mrs_value(int channel, int rank,
431 const struct mt8173_sdram_params *sdram_params)
433 /* MR63 -> Reset, Wait >=10us if not check DAI */
434 mrs_write(channel, rank, sdram_params->mrs_set.mrs_63, 10);
435 /* MR10 -> ZQ Init, tZQINIT>=1us */
436 mrs_write(channel, rank, sdram_params->mrs_set.mrs_10, 1);
437 /* MR3 driving strength set to max */
438 mrs_write(channel, rank, sdram_params->mrs_set.mrs_3, 1);
439 /* MR1 */
440 mrs_write(channel, rank, sdram_params->mrs_set.mrs_1, 1);
441 /* MR2 */
442 mrs_write(channel, rank, sdram_params->mrs_set.mrs_2, 1);
443 /* MR11 ODT disable */
444 mrs_write(channel, rank, sdram_params->mrs_set.mrs_11, 1);
447 void dramc_init(u32 channel, const struct mt8173_sdram_params *sdram_params)
449 u32 bit, dual_rank_set;
451 const struct mt8173_calib_params *calib_params;
453 dual_rank_set = is_dual_rank(channel, sdram_params);
454 calib_params = &sdram_params->calib_params;
456 write32(&ch[channel].ddrphy_regs->peri[2], 0x1 << 12 |
457 0x1 << 4);
459 write32(&ch[channel].ddrphy_regs->peri[3], 0x0);
461 write32(&ch[channel].ao_regs->test2_4,
462 sdram_params->ac_timing.test2_4);
464 write32(&ch[channel].ao_regs->clk1delay, 0x1 << 23 |
465 0x1 << 22 |
466 0x1 << 21);
468 /* rank config */
469 assert((sdram_params->ac_timing.rkcfg & 0x1) == dual_rank_set);
470 write32(&ch[channel].ao_regs->rkcfg,
471 sdram_params->ac_timing.rkcfg);
473 /* pimux */
474 write32(&ch[channel].ao_regs->mckdly, 0x1 << 30 |
475 0x1 << 20 |
476 0x1 << 4);
478 write32(&ch[channel].ddrphy_regs->mckdly, 0x1 << 8);
480 write32(&ch[channel].ao_regs->padctl4, 0x1 << 0);
482 /* tCKEH/tCKEL extend 1T */
483 write32(&ch[channel].ao_regs->dummy, 0x1 << 31 |
484 0x3 << 10 |
485 0x1 << 4);
487 /* driving control */
488 write32(&ch[channel].ao_regs->iodrv6, DEFAULT_DRIVING |
489 DRIVING_DS2_0 << 20 |
490 DRIVING_DS2_0 << 4);
492 write32(&ch[channel].ddrphy_regs->drvctl1, DEFAULT_DRIVING |
493 DRIVING_DS2_0 << 20);
495 write32(&ch[channel].ao_regs->drvctl1, DEFAULT_DRIVING |
496 DRIVING_DS2_0 << 4);
498 /* enable dqs signal output */
499 write32(&ch[channel].ddrphy_regs->ioctl, 0x0);
501 /* rank 0 dqs gating delay */
502 write32(&ch[channel].ao_regs->dqsien[0], 0x40 << 24 |
503 0x40 << 16 |
504 0x40 << 8 |
505 0x40 << 0);
507 write32(&ch[channel].ao_regs->dqsctl1, 0x1 << 28 |
508 0x5 << 24);
510 write32(&ch[channel].ao_regs->dqsctl2, 0x5 << 0);
511 write32(&ch[channel].ao_regs->phyctl1, 0x1 << 25);
512 write32(&ch[channel].ao_regs->gddr3ctl1, 0x1 << 24);
513 write32(&ch[channel].ddrphy_regs->gddr3ctl1, 0x1 << 28);
514 write32(&ch[channel].ao_regs->arbctl0, 0x80 << 0);
516 /* enable clock pad 0 */
517 write32(&ch[channel].ao_regs->clkctl, 0x1 << 28);
519 udelay(1);
521 write32(&ch[channel].ao_regs->conf1,
522 sdram_params->ac_timing.conf1);
524 /* bit 17,18 would bypass some dummy path */
525 write32(&ch[channel].ddrphy_regs->dqsgctl, 0x1 << 31 |
526 0x1 << 30 |
527 0x1 << 17 |
528 0x1 << 18 |
529 0x1 << 4 |
530 0x1 << 0);
532 write32(&ch[channel].ao_regs->dqscal0, 0x0);
533 write32(&ch[channel].ddrphy_regs->dqscal0, 0x0);
535 write32(&ch[channel].ao_regs->actim0,
536 sdram_params->ac_timing.actim);
538 write32(&ch[channel].ao_regs->misctl0,
539 sdram_params->ac_timing.misctl0);
540 write32(&ch[channel].ddrphy_regs->misctl0,
541 sdram_params->ac_timing.misctl0);
543 write32(&ch[channel].ao_regs->perfctl0, 0x1 << 20);
545 write32(&ch[channel].ao_regs->ddr2ctl,
546 sdram_params->ac_timing.ddr2ctl);
547 write32(&ch[channel].ddrphy_regs->ddr2ctl,
548 sdram_params->ac_timing.ddr2ctl);
550 write32(&ch[channel].ao_regs->misc, 0xb << 8 |
551 0x1 << 7 |
552 0x1 << 6 |
553 0x1 << 5);
555 write32(&ch[channel].ao_regs->dllconf, 0xf << 28 |
556 0x1 << 24);
558 write32(&ch[channel].ao_regs->actim1,
559 sdram_params->ac_timing.actim1);
561 write32(&ch[channel].ddrphy_regs->dqsisel, 0x0);
563 /* disable ODT before ZQ calibration */
564 write32(&ch[channel].ao_regs->wodt, 0x1 << 0);
566 write32(&ch[channel].ao_regs->padctl4, 0x1 << 2 |
567 0x1 << 0);
569 udelay(200); /* tINIT3 > 200us */
571 write32(&ch[channel].ao_regs->gddr3ctl1, 0x1 << 24 |
572 0x1 << 20);
574 write32(&ch[channel].ddrphy_regs->gddr3ctl1, 0x1 << 28);
576 /* set mode register value */
577 dramc_set_mrs_value(channel, 0, sdram_params);
579 if (dual_rank_set)
580 dramc_set_mrs_value(channel, 1, sdram_params);
582 write32(&ch[channel].ao_regs->gddr3ctl1,
583 sdram_params->ac_timing.gddr3ctl1);
584 write32(&ch[channel].ddrphy_regs->gddr3ctl1,
585 sdram_params->ac_timing.gddr3ctl1);
587 write32(&ch[channel].ao_regs->dramc_pd_ctrl,
588 sdram_params->ac_timing.pd_ctrl);
590 write32(&ch[channel].ao_regs->padctl4, 0x1 << 0);
591 write32(&ch[channel].ao_regs->perfctl0, 0x1 << 20 | 0x1 << 0);
592 write32(&ch[channel].ao_regs->zqcs, 0xa << 8 | 0x56 << 0);
593 write32(&ch[channel].ddrphy_regs->padctl1, 0x0);
595 write32(&ch[channel].ao_regs->test2_3,
596 sdram_params->ac_timing.test2_3);
598 write32(&ch[channel].ao_regs->conf2,
599 sdram_params->ac_timing.conf2);
601 write32(&ch[channel].ddrphy_regs->padctl2, 0x0);
603 /* DISABLE_DRVREF */
604 write32(&ch[channel].ao_regs->ocdk, 0x0);
605 write32(&ch[channel].ddrphy_regs->ocdk, 0x0);
607 write32(&ch[channel].ao_regs->r1deldly, 0x12 << 24 |
608 0x12 << 16 |
609 0x12 << 8 |
610 0x12 << 0);
612 write32(&ch[channel].ao_regs->padctl7, 0x0);
614 /* CLKTDN, DS0TDN, DS1TDN, DS2TDN, DS3TDN */
615 setbits32(&ch[channel].ddrphy_regs->tdsel[2], 0x1 << 31 |
616 0x1 << 29 |
617 0x1 << 27 |
618 0x1 << 25 |
619 0x1 << 1);
620 /* DISABLE_PERBANK_REFRESH */
621 clrbits32(&ch[channel].ao_regs->rkcfg, 0x1 << 7);
623 /* clear R_DMREFTHD to reduce MR4 wait refresh queue time */
624 clrbits32(&ch[channel].ao_regs->conf2, 0x7 << 24);
626 /* duty default value */
627 write32(&ch[channel].ddrphy_regs->phyclkduty, 0x1 << 28 |
628 0x1 << 16);
630 if (!dual_rank_set) {
631 /* single rank, CKE1 always off */
632 setbits32(&ch[channel].ao_regs->gddr3ctl1, 0x1 << 21);
635 /* default dqs rx perbit input delay */
636 write32(&ch[channel].ao_regs->r0deldly,
637 calib_params->rx_dqs_dly[channel]);
639 write32(&ch[channel].ao_regs->r1deldly,
640 calib_params->rx_dqs_dly[channel]);
642 for (bit = 0; bit < DQS_BIT_NUMBER; bit++)
643 write32(&ch[channel].ao_regs->dqidly[bit],
644 calib_params->rx_dq_dly[channel][bit]);
647 void div2_phase_sync(void)
649 clrbits32(&ch[CHANNEL_B].ddrphy_regs->mempll_divider,
650 1 << MEMCLKENB_SHIFT);
651 udelay(1);
653 setbits32(&ch[CHANNEL_B].ddrphy_regs->mempll_divider,
654 1 << MEMCLKENB_SHIFT);
657 void dramc_phy_reset(u32 channel)
659 /* reset phy */
660 setbits32(&ch[channel].ddrphy_regs->phyctl1,
661 1 << PHYCTL1_PHYRST_SHIFT);
663 /* read data counter reset */
664 setbits32(&ch[channel].ao_regs->gddr3ctl1,
665 1 << GDDR3CTL1_RDATRST_SHIFT);
667 udelay(1); /* delay 1ns */
669 clrbits32(&ch[channel].ao_regs->gddr3ctl1,
670 1 << GDDR3CTL1_RDATRST_SHIFT);
672 clrbits32(&ch[channel].ddrphy_regs->phyctl1,
673 1 << PHYCTL1_PHYRST_SHIFT);
676 void dramc_runtime_config(u32 channel,
677 const struct mt8173_sdram_params *sdram_params)
679 setbits32(&ch[channel].ddrphy_regs->dqsgctl,
680 BIT(17)|BIT(18));
682 /* enable hw gating */
683 setbits32(&ch[channel].ao_regs->dqscal0,
684 1 << DQSCAL0_STBCALEN_SHIFT);
686 /* if frequency >1600, tCKE should >7 clk */
687 setbits32(&ch[channel].ao_regs->dummy, 0x1 << 4);
689 if (sdram_params->dram_freq * 2 < 1600 * MHz)
690 die("set tCKE error in runtime config");
692 /* DDRPHY C/A and DQ M_CK clock gating enable */
693 setbits32(&ch[channel].ddrphy_regs->ddrphy_cg_ctrl, 0x1 << 2 |
694 0x1 << 1);
696 setbits32(&ch[channel].ao_regs->perfctl0, BIT(19) | BIT(14) |
697 BIT(11) | BIT(10) |
698 BIT(9) | BIT(8) |
699 BIT(4) | BIT(0));
700 /* ZQCS_ENABLE */
701 if (sdram_params->emi_set.cona & 0x1) {
702 /* dual channel, clear ZQCSCNT */
703 clrbits32(&ch[channel].ao_regs->spcmd, 0xff << 16);
704 /* set ZQCSMASK for different channels */
705 if (channel == CHANNEL_A) {
706 clrbits32(&ch[channel].ao_regs->perfctl0, 0x1 << 24);
707 } else {
708 setbits32(&ch[channel].ao_regs->perfctl0, 0x1 << 24);
710 /* enable ZQCSDUAL */
711 setbits32(&ch[channel].ao_regs->perfctl0, 0x1 << 25);
712 } else {
713 /* single channel, set ZQCSCNT */
714 setbits32(&ch[channel].ao_regs->spcmd, 0x8 << 16);
718 void transfer_to_spm_control(void)
720 u32 msk;
722 msk = BIT(7) | BIT(11) | BIT(15);
723 clrbits32(&mtk_apmixed->ap_pll_con3, msk);
725 msk = BIT(0) | BIT(4) | BIT(8);
726 clrbits32(&ch[CHANNEL_A].ddrphy_regs->peri[3], msk);
728 msk = BIT(0) | BIT(8);
729 clrbits32(&ch[CHANNEL_B].ddrphy_regs->peri[3], msk);
731 msk = BIT(0) | BIT(9) | BIT(10) | BIT(11) | BIT(16) | BIT(24);
732 clrbits32(&ch[CHANNEL_A].ddrphy_regs->peri[2], msk);
733 clrbits32(&ch[CHANNEL_B].ddrphy_regs->peri[2], msk);
736 void transfer_to_reg_control(void)
738 u32 val;
740 val = BIT(7) | BIT(11) | BIT(15);
741 setbits32(&mtk_apmixed->ap_pll_con3, val);
743 val = BIT(0) | BIT(4) | BIT(8);
744 setbits32(&ch[CHANNEL_A].ddrphy_regs->peri[3], val);
746 val = BIT(0) | BIT(8);
747 write32(&ch[CHANNEL_B].ddrphy_regs->peri[3], val);
749 val = BIT(0) | BIT(9) | BIT(10) | BIT(11) | BIT(16) | BIT(24);
750 setbits32(&ch[CHANNEL_A].ddrphy_regs->peri[2], val);
751 setbits32(&ch[CHANNEL_B].ddrphy_regs->peri[2], val);
754 u32 dramc_engine2(u32 channel, enum dram_tw_op wr, u32 test2_1, u32 test2_2,
755 u8 testaudpat, u8 log2loopcount)
757 u32 value;
759 if (log2loopcount > 15)
760 die("Invalid loopcount of engine2!");
762 /* Disable Test Agent1, Test Agent2 write/read */
763 clrbits32(&ch[channel].ao_regs->conf2, CONF2_TEST1_EN |
764 CONF2_TEST2R_EN |
765 CONF2_TEST2W_EN);
767 /* 1. set pattern, base address, offset address */
768 write32(&ch[channel].nao_regs->test2_1, test2_1);
769 write32(&ch[channel].nao_regs->test2_2, test2_2);
771 /* 2. select test pattern */
772 /* TESTXTALKPAT | TESTAUDPAT
773 * ISI 0 | 0
774 * AUD 0 | 1
775 * XTALK 1 | 0
776 * UNKNOWN 1 | 1
778 switch (testaudpat) {
779 case XTALK:
780 /* TESTAUDPAT = 0 */
781 clrbits32(&ch[channel].ao_regs->test2_3,
782 TEST2_3_TESTAUDPAT_EN);
783 /* TESTXTALKPAT = 1, select xtalk pattern
784 * TESTAUDMODE = 0, read only
785 * TESTAUDBITINV = 0, no bit inversion
787 clrsetbits32(&ch[channel].ao_regs->test2_4,
788 TEST2_4_TESTAUDBITINV_EN |
789 TEST2_4_TESTAUDMODE_EN,
790 TEST2_4_TESTXTALKPAT_EN);
791 break;
792 case AUDIO:
793 /* TESTAUDPAT = 1 */
794 setbits32(&ch[channel].ao_regs->test2_3,
795 TEST2_3_TESTAUDPAT_EN);
796 /* TESTXTALKPAT = 0
797 * TESTAUDINIT = 0x11
798 * TESTAUDINC = 0x0d
799 * TESTAUDBITINV = 1
800 * TESTAUDMODE = 1
802 clrsetbits32(&ch[channel].ao_regs->test2_4,
803 TEST2_4_TESTXTALKPAT_EN |
804 TEST2_4_TESTAUDINIT_MASK |
805 TEST2_4_TESTAUDINC_MASK,
806 TEST2_4_TESTAUDMODE_EN |
807 TEST2_4_TESTAUDBITINV_EN |
808 0x11 << TEST2_4_TESTAUDINIT_SHIFT |
809 0xd << TEST2_4_TESTAUDINC_SHIFT);
811 break;
812 case ISI:
813 /* TESTAUDPAT = 0 */
814 clrbits32(&ch[channel].ao_regs->test2_3,
815 TEST2_3_TESTAUDPAT_EN);
816 /* TESTXTALKPAT = 0 */
817 clrbits32(&ch[channel].ao_regs->test2_4,
818 TEST2_4_TESTXTALKPAT_EN);
821 /* 3. set loop number */
822 clrsetbits32(&ch[channel].ao_regs->test2_3, TEST2_3_TESTCNT_MASK,
823 log2loopcount << TEST2_3_TESTCNT_SHIFT);
825 /* 4. enable read/write test */
826 if (wr == TE_OP_READ_CHECK) {
827 if ((testaudpat == 1) || (testaudpat == 2)) {
828 /* if audio pattern, enable read only */
829 /* (disable write after read), */
830 /* AUDMODE=0x48[15]=0 */
831 clrbits32(&ch[channel].ao_regs->test2_4,
832 TEST2_4_TESTAUDMODE_EN);
835 /* enable read, 0x008[30:30] */
836 setbits32(&ch[channel].ao_regs->conf2, CONF2_TEST2R_EN);
837 } else if (wr == TE_OP_WRITE_READ_CHECK) {
838 /* enable write, 0x008[31:31] */
839 setbits32(&ch[channel].ao_regs->conf2, CONF2_TEST2W_EN);
841 /* check "read data compare ready" bit */
842 do {
843 value = read32(&ch[channel].nao_regs->testrpt);
844 } while ((value & (1 << TESTRPT_DM_CMP_CPT_SHIFT)) == 0);
846 /* Disable Test Agent2 write and enable Test Agent2 read */
847 clrbits32(&ch[channel].ao_regs->conf2, CONF2_TEST2W_EN);
848 setbits32(&ch[channel].ao_regs->conf2, CONF2_TEST2R_EN);
851 /* 5 check "read data compare ready" bit */
852 do {
853 value = read32(&ch[channel].nao_regs->testrpt);
854 } while ((value & (1 << TESTRPT_DM_CMP_CPT_SHIFT)) == 0);
856 /* delay 10ns after ready check from DE suggestion (1us here) */
857 udelay(1);
859 /* read CMP_ERR result */
860 value = read32(&ch[channel].nao_regs->cmp_err);
862 /* 6 disable read */
863 clrbits32(&ch[channel].ao_regs->conf2, CONF2_TEST2R_EN);
865 /* return CMP_ERR result, pass: 0, failure: otherwise */
866 return value;