1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (c) 2014-2020, NVIDIA CORPORATION. All rights reserved.
6 #include <linux/kernel.h>
9 #include <linux/delay.h>
12 #include <soc/tegra/mc.h>
14 #include "tegra210-emc.h"
15 #include "tegra210-mc.h"
18 * Enable flags for specifying verbosity.
21 #define STEPS (1 << 1)
22 #define SUB_STEPS (1 << 2)
23 #define PRELOCK (1 << 3)
24 #define PRELOCK_STEPS (1 << 4)
25 #define ACTIVE_EN (1 << 5)
26 #define PRAMP_UP (1 << 6)
27 #define PRAMP_DN (1 << 7)
28 #define EMA_WRITES (1 << 10)
29 #define EMA_UPDATES (1 << 11)
30 #define PER_TRAIN (1 << 16)
31 #define CC_PRINT (1 << 17)
32 #define CCFIFO (1 << 29)
33 #define REGS (1 << 30)
34 #define REG_LISTS (1 << 31)
36 #define emc_dbg(emc, flags, ...) dev_dbg(emc->dev, __VA_ARGS__)
38 #define DVFS_CLOCK_CHANGE_VERSION 21021
39 #define EMC_PRELOCK_VERSION 2101
43 WRITE_TRAINING_SEQUENCE
= 2,
44 PERIODIC_TRAINING_SEQUENCE
= 3,
49 PERIODIC_TRAINING_UPDATE
= 14
53 * PTFV defines - basically just indexes into the per table PTFV array.
55 #define PTFV_DQSOSC_MOVAVG_C0D0U0_INDEX 0
56 #define PTFV_DQSOSC_MOVAVG_C0D0U1_INDEX 1
57 #define PTFV_DQSOSC_MOVAVG_C0D1U0_INDEX 2
58 #define PTFV_DQSOSC_MOVAVG_C0D1U1_INDEX 3
59 #define PTFV_DQSOSC_MOVAVG_C1D0U0_INDEX 4
60 #define PTFV_DQSOSC_MOVAVG_C1D0U1_INDEX 5
61 #define PTFV_DQSOSC_MOVAVG_C1D1U0_INDEX 6
62 #define PTFV_DQSOSC_MOVAVG_C1D1U1_INDEX 7
63 #define PTFV_DVFS_SAMPLES_INDEX 9
64 #define PTFV_MOVAVG_WEIGHT_INDEX 10
65 #define PTFV_CONFIG_CTRL_INDEX 11
67 #define PTFV_CONFIG_CTRL_USE_PREVIOUS_EMA (1 << 0)
70 * Do arithmetic in fixed point.
72 #define MOVAVG_PRECISION_FACTOR 100
75 * The division portion of the average operation.
77 #define __AVERAGE_PTFV(dev) \
78 ({ next->ptfv_list[(dev)] = \
79 next->ptfv_list[(dev)] / \
80 next->ptfv_list[PTFV_DVFS_SAMPLES_INDEX]; })
83 * Convert val to fixed point and add it to the temporary average.
85 #define __INCREMENT_PTFV(dev, val) \
86 ({ next->ptfv_list[(dev)] += \
87 ((val) * MOVAVG_PRECISION_FACTOR); })
90 * Convert a moving average back to integral form and return the value.
92 #define __MOVAVG_AC(timing, dev) \
93 ((timing)->ptfv_list[(dev)] / \
94 MOVAVG_PRECISION_FACTOR)
96 /* Weighted update. */
97 #define __WEIGHTED_UPDATE_PTFV(dev, nval) \
99 int w = PTFV_MOVAVG_WEIGHT_INDEX; \
102 next->ptfv_list[dqs] = \
103 ((nval * MOVAVG_PRECISION_FACTOR) + \
104 (next->ptfv_list[dqs] * \
105 next->ptfv_list[w])) / \
106 (next->ptfv_list[w] + 1); \
108 emc_dbg(emc, EMA_UPDATES, "%s: (s=%u) EMA: %u\n", \
109 __stringify(dev), nval, next->ptfv_list[dqs]); \
112 /* Access a particular average. */
113 #define __MOVAVG(timing, dev) \
114 ((timing)->ptfv_list[(dev)])
116 static bool tegra210_emc_compare_update_delay(struct tegra210_emc_timing
*timing
,
117 u32 measured
, u32 idx
)
119 u32
*curr
= &timing
->current_dram_clktree
[idx
];
120 u32 rate_mhz
= timing
->rate
/ 1000;
123 tmdel
= abs(*curr
- measured
);
125 if (tmdel
* 128 * rate_mhz
/ 1000000 > timing
->tree_margin
) {
133 static void tegra210_emc_get_clktree_delay(struct tegra210_emc
*emc
,
134 u32 delay
[DRAM_CLKTREE_NUM
])
136 struct tegra210_emc_timing
*curr
= emc
->last
;
137 u32 rate_mhz
= curr
->rate
/ 1000;
138 u32 msb
, lsb
, dqsosc
, delay_us
;
139 unsigned int c
, d
, idx
;
140 unsigned long clocks
;
142 clocks
= tegra210_emc_actual_osc_clocks(curr
->run_clocks
);
143 delay_us
= 2 + (clocks
/ rate_mhz
);
145 tegra210_emc_start_periodic_compensation(emc
);
148 for (d
= 0; d
< emc
->num_devices
; d
++) {
149 /* Read DQSOSC from MRR18/19 */
150 msb
= tegra210_emc_mrr_read(emc
, 2 - d
, 19);
151 lsb
= tegra210_emc_mrr_read(emc
, 2 - d
, 18);
153 for (c
= 0; c
< emc
->num_channels
; c
++) {
157 dqsosc
= (msb
& 0x00ff) << 8;
158 dqsosc
|= (lsb
& 0x00ff) >> 0;
160 /* Check for unpopulated channels */
162 delay
[idx
] = (clocks
* 1000000) /
163 (rate_mhz
* 2 * dqsosc
);
168 dqsosc
= (msb
& 0xff00) << 0;
169 dqsosc
|= (lsb
& 0xff00) >> 8;
171 /* Check for unpopulated channels */
173 delay
[idx
] = (clocks
* 1000000) /
174 (rate_mhz
* 2 * dqsosc
);
182 static bool periodic_compensation_handler(struct tegra210_emc
*emc
, u32 type
,
183 struct tegra210_emc_timing
*last
,
184 struct tegra210_emc_timing
*next
)
186 #define __COPY_EMA(nt, lt, dev) \
187 ({ __MOVAVG(nt, dev) = __MOVAVG(lt, dev) * \
188 (nt)->ptfv_list[PTFV_DVFS_SAMPLES_INDEX]; })
190 u32 i
, samples
= next
->ptfv_list
[PTFV_DVFS_SAMPLES_INDEX
];
191 u32 delay
[DRAM_CLKTREE_NUM
], idx
;
194 if (!next
->periodic_training
)
197 if (type
== DVFS_SEQUENCE
) {
198 if (last
->periodic_training
&&
199 (next
->ptfv_list
[PTFV_CONFIG_CTRL_INDEX
] &
200 PTFV_CONFIG_CTRL_USE_PREVIOUS_EMA
)) {
202 * If the previous frequency was using periodic
203 * calibration then we can reuse the previous
204 * frequencies EMA data.
206 for (idx
= 0; idx
< DRAM_CLKTREE_NUM
; idx
++)
207 __COPY_EMA(next
, last
, idx
);
210 for (idx
= 0; idx
< DRAM_CLKTREE_NUM
; idx
++)
211 __MOVAVG(next
, idx
) = 0;
213 for (i
= 0; i
< samples
; i
++) {
214 /* Generate next sample of data. */
215 tegra210_emc_get_clktree_delay(emc
, delay
);
217 for (idx
= 0; idx
< DRAM_CLKTREE_NUM
; idx
++)
218 __INCREMENT_PTFV(idx
, delay
[idx
]);
222 for (idx
= 0; idx
< DRAM_CLKTREE_NUM
; idx
++) {
223 /* Do the division part of the moving average */
225 over
|= tegra210_emc_compare_update_delay(next
,
226 __MOVAVG_AC(next
, idx
), idx
);
230 if (type
== PERIODIC_TRAINING_SEQUENCE
) {
231 tegra210_emc_get_clktree_delay(emc
, delay
);
233 for (idx
= 0; idx
< DRAM_CLKTREE_NUM
; idx
++) {
234 __WEIGHTED_UPDATE_PTFV(idx
, delay
[idx
]);
235 over
|= tegra210_emc_compare_update_delay(next
,
236 __MOVAVG_AC(next
, idx
), idx
);
243 static u32
tegra210_emc_r21021_periodic_compensation(struct tegra210_emc
*emc
)
245 u32 emc_cfg
, emc_cfg_o
, emc_cfg_update
, value
;
246 static const u32 list
[] = {
247 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0
,
248 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1
,
249 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2
,
250 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3
,
251 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0
,
252 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1
,
253 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2
,
254 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3
,
258 struct tegra210_emc_timing
*last
= emc
->last
;
259 unsigned int items
= ARRAY_SIZE(list
), i
;
261 if (last
->periodic_training
) {
262 emc_dbg(emc
, PER_TRAIN
, "Periodic training starting\n");
264 value
= emc_readl(emc
, EMC_DBG
);
265 emc_cfg_o
= emc_readl(emc
, EMC_CFG
);
266 emc_cfg
= emc_cfg_o
& ~(EMC_CFG_DYN_SELF_REF
|
268 EMC_CFG_DRAM_CLKSTOP_PD
);
272 * 1. Power optimizations should be off.
274 emc_writel(emc
, emc_cfg
, EMC_CFG
);
276 /* Does emc_timing_update() for above changes. */
277 tegra210_emc_dll_disable(emc
);
279 for (i
= 0; i
< emc
->num_channels
; i
++)
280 tegra210_emc_wait_for_update(emc
, i
, EMC_EMC_STATUS
,
281 EMC_EMC_STATUS_DRAM_IN_POWERDOWN_MASK
,
284 for (i
= 0; i
< emc
->num_channels
; i
++)
285 tegra210_emc_wait_for_update(emc
, i
, EMC_EMC_STATUS
,
286 EMC_EMC_STATUS_DRAM_IN_SELF_REFRESH_MASK
,
289 emc_cfg_update
= value
= emc_readl(emc
, EMC_CFG_UPDATE
);
290 value
&= ~EMC_CFG_UPDATE_UPDATE_DLL_IN_UPDATE_MASK
;
291 value
|= (2 << EMC_CFG_UPDATE_UPDATE_DLL_IN_UPDATE_SHIFT
);
292 emc_writel(emc
, value
, EMC_CFG_UPDATE
);
295 * 2. osc kick off - this assumes training and dvfs have set
298 * 3. Let dram capture its clock tree delays.
300 * 4. Check delta wrt previous values (save value if margin
301 * exceeds what is set in table).
303 if (periodic_compensation_handler(emc
, PERIODIC_TRAINING_SEQUENCE
,
306 * 5. Apply compensation w.r.t. trained values (if clock tree
307 * has drifted more than the set margin).
309 for (i
= 0; i
< items
; i
++) {
310 value
= tegra210_emc_compensate(last
, list
[i
]);
311 emc_dbg(emc
, EMA_WRITES
, "0x%08x <= 0x%08x\n",
313 emc_writel(emc
, value
, list
[i
]);
317 emc_writel(emc
, emc_cfg_o
, EMC_CFG
);
320 * 6. Timing update actally applies the new trimmers.
322 tegra210_emc_timing_update(emc
);
324 /* 6.1. Restore the UPDATE_DLL_IN_UPDATE field. */
325 emc_writel(emc
, emc_cfg_update
, EMC_CFG_UPDATE
);
327 /* 6.2. Restore the DLL. */
328 tegra210_emc_dll_enable(emc
);
335 * Do the clock change sequence.
337 static void tegra210_emc_r21021_set_clock(struct tegra210_emc
*emc
, u32 clksrc
)
339 /* state variables */
340 static bool fsp_for_next_freq
;
341 /* constant configuration parameters */
342 const bool save_restore_clkstop_pd
= true;
343 const u32 zqcal_before_cc_cutoff
= 2400;
344 const bool cya_allow_ref_cc
= false;
345 const bool cya_issue_pc_ref
= false;
346 const bool opt_cc_short_zcal
= true;
347 const bool ref_b4_sref_en
= false;
348 const u32 tZQCAL_lpddr4
= 1000000;
349 const bool opt_short_zcal
= true;
350 const bool opt_do_sw_qrst
= true;
351 const u32 opt_dvfs_mode
= MAN_SR
;
353 * This is the timing table for the source frequency. It does _not_
354 * necessarily correspond to the actual timing values in the EMC at the
355 * moment. If the boot BCT differs from the table then this can happen.
356 * However, we need it for accessing the dram_timings (which are not
357 * really registers) array for the current frequency.
359 struct tegra210_emc_timing
*fake
, *last
= emc
->last
, *next
= emc
->next
;
360 u32 tRTM
, RP_war
, R2P_war
, TRPab_war
, deltaTWATM
, W2P_war
, tRPST
;
361 u32 mr13_flip_fspwr
, mr13_flip_fspop
, ramp_up_wait
, ramp_down_wait
;
362 u32 zq_wait_long
, zq_latch_dvfs_wait_time
, tZQCAL_lpddr4_fc_adj
;
363 u32 emc_auto_cal_config
, auto_cal_en
, emc_cfg
, emc_sel_dpd_ctrl
;
364 u32 tFC_lpddr4
= 1000 * next
->dram_timings
[T_FC_LPDDR4
];
365 u32 bg_reg_mode_change
, enable_bglp_reg
, enable_bg_reg
;
366 bool opt_zcal_en_cc
= false, is_lpddr3
= false;
367 bool compensate_trimmer_applicable
= false;
368 u32 emc_dbg
, emc_cfg_pipe_clk
, emc_pin
;
369 u32 src_clk_period
, dst_clk_period
; /* in picoseconds */
370 bool shared_zq_resistor
= false;
371 u32 value
, dram_type
;
372 u32 opt_dll_mode
= 0;
376 emc_dbg(emc
, INFO
, "Running clock change.\n");
378 /* XXX fake == last */
379 fake
= tegra210_emc_find_timing(emc
, last
->rate
* 1000UL);
380 fsp_for_next_freq
= !fsp_for_next_freq
;
382 value
= emc_readl(emc
, EMC_FBIO_CFG5
) & EMC_FBIO_CFG5_DRAM_TYPE_MASK
;
383 dram_type
= value
>> EMC_FBIO_CFG5_DRAM_TYPE_SHIFT
;
385 if (last
->burst_regs
[EMC_ZCAL_WAIT_CNT_INDEX
] & BIT(31))
386 shared_zq_resistor
= true;
388 if ((next
->burst_regs
[EMC_ZCAL_INTERVAL_INDEX
] != 0 &&
389 last
->burst_regs
[EMC_ZCAL_INTERVAL_INDEX
] == 0) ||
390 dram_type
== DRAM_TYPE_LPDDR4
)
391 opt_zcal_en_cc
= true;
393 if (dram_type
== DRAM_TYPE_DDR3
)
394 opt_dll_mode
= tegra210_emc_get_dll_state(next
);
396 if ((next
->burst_regs
[EMC_FBIO_CFG5_INDEX
] & BIT(25)) &&
397 (dram_type
== DRAM_TYPE_LPDDR2
))
400 emc_readl(emc
, EMC_CFG
);
401 emc_readl(emc
, EMC_AUTO_CAL_CONFIG
);
403 src_clk_period
= 1000000000 / last
->rate
;
404 dst_clk_period
= 1000000000 / next
->rate
;
406 if (dst_clk_period
<= zqcal_before_cc_cutoff
)
407 tZQCAL_lpddr4_fc_adj
= tZQCAL_lpddr4
- tFC_lpddr4
;
409 tZQCAL_lpddr4_fc_adj
= tZQCAL_lpddr4
;
411 tZQCAL_lpddr4_fc_adj
/= dst_clk_period
;
413 emc_dbg
= emc_readl(emc
, EMC_DBG
);
414 emc_pin
= emc_readl(emc
, EMC_PIN
);
415 emc_cfg_pipe_clk
= emc_readl(emc
, EMC_CFG_PIPE_CLK
);
417 emc_cfg
= next
->burst_regs
[EMC_CFG_INDEX
];
418 emc_cfg
&= ~(EMC_CFG_DYN_SELF_REF
| EMC_CFG_DRAM_ACPD
|
419 EMC_CFG_DRAM_CLKSTOP_SR
| EMC_CFG_DRAM_CLKSTOP_PD
);
420 emc_sel_dpd_ctrl
= next
->emc_sel_dpd_ctrl
;
421 emc_sel_dpd_ctrl
&= ~(EMC_SEL_DPD_CTRL_CLK_SEL_DPD_EN
|
422 EMC_SEL_DPD_CTRL_CA_SEL_DPD_EN
|
423 EMC_SEL_DPD_CTRL_RESET_SEL_DPD_EN
|
424 EMC_SEL_DPD_CTRL_ODT_SEL_DPD_EN
|
425 EMC_SEL_DPD_CTRL_DATA_SEL_DPD_EN
);
427 emc_dbg(emc
, INFO
, "Clock change version: %d\n",
428 DVFS_CLOCK_CHANGE_VERSION
);
429 emc_dbg(emc
, INFO
, "DRAM type = %d\n", dram_type
);
430 emc_dbg(emc
, INFO
, "DRAM dev #: %u\n", emc
->num_devices
);
431 emc_dbg(emc
, INFO
, "Next EMC clksrc: 0x%08x\n", clksrc
);
432 emc_dbg(emc
, INFO
, "DLL clksrc: 0x%08x\n", next
->dll_clk_src
);
433 emc_dbg(emc
, INFO
, "last rate: %u, next rate %u\n", last
->rate
,
435 emc_dbg(emc
, INFO
, "last period: %u, next period: %u\n",
436 src_clk_period
, dst_clk_period
);
437 emc_dbg(emc
, INFO
, " shared_zq_resistor: %d\n", !!shared_zq_resistor
);
438 emc_dbg(emc
, INFO
, " num_channels: %u\n", emc
->num_channels
);
439 emc_dbg(emc
, INFO
, " opt_dll_mode: %d\n", opt_dll_mode
);
443 * Pre DVFS SW sequence.
445 emc_dbg(emc
, STEPS
, "Step 1\n");
446 emc_dbg(emc
, STEPS
, "Step 1.1: Disable DLL temporarily.\n");
448 value
= emc_readl(emc
, EMC_CFG_DIG_DLL
);
449 value
&= ~EMC_CFG_DIG_DLL_CFG_DLL_EN
;
450 emc_writel(emc
, value
, EMC_CFG_DIG_DLL
);
452 tegra210_emc_timing_update(emc
);
454 for (i
= 0; i
< emc
->num_channels
; i
++)
455 tegra210_emc_wait_for_update(emc
, i
, EMC_CFG_DIG_DLL
,
456 EMC_CFG_DIG_DLL_CFG_DLL_EN
, 0);
458 emc_dbg(emc
, STEPS
, "Step 1.2: Disable AUTOCAL temporarily.\n");
460 emc_auto_cal_config
= next
->emc_auto_cal_config
;
461 auto_cal_en
= emc_auto_cal_config
& EMC_AUTO_CAL_CONFIG_AUTO_CAL_ENABLE
;
462 emc_auto_cal_config
&= ~EMC_AUTO_CAL_CONFIG_AUTO_CAL_START
;
463 emc_auto_cal_config
|= EMC_AUTO_CAL_CONFIG_AUTO_CAL_MEASURE_STALL
;
464 emc_auto_cal_config
|= EMC_AUTO_CAL_CONFIG_AUTO_CAL_UPDATE_STALL
;
465 emc_auto_cal_config
|= auto_cal_en
;
466 emc_writel(emc
, emc_auto_cal_config
, EMC_AUTO_CAL_CONFIG
);
467 emc_readl(emc
, EMC_AUTO_CAL_CONFIG
); /* Flush write. */
469 emc_dbg(emc
, STEPS
, "Step 1.3: Disable other power features.\n");
471 tegra210_emc_set_shadow_bypass(emc
, ACTIVE
);
472 emc_writel(emc
, emc_cfg
, EMC_CFG
);
473 emc_writel(emc
, emc_sel_dpd_ctrl
, EMC_SEL_DPD_CTRL
);
474 tegra210_emc_set_shadow_bypass(emc
, ASSEMBLY
);
476 if (next
->periodic_training
) {
477 tegra210_emc_reset_dram_clktree_values(next
);
479 for (i
= 0; i
< emc
->num_channels
; i
++)
480 tegra210_emc_wait_for_update(emc
, i
, EMC_EMC_STATUS
,
481 EMC_EMC_STATUS_DRAM_IN_POWERDOWN_MASK
,
484 for (i
= 0; i
< emc
->num_channels
; i
++)
485 tegra210_emc_wait_for_update(emc
, i
, EMC_EMC_STATUS
,
486 EMC_EMC_STATUS_DRAM_IN_SELF_REFRESH_MASK
,
489 if (periodic_compensation_handler(emc
, DVFS_SEQUENCE
, fake
, next
))
490 compensate_trimmer_applicable
= true;
493 emc_writel(emc
, EMC_INTSTATUS_CLKCHANGE_COMPLETE
, EMC_INTSTATUS
);
494 tegra210_emc_set_shadow_bypass(emc
, ACTIVE
);
495 emc_writel(emc
, emc_cfg
, EMC_CFG
);
496 emc_writel(emc
, emc_sel_dpd_ctrl
, EMC_SEL_DPD_CTRL
);
497 emc_writel(emc
, emc_cfg_pipe_clk
| EMC_CFG_PIPE_CLK_CLK_ALWAYS_ON
,
499 emc_writel(emc
, next
->emc_fdpd_ctrl_cmd_no_ramp
&
500 ~EMC_FDPD_CTRL_CMD_NO_RAMP_CMD_DPD_NO_RAMP_ENABLE
,
501 EMC_FDPD_CTRL_CMD_NO_RAMP
);
504 ((next
->burst_regs
[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX
] &
505 EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD
) ^
506 (last
->burst_regs
[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX
] &
507 EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD
)) ||
508 ((next
->burst_regs
[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX
] &
509 EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD
) ^
510 (last
->burst_regs
[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX
] &
511 EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD
));
513 (next
->burst_regs
[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX
] &
514 EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD
) == 0;
516 (next
->burst_regs
[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX
] &
517 EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD
) == 0;
519 if (bg_reg_mode_change
) {
521 emc_writel(emc
, last
->burst_regs
522 [EMC_PMACRO_BG_BIAS_CTRL_0_INDEX
] &
523 ~EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD
,
524 EMC_PMACRO_BG_BIAS_CTRL_0
);
527 emc_writel(emc
, last
->burst_regs
528 [EMC_PMACRO_BG_BIAS_CTRL_0_INDEX
] &
529 ~EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD
,
530 EMC_PMACRO_BG_BIAS_CTRL_0
);
533 /* Check if we need to turn on VREF generator. */
534 if ((((last
->burst_regs
[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX
] &
535 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF
) == 0) &&
536 ((next
->burst_regs
[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX
] &
537 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF
) == 1)) ||
538 (((last
->burst_regs
[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX
] &
539 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF
) == 0) &&
540 ((next
->burst_regs
[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX
] &
541 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF
) != 0))) {
543 next
->burst_regs
[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX
];
544 u32 last_pad_tx_ctrl
=
545 last
->burst_regs
[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX
];
546 u32 next_dq_e_ivref
, next_dqs_e_ivref
;
548 next_dqs_e_ivref
= pad_tx_ctrl
&
549 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF
;
550 next_dq_e_ivref
= pad_tx_ctrl
&
551 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF
;
552 value
= (last_pad_tx_ctrl
&
553 ~EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF
&
554 ~EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF
) |
555 next_dq_e_ivref
| next_dqs_e_ivref
;
556 emc_writel(emc
, value
, EMC_PMACRO_DATA_PAD_TX_CTRL
);
558 } else if (bg_reg_mode_change
) {
562 tegra210_emc_set_shadow_bypass(emc
, ASSEMBLY
);
568 emc_dbg(emc
, STEPS
, "Step 2\n");
570 if (next
->burst_regs
[EMC_CFG_DIG_DLL_INDEX
] &
571 EMC_CFG_DIG_DLL_CFG_DLL_EN
) {
572 emc_dbg(emc
, INFO
, "Prelock enabled for target frequency.\n");
573 value
= tegra210_emc_dll_prelock(emc
, clksrc
);
574 emc_dbg(emc
, INFO
, "DLL out: 0x%03x\n", value
);
576 emc_dbg(emc
, INFO
, "Disabling DLL for target frequency.\n");
577 tegra210_emc_dll_disable(emc
);
582 * Prepare autocal for the clock change.
584 emc_dbg(emc
, STEPS
, "Step 3\n");
586 tegra210_emc_set_shadow_bypass(emc
, ACTIVE
);
587 emc_writel(emc
, next
->emc_auto_cal_config2
, EMC_AUTO_CAL_CONFIG2
);
588 emc_writel(emc
, next
->emc_auto_cal_config3
, EMC_AUTO_CAL_CONFIG3
);
589 emc_writel(emc
, next
->emc_auto_cal_config4
, EMC_AUTO_CAL_CONFIG4
);
590 emc_writel(emc
, next
->emc_auto_cal_config5
, EMC_AUTO_CAL_CONFIG5
);
591 emc_writel(emc
, next
->emc_auto_cal_config6
, EMC_AUTO_CAL_CONFIG6
);
592 emc_writel(emc
, next
->emc_auto_cal_config7
, EMC_AUTO_CAL_CONFIG7
);
593 emc_writel(emc
, next
->emc_auto_cal_config8
, EMC_AUTO_CAL_CONFIG8
);
594 tegra210_emc_set_shadow_bypass(emc
, ASSEMBLY
);
596 emc_auto_cal_config
|= (EMC_AUTO_CAL_CONFIG_AUTO_CAL_COMPUTE_START
|
598 emc_writel(emc
, emc_auto_cal_config
, EMC_AUTO_CAL_CONFIG
);
602 * Update EMC_CFG. (??)
604 emc_dbg(emc
, STEPS
, "Step 4\n");
606 if (src_clk_period
> 50000 && dram_type
== DRAM_TYPE_LPDDR4
)
607 ccfifo_writel(emc
, 1, EMC_SELF_REF
, 0);
609 emc_writel(emc
, next
->emc_cfg_2
, EMC_CFG_2
);
613 * Prepare reference variables for ZQCAL regs.
615 emc_dbg(emc
, STEPS
, "Step 5\n");
617 if (dram_type
== DRAM_TYPE_LPDDR4
)
618 zq_wait_long
= max((u32
)1, div_o3(1000000, dst_clk_period
));
619 else if (dram_type
== DRAM_TYPE_LPDDR2
|| is_lpddr3
)
620 zq_wait_long
= max(next
->min_mrs_wait
,
621 div_o3(360000, dst_clk_period
)) + 4;
622 else if (dram_type
== DRAM_TYPE_DDR3
)
623 zq_wait_long
= max((u32
)256,
624 div_o3(320000, dst_clk_period
) + 2);
630 * Training code - removed.
632 emc_dbg(emc
, STEPS
, "Step 6\n");
636 * Program FSP reference registers and send MRWs to new FSPWR.
638 emc_dbg(emc
, STEPS
, "Step 7\n");
639 emc_dbg(emc
, SUB_STEPS
, "Step 7.1: Bug 200024907 - Patch RP R2P");
642 if (dram_type
== DRAM_TYPE_LPDDR4
) {
645 if (src_clk_period
>= 1000000 / 1866) /* 535.91 ps */
648 if (src_clk_period
>= 1000000 / 1600) /* 625.00 ps */
651 if (src_clk_period
>= 1000000 / 1333) /* 750.19 ps */
654 if (src_clk_period
>= 1000000 / 1066) /* 938.09 ps */
657 deltaTWATM
= max_t(u32
, div_o3(7500, src_clk_period
), 8);
660 * Originally there was a + .5 in the tRPST calculation.
661 * However since we can't do FP in the kernel and the tRTM
662 * computation was in a floating point ceiling function, adding
663 * one to tRTP should be ok. There is no other source of non
664 * integer values, so the result was always going to be
665 * something for the form: f_ceil(N + .5) = N + 1;
667 tRPST
= (last
->emc_mrw
& 0x80) >> 7;
668 tRTM
= fake
->dram_timings
[RL
] + div_o3(3600, src_clk_period
) +
669 max_t(u32
, div_o3(7500, src_clk_period
), 8) + tRPST
+
672 emc_dbg(emc
, INFO
, "tRTM = %u, EMC_RP = %u\n", tRTM
,
673 next
->burst_regs
[EMC_RP_INDEX
]);
675 if (last
->burst_regs
[EMC_RP_INDEX
] < tRTM
) {
676 if (tRTM
> (last
->burst_regs
[EMC_R2P_INDEX
] +
677 last
->burst_regs
[EMC_RP_INDEX
])) {
678 R2P_war
= tRTM
- last
->burst_regs
[EMC_RP_INDEX
];
679 RP_war
= last
->burst_regs
[EMC_RP_INDEX
];
680 TRPab_war
= last
->burst_regs
[EMC_TRPAB_INDEX
];
684 last
->burst_regs
[EMC_RP_INDEX
] - 63;
686 if (TRPab_war
< RP_war
)
692 R2P_war
= last
->burst_regs
[EMC_R2P_INDEX
];
693 RP_war
= last
->burst_regs
[EMC_RP_INDEX
];
694 TRPab_war
= last
->burst_regs
[EMC_TRPAB_INDEX
];
697 if (RP_war
< deltaTWATM
) {
698 W2P_war
= last
->burst_regs
[EMC_W2P_INDEX
]
699 + deltaTWATM
- RP_war
;
701 RP_war
= RP_war
+ W2P_war
- 63;
702 if (TRPab_war
< RP_war
)
707 W2P_war
= last
->burst_regs
[
711 if ((last
->burst_regs
[EMC_W2P_INDEX
] ^ W2P_war
) ||
712 (last
->burst_regs
[EMC_R2P_INDEX
] ^ R2P_war
) ||
713 (last
->burst_regs
[EMC_RP_INDEX
] ^ RP_war
) ||
714 (last
->burst_regs
[EMC_TRPAB_INDEX
] ^ TRPab_war
)) {
715 emc_writel(emc
, RP_war
, EMC_RP
);
716 emc_writel(emc
, R2P_war
, EMC_R2P
);
717 emc_writel(emc
, W2P_war
, EMC_W2P
);
718 emc_writel(emc
, TRPab_war
, EMC_TRPAB
);
721 tegra210_emc_timing_update(emc
);
723 emc_dbg(emc
, INFO
, "Skipped WAR\n");
727 if (!fsp_for_next_freq
) {
728 mr13_flip_fspwr
= (next
->emc_mrw3
& 0xffffff3f) | 0x80;
729 mr13_flip_fspop
= (next
->emc_mrw3
& 0xffffff3f) | 0x00;
731 mr13_flip_fspwr
= (next
->emc_mrw3
& 0xffffff3f) | 0x40;
732 mr13_flip_fspop
= (next
->emc_mrw3
& 0xffffff3f) | 0xc0;
735 if (dram_type
== DRAM_TYPE_LPDDR4
) {
736 emc_writel(emc
, mr13_flip_fspwr
, EMC_MRW3
);
737 emc_writel(emc
, next
->emc_mrw
, EMC_MRW
);
738 emc_writel(emc
, next
->emc_mrw2
, EMC_MRW2
);
743 * Program the shadow registers.
745 emc_dbg(emc
, STEPS
, "Step 8\n");
746 emc_dbg(emc
, SUB_STEPS
, "Writing burst_regs\n");
748 for (i
= 0; i
< next
->num_burst
; i
++) {
749 const u16
*offsets
= emc
->offsets
->burst
;
755 value
= next
->burst_regs
[i
];
758 if (dram_type
!= DRAM_TYPE_LPDDR4
&&
759 (offset
== EMC_MRW6
|| offset
== EMC_MRW7
||
760 offset
== EMC_MRW8
|| offset
== EMC_MRW9
||
761 offset
== EMC_MRW10
|| offset
== EMC_MRW11
||
762 offset
== EMC_MRW12
|| offset
== EMC_MRW13
||
763 offset
== EMC_MRW14
|| offset
== EMC_MRW15
||
764 offset
== EMC_TRAINING_CTRL
))
767 /* Pain... And suffering. */
768 if (offset
== EMC_CFG
) {
769 value
&= ~EMC_CFG_DRAM_ACPD
;
770 value
&= ~EMC_CFG_DYN_SELF_REF
;
772 if (dram_type
== DRAM_TYPE_LPDDR4
) {
773 value
&= ~EMC_CFG_DRAM_CLKSTOP_SR
;
774 value
&= ~EMC_CFG_DRAM_CLKSTOP_PD
;
776 } else if (offset
== EMC_MRS_WAIT_CNT
&&
777 dram_type
== DRAM_TYPE_LPDDR2
&&
778 opt_zcal_en_cc
&& !opt_cc_short_zcal
&&
780 value
= (value
& ~(EMC_MRS_WAIT_CNT_SHORT_WAIT_MASK
<<
781 EMC_MRS_WAIT_CNT_SHORT_WAIT_SHIFT
)) |
782 ((zq_wait_long
& EMC_MRS_WAIT_CNT_SHORT_WAIT_MASK
) <<
783 EMC_MRS_WAIT_CNT_SHORT_WAIT_SHIFT
);
784 } else if (offset
== EMC_ZCAL_WAIT_CNT
&&
785 dram_type
== DRAM_TYPE_DDR3
&& opt_zcal_en_cc
&&
786 !opt_cc_short_zcal
&& opt_short_zcal
) {
787 value
= (value
& ~(EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_MASK
<<
788 EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_SHIFT
)) |
789 ((zq_wait_long
& EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_MASK
) <<
790 EMC_MRS_WAIT_CNT_SHORT_WAIT_SHIFT
);
791 } else if (offset
== EMC_ZCAL_INTERVAL
&& opt_zcal_en_cc
) {
792 value
= 0; /* EMC_ZCAL_INTERVAL reset value. */
793 } else if (offset
== EMC_PMACRO_AUTOCAL_CFG_COMMON
) {
794 value
|= EMC_PMACRO_AUTOCAL_CFG_COMMON_E_CAL_BYPASS_DVFS
;
795 } else if (offset
== EMC_PMACRO_DATA_PAD_TX_CTRL
) {
796 value
&= ~(EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSP_TX_E_DCC
|
797 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSN_TX_E_DCC
|
798 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_TX_E_DCC
|
799 EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_CMD_TX_E_DCC
);
800 } else if (offset
== EMC_PMACRO_CMD_PAD_TX_CTRL
) {
801 value
|= EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_DRVFORCEON
;
802 value
&= ~(EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSP_TX_E_DCC
|
803 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSN_TX_E_DCC
|
804 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_E_DCC
|
805 EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_CMD_TX_E_DCC
);
806 } else if (offset
== EMC_PMACRO_BRICK_CTRL_RFU1
) {
808 } else if (offset
== EMC_PMACRO_COMMON_PAD_TX_CTRL
) {
812 emc_writel(emc
, value
, offset
);
815 /* SW addition: do EMC refresh adjustment here. */
816 tegra210_emc_adjust_timing(emc
, next
);
818 if (dram_type
== DRAM_TYPE_LPDDR4
) {
819 value
= (23 << EMC_MRW_MRW_MA_SHIFT
) |
820 (next
->run_clocks
& EMC_MRW_MRW_OP_MASK
);
821 emc_writel(emc
, value
, EMC_MRW
);
824 /* Per channel burst registers. */
825 emc_dbg(emc
, SUB_STEPS
, "Writing burst_regs_per_ch\n");
827 for (i
= 0; i
< next
->num_burst_per_ch
; i
++) {
828 const struct tegra210_emc_per_channel_regs
*burst
=
829 emc
->offsets
->burst_per_channel
;
831 if (!burst
[i
].offset
)
834 if (dram_type
!= DRAM_TYPE_LPDDR4
&&
835 (burst
[i
].offset
== EMC_MRW6
||
836 burst
[i
].offset
== EMC_MRW7
||
837 burst
[i
].offset
== EMC_MRW8
||
838 burst
[i
].offset
== EMC_MRW9
||
839 burst
[i
].offset
== EMC_MRW10
||
840 burst
[i
].offset
== EMC_MRW11
||
841 burst
[i
].offset
== EMC_MRW12
||
842 burst
[i
].offset
== EMC_MRW13
||
843 burst
[i
].offset
== EMC_MRW14
||
844 burst
[i
].offset
== EMC_MRW15
))
847 /* Filter out second channel if not in DUAL_CHANNEL mode. */
848 if (emc
->num_channels
< 2 && burst
[i
].bank
>= 1)
851 emc_dbg(emc
, REG_LISTS
, "(%u) 0x%08x => 0x%08x\n", i
,
852 next
->burst_reg_per_ch
[i
], burst
[i
].offset
);
853 emc_channel_writel(emc
, burst
[i
].bank
,
854 next
->burst_reg_per_ch
[i
],
859 emc_dbg(emc
, SUB_STEPS
, "Writing vref_regs\n");
861 for (i
= 0; i
< next
->vref_num
; i
++) {
862 const struct tegra210_emc_per_channel_regs
*vref
=
863 emc
->offsets
->vref_per_channel
;
868 if (emc
->num_channels
< 2 && vref
[i
].bank
>= 1)
871 emc_dbg(emc
, REG_LISTS
, "(%u) 0x%08x => 0x%08x\n", i
,
872 next
->vref_perch_regs
[i
], vref
[i
].offset
);
873 emc_channel_writel(emc
, vref
[i
].bank
, next
->vref_perch_regs
[i
],
878 emc_dbg(emc
, SUB_STEPS
, "Writing trim_regs\n");
880 for (i
= 0; i
< next
->num_trim
; i
++) {
881 const u16
*offsets
= emc
->offsets
->trim
;
886 if (compensate_trimmer_applicable
&&
887 (offsets
[i
] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0
||
888 offsets
[i
] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1
||
889 offsets
[i
] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2
||
890 offsets
[i
] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3
||
891 offsets
[i
] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0
||
892 offsets
[i
] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1
||
893 offsets
[i
] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2
||
894 offsets
[i
] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3
||
895 offsets
[i
] == EMC_DATA_BRLSHFT_0
||
896 offsets
[i
] == EMC_DATA_BRLSHFT_1
)) {
897 value
= tegra210_emc_compensate(next
, offsets
[i
]);
898 emc_dbg(emc
, REG_LISTS
, "(%u) 0x%08x => 0x%08x\n", i
,
900 emc_dbg(emc
, EMA_WRITES
, "0x%08x <= 0x%08x\n",
901 (u32
)(u64
)offsets
[i
], value
);
902 emc_writel(emc
, value
, offsets
[i
]);
904 emc_dbg(emc
, REG_LISTS
, "(%u) 0x%08x => 0x%08x\n", i
,
905 next
->trim_regs
[i
], offsets
[i
]);
906 emc_writel(emc
, next
->trim_regs
[i
], offsets
[i
]);
910 /* Per channel trimmers. */
911 emc_dbg(emc
, SUB_STEPS
, "Writing trim_regs_per_ch\n");
913 for (i
= 0; i
< next
->num_trim_per_ch
; i
++) {
914 const struct tegra210_emc_per_channel_regs
*trim
=
915 &emc
->offsets
->trim_per_channel
[0];
921 if (emc
->num_channels
< 2 && trim
[i
].bank
>= 1)
924 offset
= trim
[i
].offset
;
926 if (compensate_trimmer_applicable
&&
927 (offset
== EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0
||
928 offset
== EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1
||
929 offset
== EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2
||
930 offset
== EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3
||
931 offset
== EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0
||
932 offset
== EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1
||
933 offset
== EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2
||
934 offset
== EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3
||
935 offset
== EMC_DATA_BRLSHFT_0
||
936 offset
== EMC_DATA_BRLSHFT_1
)) {
937 value
= tegra210_emc_compensate(next
, offset
);
938 emc_dbg(emc
, REG_LISTS
, "(%u) 0x%08x => 0x%08x\n", i
,
940 emc_dbg(emc
, EMA_WRITES
, "0x%08x <= 0x%08x\n", offset
,
942 emc_channel_writel(emc
, trim
[i
].bank
, value
, offset
);
944 emc_dbg(emc
, REG_LISTS
, "(%u) 0x%08x => 0x%08x\n", i
,
945 next
->trim_perch_regs
[i
], offset
);
946 emc_channel_writel(emc
, trim
[i
].bank
,
947 next
->trim_perch_regs
[i
], offset
);
951 emc_dbg(emc
, SUB_STEPS
, "Writing burst_mc_regs\n");
953 for (i
= 0; i
< next
->num_mc_regs
; i
++) {
954 const u16
*offsets
= emc
->offsets
->burst_mc
;
955 u32
*values
= next
->burst_mc_regs
;
957 emc_dbg(emc
, REG_LISTS
, "(%u) 0x%08x => 0x%08x\n", i
,
958 values
[i
], offsets
[i
]);
959 mc_writel(emc
->mc
, values
[i
], offsets
[i
]);
962 /* Registers to be programmed on the faster clock. */
963 if (next
->rate
< last
->rate
) {
964 const u16
*la
= emc
->offsets
->la_scale
;
966 emc_dbg(emc
, SUB_STEPS
, "Writing la_scale_regs\n");
968 for (i
= 0; i
< next
->num_up_down
; i
++) {
969 emc_dbg(emc
, REG_LISTS
, "(%u) 0x%08x => 0x%08x\n", i
,
970 next
->la_scale_regs
[i
], la
[i
]);
971 mc_writel(emc
->mc
, next
->la_scale_regs
[i
], la
[i
]);
975 /* Flush all the burst register writes. */
976 mc_readl(emc
->mc
, MC_EMEM_ADR_CFG
);
982 emc_dbg(emc
, STEPS
, "Step 9\n");
984 value
= next
->burst_regs
[EMC_ZCAL_WAIT_CNT_INDEX
];
985 value
&= ~EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_MASK
;
987 if (dram_type
== DRAM_TYPE_LPDDR4
) {
988 emc_writel(emc
, 0, EMC_ZCAL_INTERVAL
);
989 emc_writel(emc
, value
, EMC_ZCAL_WAIT_CNT
);
991 value
= emc_dbg
| (EMC_DBG_WRITE_MUX_ACTIVE
|
992 EMC_DBG_WRITE_ACTIVE_ONLY
);
994 emc_writel(emc
, value
, EMC_DBG
);
995 emc_writel(emc
, 0, EMC_ZCAL_INTERVAL
);
996 emc_writel(emc
, emc_dbg
, EMC_DBG
);
1001 * LPDDR4 and DDR3 common section.
1003 emc_dbg(emc
, STEPS
, "Step 10\n");
1005 if (opt_dvfs_mode
== MAN_SR
|| dram_type
== DRAM_TYPE_LPDDR4
) {
1006 if (dram_type
== DRAM_TYPE_LPDDR4
)
1007 ccfifo_writel(emc
, 0x101, EMC_SELF_REF
, 0);
1009 ccfifo_writel(emc
, 0x1, EMC_SELF_REF
, 0);
1011 if (dram_type
== DRAM_TYPE_LPDDR4
&&
1012 dst_clk_period
<= zqcal_before_cc_cutoff
) {
1013 ccfifo_writel(emc
, mr13_flip_fspwr
^ 0x40, EMC_MRW3
, 0);
1014 ccfifo_writel(emc
, (next
->burst_regs
[EMC_MRW6_INDEX
] &
1016 (last
->burst_regs
[EMC_MRW6_INDEX
] &
1017 0x0000C0C0), EMC_MRW6
, 0);
1018 ccfifo_writel(emc
, (next
->burst_regs
[EMC_MRW14_INDEX
] &
1020 (last
->burst_regs
[EMC_MRW14_INDEX
] &
1021 0x00003838), EMC_MRW14
, 0);
1023 if (emc
->num_devices
> 1) {
1025 (next
->burst_regs
[EMC_MRW7_INDEX
] &
1027 (last
->burst_regs
[EMC_MRW7_INDEX
] &
1028 0x0000C0C0), EMC_MRW7
, 0);
1030 (next
->burst_regs
[EMC_MRW15_INDEX
] &
1032 (last
->burst_regs
[EMC_MRW15_INDEX
] &
1033 0x00003838), EMC_MRW15
, 0);
1036 if (opt_zcal_en_cc
) {
1037 if (emc
->num_devices
< 2)
1039 2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT
1040 | EMC_ZQ_CAL_ZQ_CAL_CMD
,
1042 else if (shared_zq_resistor
)
1044 2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT
1045 | EMC_ZQ_CAL_ZQ_CAL_CMD
,
1049 EMC_ZQ_CAL_ZQ_CAL_CMD
,
1055 if (dram_type
== DRAM_TYPE_LPDDR4
) {
1056 value
= (1000 * fake
->dram_timings
[T_RP
]) / src_clk_period
;
1057 ccfifo_writel(emc
, mr13_flip_fspop
| 0x8, EMC_MRW3
, value
);
1058 ccfifo_writel(emc
, 0, 0, tFC_lpddr4
/ src_clk_period
);
1061 if (dram_type
== DRAM_TYPE_LPDDR4
|| opt_dvfs_mode
!= MAN_SR
) {
1064 if (cya_allow_ref_cc
) {
1065 delay
+= (1000 * fake
->dram_timings
[T_RP
]) /
1067 delay
+= 4000 * fake
->dram_timings
[T_RFC
];
1070 ccfifo_writel(emc
, emc_pin
& ~(EMC_PIN_PIN_CKE_PER_DEV
|
1076 /* calculate reference delay multiplier */
1082 if (cya_allow_ref_cc
)
1085 if (cya_issue_pc_ref
)
1088 if (dram_type
!= DRAM_TYPE_LPDDR4
) {
1089 delay
= ((1000 * fake
->dram_timings
[T_RP
] / src_clk_period
) +
1090 (1000 * fake
->dram_timings
[T_RFC
] / src_clk_period
));
1091 delay
= value
* delay
+ 20;
1100 emc_dbg(emc
, STEPS
, "Step 11\n");
1102 ccfifo_writel(emc
, 0x0, EMC_CFG_SYNC
, delay
);
1104 value
= emc_dbg
| EMC_DBG_WRITE_MUX_ACTIVE
| EMC_DBG_WRITE_ACTIVE_ONLY
;
1105 ccfifo_writel(emc
, value
, EMC_DBG
, 0);
1107 ramp_down_wait
= tegra210_emc_dvfs_power_ramp_down(emc
, src_clk_period
,
1112 * And finally - trigger the clock change.
1114 emc_dbg(emc
, STEPS
, "Step 12\n");
1116 ccfifo_writel(emc
, 1, EMC_STALL_THEN_EXE_AFTER_CLKCHANGE
, 0);
1117 value
&= ~EMC_DBG_WRITE_ACTIVE_ONLY
;
1118 ccfifo_writel(emc
, value
, EMC_DBG
, 0);
1124 emc_dbg(emc
, STEPS
, "Step 13\n");
1126 ramp_up_wait
= tegra210_emc_dvfs_power_ramp_up(emc
, dst_clk_period
, 0);
1127 ccfifo_writel(emc
, emc_dbg
, EMC_DBG
, 0);
1133 emc_dbg(emc
, STEPS
, "Step 14\n");
1135 if (dram_type
== DRAM_TYPE_LPDDR4
) {
1136 value
= emc_pin
| EMC_PIN_PIN_CKE
;
1138 if (emc
->num_devices
<= 1)
1139 value
&= ~(EMC_PIN_PIN_CKEB
| EMC_PIN_PIN_CKE_PER_DEV
);
1141 value
|= EMC_PIN_PIN_CKEB
| EMC_PIN_PIN_CKE_PER_DEV
;
1143 ccfifo_writel(emc
, value
, EMC_PIN
, 0);
1147 * Step 15: (two step 15s ??)
1148 * Calculate zqlatch wait time; has dependency on ramping times.
1150 emc_dbg(emc
, STEPS
, "Step 15\n");
1152 if (dst_clk_period
<= zqcal_before_cc_cutoff
) {
1153 s32 t
= (s32
)(ramp_up_wait
+ ramp_down_wait
) /
1154 (s32
)dst_clk_period
;
1155 zq_latch_dvfs_wait_time
= (s32
)tZQCAL_lpddr4_fc_adj
- t
;
1157 zq_latch_dvfs_wait_time
= tZQCAL_lpddr4_fc_adj
-
1158 div_o3(1000 * next
->dram_timings
[T_PDEX
],
1162 emc_dbg(emc
, INFO
, "tZQCAL_lpddr4_fc_adj = %u\n", tZQCAL_lpddr4_fc_adj
);
1163 emc_dbg(emc
, INFO
, "dst_clk_period = %u\n",
1165 emc_dbg(emc
, INFO
, "next->dram_timings[T_PDEX] = %u\n",
1166 next
->dram_timings
[T_PDEX
]);
1167 emc_dbg(emc
, INFO
, "zq_latch_dvfs_wait_time = %d\n",
1168 max_t(s32
, 0, zq_latch_dvfs_wait_time
));
1170 if (dram_type
== DRAM_TYPE_LPDDR4
&& opt_zcal_en_cc
) {
1171 delay
= div_o3(1000 * next
->dram_timings
[T_PDEX
],
1174 if (emc
->num_devices
< 2) {
1175 if (dst_clk_period
> zqcal_before_cc_cutoff
)
1177 2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT
|
1178 EMC_ZQ_CAL_ZQ_CAL_CMD
, EMC_ZQ_CAL
,
1181 value
= (mr13_flip_fspop
& 0xfffffff7) | 0x0c000000;
1182 ccfifo_writel(emc
, value
, EMC_MRW3
, delay
);
1183 ccfifo_writel(emc
, 0, EMC_SELF_REF
, 0);
1184 ccfifo_writel(emc
, 0, EMC_REF
, 0);
1185 ccfifo_writel(emc
, 2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT
|
1186 EMC_ZQ_CAL_ZQ_LATCH_CMD
,
1188 max_t(s32
, 0, zq_latch_dvfs_wait_time
));
1189 } else if (shared_zq_resistor
) {
1190 if (dst_clk_period
> zqcal_before_cc_cutoff
)
1192 2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT
|
1193 EMC_ZQ_CAL_ZQ_CAL_CMD
, EMC_ZQ_CAL
,
1196 ccfifo_writel(emc
, 2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT
|
1197 EMC_ZQ_CAL_ZQ_LATCH_CMD
, EMC_ZQ_CAL
,
1198 max_t(s32
, 0, zq_latch_dvfs_wait_time
) +
1200 ccfifo_writel(emc
, 1UL << EMC_ZQ_CAL_DEV_SEL_SHIFT
|
1201 EMC_ZQ_CAL_ZQ_LATCH_CMD
,
1204 value
= (mr13_flip_fspop
& 0xfffffff7) | 0x0c000000;
1205 ccfifo_writel(emc
, value
, EMC_MRW3
, 0);
1206 ccfifo_writel(emc
, 0, EMC_SELF_REF
, 0);
1207 ccfifo_writel(emc
, 0, EMC_REF
, 0);
1209 ccfifo_writel(emc
, 1UL << EMC_ZQ_CAL_DEV_SEL_SHIFT
|
1210 EMC_ZQ_CAL_ZQ_LATCH_CMD
, EMC_ZQ_CAL
,
1211 tZQCAL_lpddr4
/ dst_clk_period
);
1213 if (dst_clk_period
> zqcal_before_cc_cutoff
)
1214 ccfifo_writel(emc
, EMC_ZQ_CAL_ZQ_CAL_CMD
,
1217 value
= (mr13_flip_fspop
& 0xfffffff7) | 0x0c000000;
1218 ccfifo_writel(emc
, value
, EMC_MRW3
, delay
);
1219 ccfifo_writel(emc
, 0, EMC_SELF_REF
, 0);
1220 ccfifo_writel(emc
, 0, EMC_REF
, 0);
1222 ccfifo_writel(emc
, EMC_ZQ_CAL_ZQ_LATCH_CMD
, EMC_ZQ_CAL
,
1223 max_t(s32
, 0, zq_latch_dvfs_wait_time
));
1227 /* WAR: delay for zqlatch */
1228 ccfifo_writel(emc
, 0, 0, 10);
1232 * LPDDR4 Conditional Training Kickoff. Removed.
1237 * MANSR exit self refresh.
1239 emc_dbg(emc
, STEPS
, "Step 17\n");
1241 if (opt_dvfs_mode
== MAN_SR
&& dram_type
!= DRAM_TYPE_LPDDR4
)
1242 ccfifo_writel(emc
, 0, EMC_SELF_REF
, 0);
1246 * Send MRWs to LPDDR3/DDR3.
1248 emc_dbg(emc
, STEPS
, "Step 18\n");
1250 if (dram_type
== DRAM_TYPE_LPDDR2
) {
1251 ccfifo_writel(emc
, next
->emc_mrw2
, EMC_MRW2
, 0);
1252 ccfifo_writel(emc
, next
->emc_mrw
, EMC_MRW
, 0);
1254 ccfifo_writel(emc
, next
->emc_mrw4
, EMC_MRW4
, 0);
1255 } else if (dram_type
== DRAM_TYPE_DDR3
) {
1257 ccfifo_writel(emc
, next
->emc_emrs
&
1258 ~EMC_EMRS_USE_EMRS_LONG_CNT
, EMC_EMRS
, 0);
1259 ccfifo_writel(emc
, next
->emc_emrs2
&
1260 ~EMC_EMRS2_USE_EMRS2_LONG_CNT
, EMC_EMRS2
, 0);
1261 ccfifo_writel(emc
, next
->emc_mrs
|
1262 EMC_EMRS_USE_EMRS_LONG_CNT
, EMC_MRS
, 0);
1267 * ZQCAL for LPDDR3/DDR3
1269 emc_dbg(emc
, STEPS
, "Step 19\n");
1271 if (opt_zcal_en_cc
) {
1272 if (dram_type
== DRAM_TYPE_LPDDR2
) {
1273 value
= opt_cc_short_zcal
? 90000 : 360000;
1274 value
= div_o3(value
, dst_clk_period
);
1276 EMC_MRS_WAIT_CNT2_MRS_EXT2_WAIT_CNT_SHIFT
|
1278 EMC_MRS_WAIT_CNT2_MRS_EXT1_WAIT_CNT_SHIFT
;
1279 ccfifo_writel(emc
, value
, EMC_MRS_WAIT_CNT2
, 0);
1281 value
= opt_cc_short_zcal
? 0x56 : 0xab;
1282 ccfifo_writel(emc
, 2 << EMC_MRW_MRW_DEV_SELECTN_SHIFT
|
1283 EMC_MRW_USE_MRW_EXT_CNT
|
1284 10 << EMC_MRW_MRW_MA_SHIFT
|
1285 value
<< EMC_MRW_MRW_OP_SHIFT
,
1288 if (emc
->num_devices
> 1) {
1289 value
= 1 << EMC_MRW_MRW_DEV_SELECTN_SHIFT
|
1290 EMC_MRW_USE_MRW_EXT_CNT
|
1291 10 << EMC_MRW_MRW_MA_SHIFT
|
1292 value
<< EMC_MRW_MRW_OP_SHIFT
;
1293 ccfifo_writel(emc
, value
, EMC_MRW
, 0);
1295 } else if (dram_type
== DRAM_TYPE_DDR3
) {
1296 value
= opt_cc_short_zcal
? 0 : EMC_ZQ_CAL_LONG
;
1298 ccfifo_writel(emc
, value
|
1299 2 << EMC_ZQ_CAL_DEV_SEL_SHIFT
|
1300 EMC_ZQ_CAL_ZQ_CAL_CMD
, EMC_ZQ_CAL
,
1303 if (emc
->num_devices
> 1) {
1304 value
= value
| 1 << EMC_ZQ_CAL_DEV_SEL_SHIFT
|
1305 EMC_ZQ_CAL_ZQ_CAL_CMD
;
1306 ccfifo_writel(emc
, value
, EMC_ZQ_CAL
, 0);
1311 if (bg_reg_mode_change
) {
1312 tegra210_emc_set_shadow_bypass(emc
, ACTIVE
);
1314 if (ramp_up_wait
<= 1250000)
1315 delay
= (1250000 - ramp_up_wait
) / dst_clk_period
;
1320 next
->burst_regs
[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX
],
1321 EMC_PMACRO_BG_BIAS_CTRL_0
, delay
);
1322 tegra210_emc_set_shadow_bypass(emc
, ASSEMBLY
);
1327 * Issue ref and optional QRST.
1329 emc_dbg(emc
, STEPS
, "Step 20\n");
1331 if (dram_type
!= DRAM_TYPE_LPDDR4
)
1332 ccfifo_writel(emc
, 0, EMC_REF
, 0);
1334 if (opt_do_sw_qrst
) {
1335 ccfifo_writel(emc
, 1, EMC_ISSUE_QRST
, 0);
1336 ccfifo_writel(emc
, 0, EMC_ISSUE_QRST
, 2);
1341 * Restore ZCAL and ZCAL interval.
1343 emc_dbg(emc
, STEPS
, "Step 21\n");
1345 if (save_restore_clkstop_pd
|| opt_zcal_en_cc
) {
1346 ccfifo_writel(emc
, emc_dbg
| EMC_DBG_WRITE_MUX_ACTIVE
,
1348 if (opt_zcal_en_cc
&& dram_type
!= DRAM_TYPE_LPDDR4
)
1349 ccfifo_writel(emc
, next
->burst_regs
[EMC_ZCAL_INTERVAL_INDEX
],
1350 EMC_ZCAL_INTERVAL
, 0);
1352 if (save_restore_clkstop_pd
)
1353 ccfifo_writel(emc
, next
->burst_regs
[EMC_CFG_INDEX
] &
1354 ~EMC_CFG_DYN_SELF_REF
,
1356 ccfifo_writel(emc
, emc_dbg
, EMC_DBG
, 0);
1361 * Restore EMC_CFG_PIPE_CLK.
1363 emc_dbg(emc
, STEPS
, "Step 22\n");
1365 ccfifo_writel(emc
, emc_cfg_pipe_clk
, EMC_CFG_PIPE_CLK
, 0);
1367 if (bg_reg_mode_change
) {
1370 next
->burst_regs
[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX
] &
1371 ~EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD
,
1372 EMC_PMACRO_BG_BIAS_CTRL_0
);
1375 next
->burst_regs
[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX
] &
1376 ~EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD
,
1377 EMC_PMACRO_BG_BIAS_CTRL_0
);
1383 emc_dbg(emc
, STEPS
, "Step 23\n");
1385 value
= emc_readl(emc
, EMC_CFG_DIG_DLL
);
1386 value
|= EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_TRAFFIC
;
1387 value
&= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_RW_UNTIL_LOCK
;
1388 value
&= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_UNTIL_LOCK
;
1389 value
&= ~EMC_CFG_DIG_DLL_CFG_DLL_EN
;
1390 value
= (value
& ~EMC_CFG_DIG_DLL_CFG_DLL_MODE_MASK
) |
1391 (2 << EMC_CFG_DIG_DLL_CFG_DLL_MODE_SHIFT
);
1392 emc_writel(emc
, value
, EMC_CFG_DIG_DLL
);
1394 tegra210_emc_do_clock_change(emc
, clksrc
);
1398 * Save training results. Removed.
1403 * Program MC updown registers.
1405 emc_dbg(emc
, STEPS
, "Step 25\n");
1407 if (next
->rate
> last
->rate
) {
1408 for (i
= 0; i
< next
->num_up_down
; i
++)
1409 mc_writel(emc
->mc
, next
->la_scale_regs
[i
],
1410 emc
->offsets
->la_scale
[i
]);
1412 tegra210_emc_timing_update(emc
);
1417 * Restore ZCAL registers.
1419 emc_dbg(emc
, STEPS
, "Step 26\n");
1421 if (dram_type
== DRAM_TYPE_LPDDR4
) {
1422 tegra210_emc_set_shadow_bypass(emc
, ACTIVE
);
1423 emc_writel(emc
, next
->burst_regs
[EMC_ZCAL_WAIT_CNT_INDEX
],
1425 emc_writel(emc
, next
->burst_regs
[EMC_ZCAL_INTERVAL_INDEX
],
1427 tegra210_emc_set_shadow_bypass(emc
, ASSEMBLY
);
1430 if (dram_type
!= DRAM_TYPE_LPDDR4
&& opt_zcal_en_cc
&&
1431 !opt_short_zcal
&& opt_cc_short_zcal
) {
1434 tegra210_emc_set_shadow_bypass(emc
, ACTIVE
);
1435 if (dram_type
== DRAM_TYPE_LPDDR2
)
1436 emc_writel(emc
, next
->burst_regs
[EMC_MRS_WAIT_CNT_INDEX
],
1438 else if (dram_type
== DRAM_TYPE_DDR3
)
1439 emc_writel(emc
, next
->burst_regs
[EMC_ZCAL_WAIT_CNT_INDEX
],
1441 tegra210_emc_set_shadow_bypass(emc
, ASSEMBLY
);
1446 * Restore EMC_CFG, FDPD registers.
1448 emc_dbg(emc
, STEPS
, "Step 27\n");
1450 tegra210_emc_set_shadow_bypass(emc
, ACTIVE
);
1451 emc_writel(emc
, next
->burst_regs
[EMC_CFG_INDEX
], EMC_CFG
);
1452 tegra210_emc_set_shadow_bypass(emc
, ASSEMBLY
);
1453 emc_writel(emc
, next
->emc_fdpd_ctrl_cmd_no_ramp
,
1454 EMC_FDPD_CTRL_CMD_NO_RAMP
);
1455 emc_writel(emc
, next
->emc_sel_dpd_ctrl
, EMC_SEL_DPD_CTRL
);
1459 * Training recover. Removed.
1461 emc_dbg(emc
, STEPS
, "Step 28\n");
1463 tegra210_emc_set_shadow_bypass(emc
, ACTIVE
);
1465 next
->burst_regs
[EMC_PMACRO_AUTOCAL_CFG_COMMON_INDEX
],
1466 EMC_PMACRO_AUTOCAL_CFG_COMMON
);
1467 tegra210_emc_set_shadow_bypass(emc
, ASSEMBLY
);
1473 emc_dbg(emc
, STEPS
, "Step 29\n");
1475 emc_writel(emc
, EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE0
|
1476 EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE1
|
1477 EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE2
|
1478 EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE3
|
1479 EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE4
|
1480 EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE5
|
1481 EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE6
|
1482 EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE7
,
1483 EMC_PMACRO_CFG_PM_GLOBAL_0
);
1484 emc_writel(emc
, EMC_PMACRO_TRAINING_CTRL_0_CH0_TRAINING_E_WRPTR
,
1485 EMC_PMACRO_TRAINING_CTRL_0
);
1486 emc_writel(emc
, EMC_PMACRO_TRAINING_CTRL_1_CH1_TRAINING_E_WRPTR
,
1487 EMC_PMACRO_TRAINING_CTRL_1
);
1488 emc_writel(emc
, 0, EMC_PMACRO_CFG_PM_GLOBAL_0
);
1492 * Re-enable autocal.
1494 emc_dbg(emc
, STEPS
, "Step 30: Re-enable DLL and AUTOCAL\n");
1496 if (next
->burst_regs
[EMC_CFG_DIG_DLL_INDEX
] & EMC_CFG_DIG_DLL_CFG_DLL_EN
) {
1497 value
= emc_readl(emc
, EMC_CFG_DIG_DLL
);
1498 value
|= EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_TRAFFIC
;
1499 value
|= EMC_CFG_DIG_DLL_CFG_DLL_EN
;
1500 value
&= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_RW_UNTIL_LOCK
;
1501 value
&= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_UNTIL_LOCK
;
1502 value
= (value
& ~EMC_CFG_DIG_DLL_CFG_DLL_MODE_MASK
) |
1503 (2 << EMC_CFG_DIG_DLL_CFG_DLL_MODE_SHIFT
);
1504 emc_writel(emc
, value
, EMC_CFG_DIG_DLL
);
1505 tegra210_emc_timing_update(emc
);
1508 emc_writel(emc
, next
->emc_auto_cal_config
, EMC_AUTO_CAL_CONFIG
);
1513 const struct tegra210_emc_sequence tegra210_emc_r21021
= {
1515 .set_clock
= tegra210_emc_r21021_set_clock
,
1516 .periodic_compensation
= tegra210_emc_r21021_periodic_compensation
,