Merge branch 'next' into for-linus
[linux-2.6-microblaze.git] / drivers / memory / tegra / tegra210-emc-cc-r21021.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (c) 2014-2020, NVIDIA CORPORATION.  All rights reserved.
4  */
5
6 #include <linux/kernel.h>
7 #include <linux/io.h>
8 #include <linux/clk.h>
9 #include <linux/delay.h>
10 #include <linux/of.h>
11
12 #include <soc/tegra/mc.h>
13
14 #include "tegra210-emc.h"
15 #include "tegra210-mc.h"
16
17 /*
18  * Enable flags for specifying verbosity.
19  */
20 #define INFO            (1 << 0)
21 #define STEPS           (1 << 1)
22 #define SUB_STEPS       (1 << 2)
23 #define PRELOCK         (1 << 3)
24 #define PRELOCK_STEPS   (1 << 4)
25 #define ACTIVE_EN       (1 << 5)
26 #define PRAMP_UP        (1 << 6)
27 #define PRAMP_DN        (1 << 7)
28 #define EMA_WRITES      (1 << 10)
29 #define EMA_UPDATES     (1 << 11)
30 #define PER_TRAIN       (1 << 16)
31 #define CC_PRINT        (1 << 17)
32 #define CCFIFO          (1 << 29)
33 #define REGS            (1 << 30)
34 #define REG_LISTS       (1 << 31)
35
36 #define emc_dbg(emc, flags, ...) dev_dbg(emc->dev, __VA_ARGS__)
37
38 #define DVFS_CLOCK_CHANGE_VERSION       21021
39 #define EMC_PRELOCK_VERSION             2101
40
41 enum {
42         DVFS_SEQUENCE = 1,
43         WRITE_TRAINING_SEQUENCE = 2,
44         PERIODIC_TRAINING_SEQUENCE = 3,
45         DVFS_PT1 = 10,
46         DVFS_UPDATE = 11,
47         TRAINING_PT1 = 12,
48         TRAINING_UPDATE = 13,
49         PERIODIC_TRAINING_UPDATE = 14
50 };
51
52 /*
53  * PTFV defines - basically just indexes into the per table PTFV array.
54  */
55 #define PTFV_DQSOSC_MOVAVG_C0D0U0_INDEX         0
56 #define PTFV_DQSOSC_MOVAVG_C0D0U1_INDEX         1
57 #define PTFV_DQSOSC_MOVAVG_C0D1U0_INDEX         2
58 #define PTFV_DQSOSC_MOVAVG_C0D1U1_INDEX         3
59 #define PTFV_DQSOSC_MOVAVG_C1D0U0_INDEX         4
60 #define PTFV_DQSOSC_MOVAVG_C1D0U1_INDEX         5
61 #define PTFV_DQSOSC_MOVAVG_C1D1U0_INDEX         6
62 #define PTFV_DQSOSC_MOVAVG_C1D1U1_INDEX         7
63 #define PTFV_DVFS_SAMPLES_INDEX                 9
64 #define PTFV_MOVAVG_WEIGHT_INDEX                10
65 #define PTFV_CONFIG_CTRL_INDEX                  11
66
67 #define PTFV_CONFIG_CTRL_USE_PREVIOUS_EMA       (1 << 0)
68
69 /*
70  * Do arithmetic in fixed point.
71  */
72 #define MOVAVG_PRECISION_FACTOR         100
73
74 /*
75  * The division portion of the average operation.
76  */
77 #define __AVERAGE_PTFV(dev)                                             \
78         ({ next->ptfv_list[PTFV_DQSOSC_MOVAVG_ ## dev ## _INDEX] =      \
79            next->ptfv_list[PTFV_DQSOSC_MOVAVG_ ## dev ## _INDEX] /      \
80            next->ptfv_list[PTFV_DVFS_SAMPLES_INDEX]; })
81
82 /*
83  * Convert val to fixed point and add it to the temporary average.
84  */
85 #define __INCREMENT_PTFV(dev, val)                                      \
86         ({ next->ptfv_list[PTFV_DQSOSC_MOVAVG_ ## dev ## _INDEX] +=     \
87            ((val) * MOVAVG_PRECISION_FACTOR); })
88
89 /*
90  * Convert a moving average back to integral form and return the value.
91  */
92 #define __MOVAVG_AC(timing, dev)                                        \
93         ((timing)->ptfv_list[PTFV_DQSOSC_MOVAVG_ ## dev ## _INDEX] /    \
94          MOVAVG_PRECISION_FACTOR)
95
96 /* Weighted update. */
97 #define __WEIGHTED_UPDATE_PTFV(dev, nval)                               \
98         do {                                                            \
99                 int w = PTFV_MOVAVG_WEIGHT_INDEX;                       \
100                 int dqs = PTFV_DQSOSC_MOVAVG_ ## dev ## _INDEX;         \
101                                                                         \
102                 next->ptfv_list[dqs] =                                  \
103                         ((nval * MOVAVG_PRECISION_FACTOR) +             \
104                          (next->ptfv_list[dqs] *                        \
105                           next->ptfv_list[w])) /                        \
106                         (next->ptfv_list[w] + 1);                       \
107                                                                         \
108                 emc_dbg(emc, EMA_UPDATES, "%s: (s=%lu) EMA: %u\n",      \
109                         __stringify(dev), nval, next->ptfv_list[dqs]);  \
110         } while (0)
111
112 /* Access a particular average. */
113 #define __MOVAVG(timing, dev)                      \
114         ((timing)->ptfv_list[PTFV_DQSOSC_MOVAVG_ ## dev ## _INDEX])
115
116 static u32 update_clock_tree_delay(struct tegra210_emc *emc, int type)
117 {
118         bool periodic_training_update = type == PERIODIC_TRAINING_UPDATE;
119         struct tegra210_emc_timing *last = emc->last;
120         struct tegra210_emc_timing *next = emc->next;
121         u32 last_timing_rate_mhz = last->rate / 1000;
122         u32 next_timing_rate_mhz = next->rate / 1000;
123         bool dvfs_update = type == DVFS_UPDATE;
124         s32 tdel = 0, tmdel = 0, adel = 0;
125         bool dvfs_pt1 = type == DVFS_PT1;
126         unsigned long cval = 0;
127         u32 temp[2][2], value;
128         unsigned int i;
129
130         /*
131          * Dev0 MSB.
132          */
133         if (dvfs_pt1 || periodic_training_update) {
134                 value = tegra210_emc_mrr_read(emc, 2, 19);
135
136                 for (i = 0; i < emc->num_channels; i++) {
137                         temp[i][0] = (value & 0x00ff) << 8;
138                         temp[i][1] = (value & 0xff00) << 0;
139                         value >>= 16;
140                 }
141
142                 /*
143                  * Dev0 LSB.
144                  */
145                 value = tegra210_emc_mrr_read(emc, 2, 18);
146
147                 for (i = 0; i < emc->num_channels; i++) {
148                         temp[i][0] |= (value & 0x00ff) >> 0;
149                         temp[i][1] |= (value & 0xff00) >> 8;
150                         value >>= 16;
151                 }
152         }
153
154         if (dvfs_pt1 || periodic_training_update) {
155                 cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
156                 cval *= 1000000;
157                 cval /= last_timing_rate_mhz * 2 * temp[0][0];
158         }
159
160         if (dvfs_pt1)
161                 __INCREMENT_PTFV(C0D0U0, cval);
162         else if (dvfs_update)
163                 __AVERAGE_PTFV(C0D0U0);
164         else if (periodic_training_update)
165                 __WEIGHTED_UPDATE_PTFV(C0D0U0, cval);
166
167         if (dvfs_update || periodic_training_update) {
168                 tdel = next->current_dram_clktree[C0D0U0] -
169                                 __MOVAVG_AC(next, C0D0U0);
170                 tmdel = (tdel < 0) ? -1 * tdel : tdel;
171                 adel = tmdel;
172
173                 if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
174                     next->tree_margin)
175                         next->current_dram_clktree[C0D0U0] =
176                                 __MOVAVG_AC(next, C0D0U0);
177         }
178
179         if (dvfs_pt1 || periodic_training_update) {
180                 cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
181                 cval *= 1000000;
182                 cval /= last_timing_rate_mhz * 2 * temp[0][1];
183         }
184
185         if (dvfs_pt1)
186                 __INCREMENT_PTFV(C0D0U1, cval);
187         else if (dvfs_update)
188                 __AVERAGE_PTFV(C0D0U1);
189         else if (periodic_training_update)
190                 __WEIGHTED_UPDATE_PTFV(C0D0U1, cval);
191
192         if (dvfs_update || periodic_training_update) {
193                 tdel = next->current_dram_clktree[C0D0U1] -
194                                 __MOVAVG_AC(next, C0D0U1);
195                 tmdel = (tdel < 0) ? -1 * tdel : tdel;
196
197                 if (tmdel > adel)
198                         adel = tmdel;
199
200                 if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
201                     next->tree_margin)
202                         next->current_dram_clktree[C0D0U1] =
203                                 __MOVAVG_AC(next, C0D0U1);
204         }
205
206         if (emc->num_channels > 1) {
207                 if (dvfs_pt1 || periodic_training_update) {
208                         cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
209                         cval *= 1000000;
210                         cval /= last_timing_rate_mhz * 2 * temp[1][0];
211                 }
212
213                 if (dvfs_pt1)
214                         __INCREMENT_PTFV(C1D0U0, cval);
215                 else if (dvfs_update)
216                         __AVERAGE_PTFV(C1D0U0);
217                 else if (periodic_training_update)
218                         __WEIGHTED_UPDATE_PTFV(C1D0U0, cval);
219
220                 if (dvfs_update || periodic_training_update) {
221                         tdel = next->current_dram_clktree[C1D0U0] -
222                                         __MOVAVG_AC(next, C1D0U0);
223                         tmdel = (tdel < 0) ? -1 * tdel : tdel;
224
225                         if (tmdel > adel)
226                                 adel = tmdel;
227
228                         if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
229                             next->tree_margin)
230                                 next->current_dram_clktree[C1D0U0] =
231                                         __MOVAVG_AC(next, C1D0U0);
232                 }
233
234                 if (dvfs_pt1 || periodic_training_update) {
235                         cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
236                         cval *= 1000000;
237                         cval /= last_timing_rate_mhz * 2 * temp[1][1];
238                 }
239
240                 if (dvfs_pt1)
241                         __INCREMENT_PTFV(C1D0U1, cval);
242                 else if (dvfs_update)
243                         __AVERAGE_PTFV(C1D0U1);
244                 else if (periodic_training_update)
245                         __WEIGHTED_UPDATE_PTFV(C1D0U1, cval);
246
247                 if (dvfs_update || periodic_training_update) {
248                         tdel = next->current_dram_clktree[C1D0U1] -
249                                         __MOVAVG_AC(next, C1D0U1);
250                         tmdel = (tdel < 0) ? -1 * tdel : tdel;
251
252                         if (tmdel > adel)
253                                 adel = tmdel;
254
255                         if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
256                             next->tree_margin)
257                                 next->current_dram_clktree[C1D0U1] =
258                                         __MOVAVG_AC(next, C1D0U1);
259                 }
260         }
261
262         if (emc->num_devices < 2)
263                 goto done;
264
265         /*
266          * Dev1 MSB.
267          */
268         if (dvfs_pt1 || periodic_training_update) {
269                 value = tegra210_emc_mrr_read(emc, 1, 19);
270
271                 for (i = 0; i < emc->num_channels; i++) {
272                         temp[i][0] = (value & 0x00ff) << 8;
273                         temp[i][1] = (value & 0xff00) << 0;
274                         value >>= 16;
275                 }
276
277                 /*
278                  * Dev1 LSB.
279                  */
280                 value = tegra210_emc_mrr_read(emc, 2, 18);
281
282                 for (i = 0; i < emc->num_channels; i++) {
283                         temp[i][0] |= (value & 0x00ff) >> 0;
284                         temp[i][1] |= (value & 0xff00) >> 8;
285                         value >>= 16;
286                 }
287         }
288
289         if (dvfs_pt1 || periodic_training_update) {
290                 cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
291                 cval *= 1000000;
292                 cval /= last_timing_rate_mhz * 2 * temp[0][0];
293         }
294
295         if (dvfs_pt1)
296                 __INCREMENT_PTFV(C0D1U0, cval);
297         else if (dvfs_update)
298                 __AVERAGE_PTFV(C0D1U0);
299         else if (periodic_training_update)
300                 __WEIGHTED_UPDATE_PTFV(C0D1U0, cval);
301
302         if (dvfs_update || periodic_training_update) {
303                 tdel = next->current_dram_clktree[C0D1U0] -
304                                 __MOVAVG_AC(next, C0D1U0);
305                 tmdel = (tdel < 0) ? -1 * tdel : tdel;
306
307                 if (tmdel > adel)
308                         adel = tmdel;
309
310                 if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
311                     next->tree_margin)
312                         next->current_dram_clktree[C0D1U0] =
313                                 __MOVAVG_AC(next, C0D1U0);
314         }
315
316         if (dvfs_pt1 || periodic_training_update) {
317                 cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
318                 cval *= 1000000;
319                 cval /= last_timing_rate_mhz * 2 * temp[0][1];
320         }
321
322         if (dvfs_pt1)
323                 __INCREMENT_PTFV(C0D1U1, cval);
324         else if (dvfs_update)
325                 __AVERAGE_PTFV(C0D1U1);
326         else if (periodic_training_update)
327                 __WEIGHTED_UPDATE_PTFV(C0D1U1, cval);
328
329         if (dvfs_update || periodic_training_update) {
330                 tdel = next->current_dram_clktree[C0D1U1] -
331                                 __MOVAVG_AC(next, C0D1U1);
332                 tmdel = (tdel < 0) ? -1 * tdel : tdel;
333
334                 if (tmdel > adel)
335                         adel = tmdel;
336
337                 if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
338                     next->tree_margin)
339                         next->current_dram_clktree[C0D1U1] =
340                                 __MOVAVG_AC(next, C0D1U1);
341         }
342
343         if (emc->num_channels > 1) {
344                 if (dvfs_pt1 || periodic_training_update) {
345                         cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
346                         cval *= 1000000;
347                         cval /= last_timing_rate_mhz * 2 * temp[1][0];
348                 }
349
350                 if (dvfs_pt1)
351                         __INCREMENT_PTFV(C1D1U0, cval);
352                 else if (dvfs_update)
353                         __AVERAGE_PTFV(C1D1U0);
354                 else if (periodic_training_update)
355                         __WEIGHTED_UPDATE_PTFV(C1D1U0, cval);
356
357                 if (dvfs_update || periodic_training_update) {
358                         tdel = next->current_dram_clktree[C1D1U0] -
359                                         __MOVAVG_AC(next, C1D1U0);
360                         tmdel = (tdel < 0) ? -1 * tdel : tdel;
361
362                         if (tmdel > adel)
363                                 adel = tmdel;
364
365                         if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
366                             next->tree_margin)
367                                 next->current_dram_clktree[C1D1U0] =
368                                         __MOVAVG_AC(next, C1D1U0);
369                 }
370
371                 if (dvfs_pt1 || periodic_training_update) {
372                         cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
373                         cval *= 1000000;
374                         cval /= last_timing_rate_mhz * 2 * temp[1][1];
375                 }
376
377                 if (dvfs_pt1)
378                         __INCREMENT_PTFV(C1D1U1, cval);
379                 else if (dvfs_update)
380                         __AVERAGE_PTFV(C1D1U1);
381                 else if (periodic_training_update)
382                         __WEIGHTED_UPDATE_PTFV(C1D1U1, cval);
383
384                 if (dvfs_update || periodic_training_update) {
385                         tdel = next->current_dram_clktree[C1D1U1] -
386                                         __MOVAVG_AC(next, C1D1U1);
387                         tmdel = (tdel < 0) ? -1 * tdel : tdel;
388
389                         if (tmdel > adel)
390                                 adel = tmdel;
391
392                         if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
393                             next->tree_margin)
394                                 next->current_dram_clktree[C1D1U1] =
395                                         __MOVAVG_AC(next, C1D1U1);
396                 }
397         }
398
399 done:
400         return adel;
401 }
402
403 static u32 periodic_compensation_handler(struct tegra210_emc *emc, u32 type,
404                                          struct tegra210_emc_timing *last,
405                                          struct tegra210_emc_timing *next)
406 {
407 #define __COPY_EMA(nt, lt, dev)                                         \
408         ({ __MOVAVG(nt, dev) = __MOVAVG(lt, dev) *                      \
409            (nt)->ptfv_list[PTFV_DVFS_SAMPLES_INDEX]; })
410
411         u32 i, adel = 0, samples = next->ptfv_list[PTFV_DVFS_SAMPLES_INDEX];
412         u32 delay;
413
414         delay = tegra210_emc_actual_osc_clocks(last->run_clocks);
415         delay *= 1000;
416         delay = 2 + (delay / last->rate);
417
418         if (!next->periodic_training)
419                 return 0;
420
421         if (type == DVFS_SEQUENCE) {
422                 if (last->periodic_training &&
423                     (next->ptfv_list[PTFV_CONFIG_CTRL_INDEX] &
424                      PTFV_CONFIG_CTRL_USE_PREVIOUS_EMA)) {
425                         /*
426                          * If the previous frequency was using periodic
427                          * calibration then we can reuse the previous
428                          * frequencies EMA data.
429                          */
430                         __COPY_EMA(next, last, C0D0U0);
431                         __COPY_EMA(next, last, C0D0U1);
432                         __COPY_EMA(next, last, C1D0U0);
433                         __COPY_EMA(next, last, C1D0U1);
434                         __COPY_EMA(next, last, C0D1U0);
435                         __COPY_EMA(next, last, C0D1U1);
436                         __COPY_EMA(next, last, C1D1U0);
437                         __COPY_EMA(next, last, C1D1U1);
438                 } else {
439                         /* Reset the EMA.*/
440                         __MOVAVG(next, C0D0U0) = 0;
441                         __MOVAVG(next, C0D0U1) = 0;
442                         __MOVAVG(next, C1D0U0) = 0;
443                         __MOVAVG(next, C1D0U1) = 0;
444                         __MOVAVG(next, C0D1U0) = 0;
445                         __MOVAVG(next, C0D1U1) = 0;
446                         __MOVAVG(next, C1D1U0) = 0;
447                         __MOVAVG(next, C1D1U1) = 0;
448
449                         for (i = 0; i < samples; i++) {
450                                 tegra210_emc_start_periodic_compensation(emc);
451                                 udelay(delay);
452
453                                 /*
454                                  * Generate next sample of data.
455                                  */
456                                 adel = update_clock_tree_delay(emc, DVFS_PT1);
457                         }
458                 }
459
460                 /*
461                  * Seems like it should be part of the
462                  * 'if (last_timing->periodic_training)' conditional
463                  * since is already done for the else clause.
464                  */
465                 adel = update_clock_tree_delay(emc, DVFS_UPDATE);
466         }
467
468         if (type == PERIODIC_TRAINING_SEQUENCE) {
469                 tegra210_emc_start_periodic_compensation(emc);
470                 udelay(delay);
471
472                 adel = update_clock_tree_delay(emc, PERIODIC_TRAINING_UPDATE);
473         }
474
475         return adel;
476 }
477
478 static u32 tegra210_emc_r21021_periodic_compensation(struct tegra210_emc *emc)
479 {
480         u32 emc_cfg, emc_cfg_o, emc_cfg_update, del, value;
481         u32 list[] = {
482                 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0,
483                 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1,
484                 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2,
485                 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3,
486                 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0,
487                 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1,
488                 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2,
489                 EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3,
490                 EMC_DATA_BRLSHFT_0,
491                 EMC_DATA_BRLSHFT_1
492         };
493         struct tegra210_emc_timing *last = emc->last;
494         unsigned int items = ARRAY_SIZE(list), i;
495         unsigned long delay;
496
497         if (last->periodic_training) {
498                 emc_dbg(emc, PER_TRAIN, "Periodic training starting\n");
499
500                 value = emc_readl(emc, EMC_DBG);
501                 emc_cfg_o = emc_readl(emc, EMC_CFG);
502                 emc_cfg = emc_cfg_o & ~(EMC_CFG_DYN_SELF_REF |
503                                         EMC_CFG_DRAM_ACPD |
504                                         EMC_CFG_DRAM_CLKSTOP_PD |
505                                         EMC_CFG_DRAM_CLKSTOP_PD);
506
507
508                 /*
509                  * 1. Power optimizations should be off.
510                  */
511                 emc_writel(emc, emc_cfg, EMC_CFG);
512
513                 /* Does emc_timing_update() for above changes. */
514                 tegra210_emc_dll_disable(emc);
515
516                 for (i = 0; i < emc->num_channels; i++)
517                         tegra210_emc_wait_for_update(emc, i, EMC_EMC_STATUS,
518                                                      EMC_EMC_STATUS_DRAM_IN_POWERDOWN_MASK,
519                                                      0);
520
521                 for (i = 0; i < emc->num_channels; i++)
522                         tegra210_emc_wait_for_update(emc, i, EMC_EMC_STATUS,
523                                                      EMC_EMC_STATUS_DRAM_IN_SELF_REFRESH_MASK,
524                                                      0);
525
526                 emc_cfg_update = value = emc_readl(emc, EMC_CFG_UPDATE);
527                 value &= ~EMC_CFG_UPDATE_UPDATE_DLL_IN_UPDATE_MASK;
528                 value |= (2 << EMC_CFG_UPDATE_UPDATE_DLL_IN_UPDATE_SHIFT);
529                 emc_writel(emc, value, EMC_CFG_UPDATE);
530
531                 /*
532                  * 2. osc kick off - this assumes training and dvfs have set
533                  *    correct MR23.
534                  */
535                 tegra210_emc_start_periodic_compensation(emc);
536
537                 /*
538                  * 3. Let dram capture its clock tree delays.
539                  */
540                 delay = tegra210_emc_actual_osc_clocks(last->run_clocks);
541                 delay *= 1000;
542                 delay /= last->rate + 1;
543                 udelay(delay);
544
545                 /*
546                  * 4. Check delta wrt previous values (save value if margin
547                  *    exceeds what is set in table).
548                  */
549                 del = periodic_compensation_handler(emc,
550                                                     PERIODIC_TRAINING_SEQUENCE,
551                                                     last, last);
552
553                 /*
554                  * 5. Apply compensation w.r.t. trained values (if clock tree
555                  *    has drifted more than the set margin).
556                  */
557                 if (last->tree_margin < ((del * 128 * (last->rate / 1000)) / 1000000)) {
558                         for (i = 0; i < items; i++) {
559                                 value = tegra210_emc_compensate(last, list[i]);
560                                 emc_dbg(emc, EMA_WRITES, "0x%08x <= 0x%08x\n",
561                                         list[i], value);
562                                 emc_writel(emc, value, list[i]);
563                         }
564                 }
565
566                 emc_writel(emc, emc_cfg_o, EMC_CFG);
567
568                 /*
569                  * 6. Timing update actally applies the new trimmers.
570                  */
571                 tegra210_emc_timing_update(emc);
572
573                 /* 6.1. Restore the UPDATE_DLL_IN_UPDATE field. */
574                 emc_writel(emc, emc_cfg_update, EMC_CFG_UPDATE);
575
576                 /* 6.2. Restore the DLL. */
577                 tegra210_emc_dll_enable(emc);
578         }
579
580         return 0;
581 }
582
583 /*
584  * Do the clock change sequence.
585  */
586 static void tegra210_emc_r21021_set_clock(struct tegra210_emc *emc, u32 clksrc)
587 {
588         /* state variables */
589         static bool fsp_for_next_freq;
590         /* constant configuration parameters */
591         const bool save_restore_clkstop_pd = true;
592         const u32 zqcal_before_cc_cutoff = 2400;
593         const bool cya_allow_ref_cc = false;
594         const bool cya_issue_pc_ref = false;
595         const bool opt_cc_short_zcal = true;
596         const bool ref_b4_sref_en = false;
597         const u32 tZQCAL_lpddr4 = 1000000;
598         const bool opt_short_zcal = true;
599         const bool opt_do_sw_qrst = true;
600         const u32 opt_dvfs_mode = MAN_SR;
601         /*
602          * This is the timing table for the source frequency. It does _not_
603          * necessarily correspond to the actual timing values in the EMC at the
604          * moment. If the boot BCT differs from the table then this can happen.
605          * However, we need it for accessing the dram_timings (which are not
606          * really registers) array for the current frequency.
607          */
608         struct tegra210_emc_timing *fake, *last = emc->last, *next = emc->next;
609         u32 tRTM, RP_war, R2P_war, TRPab_war, deltaTWATM, W2P_war, tRPST;
610         u32 mr13_flip_fspwr, mr13_flip_fspop, ramp_up_wait, ramp_down_wait;
611         u32 zq_wait_long, zq_latch_dvfs_wait_time, tZQCAL_lpddr4_fc_adj;
612         u32 emc_auto_cal_config, auto_cal_en, emc_cfg, emc_sel_dpd_ctrl;
613         u32 tFC_lpddr4 = 1000 * next->dram_timings[T_FC_LPDDR4];
614         u32 bg_reg_mode_change, enable_bglp_reg, enable_bg_reg;
615         bool opt_zcal_en_cc = false, is_lpddr3 = false;
616         bool compensate_trimmer_applicable = false;
617         u32 emc_dbg, emc_cfg_pipe_clk, emc_pin;
618         u32 src_clk_period, dst_clk_period; /* in picoseconds */
619         bool shared_zq_resistor = false;
620         u32 value, dram_type;
621         u32 opt_dll_mode = 0;
622         unsigned long delay;
623         unsigned int i;
624
625         emc_dbg(emc, INFO, "Running clock change.\n");
626
627         /* XXX fake == last */
628         fake = tegra210_emc_find_timing(emc, last->rate * 1000UL);
629         fsp_for_next_freq = !fsp_for_next_freq;
630
631         value = emc_readl(emc, EMC_FBIO_CFG5) & EMC_FBIO_CFG5_DRAM_TYPE_MASK;
632         dram_type = value >> EMC_FBIO_CFG5_DRAM_TYPE_SHIFT;
633
634         if (last->burst_regs[EMC_ZCAL_WAIT_CNT_INDEX] & BIT(31))
635                 shared_zq_resistor = true;
636
637         if ((next->burst_regs[EMC_ZCAL_INTERVAL_INDEX] != 0 &&
638              last->burst_regs[EMC_ZCAL_INTERVAL_INDEX] == 0) ||
639             dram_type == DRAM_TYPE_LPDDR4)
640                 opt_zcal_en_cc = true;
641
642         if (dram_type == DRAM_TYPE_DDR3)
643                 opt_dll_mode = tegra210_emc_get_dll_state(next);
644
645         if ((next->burst_regs[EMC_FBIO_CFG5_INDEX] & BIT(25)) &&
646             (dram_type == DRAM_TYPE_LPDDR2))
647                 is_lpddr3 = true;
648
649         emc_readl(emc, EMC_CFG);
650         emc_readl(emc, EMC_AUTO_CAL_CONFIG);
651
652         src_clk_period = 1000000000 / last->rate;
653         dst_clk_period = 1000000000 / next->rate;
654
655         if (dst_clk_period <= zqcal_before_cc_cutoff)
656                 tZQCAL_lpddr4_fc_adj = tZQCAL_lpddr4 - tFC_lpddr4;
657         else
658                 tZQCAL_lpddr4_fc_adj = tZQCAL_lpddr4;
659
660         tZQCAL_lpddr4_fc_adj /= dst_clk_period;
661
662         emc_dbg = emc_readl(emc, EMC_DBG);
663         emc_pin = emc_readl(emc, EMC_PIN);
664         emc_cfg_pipe_clk = emc_readl(emc, EMC_CFG_PIPE_CLK);
665
666         emc_cfg = next->burst_regs[EMC_CFG_INDEX];
667         emc_cfg &= ~(EMC_CFG_DYN_SELF_REF | EMC_CFG_DRAM_ACPD |
668                      EMC_CFG_DRAM_CLKSTOP_SR | EMC_CFG_DRAM_CLKSTOP_PD);
669         emc_sel_dpd_ctrl = next->emc_sel_dpd_ctrl;
670         emc_sel_dpd_ctrl &= ~(EMC_SEL_DPD_CTRL_CLK_SEL_DPD_EN |
671                               EMC_SEL_DPD_CTRL_CA_SEL_DPD_EN |
672                               EMC_SEL_DPD_CTRL_RESET_SEL_DPD_EN |
673                               EMC_SEL_DPD_CTRL_ODT_SEL_DPD_EN |
674                               EMC_SEL_DPD_CTRL_DATA_SEL_DPD_EN);
675
676         emc_dbg(emc, INFO, "Clock change version: %d\n",
677                 DVFS_CLOCK_CHANGE_VERSION);
678         emc_dbg(emc, INFO, "DRAM type = %d\n", dram_type);
679         emc_dbg(emc, INFO, "DRAM dev #: %u\n", emc->num_devices);
680         emc_dbg(emc, INFO, "Next EMC clksrc: 0x%08x\n", clksrc);
681         emc_dbg(emc, INFO, "DLL clksrc:      0x%08x\n", next->dll_clk_src);
682         emc_dbg(emc, INFO, "last rate: %u, next rate %u\n", last->rate,
683                 next->rate);
684         emc_dbg(emc, INFO, "last period: %u, next period: %u\n",
685                 src_clk_period, dst_clk_period);
686         emc_dbg(emc, INFO, "  shared_zq_resistor: %d\n", !!shared_zq_resistor);
687         emc_dbg(emc, INFO, "  num_channels: %u\n", emc->num_channels);
688         emc_dbg(emc, INFO, "  opt_dll_mode: %d\n", opt_dll_mode);
689
690         /*
691          * Step 1:
692          *   Pre DVFS SW sequence.
693          */
694         emc_dbg(emc, STEPS, "Step 1\n");
695         emc_dbg(emc, STEPS, "Step 1.1: Disable DLL temporarily.\n");
696
697         value = emc_readl(emc, EMC_CFG_DIG_DLL);
698         value &= ~EMC_CFG_DIG_DLL_CFG_DLL_EN;
699         emc_writel(emc, value, EMC_CFG_DIG_DLL);
700
701         tegra210_emc_timing_update(emc);
702
703         for (i = 0; i < emc->num_channels; i++)
704                 tegra210_emc_wait_for_update(emc, i, EMC_CFG_DIG_DLL,
705                                              EMC_CFG_DIG_DLL_CFG_DLL_EN, 0);
706
707         emc_dbg(emc, STEPS, "Step 1.2: Disable AUTOCAL temporarily.\n");
708
709         emc_auto_cal_config = next->emc_auto_cal_config;
710         auto_cal_en = emc_auto_cal_config & EMC_AUTO_CAL_CONFIG_AUTO_CAL_ENABLE;
711         emc_auto_cal_config &= ~EMC_AUTO_CAL_CONFIG_AUTO_CAL_START;
712         emc_auto_cal_config |= EMC_AUTO_CAL_CONFIG_AUTO_CAL_MEASURE_STALL;
713         emc_auto_cal_config |= EMC_AUTO_CAL_CONFIG_AUTO_CAL_UPDATE_STALL;
714         emc_auto_cal_config |= auto_cal_en;
715         emc_writel(emc, emc_auto_cal_config, EMC_AUTO_CAL_CONFIG);
716         emc_readl(emc, EMC_AUTO_CAL_CONFIG); /* Flush write. */
717
718         emc_dbg(emc, STEPS, "Step 1.3: Disable other power features.\n");
719
720         tegra210_emc_set_shadow_bypass(emc, ACTIVE);
721         emc_writel(emc, emc_cfg, EMC_CFG);
722         emc_writel(emc, emc_sel_dpd_ctrl, EMC_SEL_DPD_CTRL);
723         tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
724
725         if (next->periodic_training) {
726                 tegra210_emc_reset_dram_clktree_values(next);
727
728                 for (i = 0; i < emc->num_channels; i++)
729                         tegra210_emc_wait_for_update(emc, i, EMC_EMC_STATUS,
730                                                      EMC_EMC_STATUS_DRAM_IN_POWERDOWN_MASK,
731                                                      0);
732
733                 for (i = 0; i < emc->num_channels; i++)
734                         tegra210_emc_wait_for_update(emc, i, EMC_EMC_STATUS,
735                                                      EMC_EMC_STATUS_DRAM_IN_SELF_REFRESH_MASK,
736                                                      0);
737
738                 tegra210_emc_start_periodic_compensation(emc);
739
740                 delay = 1000 * tegra210_emc_actual_osc_clocks(last->run_clocks);
741                 udelay((delay / last->rate) + 2);
742
743                 value = periodic_compensation_handler(emc, DVFS_SEQUENCE, fake,
744                                                       next);
745                 value = (value * 128 * next->rate / 1000) / 1000000;
746
747                 if (next->periodic_training && value > next->tree_margin)
748                         compensate_trimmer_applicable = true;
749         }
750
751         emc_writel(emc, EMC_INTSTATUS_CLKCHANGE_COMPLETE, EMC_INTSTATUS);
752         tegra210_emc_set_shadow_bypass(emc, ACTIVE);
753         emc_writel(emc, emc_cfg, EMC_CFG);
754         emc_writel(emc, emc_sel_dpd_ctrl, EMC_SEL_DPD_CTRL);
755         emc_writel(emc, emc_cfg_pipe_clk | EMC_CFG_PIPE_CLK_CLK_ALWAYS_ON,
756                    EMC_CFG_PIPE_CLK);
757         emc_writel(emc, next->emc_fdpd_ctrl_cmd_no_ramp &
758                         ~EMC_FDPD_CTRL_CMD_NO_RAMP_CMD_DPD_NO_RAMP_ENABLE,
759                    EMC_FDPD_CTRL_CMD_NO_RAMP);
760
761         bg_reg_mode_change =
762                 ((next->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
763                   EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD) ^
764                  (last->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
765                   EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD)) ||
766                 ((next->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
767                   EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD) ^
768                  (last->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
769                   EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD));
770         enable_bglp_reg =
771                 (next->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
772                  EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD) == 0;
773         enable_bg_reg =
774                 (next->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
775                  EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD) == 0;
776
777         if (bg_reg_mode_change) {
778                 if (enable_bg_reg)
779                         emc_writel(emc, last->burst_regs
780                                    [EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
781                                    ~EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD,
782                                    EMC_PMACRO_BG_BIAS_CTRL_0);
783
784                 if (enable_bglp_reg)
785                         emc_writel(emc, last->burst_regs
786                                    [EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
787                                    ~EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD,
788                                    EMC_PMACRO_BG_BIAS_CTRL_0);
789         }
790
791         /* Check if we need to turn on VREF generator. */
792         if ((((last->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX] &
793                EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF) == 0) &&
794              ((next->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX] &
795                EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF) == 1)) ||
796             (((last->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX] &
797                EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF) == 0) &&
798              ((next->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX] &
799                EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF) != 0))) {
800                 u32 pad_tx_ctrl =
801                     next->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX];
802                 u32 last_pad_tx_ctrl =
803                     last->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX];
804                 u32 next_dq_e_ivref, next_dqs_e_ivref;
805
806                 next_dqs_e_ivref = pad_tx_ctrl &
807                                    EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF;
808                 next_dq_e_ivref = pad_tx_ctrl &
809                                   EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF;
810                 value = (last_pad_tx_ctrl &
811                                 ~EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF &
812                                 ~EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF) |
813                         next_dq_e_ivref | next_dqs_e_ivref;
814                 emc_writel(emc, value, EMC_PMACRO_DATA_PAD_TX_CTRL);
815                 udelay(1);
816         } else if (bg_reg_mode_change) {
817                 udelay(1);
818         }
819
820         tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
821
822         /*
823          * Step 2:
824          *   Prelock the DLL.
825          */
826         emc_dbg(emc, STEPS, "Step 2\n");
827
828         if (next->burst_regs[EMC_CFG_DIG_DLL_INDEX] &
829             EMC_CFG_DIG_DLL_CFG_DLL_EN) {
830                 emc_dbg(emc, INFO, "Prelock enabled for target frequency.\n");
831                 value = tegra210_emc_dll_prelock(emc, clksrc);
832                 emc_dbg(emc, INFO, "DLL out: 0x%03x\n", value);
833         } else {
834                 emc_dbg(emc, INFO, "Disabling DLL for target frequency.\n");
835                 tegra210_emc_dll_disable(emc);
836         }
837
838         /*
839          * Step 3:
840          *   Prepare autocal for the clock change.
841          */
842         emc_dbg(emc, STEPS, "Step 3\n");
843
844         tegra210_emc_set_shadow_bypass(emc, ACTIVE);
845         emc_writel(emc, next->emc_auto_cal_config2, EMC_AUTO_CAL_CONFIG2);
846         emc_writel(emc, next->emc_auto_cal_config3, EMC_AUTO_CAL_CONFIG3);
847         emc_writel(emc, next->emc_auto_cal_config4, EMC_AUTO_CAL_CONFIG4);
848         emc_writel(emc, next->emc_auto_cal_config5, EMC_AUTO_CAL_CONFIG5);
849         emc_writel(emc, next->emc_auto_cal_config6, EMC_AUTO_CAL_CONFIG6);
850         emc_writel(emc, next->emc_auto_cal_config7, EMC_AUTO_CAL_CONFIG7);
851         emc_writel(emc, next->emc_auto_cal_config8, EMC_AUTO_CAL_CONFIG8);
852         tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
853
854         emc_auto_cal_config |= (EMC_AUTO_CAL_CONFIG_AUTO_CAL_COMPUTE_START |
855                                 auto_cal_en);
856         emc_writel(emc, emc_auto_cal_config, EMC_AUTO_CAL_CONFIG);
857
858         /*
859          * Step 4:
860          *   Update EMC_CFG. (??)
861          */
862         emc_dbg(emc, STEPS, "Step 4\n");
863
864         if (src_clk_period > 50000 && dram_type == DRAM_TYPE_LPDDR4)
865                 ccfifo_writel(emc, 1, EMC_SELF_REF, 0);
866         else
867                 emc_writel(emc, next->emc_cfg_2, EMC_CFG_2);
868
869         /*
870          * Step 5:
871          *   Prepare reference variables for ZQCAL regs.
872          */
873         emc_dbg(emc, STEPS, "Step 5\n");
874
875         if (dram_type == DRAM_TYPE_LPDDR4)
876                 zq_wait_long = max((u32)1, div_o3(1000000, dst_clk_period));
877         else if (dram_type == DRAM_TYPE_LPDDR2 || is_lpddr3)
878                 zq_wait_long = max(next->min_mrs_wait,
879                                    div_o3(360000, dst_clk_period)) + 4;
880         else if (dram_type == DRAM_TYPE_DDR3)
881                 zq_wait_long = max((u32)256,
882                                    div_o3(320000, dst_clk_period) + 2);
883         else
884                 zq_wait_long = 0;
885
886         /*
887          * Step 6:
888          *   Training code - removed.
889          */
890         emc_dbg(emc, STEPS, "Step 6\n");
891
892         /*
893          * Step 7:
894          *   Program FSP reference registers and send MRWs to new FSPWR.
895          */
896         emc_dbg(emc, STEPS, "Step 7\n");
897         emc_dbg(emc, SUB_STEPS, "Step 7.1: Bug 200024907 - Patch RP R2P");
898
899         /* WAR 200024907 */
900         if (dram_type == DRAM_TYPE_LPDDR4) {
901                 u32 nRTP = 16;
902
903                 if (src_clk_period >= 1000000 / 1866) /* 535.91 ps */
904                         nRTP = 14;
905
906                 if (src_clk_period >= 1000000 / 1600) /* 625.00 ps */
907                         nRTP = 12;
908
909                 if (src_clk_period >= 1000000 / 1333) /* 750.19 ps */
910                         nRTP = 10;
911
912                 if (src_clk_period >= 1000000 / 1066) /* 938.09 ps */
913                         nRTP = 8;
914
915                 deltaTWATM = max_t(u32, div_o3(7500, src_clk_period), 8);
916
917                 /*
918                  * Originally there was a + .5 in the tRPST calculation.
919                  * However since we can't do FP in the kernel and the tRTM
920                  * computation was in a floating point ceiling function, adding
921                  * one to tRTP should be ok. There is no other source of non
922                  * integer values, so the result was always going to be
923                  * something for the form: f_ceil(N + .5) = N + 1;
924                  */
925                 tRPST = (last->emc_mrw & 0x80) >> 7;
926                 tRTM = fake->dram_timings[RL] + div_o3(3600, src_clk_period) +
927                         max_t(u32, div_o3(7500, src_clk_period), 8) + tRPST +
928                         1 + nRTP;
929
930                 emc_dbg(emc, INFO, "tRTM = %u, EMC_RP = %u\n", tRTM,
931                         next->burst_regs[EMC_RP_INDEX]);
932
933                 if (last->burst_regs[EMC_RP_INDEX] < tRTM) {
934                         if (tRTM > (last->burst_regs[EMC_R2P_INDEX] +
935                                     last->burst_regs[EMC_RP_INDEX])) {
936                                 R2P_war = tRTM - last->burst_regs[EMC_RP_INDEX];
937                                 RP_war = last->burst_regs[EMC_RP_INDEX];
938                                 TRPab_war = last->burst_regs[EMC_TRPAB_INDEX];
939
940                                 if (R2P_war > 63) {
941                                         RP_war = R2P_war +
942                                                  last->burst_regs[EMC_RP_INDEX] - 63;
943
944                                         if (TRPab_war < RP_war)
945                                                 TRPab_war = RP_war;
946
947                                         R2P_war = 63;
948                                 }
949                         } else {
950                                 R2P_war = last->burst_regs[EMC_R2P_INDEX];
951                                 RP_war = last->burst_regs[EMC_RP_INDEX];
952                                 TRPab_war = last->burst_regs[EMC_TRPAB_INDEX];
953                         }
954
955                         if (RP_war < deltaTWATM) {
956                                 W2P_war = last->burst_regs[EMC_W2P_INDEX]
957                                           + deltaTWATM - RP_war;
958                                 if (W2P_war > 63) {
959                                         RP_war = RP_war + W2P_war - 63;
960                                         if (TRPab_war < RP_war)
961                                                 TRPab_war = RP_war;
962                                         W2P_war = 63;
963                                 }
964                         } else {
965                                 W2P_war = last->burst_regs[
966                                           EMC_W2P_INDEX];
967                         }
968
969                         if ((last->burst_regs[EMC_W2P_INDEX] ^ W2P_war) ||
970                             (last->burst_regs[EMC_R2P_INDEX] ^ R2P_war) ||
971                             (last->burst_regs[EMC_RP_INDEX] ^ RP_war) ||
972                             (last->burst_regs[EMC_TRPAB_INDEX] ^ TRPab_war)) {
973                                 emc_writel(emc, RP_war, EMC_RP);
974                                 emc_writel(emc, R2P_war, EMC_R2P);
975                                 emc_writel(emc, W2P_war, EMC_W2P);
976                                 emc_writel(emc, TRPab_war, EMC_TRPAB);
977                         }
978
979                         tegra210_emc_timing_update(emc);
980                 } else {
981                         emc_dbg(emc, INFO, "Skipped WAR\n");
982                 }
983         }
984
985         if (!fsp_for_next_freq) {
986                 mr13_flip_fspwr = (next->emc_mrw3 & 0xffffff3f) | 0x80;
987                 mr13_flip_fspop = (next->emc_mrw3 & 0xffffff3f) | 0x00;
988         } else {
989                 mr13_flip_fspwr = (next->emc_mrw3 & 0xffffff3f) | 0x40;
990                 mr13_flip_fspop = (next->emc_mrw3 & 0xffffff3f) | 0xc0;
991         }
992
993         if (dram_type == DRAM_TYPE_LPDDR4) {
994                 emc_writel(emc, mr13_flip_fspwr, EMC_MRW3);
995                 emc_writel(emc, next->emc_mrw, EMC_MRW);
996                 emc_writel(emc, next->emc_mrw2, EMC_MRW2);
997         }
998
999         /*
1000          * Step 8:
1001          *   Program the shadow registers.
1002          */
1003         emc_dbg(emc, STEPS, "Step 8\n");
1004         emc_dbg(emc, SUB_STEPS, "Writing burst_regs\n");
1005
1006         for (i = 0; i < next->num_burst; i++) {
1007                 const u16 *offsets = emc->offsets->burst;
1008                 u16 offset;
1009
1010                 if (!offsets[i])
1011                         continue;
1012
1013                 value = next->burst_regs[i];
1014                 offset = offsets[i];
1015
1016                 if (dram_type != DRAM_TYPE_LPDDR4 &&
1017                     (offset == EMC_MRW6 || offset == EMC_MRW7 ||
1018                      offset == EMC_MRW8 || offset == EMC_MRW9 ||
1019                      offset == EMC_MRW10 || offset == EMC_MRW11 ||
1020                      offset == EMC_MRW12 || offset == EMC_MRW13 ||
1021                      offset == EMC_MRW14 || offset == EMC_MRW15 ||
1022                      offset == EMC_TRAINING_CTRL))
1023                         continue;
1024
1025                 /* Pain... And suffering. */
1026                 if (offset == EMC_CFG) {
1027                         value &= ~EMC_CFG_DRAM_ACPD;
1028                         value &= ~EMC_CFG_DYN_SELF_REF;
1029
1030                         if (dram_type == DRAM_TYPE_LPDDR4) {
1031                                 value &= ~EMC_CFG_DRAM_CLKSTOP_SR;
1032                                 value &= ~EMC_CFG_DRAM_CLKSTOP_PD;
1033                         }
1034                 } else if (offset == EMC_MRS_WAIT_CNT &&
1035                            dram_type == DRAM_TYPE_LPDDR2 &&
1036                            opt_zcal_en_cc && !opt_cc_short_zcal &&
1037                            opt_short_zcal) {
1038                         value = (value & ~(EMC_MRS_WAIT_CNT_SHORT_WAIT_MASK <<
1039                                            EMC_MRS_WAIT_CNT_SHORT_WAIT_SHIFT)) |
1040                                 ((zq_wait_long & EMC_MRS_WAIT_CNT_SHORT_WAIT_MASK) <<
1041                                                  EMC_MRS_WAIT_CNT_SHORT_WAIT_SHIFT);
1042                 } else if (offset == EMC_ZCAL_WAIT_CNT &&
1043                            dram_type == DRAM_TYPE_DDR3 && opt_zcal_en_cc &&
1044                            !opt_cc_short_zcal && opt_short_zcal) {
1045                         value = (value & ~(EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_MASK <<
1046                                            EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_SHIFT)) |
1047                                 ((zq_wait_long & EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_MASK) <<
1048                                                  EMC_MRS_WAIT_CNT_SHORT_WAIT_SHIFT);
1049                 } else if (offset == EMC_ZCAL_INTERVAL && opt_zcal_en_cc) {
1050                         value = 0; /* EMC_ZCAL_INTERVAL reset value. */
1051                 } else if (offset == EMC_PMACRO_AUTOCAL_CFG_COMMON) {
1052                         value |= EMC_PMACRO_AUTOCAL_CFG_COMMON_E_CAL_BYPASS_DVFS;
1053                 } else if (offset == EMC_PMACRO_DATA_PAD_TX_CTRL) {
1054                         value &= ~(EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSP_TX_E_DCC |
1055                                    EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSN_TX_E_DCC |
1056                                    EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_TX_E_DCC |
1057                                    EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_CMD_TX_E_DCC);
1058                 } else if (offset == EMC_PMACRO_CMD_PAD_TX_CTRL) {
1059                         value |= EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_DRVFORCEON;
1060                         value &= ~(EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSP_TX_E_DCC |
1061                                    EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSN_TX_E_DCC |
1062                                    EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_E_DCC |
1063                                    EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_CMD_TX_E_DCC);
1064                 } else if (offset == EMC_PMACRO_BRICK_CTRL_RFU1) {
1065                         value &= 0xf800f800;
1066                 } else if (offset == EMC_PMACRO_COMMON_PAD_TX_CTRL) {
1067                         value &= 0xfffffff0;
1068                 }
1069
1070                 emc_writel(emc, value, offset);
1071         }
1072
1073         /* SW addition: do EMC refresh adjustment here. */
1074         tegra210_emc_adjust_timing(emc, next);
1075
1076         if (dram_type == DRAM_TYPE_LPDDR4) {
1077                 value = (23 << EMC_MRW_MRW_MA_SHIFT) |
1078                         (next->run_clocks & EMC_MRW_MRW_OP_MASK);
1079                 emc_writel(emc, value, EMC_MRW);
1080         }
1081
1082         /* Per channel burst registers. */
1083         emc_dbg(emc, SUB_STEPS, "Writing burst_regs_per_ch\n");
1084
1085         for (i = 0; i < next->num_burst_per_ch; i++) {
1086                 const struct tegra210_emc_per_channel_regs *burst =
1087                                 emc->offsets->burst_per_channel;
1088
1089                 if (!burst[i].offset)
1090                         continue;
1091
1092                 if (dram_type != DRAM_TYPE_LPDDR4 &&
1093                     (burst[i].offset == EMC_MRW6 ||
1094                      burst[i].offset == EMC_MRW7 ||
1095                      burst[i].offset == EMC_MRW8 ||
1096                      burst[i].offset == EMC_MRW9 ||
1097                      burst[i].offset == EMC_MRW10 ||
1098                      burst[i].offset == EMC_MRW11 ||
1099                      burst[i].offset == EMC_MRW12 ||
1100                      burst[i].offset == EMC_MRW13 ||
1101                      burst[i].offset == EMC_MRW14 ||
1102                      burst[i].offset == EMC_MRW15))
1103                         continue;
1104
1105                 /* Filter out second channel if not in DUAL_CHANNEL mode. */
1106                 if (emc->num_channels < 2 && burst[i].bank >= 1)
1107                         continue;
1108
1109                 emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
1110                         next->burst_reg_per_ch[i], burst[i].offset);
1111                 emc_channel_writel(emc, burst[i].bank,
1112                                    next->burst_reg_per_ch[i],
1113                                    burst[i].offset);
1114         }
1115
1116         /* Vref regs. */
1117         emc_dbg(emc, SUB_STEPS, "Writing vref_regs\n");
1118
1119         for (i = 0; i < next->vref_num; i++) {
1120                 const struct tegra210_emc_per_channel_regs *vref =
1121                                         emc->offsets->vref_per_channel;
1122
1123                 if (!vref[i].offset)
1124                         continue;
1125
1126                 if (emc->num_channels < 2 && vref[i].bank >= 1)
1127                         continue;
1128
1129                 emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
1130                         next->vref_perch_regs[i], vref[i].offset);
1131                 emc_channel_writel(emc, vref[i].bank, next->vref_perch_regs[i],
1132                                    vref[i].offset);
1133         }
1134
1135         /* Trimmers. */
1136         emc_dbg(emc, SUB_STEPS, "Writing trim_regs\n");
1137
1138         for (i = 0; i < next->num_trim; i++) {
1139                 const u16 *offsets = emc->offsets->trim;
1140
1141                 if (!offsets[i])
1142                         continue;
1143
1144                 if (compensate_trimmer_applicable &&
1145                     (offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0 ||
1146                      offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1 ||
1147                      offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2 ||
1148                      offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3 ||
1149                      offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0 ||
1150                      offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1 ||
1151                      offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2 ||
1152                      offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3 ||
1153                      offsets[i] == EMC_DATA_BRLSHFT_0 ||
1154                      offsets[i] == EMC_DATA_BRLSHFT_1)) {
1155                         value = tegra210_emc_compensate(next, offsets[i]);
1156                         emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
1157                                 value, offsets[i]);
1158                         emc_dbg(emc, EMA_WRITES, "0x%08x <= 0x%08x\n",
1159                                 (u32)(u64)offsets[i], value);
1160                         emc_writel(emc, value, offsets[i]);
1161                 } else {
1162                         emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
1163                                 next->trim_regs[i], offsets[i]);
1164                         emc_writel(emc, next->trim_regs[i], offsets[i]);
1165                 }
1166         }
1167
1168         /* Per channel trimmers. */
1169         emc_dbg(emc, SUB_STEPS, "Writing trim_regs_per_ch\n");
1170
1171         for (i = 0; i < next->num_trim_per_ch; i++) {
1172                 const struct tegra210_emc_per_channel_regs *trim =
1173                                 &emc->offsets->trim_per_channel[0];
1174                 unsigned int offset;
1175
1176                 if (!trim[i].offset)
1177                         continue;
1178
1179                 if (emc->num_channels < 2 && trim[i].bank >= 1)
1180                         continue;
1181
1182                 offset = trim[i].offset;
1183
1184                 if (compensate_trimmer_applicable &&
1185                     (offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0 ||
1186                      offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1 ||
1187                      offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2 ||
1188                      offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3 ||
1189                      offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0 ||
1190                      offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1 ||
1191                      offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2 ||
1192                      offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3 ||
1193                      offset == EMC_DATA_BRLSHFT_0 ||
1194                      offset == EMC_DATA_BRLSHFT_1)) {
1195                         value = tegra210_emc_compensate(next, offset);
1196                         emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
1197                                 value, offset);
1198                         emc_dbg(emc, EMA_WRITES, "0x%08x <= 0x%08x\n", offset,
1199                                 value);
1200                         emc_channel_writel(emc, trim[i].bank, value, offset);
1201                 } else {
1202                         emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
1203                                 next->trim_perch_regs[i], offset);
1204                         emc_channel_writel(emc, trim[i].bank,
1205                                            next->trim_perch_regs[i], offset);
1206                 }
1207         }
1208
1209         emc_dbg(emc, SUB_STEPS, "Writing burst_mc_regs\n");
1210
1211         for (i = 0; i < next->num_mc_regs; i++) {
1212                 const u16 *offsets = emc->offsets->burst_mc;
1213                 u32 *values = next->burst_mc_regs;
1214
1215                 emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
1216                         values[i], offsets[i]);
1217                 mc_writel(emc->mc, values[i], offsets[i]);
1218         }
1219
1220         /* Registers to be programmed on the faster clock. */
1221         if (next->rate < last->rate) {
1222                 const u16 *la = emc->offsets->la_scale;
1223
1224                 emc_dbg(emc, SUB_STEPS, "Writing la_scale_regs\n");
1225
1226                 for (i = 0; i < next->num_up_down; i++) {
1227                         emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
1228                                 next->la_scale_regs[i], la[i]);
1229                         mc_writel(emc->mc, next->la_scale_regs[i], la[i]);
1230                 }
1231         }
1232
1233         /* Flush all the burst register writes. */
1234         mc_readl(emc->mc, MC_EMEM_ADR_CFG);
1235
1236         /*
1237          * Step 9:
1238          *   LPDDR4 section A.
1239          */
1240         emc_dbg(emc, STEPS, "Step 9\n");
1241
1242         value = next->burst_regs[EMC_ZCAL_WAIT_CNT_INDEX];
1243         value &= ~EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_MASK;
1244
1245         if (dram_type == DRAM_TYPE_LPDDR4) {
1246                 emc_writel(emc, 0, EMC_ZCAL_INTERVAL);
1247                 emc_writel(emc, value, EMC_ZCAL_WAIT_CNT);
1248
1249                 value = emc_dbg | (EMC_DBG_WRITE_MUX_ACTIVE |
1250                                    EMC_DBG_WRITE_ACTIVE_ONLY);
1251
1252                 emc_writel(emc, value, EMC_DBG);
1253                 emc_writel(emc, 0, EMC_ZCAL_INTERVAL);
1254                 emc_writel(emc, emc_dbg, EMC_DBG);
1255         }
1256
1257         /*
1258          * Step 10:
1259          *   LPDDR4 and DDR3 common section.
1260          */
1261         emc_dbg(emc, STEPS, "Step 10\n");
1262
1263         if (opt_dvfs_mode == MAN_SR || dram_type == DRAM_TYPE_LPDDR4) {
1264                 if (dram_type == DRAM_TYPE_LPDDR4)
1265                         ccfifo_writel(emc, 0x101, EMC_SELF_REF, 0);
1266                 else
1267                         ccfifo_writel(emc, 0x1, EMC_SELF_REF, 0);
1268
1269                 if (dram_type == DRAM_TYPE_LPDDR4 &&
1270                     dst_clk_period <= zqcal_before_cc_cutoff) {
1271                         ccfifo_writel(emc, mr13_flip_fspwr ^ 0x40, EMC_MRW3, 0);
1272                         ccfifo_writel(emc, (next->burst_regs[EMC_MRW6_INDEX] &
1273                                                 0xFFFF3F3F) |
1274                                            (last->burst_regs[EMC_MRW6_INDEX] &
1275                                                 0x0000C0C0), EMC_MRW6, 0);
1276                         ccfifo_writel(emc, (next->burst_regs[EMC_MRW14_INDEX] &
1277                                                 0xFFFF0707) |
1278                                            (last->burst_regs[EMC_MRW14_INDEX] &
1279                                                 0x00003838), EMC_MRW14, 0);
1280
1281                         if (emc->num_devices > 1) {
1282                                 ccfifo_writel(emc,
1283                                       (next->burst_regs[EMC_MRW7_INDEX] &
1284                                        0xFFFF3F3F) |
1285                                       (last->burst_regs[EMC_MRW7_INDEX] &
1286                                        0x0000C0C0), EMC_MRW7, 0);
1287                                 ccfifo_writel(emc,
1288                                      (next->burst_regs[EMC_MRW15_INDEX] &
1289                                       0xFFFF0707) |
1290                                      (last->burst_regs[EMC_MRW15_INDEX] &
1291                                       0x00003838), EMC_MRW15, 0);
1292                         }
1293
1294                         if (opt_zcal_en_cc) {
1295                                 if (emc->num_devices < 2)
1296                                         ccfifo_writel(emc,
1297                                                 2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT
1298                                                 | EMC_ZQ_CAL_ZQ_CAL_CMD,
1299                                                 EMC_ZQ_CAL, 0);
1300                                 else if (shared_zq_resistor)
1301                                         ccfifo_writel(emc,
1302                                                 2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT
1303                                                 | EMC_ZQ_CAL_ZQ_CAL_CMD,
1304                                                 EMC_ZQ_CAL, 0);
1305                                 else
1306                                         ccfifo_writel(emc,
1307                                                       EMC_ZQ_CAL_ZQ_CAL_CMD,
1308                                                       EMC_ZQ_CAL, 0);
1309                         }
1310                 }
1311         }
1312
1313         if (dram_type == DRAM_TYPE_LPDDR4) {
1314                 value = (1000 * fake->dram_timings[T_RP]) / src_clk_period;
1315                 ccfifo_writel(emc, mr13_flip_fspop | 0x8, EMC_MRW3, value);
1316                 ccfifo_writel(emc, 0, 0, tFC_lpddr4 / src_clk_period);
1317         }
1318
1319         if (dram_type == DRAM_TYPE_LPDDR4 || opt_dvfs_mode != MAN_SR) {
1320                 delay = 30;
1321
1322                 if (cya_allow_ref_cc) {
1323                         delay += (1000 * fake->dram_timings[T_RP]) /
1324                                         src_clk_period;
1325                         delay += 4000 * fake->dram_timings[T_RFC];
1326                 }
1327
1328                 ccfifo_writel(emc, emc_pin & ~(EMC_PIN_PIN_CKE_PER_DEV |
1329                                                EMC_PIN_PIN_CKEB |
1330                                                EMC_PIN_PIN_CKE),
1331                               EMC_PIN, delay);
1332         }
1333
1334         /* calculate reference delay multiplier */
1335         value = 1;
1336
1337         if (ref_b4_sref_en)
1338                 value++;
1339
1340         if (cya_allow_ref_cc)
1341                 value++;
1342
1343         if (cya_issue_pc_ref)
1344                 value++;
1345
1346         if (dram_type != DRAM_TYPE_LPDDR4) {
1347                 delay = ((1000 * fake->dram_timings[T_RP] / src_clk_period) +
1348                          (1000 * fake->dram_timings[T_RFC] / src_clk_period));
1349                 delay = value * delay + 20;
1350         } else {
1351                 delay = 0;
1352         }
1353
1354         /*
1355          * Step 11:
1356          *   Ramp down.
1357          */
1358         emc_dbg(emc, STEPS, "Step 11\n");
1359
1360         ccfifo_writel(emc, 0x0, EMC_CFG_SYNC, delay);
1361
1362         value = emc_dbg | EMC_DBG_WRITE_MUX_ACTIVE | EMC_DBG_WRITE_ACTIVE_ONLY;
1363         ccfifo_writel(emc, value, EMC_DBG, 0);
1364
1365         ramp_down_wait = tegra210_emc_dvfs_power_ramp_down(emc, src_clk_period,
1366                                                            0);
1367
1368         /*
1369          * Step 12:
1370          *   And finally - trigger the clock change.
1371          */
1372         emc_dbg(emc, STEPS, "Step 12\n");
1373
1374         ccfifo_writel(emc, 1, EMC_STALL_THEN_EXE_AFTER_CLKCHANGE, 0);
1375         value &= ~EMC_DBG_WRITE_ACTIVE_ONLY;
1376         ccfifo_writel(emc, value, EMC_DBG, 0);
1377
1378         /*
1379          * Step 13:
1380          *   Ramp up.
1381          */
1382         emc_dbg(emc, STEPS, "Step 13\n");
1383
1384         ramp_up_wait = tegra210_emc_dvfs_power_ramp_up(emc, dst_clk_period, 0);
1385         ccfifo_writel(emc, emc_dbg, EMC_DBG, 0);
1386
1387         /*
1388          * Step 14:
1389          *   Bringup CKE pins.
1390          */
1391         emc_dbg(emc, STEPS, "Step 14\n");
1392
1393         if (dram_type == DRAM_TYPE_LPDDR4) {
1394                 value = emc_pin | EMC_PIN_PIN_CKE;
1395
1396                 if (emc->num_devices <= 1)
1397                         value &= ~(EMC_PIN_PIN_CKEB | EMC_PIN_PIN_CKE_PER_DEV);
1398                 else
1399                         value |= EMC_PIN_PIN_CKEB | EMC_PIN_PIN_CKE_PER_DEV;
1400
1401                 ccfifo_writel(emc, value, EMC_PIN, 0);
1402         }
1403
1404         /*
1405          * Step 15: (two step 15s ??)
1406          *   Calculate zqlatch wait time; has dependency on ramping times.
1407          */
1408         emc_dbg(emc, STEPS, "Step 15\n");
1409
1410         if (dst_clk_period <= zqcal_before_cc_cutoff) {
1411                 s32 t = (s32)(ramp_up_wait + ramp_down_wait) /
1412                         (s32)dst_clk_period;
1413                 zq_latch_dvfs_wait_time = (s32)tZQCAL_lpddr4_fc_adj - t;
1414         } else {
1415                 zq_latch_dvfs_wait_time = tZQCAL_lpddr4_fc_adj -
1416                         div_o3(1000 * next->dram_timings[T_PDEX],
1417                                dst_clk_period);
1418         }
1419
1420         emc_dbg(emc, INFO, "tZQCAL_lpddr4_fc_adj = %u\n", tZQCAL_lpddr4_fc_adj);
1421         emc_dbg(emc, INFO, "dst_clk_period = %u\n",
1422                 dst_clk_period);
1423         emc_dbg(emc, INFO, "next->dram_timings[T_PDEX] = %u\n",
1424                 next->dram_timings[T_PDEX]);
1425         emc_dbg(emc, INFO, "zq_latch_dvfs_wait_time = %d\n",
1426                 max_t(s32, 0, zq_latch_dvfs_wait_time));
1427
1428         if (dram_type == DRAM_TYPE_LPDDR4 && opt_zcal_en_cc) {
1429                 delay = div_o3(1000 * next->dram_timings[T_PDEX],
1430                                dst_clk_period);
1431
1432                 if (emc->num_devices < 2) {
1433                         if (dst_clk_period > zqcal_before_cc_cutoff)
1434                                 ccfifo_writel(emc,
1435                                               2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT |
1436                                               EMC_ZQ_CAL_ZQ_CAL_CMD, EMC_ZQ_CAL,
1437                                               delay);
1438
1439                         value = (mr13_flip_fspop & 0xfffffff7) | 0x0c000000;
1440                         ccfifo_writel(emc, value, EMC_MRW3, delay);
1441                         ccfifo_writel(emc, 0, EMC_SELF_REF, 0);
1442                         ccfifo_writel(emc, 0, EMC_REF, 0);
1443                         ccfifo_writel(emc, 2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT |
1444                                       EMC_ZQ_CAL_ZQ_LATCH_CMD,
1445                                       EMC_ZQ_CAL,
1446                                       max_t(s32, 0, zq_latch_dvfs_wait_time));
1447                 } else if (shared_zq_resistor) {
1448                         if (dst_clk_period > zqcal_before_cc_cutoff)
1449                                 ccfifo_writel(emc,
1450                                               2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT |
1451                                               EMC_ZQ_CAL_ZQ_CAL_CMD, EMC_ZQ_CAL,
1452                                               delay);
1453
1454                         ccfifo_writel(emc, 2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT |
1455                                       EMC_ZQ_CAL_ZQ_LATCH_CMD, EMC_ZQ_CAL,
1456                                       max_t(s32, 0, zq_latch_dvfs_wait_time) +
1457                                         delay);
1458                         ccfifo_writel(emc, 1UL << EMC_ZQ_CAL_DEV_SEL_SHIFT |
1459                                       EMC_ZQ_CAL_ZQ_LATCH_CMD,
1460                                       EMC_ZQ_CAL, 0);
1461
1462                         value = (mr13_flip_fspop & 0xfffffff7) | 0x0c000000;
1463                         ccfifo_writel(emc, value, EMC_MRW3, 0);
1464                         ccfifo_writel(emc, 0, EMC_SELF_REF, 0);
1465                         ccfifo_writel(emc, 0, EMC_REF, 0);
1466
1467                         ccfifo_writel(emc, 1UL << EMC_ZQ_CAL_DEV_SEL_SHIFT |
1468                                       EMC_ZQ_CAL_ZQ_LATCH_CMD, EMC_ZQ_CAL,
1469                                       tZQCAL_lpddr4 / dst_clk_period);
1470                 } else {
1471                         if (dst_clk_period > zqcal_before_cc_cutoff)
1472                                 ccfifo_writel(emc, EMC_ZQ_CAL_ZQ_CAL_CMD,
1473                                               EMC_ZQ_CAL, delay);
1474
1475                         value = (mr13_flip_fspop & 0xfffffff7) | 0x0c000000;
1476                         ccfifo_writel(emc, value, EMC_MRW3, delay);
1477                         ccfifo_writel(emc, 0, EMC_SELF_REF, 0);
1478                         ccfifo_writel(emc, 0, EMC_REF, 0);
1479
1480                         ccfifo_writel(emc, EMC_ZQ_CAL_ZQ_LATCH_CMD, EMC_ZQ_CAL,
1481                                       max_t(s32, 0, zq_latch_dvfs_wait_time));
1482                 }
1483         }
1484
1485         /* WAR: delay for zqlatch */
1486         ccfifo_writel(emc, 0, 0, 10);
1487
1488         /*
1489          * Step 16:
1490          *   LPDDR4 Conditional Training Kickoff. Removed.
1491          */
1492
1493         /*
1494          * Step 17:
1495          *   MANSR exit self refresh.
1496          */
1497         emc_dbg(emc, STEPS, "Step 17\n");
1498
1499         if (opt_dvfs_mode == MAN_SR && dram_type != DRAM_TYPE_LPDDR4)
1500                 ccfifo_writel(emc, 0, EMC_SELF_REF, 0);
1501
1502         /*
1503          * Step 18:
1504          *   Send MRWs to LPDDR3/DDR3.
1505          */
1506         emc_dbg(emc, STEPS, "Step 18\n");
1507
1508         if (dram_type == DRAM_TYPE_LPDDR2) {
1509                 ccfifo_writel(emc, next->emc_mrw2, EMC_MRW2, 0);
1510                 ccfifo_writel(emc, next->emc_mrw,  EMC_MRW,  0);
1511                 if (is_lpddr3)
1512                         ccfifo_writel(emc, next->emc_mrw4, EMC_MRW4, 0);
1513         } else if (dram_type == DRAM_TYPE_DDR3) {
1514                 if (opt_dll_mode)
1515                         ccfifo_writel(emc, next->emc_emrs &
1516                                       ~EMC_EMRS_USE_EMRS_LONG_CNT, EMC_EMRS, 0);
1517                 ccfifo_writel(emc, next->emc_emrs2 &
1518                               ~EMC_EMRS2_USE_EMRS2_LONG_CNT, EMC_EMRS2, 0);
1519                 ccfifo_writel(emc, next->emc_mrs |
1520                               EMC_EMRS_USE_EMRS_LONG_CNT, EMC_MRS, 0);
1521         }
1522
1523         /*
1524          * Step 19:
1525          *   ZQCAL for LPDDR3/DDR3
1526          */
1527         emc_dbg(emc, STEPS, "Step 19\n");
1528
1529         if (opt_zcal_en_cc) {
1530                 if (dram_type == DRAM_TYPE_LPDDR2) {
1531                         value = opt_cc_short_zcal ? 90000 : 360000;
1532                         value = div_o3(value, dst_clk_period);
1533                         value = value <<
1534                                 EMC_MRS_WAIT_CNT2_MRS_EXT2_WAIT_CNT_SHIFT |
1535                                 value <<
1536                                 EMC_MRS_WAIT_CNT2_MRS_EXT1_WAIT_CNT_SHIFT;
1537                         ccfifo_writel(emc, value, EMC_MRS_WAIT_CNT2, 0);
1538
1539                         value = opt_cc_short_zcal ? 0x56 : 0xab;
1540                         ccfifo_writel(emc, 2 << EMC_MRW_MRW_DEV_SELECTN_SHIFT |
1541                                            EMC_MRW_USE_MRW_EXT_CNT |
1542                                            10 << EMC_MRW_MRW_MA_SHIFT |
1543                                            value << EMC_MRW_MRW_OP_SHIFT,
1544                                       EMC_MRW, 0);
1545
1546                         if (emc->num_devices > 1) {
1547                                 value = 1 << EMC_MRW_MRW_DEV_SELECTN_SHIFT |
1548                                         EMC_MRW_USE_MRW_EXT_CNT |
1549                                         10 << EMC_MRW_MRW_MA_SHIFT |
1550                                         value << EMC_MRW_MRW_OP_SHIFT;
1551                                 ccfifo_writel(emc, value, EMC_MRW, 0);
1552                         }
1553                 } else if (dram_type == DRAM_TYPE_DDR3) {
1554                         value = opt_cc_short_zcal ? 0 : EMC_ZQ_CAL_LONG;
1555
1556                         ccfifo_writel(emc, value |
1557                                            2 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
1558                                            EMC_ZQ_CAL_ZQ_CAL_CMD, EMC_ZQ_CAL,
1559                                            0);
1560
1561                         if (emc->num_devices > 1) {
1562                                 value = value | 1 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
1563                                                 EMC_ZQ_CAL_ZQ_CAL_CMD;
1564                                 ccfifo_writel(emc, value, EMC_ZQ_CAL, 0);
1565                         }
1566                 }
1567         }
1568
1569         if (bg_reg_mode_change) {
1570                 tegra210_emc_set_shadow_bypass(emc, ACTIVE);
1571
1572                 if (ramp_up_wait <= 1250000)
1573                         delay = (1250000 - ramp_up_wait) / dst_clk_period;
1574                 else
1575                         delay = 0;
1576
1577                 ccfifo_writel(emc,
1578                               next->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX],
1579                               EMC_PMACRO_BG_BIAS_CTRL_0, delay);
1580                 tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
1581         }
1582
1583         /*
1584          * Step 20:
1585          *   Issue ref and optional QRST.
1586          */
1587         emc_dbg(emc, STEPS, "Step 20\n");
1588
1589         if (dram_type != DRAM_TYPE_LPDDR4)
1590                 ccfifo_writel(emc, 0, EMC_REF, 0);
1591
1592         if (opt_do_sw_qrst) {
1593                 ccfifo_writel(emc, 1, EMC_ISSUE_QRST, 0);
1594                 ccfifo_writel(emc, 0, EMC_ISSUE_QRST, 2);
1595         }
1596
1597         /*
1598          * Step 21:
1599          *   Restore ZCAL and ZCAL interval.
1600          */
1601         emc_dbg(emc, STEPS, "Step 21\n");
1602
1603         if (save_restore_clkstop_pd || opt_zcal_en_cc) {
1604                 ccfifo_writel(emc, emc_dbg | EMC_DBG_WRITE_MUX_ACTIVE,
1605                               EMC_DBG, 0);
1606                 if (opt_zcal_en_cc && dram_type != DRAM_TYPE_LPDDR4)
1607                         ccfifo_writel(emc, next->burst_regs[EMC_ZCAL_INTERVAL_INDEX],
1608                                       EMC_ZCAL_INTERVAL, 0);
1609
1610                 if (save_restore_clkstop_pd)
1611                         ccfifo_writel(emc, next->burst_regs[EMC_CFG_INDEX] &
1612                                                 ~EMC_CFG_DYN_SELF_REF,
1613                                       EMC_CFG, 0);
1614                 ccfifo_writel(emc, emc_dbg, EMC_DBG, 0);
1615         }
1616
1617         /*
1618          * Step 22:
1619          *   Restore EMC_CFG_PIPE_CLK.
1620          */
1621         emc_dbg(emc, STEPS, "Step 22\n");
1622
1623         ccfifo_writel(emc, emc_cfg_pipe_clk, EMC_CFG_PIPE_CLK, 0);
1624
1625         if (bg_reg_mode_change) {
1626                 if (enable_bg_reg)
1627                         emc_writel(emc,
1628                                    next->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
1629                                         ~EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD,
1630                                    EMC_PMACRO_BG_BIAS_CTRL_0);
1631                 else
1632                         emc_writel(emc,
1633                                    next->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
1634                                         ~EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD,
1635                                    EMC_PMACRO_BG_BIAS_CTRL_0);
1636         }
1637
1638         /*
1639          * Step 23:
1640          */
1641         emc_dbg(emc, STEPS, "Step 23\n");
1642
1643         value = emc_readl(emc, EMC_CFG_DIG_DLL);
1644         value |= EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_TRAFFIC;
1645         value &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_RW_UNTIL_LOCK;
1646         value &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_UNTIL_LOCK;
1647         value &= ~EMC_CFG_DIG_DLL_CFG_DLL_EN;
1648         value = (value & ~EMC_CFG_DIG_DLL_CFG_DLL_MODE_MASK) |
1649                 (2 << EMC_CFG_DIG_DLL_CFG_DLL_MODE_SHIFT);
1650         emc_writel(emc, value, EMC_CFG_DIG_DLL);
1651
1652         tegra210_emc_do_clock_change(emc, clksrc);
1653
1654         /*
1655          * Step 24:
1656          *   Save training results. Removed.
1657          */
1658
1659         /*
1660          * Step 25:
1661          *   Program MC updown registers.
1662          */
1663         emc_dbg(emc, STEPS, "Step 25\n");
1664
1665         if (next->rate > last->rate) {
1666                 for (i = 0; i < next->num_up_down; i++)
1667                         mc_writel(emc->mc, next->la_scale_regs[i],
1668                                   emc->offsets->la_scale[i]);
1669
1670                 tegra210_emc_timing_update(emc);
1671         }
1672
1673         /*
1674          * Step 26:
1675          *   Restore ZCAL registers.
1676          */
1677         emc_dbg(emc, STEPS, "Step 26\n");
1678
1679         if (dram_type == DRAM_TYPE_LPDDR4) {
1680                 tegra210_emc_set_shadow_bypass(emc, ACTIVE);
1681                 emc_writel(emc, next->burst_regs[EMC_ZCAL_WAIT_CNT_INDEX],
1682                            EMC_ZCAL_WAIT_CNT);
1683                 emc_writel(emc, next->burst_regs[EMC_ZCAL_INTERVAL_INDEX],
1684                            EMC_ZCAL_INTERVAL);
1685                 tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
1686         }
1687
1688         if (dram_type != DRAM_TYPE_LPDDR4 && opt_zcal_en_cc &&
1689             !opt_short_zcal && opt_cc_short_zcal) {
1690                 udelay(2);
1691
1692                 tegra210_emc_set_shadow_bypass(emc, ACTIVE);
1693                 if (dram_type == DRAM_TYPE_LPDDR2)
1694                         emc_writel(emc, next->burst_regs[EMC_MRS_WAIT_CNT_INDEX],
1695                                    EMC_MRS_WAIT_CNT);
1696                 else if (dram_type == DRAM_TYPE_DDR3)
1697                         emc_writel(emc, next->burst_regs[EMC_ZCAL_WAIT_CNT_INDEX],
1698                                    EMC_ZCAL_WAIT_CNT);
1699                 tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
1700         }
1701
1702         /*
1703          * Step 27:
1704          *   Restore EMC_CFG, FDPD registers.
1705          */
1706         emc_dbg(emc, STEPS, "Step 27\n");
1707
1708         tegra210_emc_set_shadow_bypass(emc, ACTIVE);
1709         emc_writel(emc, next->burst_regs[EMC_CFG_INDEX], EMC_CFG);
1710         tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
1711         emc_writel(emc, next->emc_fdpd_ctrl_cmd_no_ramp,
1712                    EMC_FDPD_CTRL_CMD_NO_RAMP);
1713         emc_writel(emc, next->emc_sel_dpd_ctrl, EMC_SEL_DPD_CTRL);
1714
1715         /*
1716          * Step 28:
1717          *   Training recover. Removed.
1718          */
1719         emc_dbg(emc, STEPS, "Step 28\n");
1720
1721         tegra210_emc_set_shadow_bypass(emc, ACTIVE);
1722         emc_writel(emc,
1723                    next->burst_regs[EMC_PMACRO_AUTOCAL_CFG_COMMON_INDEX],
1724                    EMC_PMACRO_AUTOCAL_CFG_COMMON);
1725         tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
1726
1727         /*
1728          * Step 29:
1729          *   Power fix WAR.
1730          */
1731         emc_dbg(emc, STEPS, "Step 29\n");
1732
1733         emc_writel(emc, EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE0 |
1734                    EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE1 |
1735                    EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE2 |
1736                    EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE3 |
1737                    EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE4 |
1738                    EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE5 |
1739                    EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE6 |
1740                    EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE7,
1741                    EMC_PMACRO_CFG_PM_GLOBAL_0);
1742         emc_writel(emc, EMC_PMACRO_TRAINING_CTRL_0_CH0_TRAINING_E_WRPTR,
1743                    EMC_PMACRO_TRAINING_CTRL_0);
1744         emc_writel(emc, EMC_PMACRO_TRAINING_CTRL_1_CH1_TRAINING_E_WRPTR,
1745                    EMC_PMACRO_TRAINING_CTRL_1);
1746         emc_writel(emc, 0, EMC_PMACRO_CFG_PM_GLOBAL_0);
1747
1748         /*
1749          * Step 30:
1750          *   Re-enable autocal.
1751          */
1752         emc_dbg(emc, STEPS, "Step 30: Re-enable DLL and AUTOCAL\n");
1753
1754         if (next->burst_regs[EMC_CFG_DIG_DLL_INDEX] & EMC_CFG_DIG_DLL_CFG_DLL_EN) {
1755                 value = emc_readl(emc, EMC_CFG_DIG_DLL);
1756                 value |=  EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_TRAFFIC;
1757                 value |=  EMC_CFG_DIG_DLL_CFG_DLL_EN;
1758                 value &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_RW_UNTIL_LOCK;
1759                 value &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_UNTIL_LOCK;
1760                 value = (value & ~EMC_CFG_DIG_DLL_CFG_DLL_MODE_MASK) |
1761                         (2 << EMC_CFG_DIG_DLL_CFG_DLL_MODE_SHIFT);
1762                 emc_writel(emc, value, EMC_CFG_DIG_DLL);
1763                 tegra210_emc_timing_update(emc);
1764         }
1765
1766         emc_writel(emc, next->emc_auto_cal_config, EMC_AUTO_CAL_CONFIG);
1767
1768         /* Done! Yay. */
1769 }
1770
1771 const struct tegra210_emc_sequence tegra210_emc_r21021 = {
1772         .revision = 0x7,
1773         .set_clock = tegra210_emc_r21021_set_clock,
1774         .periodic_compensation = tegra210_emc_r21021_periodic_compensation,
1775 };