tegra210-emc-cc-r21021.c 51 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Copyright (c) 2014-2020, NVIDIA CORPORATION. All rights reserved.
  4. */
  5. #include <linux/kernel.h>
  6. #include <linux/io.h>
  7. #include <linux/clk.h>
  8. #include <linux/delay.h>
  9. #include <linux/of.h>
  10. #include <soc/tegra/mc.h>
  11. #include "tegra210-emc.h"
  12. #include "tegra210-mc.h"
  13. /*
  14. * Enable flags for specifying verbosity.
  15. */
  16. #define INFO (1 << 0)
  17. #define STEPS (1 << 1)
  18. #define SUB_STEPS (1 << 2)
  19. #define PRELOCK (1 << 3)
  20. #define PRELOCK_STEPS (1 << 4)
  21. #define ACTIVE_EN (1 << 5)
  22. #define PRAMP_UP (1 << 6)
  23. #define PRAMP_DN (1 << 7)
  24. #define EMA_WRITES (1 << 10)
  25. #define EMA_UPDATES (1 << 11)
  26. #define PER_TRAIN (1 << 16)
  27. #define CC_PRINT (1 << 17)
  28. #define CCFIFO (1 << 29)
  29. #define REGS (1 << 30)
  30. #define REG_LISTS (1 << 31)
  31. #define emc_dbg(emc, flags, ...) dev_dbg(emc->dev, __VA_ARGS__)
  32. #define DVFS_CLOCK_CHANGE_VERSION 21021
  33. #define EMC_PRELOCK_VERSION 2101
  34. enum {
  35. DVFS_SEQUENCE = 1,
  36. WRITE_TRAINING_SEQUENCE = 2,
  37. PERIODIC_TRAINING_SEQUENCE = 3,
  38. DVFS_PT1 = 10,
  39. DVFS_UPDATE = 11,
  40. TRAINING_PT1 = 12,
  41. TRAINING_UPDATE = 13,
  42. PERIODIC_TRAINING_UPDATE = 14
  43. };
  44. /*
  45. * PTFV defines - basically just indexes into the per table PTFV array.
  46. */
  47. #define PTFV_DQSOSC_MOVAVG_C0D0U0_INDEX 0
  48. #define PTFV_DQSOSC_MOVAVG_C0D0U1_INDEX 1
  49. #define PTFV_DQSOSC_MOVAVG_C0D1U0_INDEX 2
  50. #define PTFV_DQSOSC_MOVAVG_C0D1U1_INDEX 3
  51. #define PTFV_DQSOSC_MOVAVG_C1D0U0_INDEX 4
  52. #define PTFV_DQSOSC_MOVAVG_C1D0U1_INDEX 5
  53. #define PTFV_DQSOSC_MOVAVG_C1D1U0_INDEX 6
  54. #define PTFV_DQSOSC_MOVAVG_C1D1U1_INDEX 7
  55. #define PTFV_DVFS_SAMPLES_INDEX 9
  56. #define PTFV_MOVAVG_WEIGHT_INDEX 10
  57. #define PTFV_CONFIG_CTRL_INDEX 11
  58. #define PTFV_CONFIG_CTRL_USE_PREVIOUS_EMA (1 << 0)
  59. /*
  60. * Do arithmetic in fixed point.
  61. */
  62. #define MOVAVG_PRECISION_FACTOR 100
  63. /*
  64. * The division portion of the average operation.
  65. */
  66. #define __AVERAGE_PTFV(dev) \
  67. ({ next->ptfv_list[PTFV_DQSOSC_MOVAVG_ ## dev ## _INDEX] = \
  68. next->ptfv_list[PTFV_DQSOSC_MOVAVG_ ## dev ## _INDEX] / \
  69. next->ptfv_list[PTFV_DVFS_SAMPLES_INDEX]; })
  70. /*
  71. * Convert val to fixed point and add it to the temporary average.
  72. */
  73. #define __INCREMENT_PTFV(dev, val) \
  74. ({ next->ptfv_list[PTFV_DQSOSC_MOVAVG_ ## dev ## _INDEX] += \
  75. ((val) * MOVAVG_PRECISION_FACTOR); })
  76. /*
  77. * Convert a moving average back to integral form and return the value.
  78. */
  79. #define __MOVAVG_AC(timing, dev) \
  80. ((timing)->ptfv_list[PTFV_DQSOSC_MOVAVG_ ## dev ## _INDEX] / \
  81. MOVAVG_PRECISION_FACTOR)
  82. /* Weighted update. */
  83. #define __WEIGHTED_UPDATE_PTFV(dev, nval) \
  84. do { \
  85. int w = PTFV_MOVAVG_WEIGHT_INDEX; \
  86. int dqs = PTFV_DQSOSC_MOVAVG_ ## dev ## _INDEX; \
  87. \
  88. next->ptfv_list[dqs] = \
  89. ((nval * MOVAVG_PRECISION_FACTOR) + \
  90. (next->ptfv_list[dqs] * \
  91. next->ptfv_list[w])) / \
  92. (next->ptfv_list[w] + 1); \
  93. \
  94. emc_dbg(emc, EMA_UPDATES, "%s: (s=%lu) EMA: %u\n", \
  95. __stringify(dev), nval, next->ptfv_list[dqs]); \
  96. } while (0)
  97. /* Access a particular average. */
  98. #define __MOVAVG(timing, dev) \
  99. ((timing)->ptfv_list[PTFV_DQSOSC_MOVAVG_ ## dev ## _INDEX])
  100. static u32 update_clock_tree_delay(struct tegra210_emc *emc, int type)
  101. {
  102. bool periodic_training_update = type == PERIODIC_TRAINING_UPDATE;
  103. struct tegra210_emc_timing *last = emc->last;
  104. struct tegra210_emc_timing *next = emc->next;
  105. u32 last_timing_rate_mhz = last->rate / 1000;
  106. u32 next_timing_rate_mhz = next->rate / 1000;
  107. bool dvfs_update = type == DVFS_UPDATE;
  108. s32 tdel = 0, tmdel = 0, adel = 0;
  109. bool dvfs_pt1 = type == DVFS_PT1;
  110. unsigned long cval = 0;
  111. u32 temp[2][2], value;
  112. unsigned int i;
  113. /*
  114. * Dev0 MSB.
  115. */
  116. if (dvfs_pt1 || periodic_training_update) {
  117. value = tegra210_emc_mrr_read(emc, 2, 19);
  118. for (i = 0; i < emc->num_channels; i++) {
  119. temp[i][0] = (value & 0x00ff) << 8;
  120. temp[i][1] = (value & 0xff00) << 0;
  121. value >>= 16;
  122. }
  123. /*
  124. * Dev0 LSB.
  125. */
  126. value = tegra210_emc_mrr_read(emc, 2, 18);
  127. for (i = 0; i < emc->num_channels; i++) {
  128. temp[i][0] |= (value & 0x00ff) >> 0;
  129. temp[i][1] |= (value & 0xff00) >> 8;
  130. value >>= 16;
  131. }
  132. }
  133. if (dvfs_pt1 || periodic_training_update) {
  134. cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
  135. cval *= 1000000;
  136. cval /= last_timing_rate_mhz * 2 * temp[0][0];
  137. }
  138. if (dvfs_pt1)
  139. __INCREMENT_PTFV(C0D0U0, cval);
  140. else if (dvfs_update)
  141. __AVERAGE_PTFV(C0D0U0);
  142. else if (periodic_training_update)
  143. __WEIGHTED_UPDATE_PTFV(C0D0U0, cval);
  144. if (dvfs_update || periodic_training_update) {
  145. tdel = next->current_dram_clktree[C0D0U0] -
  146. __MOVAVG_AC(next, C0D0U0);
  147. tmdel = (tdel < 0) ? -1 * tdel : tdel;
  148. adel = tmdel;
  149. if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
  150. next->tree_margin)
  151. next->current_dram_clktree[C0D0U0] =
  152. __MOVAVG_AC(next, C0D0U0);
  153. }
  154. if (dvfs_pt1 || periodic_training_update) {
  155. cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
  156. cval *= 1000000;
  157. cval /= last_timing_rate_mhz * 2 * temp[0][1];
  158. }
  159. if (dvfs_pt1)
  160. __INCREMENT_PTFV(C0D0U1, cval);
  161. else if (dvfs_update)
  162. __AVERAGE_PTFV(C0D0U1);
  163. else if (periodic_training_update)
  164. __WEIGHTED_UPDATE_PTFV(C0D0U1, cval);
  165. if (dvfs_update || periodic_training_update) {
  166. tdel = next->current_dram_clktree[C0D0U1] -
  167. __MOVAVG_AC(next, C0D0U1);
  168. tmdel = (tdel < 0) ? -1 * tdel : tdel;
  169. if (tmdel > adel)
  170. adel = tmdel;
  171. if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
  172. next->tree_margin)
  173. next->current_dram_clktree[C0D0U1] =
  174. __MOVAVG_AC(next, C0D0U1);
  175. }
  176. if (emc->num_channels > 1) {
  177. if (dvfs_pt1 || periodic_training_update) {
  178. cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
  179. cval *= 1000000;
  180. cval /= last_timing_rate_mhz * 2 * temp[1][0];
  181. }
  182. if (dvfs_pt1)
  183. __INCREMENT_PTFV(C1D0U0, cval);
  184. else if (dvfs_update)
  185. __AVERAGE_PTFV(C1D0U0);
  186. else if (periodic_training_update)
  187. __WEIGHTED_UPDATE_PTFV(C1D0U0, cval);
  188. if (dvfs_update || periodic_training_update) {
  189. tdel = next->current_dram_clktree[C1D0U0] -
  190. __MOVAVG_AC(next, C1D0U0);
  191. tmdel = (tdel < 0) ? -1 * tdel : tdel;
  192. if (tmdel > adel)
  193. adel = tmdel;
  194. if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
  195. next->tree_margin)
  196. next->current_dram_clktree[C1D0U0] =
  197. __MOVAVG_AC(next, C1D0U0);
  198. }
  199. if (dvfs_pt1 || periodic_training_update) {
  200. cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
  201. cval *= 1000000;
  202. cval /= last_timing_rate_mhz * 2 * temp[1][1];
  203. }
  204. if (dvfs_pt1)
  205. __INCREMENT_PTFV(C1D0U1, cval);
  206. else if (dvfs_update)
  207. __AVERAGE_PTFV(C1D0U1);
  208. else if (periodic_training_update)
  209. __WEIGHTED_UPDATE_PTFV(C1D0U1, cval);
  210. if (dvfs_update || periodic_training_update) {
  211. tdel = next->current_dram_clktree[C1D0U1] -
  212. __MOVAVG_AC(next, C1D0U1);
  213. tmdel = (tdel < 0) ? -1 * tdel : tdel;
  214. if (tmdel > adel)
  215. adel = tmdel;
  216. if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
  217. next->tree_margin)
  218. next->current_dram_clktree[C1D0U1] =
  219. __MOVAVG_AC(next, C1D0U1);
  220. }
  221. }
  222. if (emc->num_devices < 2)
  223. goto done;
  224. /*
  225. * Dev1 MSB.
  226. */
  227. if (dvfs_pt1 || periodic_training_update) {
  228. value = tegra210_emc_mrr_read(emc, 1, 19);
  229. for (i = 0; i < emc->num_channels; i++) {
  230. temp[i][0] = (value & 0x00ff) << 8;
  231. temp[i][1] = (value & 0xff00) << 0;
  232. value >>= 16;
  233. }
  234. /*
  235. * Dev1 LSB.
  236. */
  237. value = tegra210_emc_mrr_read(emc, 2, 18);
  238. for (i = 0; i < emc->num_channels; i++) {
  239. temp[i][0] |= (value & 0x00ff) >> 0;
  240. temp[i][1] |= (value & 0xff00) >> 8;
  241. value >>= 16;
  242. }
  243. }
  244. if (dvfs_pt1 || periodic_training_update) {
  245. cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
  246. cval *= 1000000;
  247. cval /= last_timing_rate_mhz * 2 * temp[0][0];
  248. }
  249. if (dvfs_pt1)
  250. __INCREMENT_PTFV(C0D1U0, cval);
  251. else if (dvfs_update)
  252. __AVERAGE_PTFV(C0D1U0);
  253. else if (periodic_training_update)
  254. __WEIGHTED_UPDATE_PTFV(C0D1U0, cval);
  255. if (dvfs_update || periodic_training_update) {
  256. tdel = next->current_dram_clktree[C0D1U0] -
  257. __MOVAVG_AC(next, C0D1U0);
  258. tmdel = (tdel < 0) ? -1 * tdel : tdel;
  259. if (tmdel > adel)
  260. adel = tmdel;
  261. if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
  262. next->tree_margin)
  263. next->current_dram_clktree[C0D1U0] =
  264. __MOVAVG_AC(next, C0D1U0);
  265. }
  266. if (dvfs_pt1 || periodic_training_update) {
  267. cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
  268. cval *= 1000000;
  269. cval /= last_timing_rate_mhz * 2 * temp[0][1];
  270. }
  271. if (dvfs_pt1)
  272. __INCREMENT_PTFV(C0D1U1, cval);
  273. else if (dvfs_update)
  274. __AVERAGE_PTFV(C0D1U1);
  275. else if (periodic_training_update)
  276. __WEIGHTED_UPDATE_PTFV(C0D1U1, cval);
  277. if (dvfs_update || periodic_training_update) {
  278. tdel = next->current_dram_clktree[C0D1U1] -
  279. __MOVAVG_AC(next, C0D1U1);
  280. tmdel = (tdel < 0) ? -1 * tdel : tdel;
  281. if (tmdel > adel)
  282. adel = tmdel;
  283. if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
  284. next->tree_margin)
  285. next->current_dram_clktree[C0D1U1] =
  286. __MOVAVG_AC(next, C0D1U1);
  287. }
  288. if (emc->num_channels > 1) {
  289. if (dvfs_pt1 || periodic_training_update) {
  290. cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
  291. cval *= 1000000;
  292. cval /= last_timing_rate_mhz * 2 * temp[1][0];
  293. }
  294. if (dvfs_pt1)
  295. __INCREMENT_PTFV(C1D1U0, cval);
  296. else if (dvfs_update)
  297. __AVERAGE_PTFV(C1D1U0);
  298. else if (periodic_training_update)
  299. __WEIGHTED_UPDATE_PTFV(C1D1U0, cval);
  300. if (dvfs_update || periodic_training_update) {
  301. tdel = next->current_dram_clktree[C1D1U0] -
  302. __MOVAVG_AC(next, C1D1U0);
  303. tmdel = (tdel < 0) ? -1 * tdel : tdel;
  304. if (tmdel > adel)
  305. adel = tmdel;
  306. if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
  307. next->tree_margin)
  308. next->current_dram_clktree[C1D1U0] =
  309. __MOVAVG_AC(next, C1D1U0);
  310. }
  311. if (dvfs_pt1 || periodic_training_update) {
  312. cval = tegra210_emc_actual_osc_clocks(last->run_clocks);
  313. cval *= 1000000;
  314. cval /= last_timing_rate_mhz * 2 * temp[1][1];
  315. }
  316. if (dvfs_pt1)
  317. __INCREMENT_PTFV(C1D1U1, cval);
  318. else if (dvfs_update)
  319. __AVERAGE_PTFV(C1D1U1);
  320. else if (periodic_training_update)
  321. __WEIGHTED_UPDATE_PTFV(C1D1U1, cval);
  322. if (dvfs_update || periodic_training_update) {
  323. tdel = next->current_dram_clktree[C1D1U1] -
  324. __MOVAVG_AC(next, C1D1U1);
  325. tmdel = (tdel < 0) ? -1 * tdel : tdel;
  326. if (tmdel > adel)
  327. adel = tmdel;
  328. if (tmdel * 128 * next_timing_rate_mhz / 1000000 >
  329. next->tree_margin)
  330. next->current_dram_clktree[C1D1U1] =
  331. __MOVAVG_AC(next, C1D1U1);
  332. }
  333. }
  334. done:
  335. return adel;
  336. }
  337. static u32 periodic_compensation_handler(struct tegra210_emc *emc, u32 type,
  338. struct tegra210_emc_timing *last,
  339. struct tegra210_emc_timing *next)
  340. {
  341. #define __COPY_EMA(nt, lt, dev) \
  342. ({ __MOVAVG(nt, dev) = __MOVAVG(lt, dev) * \
  343. (nt)->ptfv_list[PTFV_DVFS_SAMPLES_INDEX]; })
  344. u32 i, adel = 0, samples = next->ptfv_list[PTFV_DVFS_SAMPLES_INDEX];
  345. u32 delay;
  346. delay = tegra210_emc_actual_osc_clocks(last->run_clocks);
  347. delay *= 1000;
  348. delay = 2 + (delay / last->rate);
  349. if (!next->periodic_training)
  350. return 0;
  351. if (type == DVFS_SEQUENCE) {
  352. if (last->periodic_training &&
  353. (next->ptfv_list[PTFV_CONFIG_CTRL_INDEX] &
  354. PTFV_CONFIG_CTRL_USE_PREVIOUS_EMA)) {
  355. /*
  356. * If the previous frequency was using periodic
  357. * calibration then we can reuse the previous
  358. * frequencies EMA data.
  359. */
  360. __COPY_EMA(next, last, C0D0U0);
  361. __COPY_EMA(next, last, C0D0U1);
  362. __COPY_EMA(next, last, C1D0U0);
  363. __COPY_EMA(next, last, C1D0U1);
  364. __COPY_EMA(next, last, C0D1U0);
  365. __COPY_EMA(next, last, C0D1U1);
  366. __COPY_EMA(next, last, C1D1U0);
  367. __COPY_EMA(next, last, C1D1U1);
  368. } else {
  369. /* Reset the EMA.*/
  370. __MOVAVG(next, C0D0U0) = 0;
  371. __MOVAVG(next, C0D0U1) = 0;
  372. __MOVAVG(next, C1D0U0) = 0;
  373. __MOVAVG(next, C1D0U1) = 0;
  374. __MOVAVG(next, C0D1U0) = 0;
  375. __MOVAVG(next, C0D1U1) = 0;
  376. __MOVAVG(next, C1D1U0) = 0;
  377. __MOVAVG(next, C1D1U1) = 0;
  378. for (i = 0; i < samples; i++) {
  379. tegra210_emc_start_periodic_compensation(emc);
  380. udelay(delay);
  381. /*
  382. * Generate next sample of data.
  383. */
  384. adel = update_clock_tree_delay(emc, DVFS_PT1);
  385. }
  386. }
  387. /*
  388. * Seems like it should be part of the
  389. * 'if (last_timing->periodic_training)' conditional
  390. * since is already done for the else clause.
  391. */
  392. adel = update_clock_tree_delay(emc, DVFS_UPDATE);
  393. }
  394. if (type == PERIODIC_TRAINING_SEQUENCE) {
  395. tegra210_emc_start_periodic_compensation(emc);
  396. udelay(delay);
  397. adel = update_clock_tree_delay(emc, PERIODIC_TRAINING_UPDATE);
  398. }
  399. return adel;
  400. }
  401. static u32 tegra210_emc_r21021_periodic_compensation(struct tegra210_emc *emc)
  402. {
  403. u32 emc_cfg, emc_cfg_o, emc_cfg_update, del, value;
  404. static const u32 list[] = {
  405. EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0,
  406. EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1,
  407. EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2,
  408. EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3,
  409. EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0,
  410. EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1,
  411. EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2,
  412. EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3,
  413. EMC_DATA_BRLSHFT_0,
  414. EMC_DATA_BRLSHFT_1
  415. };
  416. struct tegra210_emc_timing *last = emc->last;
  417. unsigned int items = ARRAY_SIZE(list), i;
  418. unsigned long delay;
  419. if (last->periodic_training) {
  420. emc_dbg(emc, PER_TRAIN, "Periodic training starting\n");
  421. value = emc_readl(emc, EMC_DBG);
  422. emc_cfg_o = emc_readl(emc, EMC_CFG);
  423. emc_cfg = emc_cfg_o & ~(EMC_CFG_DYN_SELF_REF |
  424. EMC_CFG_DRAM_ACPD |
  425. EMC_CFG_DRAM_CLKSTOP_PD);
  426. /*
  427. * 1. Power optimizations should be off.
  428. */
  429. emc_writel(emc, emc_cfg, EMC_CFG);
  430. /* Does emc_timing_update() for above changes. */
  431. tegra210_emc_dll_disable(emc);
  432. for (i = 0; i < emc->num_channels; i++)
  433. tegra210_emc_wait_for_update(emc, i, EMC_EMC_STATUS,
  434. EMC_EMC_STATUS_DRAM_IN_POWERDOWN_MASK,
  435. 0);
  436. for (i = 0; i < emc->num_channels; i++)
  437. tegra210_emc_wait_for_update(emc, i, EMC_EMC_STATUS,
  438. EMC_EMC_STATUS_DRAM_IN_SELF_REFRESH_MASK,
  439. 0);
  440. emc_cfg_update = value = emc_readl(emc, EMC_CFG_UPDATE);
  441. value &= ~EMC_CFG_UPDATE_UPDATE_DLL_IN_UPDATE_MASK;
  442. value |= (2 << EMC_CFG_UPDATE_UPDATE_DLL_IN_UPDATE_SHIFT);
  443. emc_writel(emc, value, EMC_CFG_UPDATE);
  444. /*
  445. * 2. osc kick off - this assumes training and dvfs have set
  446. * correct MR23.
  447. */
  448. tegra210_emc_start_periodic_compensation(emc);
  449. /*
  450. * 3. Let dram capture its clock tree delays.
  451. */
  452. delay = tegra210_emc_actual_osc_clocks(last->run_clocks);
  453. delay *= 1000;
  454. delay /= last->rate + 1;
  455. udelay(delay);
  456. /*
  457. * 4. Check delta wrt previous values (save value if margin
  458. * exceeds what is set in table).
  459. */
  460. del = periodic_compensation_handler(emc,
  461. PERIODIC_TRAINING_SEQUENCE,
  462. last, last);
  463. /*
  464. * 5. Apply compensation w.r.t. trained values (if clock tree
  465. * has drifted more than the set margin).
  466. */
  467. if (last->tree_margin < ((del * 128 * (last->rate / 1000)) / 1000000)) {
  468. for (i = 0; i < items; i++) {
  469. value = tegra210_emc_compensate(last, list[i]);
  470. emc_dbg(emc, EMA_WRITES, "0x%08x <= 0x%08x\n",
  471. list[i], value);
  472. emc_writel(emc, value, list[i]);
  473. }
  474. }
  475. emc_writel(emc, emc_cfg_o, EMC_CFG);
  476. /*
  477. * 6. Timing update actally applies the new trimmers.
  478. */
  479. tegra210_emc_timing_update(emc);
  480. /* 6.1. Restore the UPDATE_DLL_IN_UPDATE field. */
  481. emc_writel(emc, emc_cfg_update, EMC_CFG_UPDATE);
  482. /* 6.2. Restore the DLL. */
  483. tegra210_emc_dll_enable(emc);
  484. }
  485. return 0;
  486. }
  487. /*
  488. * Do the clock change sequence.
  489. */
  490. static void tegra210_emc_r21021_set_clock(struct tegra210_emc *emc, u32 clksrc)
  491. {
  492. /* state variables */
  493. static bool fsp_for_next_freq;
  494. /* constant configuration parameters */
  495. const bool save_restore_clkstop_pd = true;
  496. const u32 zqcal_before_cc_cutoff = 2400;
  497. const bool cya_allow_ref_cc = false;
  498. const bool cya_issue_pc_ref = false;
  499. const bool opt_cc_short_zcal = true;
  500. const bool ref_b4_sref_en = false;
  501. const u32 tZQCAL_lpddr4 = 1000000;
  502. const bool opt_short_zcal = true;
  503. const bool opt_do_sw_qrst = true;
  504. const u32 opt_dvfs_mode = MAN_SR;
  505. /*
  506. * This is the timing table for the source frequency. It does _not_
  507. * necessarily correspond to the actual timing values in the EMC at the
  508. * moment. If the boot BCT differs from the table then this can happen.
  509. * However, we need it for accessing the dram_timings (which are not
  510. * really registers) array for the current frequency.
  511. */
  512. struct tegra210_emc_timing *fake, *last = emc->last, *next = emc->next;
  513. u32 tRTM, RP_war, R2P_war, TRPab_war, deltaTWATM, W2P_war, tRPST;
  514. u32 mr13_flip_fspwr, mr13_flip_fspop, ramp_up_wait, ramp_down_wait;
  515. u32 zq_wait_long, zq_latch_dvfs_wait_time, tZQCAL_lpddr4_fc_adj;
  516. u32 emc_auto_cal_config, auto_cal_en, emc_cfg, emc_sel_dpd_ctrl;
  517. u32 tFC_lpddr4 = 1000 * next->dram_timings[T_FC_LPDDR4];
  518. u32 bg_reg_mode_change, enable_bglp_reg, enable_bg_reg;
  519. bool opt_zcal_en_cc = false, is_lpddr3 = false;
  520. bool compensate_trimmer_applicable = false;
  521. u32 emc_dbg, emc_cfg_pipe_clk, emc_pin;
  522. u32 src_clk_period, dst_clk_period; /* in picoseconds */
  523. bool shared_zq_resistor = false;
  524. u32 value, dram_type;
  525. u32 opt_dll_mode = 0;
  526. unsigned long delay;
  527. unsigned int i;
  528. emc_dbg(emc, INFO, "Running clock change.\n");
  529. /* XXX fake == last */
  530. fake = tegra210_emc_find_timing(emc, last->rate * 1000UL);
  531. fsp_for_next_freq = !fsp_for_next_freq;
  532. value = emc_readl(emc, EMC_FBIO_CFG5) & EMC_FBIO_CFG5_DRAM_TYPE_MASK;
  533. dram_type = value >> EMC_FBIO_CFG5_DRAM_TYPE_SHIFT;
  534. if (last->burst_regs[EMC_ZCAL_WAIT_CNT_INDEX] & BIT(31))
  535. shared_zq_resistor = true;
  536. if ((next->burst_regs[EMC_ZCAL_INTERVAL_INDEX] != 0 &&
  537. last->burst_regs[EMC_ZCAL_INTERVAL_INDEX] == 0) ||
  538. dram_type == DRAM_TYPE_LPDDR4)
  539. opt_zcal_en_cc = true;
  540. if (dram_type == DRAM_TYPE_DDR3)
  541. opt_dll_mode = tegra210_emc_get_dll_state(next);
  542. if ((next->burst_regs[EMC_FBIO_CFG5_INDEX] & BIT(25)) &&
  543. (dram_type == DRAM_TYPE_LPDDR2))
  544. is_lpddr3 = true;
  545. emc_readl(emc, EMC_CFG);
  546. emc_readl(emc, EMC_AUTO_CAL_CONFIG);
  547. src_clk_period = 1000000000 / last->rate;
  548. dst_clk_period = 1000000000 / next->rate;
  549. if (dst_clk_period <= zqcal_before_cc_cutoff)
  550. tZQCAL_lpddr4_fc_adj = tZQCAL_lpddr4 - tFC_lpddr4;
  551. else
  552. tZQCAL_lpddr4_fc_adj = tZQCAL_lpddr4;
  553. tZQCAL_lpddr4_fc_adj /= dst_clk_period;
  554. emc_dbg = emc_readl(emc, EMC_DBG);
  555. emc_pin = emc_readl(emc, EMC_PIN);
  556. emc_cfg_pipe_clk = emc_readl(emc, EMC_CFG_PIPE_CLK);
  557. emc_cfg = next->burst_regs[EMC_CFG_INDEX];
  558. emc_cfg &= ~(EMC_CFG_DYN_SELF_REF | EMC_CFG_DRAM_ACPD |
  559. EMC_CFG_DRAM_CLKSTOP_SR | EMC_CFG_DRAM_CLKSTOP_PD);
  560. emc_sel_dpd_ctrl = next->emc_sel_dpd_ctrl;
  561. emc_sel_dpd_ctrl &= ~(EMC_SEL_DPD_CTRL_CLK_SEL_DPD_EN |
  562. EMC_SEL_DPD_CTRL_CA_SEL_DPD_EN |
  563. EMC_SEL_DPD_CTRL_RESET_SEL_DPD_EN |
  564. EMC_SEL_DPD_CTRL_ODT_SEL_DPD_EN |
  565. EMC_SEL_DPD_CTRL_DATA_SEL_DPD_EN);
  566. emc_dbg(emc, INFO, "Clock change version: %d\n",
  567. DVFS_CLOCK_CHANGE_VERSION);
  568. emc_dbg(emc, INFO, "DRAM type = %d\n", dram_type);
  569. emc_dbg(emc, INFO, "DRAM dev #: %u\n", emc->num_devices);
  570. emc_dbg(emc, INFO, "Next EMC clksrc: 0x%08x\n", clksrc);
  571. emc_dbg(emc, INFO, "DLL clksrc: 0x%08x\n", next->dll_clk_src);
  572. emc_dbg(emc, INFO, "last rate: %u, next rate %u\n", last->rate,
  573. next->rate);
  574. emc_dbg(emc, INFO, "last period: %u, next period: %u\n",
  575. src_clk_period, dst_clk_period);
  576. emc_dbg(emc, INFO, " shared_zq_resistor: %d\n", !!shared_zq_resistor);
  577. emc_dbg(emc, INFO, " num_channels: %u\n", emc->num_channels);
  578. emc_dbg(emc, INFO, " opt_dll_mode: %d\n", opt_dll_mode);
  579. /*
  580. * Step 1:
  581. * Pre DVFS SW sequence.
  582. */
  583. emc_dbg(emc, STEPS, "Step 1\n");
  584. emc_dbg(emc, STEPS, "Step 1.1: Disable DLL temporarily.\n");
  585. value = emc_readl(emc, EMC_CFG_DIG_DLL);
  586. value &= ~EMC_CFG_DIG_DLL_CFG_DLL_EN;
  587. emc_writel(emc, value, EMC_CFG_DIG_DLL);
  588. tegra210_emc_timing_update(emc);
  589. for (i = 0; i < emc->num_channels; i++)
  590. tegra210_emc_wait_for_update(emc, i, EMC_CFG_DIG_DLL,
  591. EMC_CFG_DIG_DLL_CFG_DLL_EN, 0);
  592. emc_dbg(emc, STEPS, "Step 1.2: Disable AUTOCAL temporarily.\n");
  593. emc_auto_cal_config = next->emc_auto_cal_config;
  594. auto_cal_en = emc_auto_cal_config & EMC_AUTO_CAL_CONFIG_AUTO_CAL_ENABLE;
  595. emc_auto_cal_config &= ~EMC_AUTO_CAL_CONFIG_AUTO_CAL_START;
  596. emc_auto_cal_config |= EMC_AUTO_CAL_CONFIG_AUTO_CAL_MEASURE_STALL;
  597. emc_auto_cal_config |= EMC_AUTO_CAL_CONFIG_AUTO_CAL_UPDATE_STALL;
  598. emc_auto_cal_config |= auto_cal_en;
  599. emc_writel(emc, emc_auto_cal_config, EMC_AUTO_CAL_CONFIG);
  600. emc_readl(emc, EMC_AUTO_CAL_CONFIG); /* Flush write. */
  601. emc_dbg(emc, STEPS, "Step 1.3: Disable other power features.\n");
  602. tegra210_emc_set_shadow_bypass(emc, ACTIVE);
  603. emc_writel(emc, emc_cfg, EMC_CFG);
  604. emc_writel(emc, emc_sel_dpd_ctrl, EMC_SEL_DPD_CTRL);
  605. tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
  606. if (next->periodic_training) {
  607. tegra210_emc_reset_dram_clktree_values(next);
  608. for (i = 0; i < emc->num_channels; i++)
  609. tegra210_emc_wait_for_update(emc, i, EMC_EMC_STATUS,
  610. EMC_EMC_STATUS_DRAM_IN_POWERDOWN_MASK,
  611. 0);
  612. for (i = 0; i < emc->num_channels; i++)
  613. tegra210_emc_wait_for_update(emc, i, EMC_EMC_STATUS,
  614. EMC_EMC_STATUS_DRAM_IN_SELF_REFRESH_MASK,
  615. 0);
  616. tegra210_emc_start_periodic_compensation(emc);
  617. delay = 1000 * tegra210_emc_actual_osc_clocks(last->run_clocks);
  618. udelay((delay / last->rate) + 2);
  619. value = periodic_compensation_handler(emc, DVFS_SEQUENCE, fake,
  620. next);
  621. value = (value * 128 * next->rate / 1000) / 1000000;
  622. if (next->periodic_training && value > next->tree_margin)
  623. compensate_trimmer_applicable = true;
  624. }
  625. emc_writel(emc, EMC_INTSTATUS_CLKCHANGE_COMPLETE, EMC_INTSTATUS);
  626. tegra210_emc_set_shadow_bypass(emc, ACTIVE);
  627. emc_writel(emc, emc_cfg, EMC_CFG);
  628. emc_writel(emc, emc_sel_dpd_ctrl, EMC_SEL_DPD_CTRL);
  629. emc_writel(emc, emc_cfg_pipe_clk | EMC_CFG_PIPE_CLK_CLK_ALWAYS_ON,
  630. EMC_CFG_PIPE_CLK);
  631. emc_writel(emc, next->emc_fdpd_ctrl_cmd_no_ramp &
  632. ~EMC_FDPD_CTRL_CMD_NO_RAMP_CMD_DPD_NO_RAMP_ENABLE,
  633. EMC_FDPD_CTRL_CMD_NO_RAMP);
  634. bg_reg_mode_change =
  635. ((next->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
  636. EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD) ^
  637. (last->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
  638. EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD)) ||
  639. ((next->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
  640. EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD) ^
  641. (last->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
  642. EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD));
  643. enable_bglp_reg =
  644. (next->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
  645. EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD) == 0;
  646. enable_bg_reg =
  647. (next->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
  648. EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD) == 0;
  649. if (bg_reg_mode_change) {
  650. if (enable_bg_reg)
  651. emc_writel(emc, last->burst_regs
  652. [EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
  653. ~EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD,
  654. EMC_PMACRO_BG_BIAS_CTRL_0);
  655. if (enable_bglp_reg)
  656. emc_writel(emc, last->burst_regs
  657. [EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
  658. ~EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD,
  659. EMC_PMACRO_BG_BIAS_CTRL_0);
  660. }
  661. /* Check if we need to turn on VREF generator. */
  662. if ((((last->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX] &
  663. EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF) == 0) &&
  664. ((next->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX] &
  665. EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF) == 1)) ||
  666. (((last->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX] &
  667. EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF) == 0) &&
  668. ((next->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX] &
  669. EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF) != 0))) {
  670. u32 pad_tx_ctrl =
  671. next->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX];
  672. u32 last_pad_tx_ctrl =
  673. last->burst_regs[EMC_PMACRO_DATA_PAD_TX_CTRL_INDEX];
  674. u32 next_dq_e_ivref, next_dqs_e_ivref;
  675. next_dqs_e_ivref = pad_tx_ctrl &
  676. EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF;
  677. next_dq_e_ivref = pad_tx_ctrl &
  678. EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF;
  679. value = (last_pad_tx_ctrl &
  680. ~EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_E_IVREF &
  681. ~EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQS_E_IVREF) |
  682. next_dq_e_ivref | next_dqs_e_ivref;
  683. emc_writel(emc, value, EMC_PMACRO_DATA_PAD_TX_CTRL);
  684. udelay(1);
  685. } else if (bg_reg_mode_change) {
  686. udelay(1);
  687. }
  688. tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
  689. /*
  690. * Step 2:
  691. * Prelock the DLL.
  692. */
  693. emc_dbg(emc, STEPS, "Step 2\n");
  694. if (next->burst_regs[EMC_CFG_DIG_DLL_INDEX] &
  695. EMC_CFG_DIG_DLL_CFG_DLL_EN) {
  696. emc_dbg(emc, INFO, "Prelock enabled for target frequency.\n");
  697. value = tegra210_emc_dll_prelock(emc, clksrc);
  698. emc_dbg(emc, INFO, "DLL out: 0x%03x\n", value);
  699. } else {
  700. emc_dbg(emc, INFO, "Disabling DLL for target frequency.\n");
  701. tegra210_emc_dll_disable(emc);
  702. }
  703. /*
  704. * Step 3:
  705. * Prepare autocal for the clock change.
  706. */
  707. emc_dbg(emc, STEPS, "Step 3\n");
  708. tegra210_emc_set_shadow_bypass(emc, ACTIVE);
  709. emc_writel(emc, next->emc_auto_cal_config2, EMC_AUTO_CAL_CONFIG2);
  710. emc_writel(emc, next->emc_auto_cal_config3, EMC_AUTO_CAL_CONFIG3);
  711. emc_writel(emc, next->emc_auto_cal_config4, EMC_AUTO_CAL_CONFIG4);
  712. emc_writel(emc, next->emc_auto_cal_config5, EMC_AUTO_CAL_CONFIG5);
  713. emc_writel(emc, next->emc_auto_cal_config6, EMC_AUTO_CAL_CONFIG6);
  714. emc_writel(emc, next->emc_auto_cal_config7, EMC_AUTO_CAL_CONFIG7);
  715. emc_writel(emc, next->emc_auto_cal_config8, EMC_AUTO_CAL_CONFIG8);
  716. tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
  717. emc_auto_cal_config |= (EMC_AUTO_CAL_CONFIG_AUTO_CAL_COMPUTE_START |
  718. auto_cal_en);
  719. emc_writel(emc, emc_auto_cal_config, EMC_AUTO_CAL_CONFIG);
  720. /*
  721. * Step 4:
  722. * Update EMC_CFG. (??)
  723. */
  724. emc_dbg(emc, STEPS, "Step 4\n");
  725. if (src_clk_period > 50000 && dram_type == DRAM_TYPE_LPDDR4)
  726. ccfifo_writel(emc, 1, EMC_SELF_REF, 0);
  727. else
  728. emc_writel(emc, next->emc_cfg_2, EMC_CFG_2);
  729. /*
  730. * Step 5:
  731. * Prepare reference variables for ZQCAL regs.
  732. */
  733. emc_dbg(emc, STEPS, "Step 5\n");
  734. if (dram_type == DRAM_TYPE_LPDDR4)
  735. zq_wait_long = max((u32)1, div_o3(1000000, dst_clk_period));
  736. else if (dram_type == DRAM_TYPE_LPDDR2 || is_lpddr3)
  737. zq_wait_long = max(next->min_mrs_wait,
  738. div_o3(360000, dst_clk_period)) + 4;
  739. else if (dram_type == DRAM_TYPE_DDR3)
  740. zq_wait_long = max((u32)256,
  741. div_o3(320000, dst_clk_period) + 2);
  742. else
  743. zq_wait_long = 0;
  744. /*
  745. * Step 6:
  746. * Training code - removed.
  747. */
  748. emc_dbg(emc, STEPS, "Step 6\n");
  749. /*
  750. * Step 7:
  751. * Program FSP reference registers and send MRWs to new FSPWR.
  752. */
  753. emc_dbg(emc, STEPS, "Step 7\n");
  754. emc_dbg(emc, SUB_STEPS, "Step 7.1: Bug 200024907 - Patch RP R2P");
  755. /* WAR 200024907 */
  756. if (dram_type == DRAM_TYPE_LPDDR4) {
  757. u32 nRTP = 16;
  758. if (src_clk_period >= 1000000 / 1866) /* 535.91 ps */
  759. nRTP = 14;
  760. if (src_clk_period >= 1000000 / 1600) /* 625.00 ps */
  761. nRTP = 12;
  762. if (src_clk_period >= 1000000 / 1333) /* 750.19 ps */
  763. nRTP = 10;
  764. if (src_clk_period >= 1000000 / 1066) /* 938.09 ps */
  765. nRTP = 8;
  766. deltaTWATM = max_t(u32, div_o3(7500, src_clk_period), 8);
  767. /*
  768. * Originally there was a + .5 in the tRPST calculation.
  769. * However since we can't do FP in the kernel and the tRTM
  770. * computation was in a floating point ceiling function, adding
  771. * one to tRTP should be ok. There is no other source of non
  772. * integer values, so the result was always going to be
  773. * something for the form: f_ceil(N + .5) = N + 1;
  774. */
  775. tRPST = (last->emc_mrw & 0x80) >> 7;
  776. tRTM = fake->dram_timings[RL] + div_o3(3600, src_clk_period) +
  777. max_t(u32, div_o3(7500, src_clk_period), 8) + tRPST +
  778. 1 + nRTP;
  779. emc_dbg(emc, INFO, "tRTM = %u, EMC_RP = %u\n", tRTM,
  780. next->burst_regs[EMC_RP_INDEX]);
  781. if (last->burst_regs[EMC_RP_INDEX] < tRTM) {
  782. if (tRTM > (last->burst_regs[EMC_R2P_INDEX] +
  783. last->burst_regs[EMC_RP_INDEX])) {
  784. R2P_war = tRTM - last->burst_regs[EMC_RP_INDEX];
  785. RP_war = last->burst_regs[EMC_RP_INDEX];
  786. TRPab_war = last->burst_regs[EMC_TRPAB_INDEX];
  787. if (R2P_war > 63) {
  788. RP_war = R2P_war +
  789. last->burst_regs[EMC_RP_INDEX] - 63;
  790. if (TRPab_war < RP_war)
  791. TRPab_war = RP_war;
  792. R2P_war = 63;
  793. }
  794. } else {
  795. R2P_war = last->burst_regs[EMC_R2P_INDEX];
  796. RP_war = last->burst_regs[EMC_RP_INDEX];
  797. TRPab_war = last->burst_regs[EMC_TRPAB_INDEX];
  798. }
  799. if (RP_war < deltaTWATM) {
  800. W2P_war = last->burst_regs[EMC_W2P_INDEX]
  801. + deltaTWATM - RP_war;
  802. if (W2P_war > 63) {
  803. RP_war = RP_war + W2P_war - 63;
  804. if (TRPab_war < RP_war)
  805. TRPab_war = RP_war;
  806. W2P_war = 63;
  807. }
  808. } else {
  809. W2P_war = last->burst_regs[
  810. EMC_W2P_INDEX];
  811. }
  812. if ((last->burst_regs[EMC_W2P_INDEX] ^ W2P_war) ||
  813. (last->burst_regs[EMC_R2P_INDEX] ^ R2P_war) ||
  814. (last->burst_regs[EMC_RP_INDEX] ^ RP_war) ||
  815. (last->burst_regs[EMC_TRPAB_INDEX] ^ TRPab_war)) {
  816. emc_writel(emc, RP_war, EMC_RP);
  817. emc_writel(emc, R2P_war, EMC_R2P);
  818. emc_writel(emc, W2P_war, EMC_W2P);
  819. emc_writel(emc, TRPab_war, EMC_TRPAB);
  820. }
  821. tegra210_emc_timing_update(emc);
  822. } else {
  823. emc_dbg(emc, INFO, "Skipped WAR\n");
  824. }
  825. }
  826. if (!fsp_for_next_freq) {
  827. mr13_flip_fspwr = (next->emc_mrw3 & 0xffffff3f) | 0x80;
  828. mr13_flip_fspop = (next->emc_mrw3 & 0xffffff3f) | 0x00;
  829. } else {
  830. mr13_flip_fspwr = (next->emc_mrw3 & 0xffffff3f) | 0x40;
  831. mr13_flip_fspop = (next->emc_mrw3 & 0xffffff3f) | 0xc0;
  832. }
  833. if (dram_type == DRAM_TYPE_LPDDR4) {
  834. emc_writel(emc, mr13_flip_fspwr, EMC_MRW3);
  835. emc_writel(emc, next->emc_mrw, EMC_MRW);
  836. emc_writel(emc, next->emc_mrw2, EMC_MRW2);
  837. }
  838. /*
  839. * Step 8:
  840. * Program the shadow registers.
  841. */
  842. emc_dbg(emc, STEPS, "Step 8\n");
  843. emc_dbg(emc, SUB_STEPS, "Writing burst_regs\n");
  844. for (i = 0; i < next->num_burst; i++) {
  845. const u16 *offsets = emc->offsets->burst;
  846. u16 offset;
  847. if (!offsets[i])
  848. continue;
  849. value = next->burst_regs[i];
  850. offset = offsets[i];
  851. if (dram_type != DRAM_TYPE_LPDDR4 &&
  852. (offset == EMC_MRW6 || offset == EMC_MRW7 ||
  853. offset == EMC_MRW8 || offset == EMC_MRW9 ||
  854. offset == EMC_MRW10 || offset == EMC_MRW11 ||
  855. offset == EMC_MRW12 || offset == EMC_MRW13 ||
  856. offset == EMC_MRW14 || offset == EMC_MRW15 ||
  857. offset == EMC_TRAINING_CTRL))
  858. continue;
  859. /* Pain... And suffering. */
  860. if (offset == EMC_CFG) {
  861. value &= ~EMC_CFG_DRAM_ACPD;
  862. value &= ~EMC_CFG_DYN_SELF_REF;
  863. if (dram_type == DRAM_TYPE_LPDDR4) {
  864. value &= ~EMC_CFG_DRAM_CLKSTOP_SR;
  865. value &= ~EMC_CFG_DRAM_CLKSTOP_PD;
  866. }
  867. } else if (offset == EMC_MRS_WAIT_CNT &&
  868. dram_type == DRAM_TYPE_LPDDR2 &&
  869. opt_zcal_en_cc && !opt_cc_short_zcal &&
  870. opt_short_zcal) {
  871. value = (value & ~(EMC_MRS_WAIT_CNT_SHORT_WAIT_MASK <<
  872. EMC_MRS_WAIT_CNT_SHORT_WAIT_SHIFT)) |
  873. ((zq_wait_long & EMC_MRS_WAIT_CNT_SHORT_WAIT_MASK) <<
  874. EMC_MRS_WAIT_CNT_SHORT_WAIT_SHIFT);
  875. } else if (offset == EMC_ZCAL_WAIT_CNT &&
  876. dram_type == DRAM_TYPE_DDR3 && opt_zcal_en_cc &&
  877. !opt_cc_short_zcal && opt_short_zcal) {
  878. value = (value & ~(EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_MASK <<
  879. EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_SHIFT)) |
  880. ((zq_wait_long & EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_MASK) <<
  881. EMC_MRS_WAIT_CNT_SHORT_WAIT_SHIFT);
  882. } else if (offset == EMC_ZCAL_INTERVAL && opt_zcal_en_cc) {
  883. value = 0; /* EMC_ZCAL_INTERVAL reset value. */
  884. } else if (offset == EMC_PMACRO_AUTOCAL_CFG_COMMON) {
  885. value |= EMC_PMACRO_AUTOCAL_CFG_COMMON_E_CAL_BYPASS_DVFS;
  886. } else if (offset == EMC_PMACRO_DATA_PAD_TX_CTRL) {
  887. value &= ~(EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSP_TX_E_DCC |
  888. EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQSN_TX_E_DCC |
  889. EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_DQ_TX_E_DCC |
  890. EMC_PMACRO_DATA_PAD_TX_CTRL_DATA_CMD_TX_E_DCC);
  891. } else if (offset == EMC_PMACRO_CMD_PAD_TX_CTRL) {
  892. value |= EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_DRVFORCEON;
  893. value &= ~(EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSP_TX_E_DCC |
  894. EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQSN_TX_E_DCC |
  895. EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_DQ_TX_E_DCC |
  896. EMC_PMACRO_CMD_PAD_TX_CTRL_CMD_CMD_TX_E_DCC);
  897. } else if (offset == EMC_PMACRO_BRICK_CTRL_RFU1) {
  898. value &= 0xf800f800;
  899. } else if (offset == EMC_PMACRO_COMMON_PAD_TX_CTRL) {
  900. value &= 0xfffffff0;
  901. }
  902. emc_writel(emc, value, offset);
  903. }
  904. /* SW addition: do EMC refresh adjustment here. */
  905. tegra210_emc_adjust_timing(emc, next);
  906. if (dram_type == DRAM_TYPE_LPDDR4) {
  907. value = (23 << EMC_MRW_MRW_MA_SHIFT) |
  908. (next->run_clocks & EMC_MRW_MRW_OP_MASK);
  909. emc_writel(emc, value, EMC_MRW);
  910. }
  911. /* Per channel burst registers. */
  912. emc_dbg(emc, SUB_STEPS, "Writing burst_regs_per_ch\n");
  913. for (i = 0; i < next->num_burst_per_ch; i++) {
  914. const struct tegra210_emc_per_channel_regs *burst =
  915. emc->offsets->burst_per_channel;
  916. if (!burst[i].offset)
  917. continue;
  918. if (dram_type != DRAM_TYPE_LPDDR4 &&
  919. (burst[i].offset == EMC_MRW6 ||
  920. burst[i].offset == EMC_MRW7 ||
  921. burst[i].offset == EMC_MRW8 ||
  922. burst[i].offset == EMC_MRW9 ||
  923. burst[i].offset == EMC_MRW10 ||
  924. burst[i].offset == EMC_MRW11 ||
  925. burst[i].offset == EMC_MRW12 ||
  926. burst[i].offset == EMC_MRW13 ||
  927. burst[i].offset == EMC_MRW14 ||
  928. burst[i].offset == EMC_MRW15))
  929. continue;
  930. /* Filter out second channel if not in DUAL_CHANNEL mode. */
  931. if (emc->num_channels < 2 && burst[i].bank >= 1)
  932. continue;
  933. emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
  934. next->burst_reg_per_ch[i], burst[i].offset);
  935. emc_channel_writel(emc, burst[i].bank,
  936. next->burst_reg_per_ch[i],
  937. burst[i].offset);
  938. }
  939. /* Vref regs. */
  940. emc_dbg(emc, SUB_STEPS, "Writing vref_regs\n");
  941. for (i = 0; i < next->vref_num; i++) {
  942. const struct tegra210_emc_per_channel_regs *vref =
  943. emc->offsets->vref_per_channel;
  944. if (!vref[i].offset)
  945. continue;
  946. if (emc->num_channels < 2 && vref[i].bank >= 1)
  947. continue;
  948. emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
  949. next->vref_perch_regs[i], vref[i].offset);
  950. emc_channel_writel(emc, vref[i].bank, next->vref_perch_regs[i],
  951. vref[i].offset);
  952. }
  953. /* Trimmers. */
  954. emc_dbg(emc, SUB_STEPS, "Writing trim_regs\n");
  955. for (i = 0; i < next->num_trim; i++) {
  956. const u16 *offsets = emc->offsets->trim;
  957. if (!offsets[i])
  958. continue;
  959. if (compensate_trimmer_applicable &&
  960. (offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0 ||
  961. offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1 ||
  962. offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2 ||
  963. offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3 ||
  964. offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0 ||
  965. offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1 ||
  966. offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2 ||
  967. offsets[i] == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3 ||
  968. offsets[i] == EMC_DATA_BRLSHFT_0 ||
  969. offsets[i] == EMC_DATA_BRLSHFT_1)) {
  970. value = tegra210_emc_compensate(next, offsets[i]);
  971. emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
  972. value, offsets[i]);
  973. emc_dbg(emc, EMA_WRITES, "0x%08x <= 0x%08x\n",
  974. (u32)(u64)offsets[i], value);
  975. emc_writel(emc, value, offsets[i]);
  976. } else {
  977. emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
  978. next->trim_regs[i], offsets[i]);
  979. emc_writel(emc, next->trim_regs[i], offsets[i]);
  980. }
  981. }
  982. /* Per channel trimmers. */
  983. emc_dbg(emc, SUB_STEPS, "Writing trim_regs_per_ch\n");
  984. for (i = 0; i < next->num_trim_per_ch; i++) {
  985. const struct tegra210_emc_per_channel_regs *trim =
  986. &emc->offsets->trim_per_channel[0];
  987. unsigned int offset;
  988. if (!trim[i].offset)
  989. continue;
  990. if (emc->num_channels < 2 && trim[i].bank >= 1)
  991. continue;
  992. offset = trim[i].offset;
  993. if (compensate_trimmer_applicable &&
  994. (offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_0 ||
  995. offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_1 ||
  996. offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2 ||
  997. offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3 ||
  998. offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_0 ||
  999. offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_1 ||
  1000. offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2 ||
  1001. offset == EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3 ||
  1002. offset == EMC_DATA_BRLSHFT_0 ||
  1003. offset == EMC_DATA_BRLSHFT_1)) {
  1004. value = tegra210_emc_compensate(next, offset);
  1005. emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
  1006. value, offset);
  1007. emc_dbg(emc, EMA_WRITES, "0x%08x <= 0x%08x\n", offset,
  1008. value);
  1009. emc_channel_writel(emc, trim[i].bank, value, offset);
  1010. } else {
  1011. emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
  1012. next->trim_perch_regs[i], offset);
  1013. emc_channel_writel(emc, trim[i].bank,
  1014. next->trim_perch_regs[i], offset);
  1015. }
  1016. }
  1017. emc_dbg(emc, SUB_STEPS, "Writing burst_mc_regs\n");
  1018. for (i = 0; i < next->num_mc_regs; i++) {
  1019. const u16 *offsets = emc->offsets->burst_mc;
  1020. u32 *values = next->burst_mc_regs;
  1021. emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
  1022. values[i], offsets[i]);
  1023. mc_writel(emc->mc, values[i], offsets[i]);
  1024. }
  1025. /* Registers to be programmed on the faster clock. */
  1026. if (next->rate < last->rate) {
  1027. const u16 *la = emc->offsets->la_scale;
  1028. emc_dbg(emc, SUB_STEPS, "Writing la_scale_regs\n");
  1029. for (i = 0; i < next->num_up_down; i++) {
  1030. emc_dbg(emc, REG_LISTS, "(%u) 0x%08x => 0x%08x\n", i,
  1031. next->la_scale_regs[i], la[i]);
  1032. mc_writel(emc->mc, next->la_scale_regs[i], la[i]);
  1033. }
  1034. }
  1035. /* Flush all the burst register writes. */
  1036. mc_readl(emc->mc, MC_EMEM_ADR_CFG);
  1037. /*
  1038. * Step 9:
  1039. * LPDDR4 section A.
  1040. */
  1041. emc_dbg(emc, STEPS, "Step 9\n");
  1042. value = next->burst_regs[EMC_ZCAL_WAIT_CNT_INDEX];
  1043. value &= ~EMC_ZCAL_WAIT_CNT_ZCAL_WAIT_CNT_MASK;
  1044. if (dram_type == DRAM_TYPE_LPDDR4) {
  1045. emc_writel(emc, 0, EMC_ZCAL_INTERVAL);
  1046. emc_writel(emc, value, EMC_ZCAL_WAIT_CNT);
  1047. value = emc_dbg | (EMC_DBG_WRITE_MUX_ACTIVE |
  1048. EMC_DBG_WRITE_ACTIVE_ONLY);
  1049. emc_writel(emc, value, EMC_DBG);
  1050. emc_writel(emc, 0, EMC_ZCAL_INTERVAL);
  1051. emc_writel(emc, emc_dbg, EMC_DBG);
  1052. }
  1053. /*
  1054. * Step 10:
  1055. * LPDDR4 and DDR3 common section.
  1056. */
  1057. emc_dbg(emc, STEPS, "Step 10\n");
  1058. if (opt_dvfs_mode == MAN_SR || dram_type == DRAM_TYPE_LPDDR4) {
  1059. if (dram_type == DRAM_TYPE_LPDDR4)
  1060. ccfifo_writel(emc, 0x101, EMC_SELF_REF, 0);
  1061. else
  1062. ccfifo_writel(emc, 0x1, EMC_SELF_REF, 0);
  1063. if (dram_type == DRAM_TYPE_LPDDR4 &&
  1064. dst_clk_period <= zqcal_before_cc_cutoff) {
  1065. ccfifo_writel(emc, mr13_flip_fspwr ^ 0x40, EMC_MRW3, 0);
  1066. ccfifo_writel(emc, (next->burst_regs[EMC_MRW6_INDEX] &
  1067. 0xFFFF3F3F) |
  1068. (last->burst_regs[EMC_MRW6_INDEX] &
  1069. 0x0000C0C0), EMC_MRW6, 0);
  1070. ccfifo_writel(emc, (next->burst_regs[EMC_MRW14_INDEX] &
  1071. 0xFFFF0707) |
  1072. (last->burst_regs[EMC_MRW14_INDEX] &
  1073. 0x00003838), EMC_MRW14, 0);
  1074. if (emc->num_devices > 1) {
  1075. ccfifo_writel(emc,
  1076. (next->burst_regs[EMC_MRW7_INDEX] &
  1077. 0xFFFF3F3F) |
  1078. (last->burst_regs[EMC_MRW7_INDEX] &
  1079. 0x0000C0C0), EMC_MRW7, 0);
  1080. ccfifo_writel(emc,
  1081. (next->burst_regs[EMC_MRW15_INDEX] &
  1082. 0xFFFF0707) |
  1083. (last->burst_regs[EMC_MRW15_INDEX] &
  1084. 0x00003838), EMC_MRW15, 0);
  1085. }
  1086. if (opt_zcal_en_cc) {
  1087. if (emc->num_devices < 2)
  1088. ccfifo_writel(emc,
  1089. 2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT
  1090. | EMC_ZQ_CAL_ZQ_CAL_CMD,
  1091. EMC_ZQ_CAL, 0);
  1092. else if (shared_zq_resistor)
  1093. ccfifo_writel(emc,
  1094. 2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT
  1095. | EMC_ZQ_CAL_ZQ_CAL_CMD,
  1096. EMC_ZQ_CAL, 0);
  1097. else
  1098. ccfifo_writel(emc,
  1099. EMC_ZQ_CAL_ZQ_CAL_CMD,
  1100. EMC_ZQ_CAL, 0);
  1101. }
  1102. }
  1103. }
  1104. if (dram_type == DRAM_TYPE_LPDDR4) {
  1105. value = (1000 * fake->dram_timings[T_RP]) / src_clk_period;
  1106. ccfifo_writel(emc, mr13_flip_fspop | 0x8, EMC_MRW3, value);
  1107. ccfifo_writel(emc, 0, 0, tFC_lpddr4 / src_clk_period);
  1108. }
  1109. if (dram_type == DRAM_TYPE_LPDDR4 || opt_dvfs_mode != MAN_SR) {
  1110. delay = 30;
  1111. if (cya_allow_ref_cc) {
  1112. delay += (1000 * fake->dram_timings[T_RP]) /
  1113. src_clk_period;
  1114. delay += 4000 * fake->dram_timings[T_RFC];
  1115. }
  1116. ccfifo_writel(emc, emc_pin & ~(EMC_PIN_PIN_CKE_PER_DEV |
  1117. EMC_PIN_PIN_CKEB |
  1118. EMC_PIN_PIN_CKE),
  1119. EMC_PIN, delay);
  1120. }
  1121. /* calculate reference delay multiplier */
  1122. value = 1;
  1123. if (ref_b4_sref_en)
  1124. value++;
  1125. if (cya_allow_ref_cc)
  1126. value++;
  1127. if (cya_issue_pc_ref)
  1128. value++;
  1129. if (dram_type != DRAM_TYPE_LPDDR4) {
  1130. delay = ((1000 * fake->dram_timings[T_RP] / src_clk_period) +
  1131. (1000 * fake->dram_timings[T_RFC] / src_clk_period));
  1132. delay = value * delay + 20;
  1133. } else {
  1134. delay = 0;
  1135. }
  1136. /*
  1137. * Step 11:
  1138. * Ramp down.
  1139. */
  1140. emc_dbg(emc, STEPS, "Step 11\n");
  1141. ccfifo_writel(emc, 0x0, EMC_CFG_SYNC, delay);
  1142. value = emc_dbg | EMC_DBG_WRITE_MUX_ACTIVE | EMC_DBG_WRITE_ACTIVE_ONLY;
  1143. ccfifo_writel(emc, value, EMC_DBG, 0);
  1144. ramp_down_wait = tegra210_emc_dvfs_power_ramp_down(emc, src_clk_period,
  1145. 0);
  1146. /*
  1147. * Step 12:
  1148. * And finally - trigger the clock change.
  1149. */
  1150. emc_dbg(emc, STEPS, "Step 12\n");
  1151. ccfifo_writel(emc, 1, EMC_STALL_THEN_EXE_AFTER_CLKCHANGE, 0);
  1152. value &= ~EMC_DBG_WRITE_ACTIVE_ONLY;
  1153. ccfifo_writel(emc, value, EMC_DBG, 0);
  1154. /*
  1155. * Step 13:
  1156. * Ramp up.
  1157. */
  1158. emc_dbg(emc, STEPS, "Step 13\n");
  1159. ramp_up_wait = tegra210_emc_dvfs_power_ramp_up(emc, dst_clk_period, 0);
  1160. ccfifo_writel(emc, emc_dbg, EMC_DBG, 0);
  1161. /*
  1162. * Step 14:
  1163. * Bringup CKE pins.
  1164. */
  1165. emc_dbg(emc, STEPS, "Step 14\n");
  1166. if (dram_type == DRAM_TYPE_LPDDR4) {
  1167. value = emc_pin | EMC_PIN_PIN_CKE;
  1168. if (emc->num_devices <= 1)
  1169. value &= ~(EMC_PIN_PIN_CKEB | EMC_PIN_PIN_CKE_PER_DEV);
  1170. else
  1171. value |= EMC_PIN_PIN_CKEB | EMC_PIN_PIN_CKE_PER_DEV;
  1172. ccfifo_writel(emc, value, EMC_PIN, 0);
  1173. }
  1174. /*
  1175. * Step 15: (two step 15s ??)
  1176. * Calculate zqlatch wait time; has dependency on ramping times.
  1177. */
  1178. emc_dbg(emc, STEPS, "Step 15\n");
  1179. if (dst_clk_period <= zqcal_before_cc_cutoff) {
  1180. s32 t = (s32)(ramp_up_wait + ramp_down_wait) /
  1181. (s32)dst_clk_period;
  1182. zq_latch_dvfs_wait_time = (s32)tZQCAL_lpddr4_fc_adj - t;
  1183. } else {
  1184. zq_latch_dvfs_wait_time = tZQCAL_lpddr4_fc_adj -
  1185. div_o3(1000 * next->dram_timings[T_PDEX],
  1186. dst_clk_period);
  1187. }
  1188. emc_dbg(emc, INFO, "tZQCAL_lpddr4_fc_adj = %u\n", tZQCAL_lpddr4_fc_adj);
  1189. emc_dbg(emc, INFO, "dst_clk_period = %u\n",
  1190. dst_clk_period);
  1191. emc_dbg(emc, INFO, "next->dram_timings[T_PDEX] = %u\n",
  1192. next->dram_timings[T_PDEX]);
  1193. emc_dbg(emc, INFO, "zq_latch_dvfs_wait_time = %d\n",
  1194. max_t(s32, 0, zq_latch_dvfs_wait_time));
  1195. if (dram_type == DRAM_TYPE_LPDDR4 && opt_zcal_en_cc) {
  1196. delay = div_o3(1000 * next->dram_timings[T_PDEX],
  1197. dst_clk_period);
  1198. if (emc->num_devices < 2) {
  1199. if (dst_clk_period > zqcal_before_cc_cutoff)
  1200. ccfifo_writel(emc,
  1201. 2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT |
  1202. EMC_ZQ_CAL_ZQ_CAL_CMD, EMC_ZQ_CAL,
  1203. delay);
  1204. value = (mr13_flip_fspop & 0xfffffff7) | 0x0c000000;
  1205. ccfifo_writel(emc, value, EMC_MRW3, delay);
  1206. ccfifo_writel(emc, 0, EMC_SELF_REF, 0);
  1207. ccfifo_writel(emc, 0, EMC_REF, 0);
  1208. ccfifo_writel(emc, 2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT |
  1209. EMC_ZQ_CAL_ZQ_LATCH_CMD,
  1210. EMC_ZQ_CAL,
  1211. max_t(s32, 0, zq_latch_dvfs_wait_time));
  1212. } else if (shared_zq_resistor) {
  1213. if (dst_clk_period > zqcal_before_cc_cutoff)
  1214. ccfifo_writel(emc,
  1215. 2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT |
  1216. EMC_ZQ_CAL_ZQ_CAL_CMD, EMC_ZQ_CAL,
  1217. delay);
  1218. ccfifo_writel(emc, 2UL << EMC_ZQ_CAL_DEV_SEL_SHIFT |
  1219. EMC_ZQ_CAL_ZQ_LATCH_CMD, EMC_ZQ_CAL,
  1220. max_t(s32, 0, zq_latch_dvfs_wait_time) +
  1221. delay);
  1222. ccfifo_writel(emc, 1UL << EMC_ZQ_CAL_DEV_SEL_SHIFT |
  1223. EMC_ZQ_CAL_ZQ_LATCH_CMD,
  1224. EMC_ZQ_CAL, 0);
  1225. value = (mr13_flip_fspop & 0xfffffff7) | 0x0c000000;
  1226. ccfifo_writel(emc, value, EMC_MRW3, 0);
  1227. ccfifo_writel(emc, 0, EMC_SELF_REF, 0);
  1228. ccfifo_writel(emc, 0, EMC_REF, 0);
  1229. ccfifo_writel(emc, 1UL << EMC_ZQ_CAL_DEV_SEL_SHIFT |
  1230. EMC_ZQ_CAL_ZQ_LATCH_CMD, EMC_ZQ_CAL,
  1231. tZQCAL_lpddr4 / dst_clk_period);
  1232. } else {
  1233. if (dst_clk_period > zqcal_before_cc_cutoff)
  1234. ccfifo_writel(emc, EMC_ZQ_CAL_ZQ_CAL_CMD,
  1235. EMC_ZQ_CAL, delay);
  1236. value = (mr13_flip_fspop & 0xfffffff7) | 0x0c000000;
  1237. ccfifo_writel(emc, value, EMC_MRW3, delay);
  1238. ccfifo_writel(emc, 0, EMC_SELF_REF, 0);
  1239. ccfifo_writel(emc, 0, EMC_REF, 0);
  1240. ccfifo_writel(emc, EMC_ZQ_CAL_ZQ_LATCH_CMD, EMC_ZQ_CAL,
  1241. max_t(s32, 0, zq_latch_dvfs_wait_time));
  1242. }
  1243. }
  1244. /* WAR: delay for zqlatch */
  1245. ccfifo_writel(emc, 0, 0, 10);
  1246. /*
  1247. * Step 16:
  1248. * LPDDR4 Conditional Training Kickoff. Removed.
  1249. */
  1250. /*
  1251. * Step 17:
  1252. * MANSR exit self refresh.
  1253. */
  1254. emc_dbg(emc, STEPS, "Step 17\n");
  1255. if (opt_dvfs_mode == MAN_SR && dram_type != DRAM_TYPE_LPDDR4)
  1256. ccfifo_writel(emc, 0, EMC_SELF_REF, 0);
  1257. /*
  1258. * Step 18:
  1259. * Send MRWs to LPDDR3/DDR3.
  1260. */
  1261. emc_dbg(emc, STEPS, "Step 18\n");
  1262. if (dram_type == DRAM_TYPE_LPDDR2) {
  1263. ccfifo_writel(emc, next->emc_mrw2, EMC_MRW2, 0);
  1264. ccfifo_writel(emc, next->emc_mrw, EMC_MRW, 0);
  1265. if (is_lpddr3)
  1266. ccfifo_writel(emc, next->emc_mrw4, EMC_MRW4, 0);
  1267. } else if (dram_type == DRAM_TYPE_DDR3) {
  1268. if (opt_dll_mode)
  1269. ccfifo_writel(emc, next->emc_emrs &
  1270. ~EMC_EMRS_USE_EMRS_LONG_CNT, EMC_EMRS, 0);
  1271. ccfifo_writel(emc, next->emc_emrs2 &
  1272. ~EMC_EMRS2_USE_EMRS2_LONG_CNT, EMC_EMRS2, 0);
  1273. ccfifo_writel(emc, next->emc_mrs |
  1274. EMC_EMRS_USE_EMRS_LONG_CNT, EMC_MRS, 0);
  1275. }
  1276. /*
  1277. * Step 19:
  1278. * ZQCAL for LPDDR3/DDR3
  1279. */
  1280. emc_dbg(emc, STEPS, "Step 19\n");
  1281. if (opt_zcal_en_cc) {
  1282. if (dram_type == DRAM_TYPE_LPDDR2) {
  1283. value = opt_cc_short_zcal ? 90000 : 360000;
  1284. value = div_o3(value, dst_clk_period);
  1285. value = value <<
  1286. EMC_MRS_WAIT_CNT2_MRS_EXT2_WAIT_CNT_SHIFT |
  1287. value <<
  1288. EMC_MRS_WAIT_CNT2_MRS_EXT1_WAIT_CNT_SHIFT;
  1289. ccfifo_writel(emc, value, EMC_MRS_WAIT_CNT2, 0);
  1290. value = opt_cc_short_zcal ? 0x56 : 0xab;
  1291. ccfifo_writel(emc, 2 << EMC_MRW_MRW_DEV_SELECTN_SHIFT |
  1292. EMC_MRW_USE_MRW_EXT_CNT |
  1293. 10 << EMC_MRW_MRW_MA_SHIFT |
  1294. value << EMC_MRW_MRW_OP_SHIFT,
  1295. EMC_MRW, 0);
  1296. if (emc->num_devices > 1) {
  1297. value = 1 << EMC_MRW_MRW_DEV_SELECTN_SHIFT |
  1298. EMC_MRW_USE_MRW_EXT_CNT |
  1299. 10 << EMC_MRW_MRW_MA_SHIFT |
  1300. value << EMC_MRW_MRW_OP_SHIFT;
  1301. ccfifo_writel(emc, value, EMC_MRW, 0);
  1302. }
  1303. } else if (dram_type == DRAM_TYPE_DDR3) {
  1304. value = opt_cc_short_zcal ? 0 : EMC_ZQ_CAL_LONG;
  1305. ccfifo_writel(emc, value |
  1306. 2 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
  1307. EMC_ZQ_CAL_ZQ_CAL_CMD, EMC_ZQ_CAL,
  1308. 0);
  1309. if (emc->num_devices > 1) {
  1310. value = value | 1 << EMC_ZQ_CAL_DEV_SEL_SHIFT |
  1311. EMC_ZQ_CAL_ZQ_CAL_CMD;
  1312. ccfifo_writel(emc, value, EMC_ZQ_CAL, 0);
  1313. }
  1314. }
  1315. }
  1316. if (bg_reg_mode_change) {
  1317. tegra210_emc_set_shadow_bypass(emc, ACTIVE);
  1318. if (ramp_up_wait <= 1250000)
  1319. delay = (1250000 - ramp_up_wait) / dst_clk_period;
  1320. else
  1321. delay = 0;
  1322. ccfifo_writel(emc,
  1323. next->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX],
  1324. EMC_PMACRO_BG_BIAS_CTRL_0, delay);
  1325. tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
  1326. }
  1327. /*
  1328. * Step 20:
  1329. * Issue ref and optional QRST.
  1330. */
  1331. emc_dbg(emc, STEPS, "Step 20\n");
  1332. if (dram_type != DRAM_TYPE_LPDDR4)
  1333. ccfifo_writel(emc, 0, EMC_REF, 0);
  1334. if (opt_do_sw_qrst) {
  1335. ccfifo_writel(emc, 1, EMC_ISSUE_QRST, 0);
  1336. ccfifo_writel(emc, 0, EMC_ISSUE_QRST, 2);
  1337. }
  1338. /*
  1339. * Step 21:
  1340. * Restore ZCAL and ZCAL interval.
  1341. */
  1342. emc_dbg(emc, STEPS, "Step 21\n");
  1343. if (save_restore_clkstop_pd || opt_zcal_en_cc) {
  1344. ccfifo_writel(emc, emc_dbg | EMC_DBG_WRITE_MUX_ACTIVE,
  1345. EMC_DBG, 0);
  1346. if (opt_zcal_en_cc && dram_type != DRAM_TYPE_LPDDR4)
  1347. ccfifo_writel(emc, next->burst_regs[EMC_ZCAL_INTERVAL_INDEX],
  1348. EMC_ZCAL_INTERVAL, 0);
  1349. if (save_restore_clkstop_pd)
  1350. ccfifo_writel(emc, next->burst_regs[EMC_CFG_INDEX] &
  1351. ~EMC_CFG_DYN_SELF_REF,
  1352. EMC_CFG, 0);
  1353. ccfifo_writel(emc, emc_dbg, EMC_DBG, 0);
  1354. }
  1355. /*
  1356. * Step 22:
  1357. * Restore EMC_CFG_PIPE_CLK.
  1358. */
  1359. emc_dbg(emc, STEPS, "Step 22\n");
  1360. ccfifo_writel(emc, emc_cfg_pipe_clk, EMC_CFG_PIPE_CLK, 0);
  1361. if (bg_reg_mode_change) {
  1362. if (enable_bg_reg)
  1363. emc_writel(emc,
  1364. next->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
  1365. ~EMC_PMACRO_BG_BIAS_CTRL_0_BGLP_E_PWRD,
  1366. EMC_PMACRO_BG_BIAS_CTRL_0);
  1367. else
  1368. emc_writel(emc,
  1369. next->burst_regs[EMC_PMACRO_BG_BIAS_CTRL_0_INDEX] &
  1370. ~EMC_PMACRO_BG_BIAS_CTRL_0_BG_E_PWRD,
  1371. EMC_PMACRO_BG_BIAS_CTRL_0);
  1372. }
  1373. /*
  1374. * Step 23:
  1375. */
  1376. emc_dbg(emc, STEPS, "Step 23\n");
  1377. value = emc_readl(emc, EMC_CFG_DIG_DLL);
  1378. value |= EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_TRAFFIC;
  1379. value &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_RW_UNTIL_LOCK;
  1380. value &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_UNTIL_LOCK;
  1381. value &= ~EMC_CFG_DIG_DLL_CFG_DLL_EN;
  1382. value = (value & ~EMC_CFG_DIG_DLL_CFG_DLL_MODE_MASK) |
  1383. (2 << EMC_CFG_DIG_DLL_CFG_DLL_MODE_SHIFT);
  1384. emc_writel(emc, value, EMC_CFG_DIG_DLL);
  1385. tegra210_emc_do_clock_change(emc, clksrc);
  1386. /*
  1387. * Step 24:
  1388. * Save training results. Removed.
  1389. */
  1390. /*
  1391. * Step 25:
  1392. * Program MC updown registers.
  1393. */
  1394. emc_dbg(emc, STEPS, "Step 25\n");
  1395. if (next->rate > last->rate) {
  1396. for (i = 0; i < next->num_up_down; i++)
  1397. mc_writel(emc->mc, next->la_scale_regs[i],
  1398. emc->offsets->la_scale[i]);
  1399. tegra210_emc_timing_update(emc);
  1400. }
  1401. /*
  1402. * Step 26:
  1403. * Restore ZCAL registers.
  1404. */
  1405. emc_dbg(emc, STEPS, "Step 26\n");
  1406. if (dram_type == DRAM_TYPE_LPDDR4) {
  1407. tegra210_emc_set_shadow_bypass(emc, ACTIVE);
  1408. emc_writel(emc, next->burst_regs[EMC_ZCAL_WAIT_CNT_INDEX],
  1409. EMC_ZCAL_WAIT_CNT);
  1410. emc_writel(emc, next->burst_regs[EMC_ZCAL_INTERVAL_INDEX],
  1411. EMC_ZCAL_INTERVAL);
  1412. tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
  1413. }
  1414. if (dram_type != DRAM_TYPE_LPDDR4 && opt_zcal_en_cc &&
  1415. !opt_short_zcal && opt_cc_short_zcal) {
  1416. udelay(2);
  1417. tegra210_emc_set_shadow_bypass(emc, ACTIVE);
  1418. if (dram_type == DRAM_TYPE_LPDDR2)
  1419. emc_writel(emc, next->burst_regs[EMC_MRS_WAIT_CNT_INDEX],
  1420. EMC_MRS_WAIT_CNT);
  1421. else if (dram_type == DRAM_TYPE_DDR3)
  1422. emc_writel(emc, next->burst_regs[EMC_ZCAL_WAIT_CNT_INDEX],
  1423. EMC_ZCAL_WAIT_CNT);
  1424. tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
  1425. }
  1426. /*
  1427. * Step 27:
  1428. * Restore EMC_CFG, FDPD registers.
  1429. */
  1430. emc_dbg(emc, STEPS, "Step 27\n");
  1431. tegra210_emc_set_shadow_bypass(emc, ACTIVE);
  1432. emc_writel(emc, next->burst_regs[EMC_CFG_INDEX], EMC_CFG);
  1433. tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
  1434. emc_writel(emc, next->emc_fdpd_ctrl_cmd_no_ramp,
  1435. EMC_FDPD_CTRL_CMD_NO_RAMP);
  1436. emc_writel(emc, next->emc_sel_dpd_ctrl, EMC_SEL_DPD_CTRL);
  1437. /*
  1438. * Step 28:
  1439. * Training recover. Removed.
  1440. */
  1441. emc_dbg(emc, STEPS, "Step 28\n");
  1442. tegra210_emc_set_shadow_bypass(emc, ACTIVE);
  1443. emc_writel(emc,
  1444. next->burst_regs[EMC_PMACRO_AUTOCAL_CFG_COMMON_INDEX],
  1445. EMC_PMACRO_AUTOCAL_CFG_COMMON);
  1446. tegra210_emc_set_shadow_bypass(emc, ASSEMBLY);
  1447. /*
  1448. * Step 29:
  1449. * Power fix WAR.
  1450. */
  1451. emc_dbg(emc, STEPS, "Step 29\n");
  1452. emc_writel(emc, EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE0 |
  1453. EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE1 |
  1454. EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE2 |
  1455. EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE3 |
  1456. EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE4 |
  1457. EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE5 |
  1458. EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE6 |
  1459. EMC_PMACRO_CFG_PM_GLOBAL_0_DISABLE_CFG_BYTE7,
  1460. EMC_PMACRO_CFG_PM_GLOBAL_0);
  1461. emc_writel(emc, EMC_PMACRO_TRAINING_CTRL_0_CH0_TRAINING_E_WRPTR,
  1462. EMC_PMACRO_TRAINING_CTRL_0);
  1463. emc_writel(emc, EMC_PMACRO_TRAINING_CTRL_1_CH1_TRAINING_E_WRPTR,
  1464. EMC_PMACRO_TRAINING_CTRL_1);
  1465. emc_writel(emc, 0, EMC_PMACRO_CFG_PM_GLOBAL_0);
  1466. /*
  1467. * Step 30:
  1468. * Re-enable autocal.
  1469. */
  1470. emc_dbg(emc, STEPS, "Step 30: Re-enable DLL and AUTOCAL\n");
  1471. if (next->burst_regs[EMC_CFG_DIG_DLL_INDEX] & EMC_CFG_DIG_DLL_CFG_DLL_EN) {
  1472. value = emc_readl(emc, EMC_CFG_DIG_DLL);
  1473. value |= EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_TRAFFIC;
  1474. value |= EMC_CFG_DIG_DLL_CFG_DLL_EN;
  1475. value &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_RW_UNTIL_LOCK;
  1476. value &= ~EMC_CFG_DIG_DLL_CFG_DLL_STALL_ALL_UNTIL_LOCK;
  1477. value = (value & ~EMC_CFG_DIG_DLL_CFG_DLL_MODE_MASK) |
  1478. (2 << EMC_CFG_DIG_DLL_CFG_DLL_MODE_SHIFT);
  1479. emc_writel(emc, value, EMC_CFG_DIG_DLL);
  1480. tegra210_emc_timing_update(emc);
  1481. }
  1482. emc_writel(emc, next->emc_auto_cal_config, EMC_AUTO_CAL_CONFIG);
  1483. /* Done! Yay. */
  1484. }
  1485. const struct tegra210_emc_sequence tegra210_emc_r21021 = {
  1486. .revision = 0x7,
  1487. .set_clock = tegra210_emc_r21021_set_clock,
  1488. .periodic_compensation = tegra210_emc_r21021_periodic_compensation,
  1489. };