ti-emif-sram-pm.S 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367
  1. /* SPDX-License-Identifier: GPL-2.0-only */
  2. /*
  3. * Low level PM code for TI EMIF
  4. *
  5. * Copyright (C) 2016-2017 Texas Instruments Incorporated - http://www.ti.com/
  6. * Dave Gerlach
  7. */
  8. #include <linux/linkage.h>
  9. #include <asm/assembler.h>
  10. #include <asm/memory.h>
  11. #include "emif.h"
  12. #include "ti-emif-asm-offsets.h"
  13. #define EMIF_POWER_MGMT_WAIT_SELF_REFRESH_8192_CYCLES 0x00a0
  14. #define EMIF_POWER_MGMT_SR_TIMER_MASK 0x00f0
  15. #define EMIF_POWER_MGMT_SELF_REFRESH_MODE 0x0200
  16. #define EMIF_POWER_MGMT_SELF_REFRESH_MODE_MASK 0x0700
  17. #define EMIF_SDCFG_TYPE_DDR2 0x2 << SDRAM_TYPE_SHIFT
  18. #define EMIF_SDCFG_TYPE_DDR3 0x3 << SDRAM_TYPE_SHIFT
  19. #define EMIF_STATUS_READY 0x4
  20. #define AM43XX_EMIF_PHY_CTRL_REG_COUNT 0x120
  21. #define EMIF_AM437X_REGISTERS 0x1
  22. .arm
  23. .align 3
  24. ENTRY(ti_emif_sram)
  25. /*
  26. * void ti_emif_save_context(void)
  27. *
  28. * Used during suspend to save the context of all required EMIF registers
  29. * to local memory if the EMIF is going to lose context during the sleep
  30. * transition. Operates on the VIRTUAL address of the EMIF.
  31. */
  32. ENTRY(ti_emif_save_context)
  33. stmfd sp!, {r4 - r11, lr} @ save registers on stack
  34. adr r4, ti_emif_pm_sram_data
  35. ldr r0, [r4, #EMIF_PM_BASE_ADDR_VIRT_OFFSET]
  36. ldr r2, [r4, #EMIF_PM_REGS_VIRT_OFFSET]
  37. /* Save EMIF configuration */
  38. ldr r1, [r0, #EMIF_SDRAM_CONFIG]
  39. str r1, [r2, #EMIF_SDCFG_VAL_OFFSET]
  40. ldr r1, [r0, #EMIF_SDRAM_REFRESH_CONTROL]
  41. str r1, [r2, #EMIF_REF_CTRL_VAL_OFFSET]
  42. ldr r1, [r0, #EMIF_SDRAM_TIMING_1]
  43. str r1, [r2, #EMIF_TIMING1_VAL_OFFSET]
  44. ldr r1, [r0, #EMIF_SDRAM_TIMING_2]
  45. str r1, [r2, #EMIF_TIMING2_VAL_OFFSET]
  46. ldr r1, [r0, #EMIF_SDRAM_TIMING_3]
  47. str r1, [r2, #EMIF_TIMING3_VAL_OFFSET]
  48. ldr r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL]
  49. str r1, [r2, #EMIF_PMCR_VAL_OFFSET]
  50. ldr r1, [r0, #EMIF_POWER_MANAGEMENT_CTRL_SHDW]
  51. str r1, [r2, #EMIF_PMCR_SHDW_VAL_OFFSET]
  52. ldr r1, [r0, #EMIF_SDRAM_OUTPUT_IMPEDANCE_CALIBRATION_CONFIG]
  53. str r1, [r2, #EMIF_ZQCFG_VAL_OFFSET]
  54. ldr r1, [r0, #EMIF_DDR_PHY_CTRL_1]
  55. str r1, [r2, #EMIF_DDR_PHY_CTLR_1_OFFSET]
  56. ldr r1, [r0, #EMIF_COS_CONFIG]
  57. str r1, [r2, #EMIF_COS_CONFIG_OFFSET]
  58. ldr r1, [r0, #EMIF_PRIORITY_TO_CLASS_OF_SERVICE_MAPPING]
  59. str r1, [r2, #EMIF_PRIORITY_TO_COS_MAPPING_OFFSET]
  60. ldr r1, [r0, #EMIF_CONNECTION_ID_TO_CLASS_OF_SERVICE_1_MAPPING]
  61. str r1, [r2, #EMIF_CONNECT_ID_SERV_1_MAP_OFFSET]
  62. ldr r1, [r0, #EMIF_CONNECTION_ID_TO_CLASS_OF_SERVICE_2_MAPPING]
  63. str r1, [r2, #EMIF_CONNECT_ID_SERV_2_MAP_OFFSET]
  64. ldr r1, [r0, #EMIF_OCP_CONFIG]
  65. str r1, [r2, #EMIF_OCP_CONFIG_VAL_OFFSET]
  66. ldr r5, [r4, #EMIF_PM_CONFIG_OFFSET]
  67. cmp r5, #EMIF_SRAM_AM43_REG_LAYOUT
  68. bne emif_skip_save_extra_regs
  69. ldr r1, [r0, #EMIF_READ_WRITE_LEVELING_RAMP_CONTROL]
  70. str r1, [r2, #EMIF_RD_WR_LEVEL_RAMP_CTRL_OFFSET]
  71. ldr r1, [r0, #EMIF_READ_WRITE_EXECUTION_THRESHOLD]
  72. str r1, [r2, #EMIF_RD_WR_EXEC_THRESH_OFFSET]
  73. ldr r1, [r0, #EMIF_LPDDR2_NVM_TIMING]
  74. str r1, [r2, #EMIF_LPDDR2_NVM_TIM_OFFSET]
  75. ldr r1, [r0, #EMIF_LPDDR2_NVM_TIMING_SHDW]
  76. str r1, [r2, #EMIF_LPDDR2_NVM_TIM_SHDW_OFFSET]
  77. ldr r1, [r0, #EMIF_DLL_CALIB_CTRL]
  78. str r1, [r2, #EMIF_DLL_CALIB_CTRL_VAL_OFFSET]
  79. ldr r1, [r0, #EMIF_DLL_CALIB_CTRL_SHDW]
  80. str r1, [r2, #EMIF_DLL_CALIB_CTRL_VAL_SHDW_OFFSET]
  81. /* Loop and save entire block of emif phy regs */
  82. mov r5, #0x0
  83. add r4, r2, #EMIF_EXT_PHY_CTRL_VALS_OFFSET
  84. add r3, r0, #EMIF_EXT_PHY_CTRL_1
  85. ddr_phy_ctrl_save:
  86. ldr r1, [r3, r5]
  87. str r1, [r4, r5]
  88. add r5, r5, #0x4
  89. cmp r5, #AM43XX_EMIF_PHY_CTRL_REG_COUNT
  90. bne ddr_phy_ctrl_save
  91. emif_skip_save_extra_regs:
  92. ldmfd sp!, {r4 - r11, pc} @ restore regs and return
  93. ENDPROC(ti_emif_save_context)
  94. /*
  95. * void ti_emif_restore_context(void)
  96. *
  97. * Used during resume to restore the context of all required EMIF registers
  98. * from local memory after the EMIF has lost context during a sleep transition.
  99. * Operates on the PHYSICAL address of the EMIF.
  100. */
  101. ENTRY(ti_emif_restore_context)
  102. adr r4, ti_emif_pm_sram_data
  103. ldr r0, [r4, #EMIF_PM_BASE_ADDR_PHYS_OFFSET]
  104. ldr r2, [r4, #EMIF_PM_REGS_PHYS_OFFSET]
  105. /* Config EMIF Timings */
  106. ldr r1, [r2, #EMIF_DDR_PHY_CTLR_1_OFFSET]
  107. str r1, [r0, #EMIF_DDR_PHY_CTRL_1]
  108. str r1, [r0, #EMIF_DDR_PHY_CTRL_1_SHDW]
  109. ldr r1, [r2, #EMIF_TIMING1_VAL_OFFSET]
  110. str r1, [r0, #EMIF_SDRAM_TIMING_1]
  111. str r1, [r0, #EMIF_SDRAM_TIMING_1_SHDW]
  112. ldr r1, [r2, #EMIF_TIMING2_VAL_OFFSET]
  113. str r1, [r0, #EMIF_SDRAM_TIMING_2]
  114. str r1, [r0, #EMIF_SDRAM_TIMING_2_SHDW]
  115. ldr r1, [r2, #EMIF_TIMING3_VAL_OFFSET]
  116. str r1, [r0, #EMIF_SDRAM_TIMING_3]
  117. str r1, [r0, #EMIF_SDRAM_TIMING_3_SHDW]
  118. ldr r1, [r2, #EMIF_REF_CTRL_VAL_OFFSET]
  119. str r1, [r0, #EMIF_SDRAM_REFRESH_CONTROL]
  120. str r1, [r0, #EMIF_SDRAM_REFRESH_CTRL_SHDW]
  121. ldr r1, [r2, #EMIF_PMCR_VAL_OFFSET]
  122. str r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL]
  123. ldr r1, [r2, #EMIF_PMCR_SHDW_VAL_OFFSET]
  124. str r1, [r0, #EMIF_POWER_MANAGEMENT_CTRL_SHDW]
  125. ldr r1, [r2, #EMIF_COS_CONFIG_OFFSET]
  126. str r1, [r0, #EMIF_COS_CONFIG]
  127. ldr r1, [r2, #EMIF_PRIORITY_TO_COS_MAPPING_OFFSET]
  128. str r1, [r0, #EMIF_PRIORITY_TO_CLASS_OF_SERVICE_MAPPING]
  129. ldr r1, [r2, #EMIF_CONNECT_ID_SERV_1_MAP_OFFSET]
  130. str r1, [r0, #EMIF_CONNECTION_ID_TO_CLASS_OF_SERVICE_1_MAPPING]
  131. ldr r1, [r2, #EMIF_CONNECT_ID_SERV_2_MAP_OFFSET]
  132. str r1, [r0, #EMIF_CONNECTION_ID_TO_CLASS_OF_SERVICE_2_MAPPING]
  133. ldr r1, [r2, #EMIF_OCP_CONFIG_VAL_OFFSET]
  134. str r1, [r0, #EMIF_OCP_CONFIG]
  135. ldr r5, [r4, #EMIF_PM_CONFIG_OFFSET]
  136. cmp r5, #EMIF_SRAM_AM43_REG_LAYOUT
  137. bne emif_skip_restore_extra_regs
  138. ldr r1, [r2, #EMIF_RD_WR_LEVEL_RAMP_CTRL_OFFSET]
  139. str r1, [r0, #EMIF_READ_WRITE_LEVELING_RAMP_CONTROL]
  140. ldr r1, [r2, #EMIF_RD_WR_EXEC_THRESH_OFFSET]
  141. str r1, [r0, #EMIF_READ_WRITE_EXECUTION_THRESHOLD]
  142. ldr r1, [r2, #EMIF_LPDDR2_NVM_TIM_OFFSET]
  143. str r1, [r0, #EMIF_LPDDR2_NVM_TIMING]
  144. ldr r1, [r2, #EMIF_LPDDR2_NVM_TIM_SHDW_OFFSET]
  145. str r1, [r0, #EMIF_LPDDR2_NVM_TIMING_SHDW]
  146. ldr r1, [r2, #EMIF_DLL_CALIB_CTRL_VAL_OFFSET]
  147. str r1, [r0, #EMIF_DLL_CALIB_CTRL]
  148. ldr r1, [r2, #EMIF_DLL_CALIB_CTRL_VAL_SHDW_OFFSET]
  149. str r1, [r0, #EMIF_DLL_CALIB_CTRL_SHDW]
  150. ldr r1, [r2, #EMIF_ZQCFG_VAL_OFFSET]
  151. str r1, [r0, #EMIF_SDRAM_OUTPUT_IMPEDANCE_CALIBRATION_CONFIG]
  152. /* Loop and restore entire block of emif phy regs */
  153. mov r5, #0x0
  154. /* Load ti_emif_regs_amx3 + EMIF_EXT_PHY_CTRL_VALS_OFFSET for address
  155. * to phy register save space
  156. */
  157. add r3, r2, #EMIF_EXT_PHY_CTRL_VALS_OFFSET
  158. add r4, r0, #EMIF_EXT_PHY_CTRL_1
  159. ddr_phy_ctrl_restore:
  160. ldr r1, [r3, r5]
  161. str r1, [r4, r5]
  162. add r5, r5, #0x4
  163. cmp r5, #AM43XX_EMIF_PHY_CTRL_REG_COUNT
  164. bne ddr_phy_ctrl_restore
  165. emif_skip_restore_extra_regs:
  166. /*
  167. * Output impedence calib needed only for DDR3
  168. * but since the initial state of this will be
  169. * disabled for DDR2 no harm in restoring the
  170. * old configuration
  171. */
  172. ldr r1, [r2, #EMIF_ZQCFG_VAL_OFFSET]
  173. str r1, [r0, #EMIF_SDRAM_OUTPUT_IMPEDANCE_CALIBRATION_CONFIG]
  174. /* Write to sdcfg last for DDR2 only */
  175. ldr r1, [r2, #EMIF_SDCFG_VAL_OFFSET]
  176. and r2, r1, #SDRAM_TYPE_MASK
  177. cmp r2, #EMIF_SDCFG_TYPE_DDR2
  178. streq r1, [r0, #EMIF_SDRAM_CONFIG]
  179. mov pc, lr
  180. ENDPROC(ti_emif_restore_context)
  181. /*
  182. * void ti_emif_run_hw_leveling(void)
  183. *
  184. * Used during resume to run hardware leveling again and restore the
  185. * configuration of the EMIF PHY, only for DDR3.
  186. */
  187. ENTRY(ti_emif_run_hw_leveling)
  188. adr r4, ti_emif_pm_sram_data
  189. ldr r0, [r4, #EMIF_PM_BASE_ADDR_PHYS_OFFSET]
  190. ldr r3, [r0, #EMIF_READ_WRITE_LEVELING_CONTROL]
  191. orr r3, r3, #RDWRLVLFULL_START
  192. ldr r2, [r0, #EMIF_SDRAM_CONFIG]
  193. and r2, r2, #SDRAM_TYPE_MASK
  194. cmp r2, #EMIF_SDCFG_TYPE_DDR3
  195. bne skip_hwlvl
  196. str r3, [r0, #EMIF_READ_WRITE_LEVELING_CONTROL]
  197. /*
  198. * If EMIF registers are touched during initial stage of HW
  199. * leveling sequence there will be an L3 NOC timeout error issued
  200. * as the EMIF will not respond, which is not fatal, but it is
  201. * avoidable. This small wait loop is enough time for this condition
  202. * to clear, even at worst case of CPU running at max speed of 1Ghz.
  203. */
  204. mov r2, #0x2000
  205. 1:
  206. subs r2, r2, #0x1
  207. bne 1b
  208. /* Bit clears when operation is complete */
  209. 2: ldr r1, [r0, #EMIF_READ_WRITE_LEVELING_CONTROL]
  210. tst r1, #RDWRLVLFULL_START
  211. bne 2b
  212. skip_hwlvl:
  213. mov pc, lr
  214. ENDPROC(ti_emif_run_hw_leveling)
  215. /*
  216. * void ti_emif_enter_sr(void)
  217. *
  218. * Programs the EMIF to tell the SDRAM to enter into self-refresh
  219. * mode during a sleep transition. Operates on the VIRTUAL address
  220. * of the EMIF.
  221. */
  222. ENTRY(ti_emif_enter_sr)
  223. stmfd sp!, {r4 - r11, lr} @ save registers on stack
  224. adr r4, ti_emif_pm_sram_data
  225. ldr r0, [r4, #EMIF_PM_BASE_ADDR_VIRT_OFFSET]
  226. ldr r2, [r4, #EMIF_PM_REGS_VIRT_OFFSET]
  227. ldr r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL]
  228. bic r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE_MASK
  229. orr r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE
  230. str r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL]
  231. ldmfd sp!, {r4 - r11, pc} @ restore regs and return
  232. ENDPROC(ti_emif_enter_sr)
  233. /*
  234. * void ti_emif_exit_sr(void)
  235. *
  236. * Programs the EMIF to tell the SDRAM to exit self-refresh mode
  237. * after a sleep transition. Operates on the PHYSICAL address of
  238. * the EMIF.
  239. */
  240. ENTRY(ti_emif_exit_sr)
  241. adr r4, ti_emif_pm_sram_data
  242. ldr r0, [r4, #EMIF_PM_BASE_ADDR_PHYS_OFFSET]
  243. ldr r2, [r4, #EMIF_PM_REGS_PHYS_OFFSET]
  244. /*
  245. * Toggle EMIF to exit refresh mode:
  246. * if EMIF lost context, PWR_MGT_CTRL is currently 0, writing disable
  247. * (0x0), wont do diddly squat! so do a toggle from SR(0x2) to disable
  248. * (0x0) here.
  249. * *If* EMIF did not lose context, nothing broken as we write the same
  250. * value(0x2) to reg before we write a disable (0x0).
  251. */
  252. ldr r1, [r2, #EMIF_PMCR_VAL_OFFSET]
  253. bic r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE_MASK
  254. orr r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE
  255. str r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL]
  256. bic r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE_MASK
  257. str r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL]
  258. /* Wait for EMIF to become ready */
  259. 1: ldr r1, [r0, #EMIF_STATUS]
  260. tst r1, #EMIF_STATUS_READY
  261. beq 1b
  262. mov pc, lr
  263. ENDPROC(ti_emif_exit_sr)
  264. /*
  265. * void ti_emif_abort_sr(void)
  266. *
  267. * Disables self-refresh after a failed transition to a low-power
  268. * state so the kernel can jump back to DDR and follow abort path.
  269. * Operates on the VIRTUAL address of the EMIF.
  270. */
  271. ENTRY(ti_emif_abort_sr)
  272. stmfd sp!, {r4 - r11, lr} @ save registers on stack
  273. adr r4, ti_emif_pm_sram_data
  274. ldr r0, [r4, #EMIF_PM_BASE_ADDR_VIRT_OFFSET]
  275. ldr r2, [r4, #EMIF_PM_REGS_VIRT_OFFSET]
  276. ldr r1, [r2, #EMIF_PMCR_VAL_OFFSET]
  277. bic r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE_MASK
  278. str r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL]
  279. /* Wait for EMIF to become ready */
  280. 1: ldr r1, [r0, #EMIF_STATUS]
  281. tst r1, #EMIF_STATUS_READY
  282. beq 1b
  283. ldmfd sp!, {r4 - r11, pc} @ restore regs and return
  284. ENDPROC(ti_emif_abort_sr)
  285. .align 3
  286. ENTRY(ti_emif_pm_sram_data)
  287. .space EMIF_PM_DATA_SIZE
  288. ENTRY(ti_emif_sram_sz)
  289. .word . - ti_emif_save_context