sleep34xx.S 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569
  1. /* SPDX-License-Identifier: GPL-2.0-or-later */
  2. /*
  3. * (C) Copyright 2007
  4. * Texas Instruments
  5. * Karthik Dasu <[email protected]>
  6. *
  7. * (C) Copyright 2004
  8. * Texas Instruments, <www.ti.com>
  9. * Richard Woodruff <[email protected]>
  10. */
  11. #include <linux/linkage.h>
  12. #include <asm/assembler.h>
  13. #include "omap34xx.h"
  14. #include "iomap.h"
  15. #include "cm3xxx.h"
  16. #include "prm3xxx.h"
  17. #include "sdrc.h"
  18. #include "sram.h"
  19. #include "control.h"
  20. /*
  21. * Registers access definitions
  22. */
  23. #define SDRC_SCRATCHPAD_SEM_OFFS 0xc
  24. #define SDRC_SCRATCHPAD_SEM_V OMAP343X_SCRATCHPAD_REGADDR\
  25. (SDRC_SCRATCHPAD_SEM_OFFS)
  26. #define PM_PREPWSTST_CORE_P OMAP3430_PRM_BASE + CORE_MOD +\
  27. OMAP3430_PM_PREPWSTST
  28. #define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL
  29. #define CM_IDLEST1_CORE_V OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1)
  30. #define CM_IDLEST_CKGEN_V OMAP34XX_CM_REGADDR(PLL_MOD, CM_IDLEST)
  31. #define SRAM_BASE_P OMAP3_SRAM_PA
  32. #define CONTROL_STAT OMAP343X_CTRL_BASE + OMAP343X_CONTROL_STATUS
  33. #define CONTROL_MEM_RTA_CTRL (OMAP343X_CTRL_BASE +\
  34. OMAP36XX_CONTROL_MEM_RTA_CTRL)
  35. /* Move this as correct place is available */
  36. #define SCRATCHPAD_MEM_OFFS 0x310
  37. #define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE +\
  38. OMAP343X_CONTROL_MEM_WKUP +\
  39. SCRATCHPAD_MEM_OFFS)
  40. #define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER)
  41. #define SDRC_SYSCONFIG_P (OMAP343X_SDRC_BASE + SDRC_SYSCONFIG)
  42. #define SDRC_MR_0_P (OMAP343X_SDRC_BASE + SDRC_MR_0)
  43. #define SDRC_EMR2_0_P (OMAP343X_SDRC_BASE + SDRC_EMR2_0)
  44. #define SDRC_MANUAL_0_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_0)
  45. #define SDRC_MR_1_P (OMAP343X_SDRC_BASE + SDRC_MR_1)
  46. #define SDRC_EMR2_1_P (OMAP343X_SDRC_BASE + SDRC_EMR2_1)
  47. #define SDRC_MANUAL_1_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_1)
  48. #define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS)
  49. #define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL)
  50. /*
  51. * This file needs be built unconditionally as ARM to interoperate correctly
  52. * with non-Thumb-2-capable firmware.
  53. */
  54. .arm
  55. /*
  56. * API functions
  57. */
  58. .text
  59. /*
  60. * L2 cache needs to be toggled for stable OFF mode functionality on 3630.
  61. * This function sets up a flag that will allow for this toggling to take
  62. * place on 3630. Hopefully some version in the future may not need this.
  63. */
  64. ENTRY(enable_omap3630_toggle_l2_on_restore)
  65. stmfd sp!, {lr} @ save registers on stack
  66. /* Setup so that we will disable and enable l2 */
  67. mov r1, #0x1
  68. adr r3, l2dis_3630_offset
  69. ldr r2, [r3] @ value for offset
  70. str r1, [r2, r3] @ write to l2dis_3630
  71. ldmfd sp!, {pc} @ restore regs and return
  72. ENDPROC(enable_omap3630_toggle_l2_on_restore)
  73. /*
  74. * Function to call rom code to save secure ram context.
  75. *
  76. * r0 = physical address of the parameters
  77. */
  78. .arch armv7-a
  79. .arch_extension sec
  80. ENTRY(save_secure_ram_context)
  81. stmfd sp!, {r4 - r11, lr} @ save registers on stack
  82. mov r3, r0 @ physical address of parameters
  83. mov r0, #25 @ set service ID for PPA
  84. mov r12, r0 @ copy secure service ID in r12
  85. mov r1, #0 @ set task id for ROM code in r1
  86. mov r2, #4 @ set some flags in r2, r6
  87. mov r6, #0xff
  88. dsb @ data write barrier
  89. dmb @ data memory barrier
  90. smc #1 @ call SMI monitor (smi #1)
  91. nop
  92. nop
  93. nop
  94. nop
  95. ldmfd sp!, {r4 - r11, pc}
  96. ENDPROC(save_secure_ram_context)
  97. /*
  98. * ======================
  99. * == Idle entry point ==
  100. * ======================
  101. */
  102. /*
  103. * Forces OMAP into idle state
  104. *
  105. * omap34xx_cpu_suspend() - This bit of code saves the CPU context if needed
  106. * and executes the WFI instruction. Calling WFI effectively changes the
  107. * power domains states to the desired target power states.
  108. *
  109. *
  110. * Notes:
  111. * - only the minimum set of functions gets copied to internal SRAM at boot
  112. * and after wake-up from OFF mode, cf. omap_push_sram_idle. The function
  113. * pointers in SDRAM or SRAM are called depending on the desired low power
  114. * target state.
  115. * - when the OMAP wakes up it continues at different execution points
  116. * depending on the low power mode (non-OFF vs OFF modes),
  117. * cf. 'Resume path for xxx mode' comments.
  118. */
  119. .align 3
  120. ENTRY(omap34xx_cpu_suspend)
  121. stmfd sp!, {r4 - r11, lr} @ save registers on stack
  122. /*
  123. * r0 contains information about saving context:
  124. * 0 - No context lost
  125. * 1 - Only L1 and logic lost
  126. * 2 - Only L2 lost (Even L1 is retained we clean it along with L2)
  127. * 3 - Both L1 and L2 lost and logic lost
  128. */
  129. /*
  130. * For OFF mode: save context and jump to WFI in SDRAM (omap3_do_wfi)
  131. * For non-OFF modes: jump to the WFI code in SRAM (omap3_do_wfi_sram)
  132. */
  133. ldr r4, omap3_do_wfi_sram_addr
  134. ldr r5, [r4]
  135. cmp r0, #0x0 @ If no context save required,
  136. bxeq r5 @ jump to the WFI code in SRAM
  137. /* Otherwise fall through to the save context code */
  138. save_context_wfi:
  139. /*
  140. * jump out to kernel flush routine
  141. * - reuse that code is better
  142. * - it executes in a cached space so is faster than refetch per-block
  143. * - should be faster and will change with kernel
  144. * - 'might' have to copy address, load and jump to it
  145. * Flush all data from the L1 data cache before disabling
  146. * SCTLR.C bit.
  147. */
  148. ldr r1, kernel_flush
  149. mov lr, pc
  150. bx r1
  151. /*
  152. * Clear the SCTLR.C bit to prevent further data cache
  153. * allocation. Clearing SCTLR.C would make all the data accesses
  154. * strongly ordered and would not hit the cache.
  155. */
  156. mrc p15, 0, r0, c1, c0, 0
  157. bic r0, r0, #(1 << 2) @ Disable the C bit
  158. mcr p15, 0, r0, c1, c0, 0
  159. isb
  160. /*
  161. * Invalidate L1 data cache. Even though only invalidate is
  162. * necessary exported flush API is used here. Doing clean
  163. * on already clean cache would be almost NOP.
  164. */
  165. ldr r1, kernel_flush
  166. blx r1
  167. b omap3_do_wfi
  168. ENDPROC(omap34xx_cpu_suspend)
  169. omap3_do_wfi_sram_addr:
  170. .word omap3_do_wfi_sram
  171. kernel_flush:
  172. .word v7_flush_dcache_all
  173. /* ===================================
  174. * == WFI instruction => Enter idle ==
  175. * ===================================
  176. */
  177. /*
  178. * Do WFI instruction
  179. * Includes the resume path for non-OFF modes
  180. *
  181. * This code gets copied to internal SRAM and is accessible
  182. * from both SDRAM and SRAM:
  183. * - executed from SRAM for non-off modes (omap3_do_wfi_sram),
  184. * - executed from SDRAM for OFF mode (omap3_do_wfi).
  185. */
  186. .align 3
  187. ENTRY(omap3_do_wfi)
  188. ldr r4, sdrc_power @ read the SDRC_POWER register
  189. ldr r5, [r4] @ read the contents of SDRC_POWER
  190. orr r5, r5, #0x40 @ enable self refresh on idle req
  191. str r5, [r4] @ write back to SDRC_POWER register
  192. /* Data memory barrier and Data sync barrier */
  193. dsb
  194. dmb
  195. /*
  196. * ===================================
  197. * == WFI instruction => Enter idle ==
  198. * ===================================
  199. */
  200. wfi @ wait for interrupt
  201. /*
  202. * ===================================
  203. * == Resume path for non-OFF modes ==
  204. * ===================================
  205. */
  206. nop
  207. nop
  208. nop
  209. nop
  210. nop
  211. nop
  212. nop
  213. nop
  214. nop
  215. nop
  216. /*
  217. * This function implements the erratum ID i581 WA:
  218. * SDRC state restore before accessing the SDRAM
  219. *
  220. * Only used at return from non-OFF mode. For OFF
  221. * mode the ROM code configures the SDRC and
  222. * the DPLL before calling the restore code directly
  223. * from DDR.
  224. */
  225. /* Make sure SDRC accesses are ok */
  226. wait_sdrc_ok:
  227. /* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this */
  228. ldr r4, cm_idlest_ckgen
  229. wait_dpll3_lock:
  230. ldr r5, [r4]
  231. tst r5, #1
  232. beq wait_dpll3_lock
  233. ldr r4, cm_idlest1_core
  234. wait_sdrc_ready:
  235. ldr r5, [r4]
  236. tst r5, #0x2
  237. bne wait_sdrc_ready
  238. /* allow DLL powerdown upon hw idle req */
  239. ldr r4, sdrc_power
  240. ldr r5, [r4]
  241. bic r5, r5, #0x40
  242. str r5, [r4]
  243. is_dll_in_lock_mode:
  244. /* Is dll in lock mode? */
  245. ldr r4, sdrc_dlla_ctrl
  246. ldr r5, [r4]
  247. tst r5, #0x4
  248. bne exit_nonoff_modes @ Return if locked
  249. /* wait till dll locks */
  250. wait_dll_lock_timed:
  251. ldr r4, sdrc_dlla_status
  252. /* Wait 20uS for lock */
  253. mov r6, #8
  254. wait_dll_lock:
  255. subs r6, r6, #0x1
  256. beq kick_dll
  257. ldr r5, [r4]
  258. and r5, r5, #0x4
  259. cmp r5, #0x4
  260. bne wait_dll_lock
  261. b exit_nonoff_modes @ Return when locked
  262. /* disable/reenable DLL if not locked */
  263. kick_dll:
  264. ldr r4, sdrc_dlla_ctrl
  265. ldr r5, [r4]
  266. mov r6, r5
  267. bic r6, #(1<<3) @ disable dll
  268. str r6, [r4]
  269. dsb
  270. orr r6, r6, #(1<<3) @ enable dll
  271. str r6, [r4]
  272. dsb
  273. b wait_dll_lock_timed
  274. exit_nonoff_modes:
  275. /* Re-enable C-bit if needed */
  276. mrc p15, 0, r0, c1, c0, 0
  277. tst r0, #(1 << 2) @ Check C bit enabled?
  278. orreq r0, r0, #(1 << 2) @ Enable the C bit if cleared
  279. mcreq p15, 0, r0, c1, c0, 0
  280. isb
  281. /*
  282. * ===================================
  283. * == Exit point from non-OFF modes ==
  284. * ===================================
  285. */
  286. ldmfd sp!, {r4 - r11, pc} @ restore regs and return
  287. ENDPROC(omap3_do_wfi)
  288. sdrc_power:
  289. .word SDRC_POWER_V
  290. cm_idlest1_core:
  291. .word CM_IDLEST1_CORE_V
  292. cm_idlest_ckgen:
  293. .word CM_IDLEST_CKGEN_V
  294. sdrc_dlla_status:
  295. .word SDRC_DLLA_STATUS_V
  296. sdrc_dlla_ctrl:
  297. .word SDRC_DLLA_CTRL_V
  298. ENTRY(omap3_do_wfi_sz)
  299. .word . - omap3_do_wfi
  300. /*
  301. * ==============================
  302. * == Resume path for OFF mode ==
  303. * ==============================
  304. */
  305. /*
  306. * The restore_* functions are called by the ROM code
  307. * when back from WFI in OFF mode.
  308. * Cf. the get_*restore_pointer functions.
  309. *
  310. * restore_es3: applies to 34xx >= ES3.0
  311. * restore_3630: applies to 36xx
  312. * restore: common code for 3xxx
  313. *
  314. * Note: when back from CORE and MPU OFF mode we are running
  315. * from SDRAM, without MMU, without the caches and prediction.
  316. * Also the SRAM content has been cleared.
  317. */
  318. ENTRY(omap3_restore_es3)
  319. ldr r5, pm_prepwstst_core_p
  320. ldr r4, [r5]
  321. and r4, r4, #0x3
  322. cmp r4, #0x0 @ Check if previous power state of CORE is OFF
  323. bne omap3_restore @ Fall through to OMAP3 common code
  324. adr r0, es3_sdrc_fix
  325. ldr r1, sram_base
  326. ldr r2, es3_sdrc_fix_sz
  327. mov r2, r2, ror #2
  328. copy_to_sram:
  329. ldmia r0!, {r3} @ val = *src
  330. stmia r1!, {r3} @ *dst = val
  331. subs r2, r2, #0x1 @ num_words--
  332. bne copy_to_sram
  333. ldr r1, sram_base
  334. blx r1
  335. b omap3_restore @ Fall through to OMAP3 common code
  336. ENDPROC(omap3_restore_es3)
  337. ENTRY(omap3_restore_3630)
  338. ldr r1, pm_prepwstst_core_p
  339. ldr r2, [r1]
  340. and r2, r2, #0x3
  341. cmp r2, #0x0 @ Check if previous power state of CORE is OFF
  342. bne omap3_restore @ Fall through to OMAP3 common code
  343. /* Disable RTA before giving control */
  344. ldr r1, control_mem_rta
  345. mov r2, #OMAP36XX_RTA_DISABLE
  346. str r2, [r1]
  347. ENDPROC(omap3_restore_3630)
  348. /* Fall through to common code for the remaining logic */
  349. ENTRY(omap3_restore)
  350. /*
  351. * Read the pwstctrl register to check the reason for mpu reset.
  352. * This tells us what was lost.
  353. */
  354. ldr r1, pm_pwstctrl_mpu
  355. ldr r2, [r1]
  356. and r2, r2, #0x3
  357. cmp r2, #0x0 @ Check if target power state was OFF or RET
  358. bne logic_l1_restore
  359. adr r1, l2dis_3630_offset @ address for offset
  360. ldr r0, [r1] @ value for offset
  361. ldr r0, [r1, r0] @ value at l2dis_3630
  362. cmp r0, #0x1 @ should we disable L2 on 3630?
  363. bne skipl2dis
  364. mrc p15, 0, r0, c1, c0, 1
  365. bic r0, r0, #2 @ disable L2 cache
  366. mcr p15, 0, r0, c1, c0, 1
  367. skipl2dis:
  368. ldr r0, control_stat
  369. ldr r1, [r0]
  370. and r1, #0x700
  371. cmp r1, #0x300
  372. beq l2_inv_gp
  373. adr r0, l2_inv_api_params_offset
  374. ldr r3, [r0]
  375. add r3, r3, r0 @ r3 points to dummy parameters
  376. mov r0, #40 @ set service ID for PPA
  377. mov r12, r0 @ copy secure Service ID in r12
  378. mov r1, #0 @ set task id for ROM code in r1
  379. mov r2, #4 @ set some flags in r2, r6
  380. mov r6, #0xff
  381. dsb @ data write barrier
  382. dmb @ data memory barrier
  383. smc #1 @ call SMI monitor (smi #1)
  384. /* Write to Aux control register to set some bits */
  385. mov r0, #42 @ set service ID for PPA
  386. mov r12, r0 @ copy secure Service ID in r12
  387. mov r1, #0 @ set task id for ROM code in r1
  388. mov r2, #4 @ set some flags in r2, r6
  389. mov r6, #0xff
  390. ldr r4, scratchpad_base
  391. ldr r3, [r4, #0xBC] @ r3 points to parameters
  392. dsb @ data write barrier
  393. dmb @ data memory barrier
  394. smc #1 @ call SMI monitor (smi #1)
  395. #ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE
  396. /* Restore L2 aux control register */
  397. @ set service ID for PPA
  398. mov r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID
  399. mov r12, r0 @ copy service ID in r12
  400. mov r1, #0 @ set task ID for ROM code in r1
  401. mov r2, #4 @ set some flags in r2, r6
  402. mov r6, #0xff
  403. ldr r4, scratchpad_base
  404. ldr r3, [r4, #0xBC]
  405. adds r3, r3, #8 @ r3 points to parameters
  406. dsb @ data write barrier
  407. dmb @ data memory barrier
  408. smc #1 @ call SMI monitor (smi #1)
  409. #endif
  410. b logic_l1_restore
  411. .align
  412. l2_inv_api_params_offset:
  413. .long l2_inv_api_params - .
  414. l2_inv_gp:
  415. /* Execute smi to invalidate L2 cache */
  416. mov r12, #0x1 @ set up to invalidate L2
  417. smc #0 @ Call SMI monitor (smieq)
  418. /* Write to Aux control register to set some bits */
  419. ldr r4, scratchpad_base
  420. ldr r3, [r4,#0xBC]
  421. ldr r0, [r3,#4]
  422. mov r12, #0x3
  423. smc #0 @ Call SMI monitor (smieq)
  424. ldr r4, scratchpad_base
  425. ldr r3, [r4,#0xBC]
  426. ldr r0, [r3,#12]
  427. mov r12, #0x2
  428. smc #0 @ Call SMI monitor (smieq)
  429. logic_l1_restore:
  430. adr r0, l2dis_3630_offset @ adress for offset
  431. ldr r1, [r0] @ value for offset
  432. ldr r1, [r0, r1] @ value at l2dis_3630
  433. cmp r1, #0x1 @ Test if L2 re-enable needed on 3630
  434. bne skipl2reen
  435. mrc p15, 0, r1, c1, c0, 1
  436. orr r1, r1, #2 @ re-enable L2 cache
  437. mcr p15, 0, r1, c1, c0, 1
  438. skipl2reen:
  439. /* Now branch to the common CPU resume function */
  440. b cpu_resume
  441. ENDPROC(omap3_restore)
  442. .ltorg
  443. /*
  444. * Local variables
  445. */
  446. pm_prepwstst_core_p:
  447. .word PM_PREPWSTST_CORE_P
  448. pm_pwstctrl_mpu:
  449. .word PM_PWSTCTRL_MPU_P
  450. scratchpad_base:
  451. .word SCRATCHPAD_BASE_P
  452. sram_base:
  453. .word SRAM_BASE_P + 0x8000
  454. control_stat:
  455. .word CONTROL_STAT
  456. control_mem_rta:
  457. .word CONTROL_MEM_RTA_CTRL
  458. l2dis_3630_offset:
  459. .long l2dis_3630 - .
  460. .data
  461. .align 2
  462. l2dis_3630:
  463. .word 0
  464. .data
  465. .align 2
  466. l2_inv_api_params:
  467. .word 0x1, 0x00
  468. /*
  469. * Internal functions
  470. */
  471. /*
  472. * This function implements the erratum ID i443 WA, applies to 34xx >= ES3.0
  473. * Copied to and run from SRAM in order to reconfigure the SDRC parameters.
  474. */
  475. .text
  476. .align 3
  477. ENTRY(es3_sdrc_fix)
  478. ldr r4, sdrc_syscfg @ get config addr
  479. ldr r5, [r4] @ get value
  480. tst r5, #0x100 @ is part access blocked
  481. it eq
  482. biceq r5, r5, #0x100 @ clear bit if set
  483. str r5, [r4] @ write back change
  484. ldr r4, sdrc_mr_0 @ get config addr
  485. ldr r5, [r4] @ get value
  486. str r5, [r4] @ write back change
  487. ldr r4, sdrc_emr2_0 @ get config addr
  488. ldr r5, [r4] @ get value
  489. str r5, [r4] @ write back change
  490. ldr r4, sdrc_manual_0 @ get config addr
  491. mov r5, #0x2 @ autorefresh command
  492. str r5, [r4] @ kick off refreshes
  493. ldr r4, sdrc_mr_1 @ get config addr
  494. ldr r5, [r4] @ get value
  495. str r5, [r4] @ write back change
  496. ldr r4, sdrc_emr2_1 @ get config addr
  497. ldr r5, [r4] @ get value
  498. str r5, [r4] @ write back change
  499. ldr r4, sdrc_manual_1 @ get config addr
  500. mov r5, #0x2 @ autorefresh command
  501. str r5, [r4] @ kick off refreshes
  502. bx lr
  503. /*
  504. * Local variables
  505. */
  506. .align
  507. sdrc_syscfg:
  508. .word SDRC_SYSCONFIG_P
  509. sdrc_mr_0:
  510. .word SDRC_MR_0_P
  511. sdrc_emr2_0:
  512. .word SDRC_EMR2_0_P
  513. sdrc_manual_0:
  514. .word SDRC_MANUAL_0_P
  515. sdrc_mr_1:
  516. .word SDRC_MR_1_P
  517. sdrc_emr2_1:
  518. .word SDRC_EMR2_1_P
  519. sdrc_manual_1:
  520. .word SDRC_MANUAL_1_P
  521. ENDPROC(es3_sdrc_fix)
  522. ENTRY(es3_sdrc_fix_sz)
  523. .word . - es3_sdrc_fix