sleep.S 6.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402
  1. /* SPDX-License-Identifier: GPL-2.0
  2. *
  3. * arch/sh/kernel/cpu/sh4a/sleep-sh_mobile.S
  4. *
  5. * Sleep mode and Standby modes support for SuperH Mobile
  6. *
  7. * Copyright (C) 2009 Magnus Damm
  8. */
  9. #include <linux/sys.h>
  10. #include <linux/errno.h>
  11. #include <linux/linkage.h>
  12. #include <asm/asm-offsets.h>
  13. #include <asm/suspend.h>
  14. /*
  15. * Kernel mode register usage, see entry.S:
  16. * k0 scratch
  17. * k1 scratch
  18. */
  19. #define k0 r0
  20. #define k1 r1
  21. /* manage self-refresh and enter standby mode. must be self-contained.
  22. * this code will be copied to on-chip memory and executed from there.
  23. */
  24. .balign 4
  25. ENTRY(sh_mobile_sleep_enter_start)
  26. /* save mode flags */
  27. mov.l r4, @(SH_SLEEP_MODE, r5)
  28. /* save original vbr */
  29. stc vbr, r0
  30. mov.l r0, @(SH_SLEEP_VBR, r5)
  31. /* point vbr to our on-chip memory page */
  32. ldc r5, vbr
  33. /* save return address */
  34. sts pr, r0
  35. mov.l r0, @(SH_SLEEP_SPC, r5)
  36. /* save sr */
  37. stc sr, r0
  38. mov.l r0, @(SH_SLEEP_SR, r5)
  39. /* save general purpose registers to stack if needed */
  40. mov.l @(SH_SLEEP_MODE, r5), r0
  41. tst #SUSP_SH_REGS, r0
  42. bt skip_regs_save
  43. sts.l pr, @-r15
  44. mov.l r14, @-r15
  45. mov.l r13, @-r15
  46. mov.l r12, @-r15
  47. mov.l r11, @-r15
  48. mov.l r10, @-r15
  49. mov.l r9, @-r15
  50. mov.l r8, @-r15
  51. /* make sure bank0 is selected, save low registers */
  52. mov.l rb_bit, r9
  53. not r9, r9
  54. bsr set_sr
  55. mov #0, r10
  56. bsr save_low_regs
  57. nop
  58. /* switch to bank 1, save low registers */
  59. mov.l rb_bit, r10
  60. bsr set_sr
  61. mov #-1, r9
  62. bsr save_low_regs
  63. nop
  64. /* switch back to bank 0 */
  65. mov.l rb_bit, r9
  66. not r9, r9
  67. bsr set_sr
  68. mov #0, r10
  69. skip_regs_save:
  70. /* save sp, also set to internal ram */
  71. mov.l r15, @(SH_SLEEP_SP, r5)
  72. mov r5, r15
  73. /* save stbcr */
  74. bsr save_register
  75. mov #SH_SLEEP_REG_STBCR, r0
  76. /* save mmu and cache context if needed */
  77. mov.l @(SH_SLEEP_MODE, r5), r0
  78. tst #SUSP_SH_MMU, r0
  79. bt skip_mmu_save_disable
  80. /* save mmu state */
  81. bsr save_register
  82. mov #SH_SLEEP_REG_PTEH, r0
  83. bsr save_register
  84. mov #SH_SLEEP_REG_PTEL, r0
  85. bsr save_register
  86. mov #SH_SLEEP_REG_TTB, r0
  87. bsr save_register
  88. mov #SH_SLEEP_REG_TEA, r0
  89. bsr save_register
  90. mov #SH_SLEEP_REG_MMUCR, r0
  91. bsr save_register
  92. mov #SH_SLEEP_REG_PTEA, r0
  93. bsr save_register
  94. mov #SH_SLEEP_REG_PASCR, r0
  95. bsr save_register
  96. mov #SH_SLEEP_REG_IRMCR, r0
  97. /* invalidate TLBs and disable the MMU */
  98. bsr get_register
  99. mov #SH_SLEEP_REG_MMUCR, r0
  100. mov #4, r1
  101. mov.l r1, @r0
  102. icbi @r0
  103. /* save cache registers and disable caches */
  104. bsr save_register
  105. mov #SH_SLEEP_REG_CCR, r0
  106. bsr save_register
  107. mov #SH_SLEEP_REG_RAMCR, r0
  108. bsr get_register
  109. mov #SH_SLEEP_REG_CCR, r0
  110. mov #0, r1
  111. mov.l r1, @r0
  112. icbi @r0
  113. skip_mmu_save_disable:
  114. /* call self-refresh entering code if needed */
  115. mov.l @(SH_SLEEP_MODE, r5), r0
  116. tst #SUSP_SH_SF, r0
  117. bt skip_set_sf
  118. mov.l @(SH_SLEEP_SF_PRE, r5), r0
  119. jsr @r0
  120. nop
  121. skip_set_sf:
  122. mov.l @(SH_SLEEP_MODE, r5), r0
  123. tst #SUSP_SH_STANDBY, r0
  124. bt test_rstandby
  125. /* set mode to "software standby mode" */
  126. bra do_sleep
  127. mov #0x80, r1
  128. test_rstandby:
  129. tst #SUSP_SH_RSTANDBY, r0
  130. bt test_ustandby
  131. /* setup BAR register */
  132. bsr get_register
  133. mov #SH_SLEEP_REG_BAR, r0
  134. mov.l @(SH_SLEEP_RESUME, r5), r1
  135. mov.l r1, @r0
  136. /* set mode to "r-standby mode" */
  137. bra do_sleep
  138. mov #0x20, r1
  139. test_ustandby:
  140. tst #SUSP_SH_USTANDBY, r0
  141. bt force_sleep
  142. /* set mode to "u-standby mode" */
  143. bra do_sleep
  144. mov #0x10, r1
  145. force_sleep:
  146. /* set mode to "sleep mode" */
  147. mov #0x00, r1
  148. do_sleep:
  149. /* setup and enter selected standby mode */
  150. bsr get_register
  151. mov #SH_SLEEP_REG_STBCR, r0
  152. mov.l r1, @r0
  153. again:
  154. sleep
  155. bra again
  156. nop
  157. save_register:
  158. add #SH_SLEEP_BASE_ADDR, r0
  159. mov.l @(r0, r5), r1
  160. add #-SH_SLEEP_BASE_ADDR, r0
  161. mov.l @r1, r1
  162. add #SH_SLEEP_BASE_DATA, r0
  163. mov.l r1, @(r0, r5)
  164. add #-SH_SLEEP_BASE_DATA, r0
  165. rts
  166. nop
  167. get_register:
  168. add #SH_SLEEP_BASE_ADDR, r0
  169. mov.l @(r0, r5), r0
  170. rts
  171. nop
  172. set_sr:
  173. stc sr, r8
  174. and r9, r8
  175. or r10, r8
  176. ldc r8, sr
  177. rts
  178. nop
  179. save_low_regs:
  180. mov.l r7, @-r15
  181. mov.l r6, @-r15
  182. mov.l r5, @-r15
  183. mov.l r4, @-r15
  184. mov.l r3, @-r15
  185. mov.l r2, @-r15
  186. mov.l r1, @-r15
  187. rts
  188. mov.l r0, @-r15
  189. .balign 4
  190. rb_bit: .long 0x20000000 ! RB=1
  191. ENTRY(sh_mobile_sleep_enter_end)
  192. .balign 4
  193. ENTRY(sh_mobile_sleep_resume_start)
  194. /* figure out start address */
  195. bsr 0f
  196. nop
  197. 0:
  198. sts pr, k1
  199. mov.l 1f, k0
  200. and k0, k1
  201. /* store pointer to data area in VBR */
  202. ldc k1, vbr
  203. /* setup sr with saved sr */
  204. mov.l @(SH_SLEEP_SR, k1), k0
  205. ldc k0, sr
  206. /* now: user register set! */
  207. stc vbr, r5
  208. /* setup spc with return address to c code */
  209. mov.l @(SH_SLEEP_SPC, r5), r0
  210. ldc r0, spc
  211. /* restore vbr */
  212. mov.l @(SH_SLEEP_VBR, r5), r0
  213. ldc r0, vbr
  214. /* setup ssr with saved sr */
  215. mov.l @(SH_SLEEP_SR, r5), r0
  216. ldc r0, ssr
  217. /* restore sp */
  218. mov.l @(SH_SLEEP_SP, r5), r15
  219. /* restore sleep mode register */
  220. bsr restore_register
  221. mov #SH_SLEEP_REG_STBCR, r0
  222. /* call self-refresh resume code if needed */
  223. mov.l @(SH_SLEEP_MODE, r5), r0
  224. tst #SUSP_SH_SF, r0
  225. bt skip_restore_sf
  226. mov.l @(SH_SLEEP_SF_POST, r5), r0
  227. jsr @r0
  228. nop
  229. skip_restore_sf:
  230. /* restore mmu and cache state if needed */
  231. mov.l @(SH_SLEEP_MODE, r5), r0
  232. tst #SUSP_SH_MMU, r0
  233. bt skip_restore_mmu
  234. /* restore mmu state */
  235. bsr restore_register
  236. mov #SH_SLEEP_REG_PTEH, r0
  237. bsr restore_register
  238. mov #SH_SLEEP_REG_PTEL, r0
  239. bsr restore_register
  240. mov #SH_SLEEP_REG_TTB, r0
  241. bsr restore_register
  242. mov #SH_SLEEP_REG_TEA, r0
  243. bsr restore_register
  244. mov #SH_SLEEP_REG_PTEA, r0
  245. bsr restore_register
  246. mov #SH_SLEEP_REG_PASCR, r0
  247. bsr restore_register
  248. mov #SH_SLEEP_REG_IRMCR, r0
  249. bsr restore_register
  250. mov #SH_SLEEP_REG_MMUCR, r0
  251. icbi @r0
  252. /* restore cache settings */
  253. bsr restore_register
  254. mov #SH_SLEEP_REG_RAMCR, r0
  255. icbi @r0
  256. bsr restore_register
  257. mov #SH_SLEEP_REG_CCR, r0
  258. icbi @r0
  259. skip_restore_mmu:
  260. /* restore general purpose registers if needed */
  261. mov.l @(SH_SLEEP_MODE, r5), r0
  262. tst #SUSP_SH_REGS, r0
  263. bt skip_restore_regs
  264. /* switch to bank 1, restore low registers */
  265. mov.l _rb_bit, r10
  266. bsr _set_sr
  267. mov #-1, r9
  268. bsr restore_low_regs
  269. nop
  270. /* switch to bank0, restore low registers */
  271. mov.l _rb_bit, r9
  272. not r9, r9
  273. bsr _set_sr
  274. mov #0, r10
  275. bsr restore_low_regs
  276. nop
  277. /* restore the rest of the registers */
  278. mov.l @r15+, r8
  279. mov.l @r15+, r9
  280. mov.l @r15+, r10
  281. mov.l @r15+, r11
  282. mov.l @r15+, r12
  283. mov.l @r15+, r13
  284. mov.l @r15+, r14
  285. lds.l @r15+, pr
  286. skip_restore_regs:
  287. rte
  288. nop
  289. restore_register:
  290. add #SH_SLEEP_BASE_DATA, r0
  291. mov.l @(r0, r5), r1
  292. add #-SH_SLEEP_BASE_DATA, r0
  293. add #SH_SLEEP_BASE_ADDR, r0
  294. mov.l @(r0, r5), r0
  295. mov.l r1, @r0
  296. rts
  297. nop
  298. _set_sr:
  299. stc sr, r8
  300. and r9, r8
  301. or r10, r8
  302. ldc r8, sr
  303. rts
  304. nop
  305. restore_low_regs:
  306. mov.l @r15+, r0
  307. mov.l @r15+, r1
  308. mov.l @r15+, r2
  309. mov.l @r15+, r3
  310. mov.l @r15+, r4
  311. mov.l @r15+, r5
  312. mov.l @r15+, r6
  313. rts
  314. mov.l @r15+, r7
  315. .balign 4
  316. _rb_bit: .long 0x20000000 ! RB=1
  317. 1: .long ~0x7ff
  318. ENTRY(sh_mobile_sleep_resume_end)