swsusp_32.S 8.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #include <linux/threads.h>
  3. #include <asm/processor.h>
  4. #include <asm/page.h>
  5. #include <asm/cputable.h>
  6. #include <asm/thread_info.h>
  7. #include <asm/ppc_asm.h>
  8. #include <asm/asm-offsets.h>
  9. #include <asm/mmu.h>
  10. #include <asm/feature-fixups.h>
  11. /*
  12. * Structure for storing CPU registers on the save area.
  13. */
  14. #define SL_SP 0
  15. #define SL_PC 4
  16. #define SL_MSR 8
  17. #define SL_SDR1 0xc
  18. #define SL_SPRG0 0x10 /* 4 sprg's */
  19. #define SL_DBAT0 0x20
  20. #define SL_IBAT0 0x28
  21. #define SL_DBAT1 0x30
  22. #define SL_IBAT1 0x38
  23. #define SL_DBAT2 0x40
  24. #define SL_IBAT2 0x48
  25. #define SL_DBAT3 0x50
  26. #define SL_IBAT3 0x58
  27. #define SL_DBAT4 0x60
  28. #define SL_IBAT4 0x68
  29. #define SL_DBAT5 0x70
  30. #define SL_IBAT5 0x78
  31. #define SL_DBAT6 0x80
  32. #define SL_IBAT6 0x88
  33. #define SL_DBAT7 0x90
  34. #define SL_IBAT7 0x98
  35. #define SL_TB 0xa0
  36. #define SL_R2 0xa8
  37. #define SL_CR 0xac
  38. #define SL_LR 0xb0
  39. #define SL_R12 0xb4 /* r12 to r31 */
  40. #define SL_SIZE (SL_R12 + 80)
  41. .section .data
  42. .align 5
  43. _GLOBAL(swsusp_save_area)
  44. .space SL_SIZE
  45. .section .text
  46. .align 5
  47. _GLOBAL(swsusp_arch_suspend)
  48. lis r11,swsusp_save_area@h
  49. ori r11,r11,swsusp_save_area@l
  50. mflr r0
  51. stw r0,SL_LR(r11)
  52. mfcr r0
  53. stw r0,SL_CR(r11)
  54. stw r1,SL_SP(r11)
  55. stw r2,SL_R2(r11)
  56. stmw r12,SL_R12(r11)
  57. /* Save MSR & SDR1 */
  58. mfmsr r4
  59. stw r4,SL_MSR(r11)
  60. mfsdr1 r4
  61. stw r4,SL_SDR1(r11)
  62. /* Get a stable timebase and save it */
  63. 1: mftbu r4
  64. stw r4,SL_TB(r11)
  65. mftb r5
  66. stw r5,SL_TB+4(r11)
  67. mftbu r3
  68. cmpw r3,r4
  69. bne 1b
  70. /* Save SPRGs */
  71. mfsprg r4,0
  72. stw r4,SL_SPRG0(r11)
  73. mfsprg r4,1
  74. stw r4,SL_SPRG0+4(r11)
  75. mfsprg r4,2
  76. stw r4,SL_SPRG0+8(r11)
  77. mfsprg r4,3
  78. stw r4,SL_SPRG0+12(r11)
  79. /* Save BATs */
  80. mfdbatu r4,0
  81. stw r4,SL_DBAT0(r11)
  82. mfdbatl r4,0
  83. stw r4,SL_DBAT0+4(r11)
  84. mfdbatu r4,1
  85. stw r4,SL_DBAT1(r11)
  86. mfdbatl r4,1
  87. stw r4,SL_DBAT1+4(r11)
  88. mfdbatu r4,2
  89. stw r4,SL_DBAT2(r11)
  90. mfdbatl r4,2
  91. stw r4,SL_DBAT2+4(r11)
  92. mfdbatu r4,3
  93. stw r4,SL_DBAT3(r11)
  94. mfdbatl r4,3
  95. stw r4,SL_DBAT3+4(r11)
  96. mfibatu r4,0
  97. stw r4,SL_IBAT0(r11)
  98. mfibatl r4,0
  99. stw r4,SL_IBAT0+4(r11)
  100. mfibatu r4,1
  101. stw r4,SL_IBAT1(r11)
  102. mfibatl r4,1
  103. stw r4,SL_IBAT1+4(r11)
  104. mfibatu r4,2
  105. stw r4,SL_IBAT2(r11)
  106. mfibatl r4,2
  107. stw r4,SL_IBAT2+4(r11)
  108. mfibatu r4,3
  109. stw r4,SL_IBAT3(r11)
  110. mfibatl r4,3
  111. stw r4,SL_IBAT3+4(r11)
  112. BEGIN_MMU_FTR_SECTION
  113. mfspr r4,SPRN_DBAT4U
  114. stw r4,SL_DBAT4(r11)
  115. mfspr r4,SPRN_DBAT4L
  116. stw r4,SL_DBAT4+4(r11)
  117. mfspr r4,SPRN_DBAT5U
  118. stw r4,SL_DBAT5(r11)
  119. mfspr r4,SPRN_DBAT5L
  120. stw r4,SL_DBAT5+4(r11)
  121. mfspr r4,SPRN_DBAT6U
  122. stw r4,SL_DBAT6(r11)
  123. mfspr r4,SPRN_DBAT6L
  124. stw r4,SL_DBAT6+4(r11)
  125. mfspr r4,SPRN_DBAT7U
  126. stw r4,SL_DBAT7(r11)
  127. mfspr r4,SPRN_DBAT7L
  128. stw r4,SL_DBAT7+4(r11)
  129. mfspr r4,SPRN_IBAT4U
  130. stw r4,SL_IBAT4(r11)
  131. mfspr r4,SPRN_IBAT4L
  132. stw r4,SL_IBAT4+4(r11)
  133. mfspr r4,SPRN_IBAT5U
  134. stw r4,SL_IBAT5(r11)
  135. mfspr r4,SPRN_IBAT5L
  136. stw r4,SL_IBAT5+4(r11)
  137. mfspr r4,SPRN_IBAT6U
  138. stw r4,SL_IBAT6(r11)
  139. mfspr r4,SPRN_IBAT6L
  140. stw r4,SL_IBAT6+4(r11)
  141. mfspr r4,SPRN_IBAT7U
  142. stw r4,SL_IBAT7(r11)
  143. mfspr r4,SPRN_IBAT7L
  144. stw r4,SL_IBAT7+4(r11)
  145. END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_HIGH_BATS)
  146. #if 0
  147. /* Backup various CPU config stuffs */
  148. bl __save_cpu_setup
  149. #endif
  150. /* Call the low level suspend stuff (we should probably have made
  151. * a stackframe...
  152. */
  153. bl swsusp_save
  154. /* Restore LR from the save area */
  155. lis r11,swsusp_save_area@h
  156. ori r11,r11,swsusp_save_area@l
  157. lwz r0,SL_LR(r11)
  158. mtlr r0
  159. blr
  160. /* Resume code */
  161. _GLOBAL(swsusp_arch_resume)
  162. #ifdef CONFIG_ALTIVEC
  163. /* Stop pending alitvec streams and memory accesses */
  164. BEGIN_FTR_SECTION
  165. PPC_DSSALL
  166. END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC)
  167. #endif
  168. sync
  169. /* Disable MSR:DR to make sure we don't take a TLB or
  170. * hash miss during the copy, as our hash table will
  171. * for a while be unusable. For .text, we assume we are
  172. * covered by a BAT. This works only for non-G5 at this
  173. * point. G5 will need a better approach, possibly using
  174. * a small temporary hash table filled with large mappings,
  175. * disabling the MMU completely isn't a good option for
  176. * performance reasons.
  177. * (Note that 750's may have the same performance issue as
  178. * the G5 in this case, we should investigate using moving
  179. * BATs for these CPUs)
  180. */
  181. mfmsr r0
  182. sync
  183. rlwinm r0,r0,0,28,26 /* clear MSR_DR */
  184. mtmsr r0
  185. sync
  186. isync
  187. /* Load ptr the list of pages to copy in r3 */
  188. lis r11,(restore_pblist - KERNELBASE)@h
  189. ori r11,r11,restore_pblist@l
  190. lwz r10,0(r11)
  191. /* Copy the pages. This is a very basic implementation, to
  192. * be replaced by something more cache efficient */
  193. 1:
  194. tophys(r3,r10)
  195. li r0,256
  196. mtctr r0
  197. lwz r11,pbe_address(r3) /* source */
  198. tophys(r5,r11)
  199. lwz r10,pbe_orig_address(r3) /* destination */
  200. tophys(r6,r10)
  201. 2:
  202. lwz r8,0(r5)
  203. lwz r9,4(r5)
  204. lwz r10,8(r5)
  205. lwz r11,12(r5)
  206. addi r5,r5,16
  207. stw r8,0(r6)
  208. stw r9,4(r6)
  209. stw r10,8(r6)
  210. stw r11,12(r6)
  211. addi r6,r6,16
  212. bdnz 2b
  213. lwz r10,pbe_next(r3)
  214. cmpwi 0,r10,0
  215. bne 1b
  216. /* Do a very simple cache flush/inval of the L1 to ensure
  217. * coherency of the icache
  218. */
  219. lis r3,0x0002
  220. mtctr r3
  221. li r3, 0
  222. 1:
  223. lwz r0,0(r3)
  224. addi r3,r3,0x0020
  225. bdnz 1b
  226. isync
  227. sync
  228. /* Now flush those cache lines */
  229. lis r3,0x0002
  230. mtctr r3
  231. li r3, 0
  232. 1:
  233. dcbf 0,r3
  234. addi r3,r3,0x0020
  235. bdnz 1b
  236. sync
  237. /* Ok, we are now running with the kernel data of the old
  238. * kernel fully restored. We can get to the save area
  239. * easily now. As for the rest of the code, it assumes the
  240. * loader kernel and the booted one are exactly identical
  241. */
  242. lis r11,swsusp_save_area@h
  243. ori r11,r11,swsusp_save_area@l
  244. tophys(r11,r11)
  245. #if 0
  246. /* Restore various CPU config stuffs */
  247. bl __restore_cpu_setup
  248. #endif
  249. /* Restore the BATs, and SDR1. Then we can turn on the MMU.
  250. * This is a bit hairy as we are running out of those BATs,
  251. * but first, our code is probably in the icache, and we are
  252. * writing the same value to the BAT, so that should be fine,
  253. * though a better solution will have to be found long-term
  254. */
  255. lwz r4,SL_SDR1(r11)
  256. mtsdr1 r4
  257. lwz r4,SL_SPRG0(r11)
  258. mtsprg 0,r4
  259. lwz r4,SL_SPRG0+4(r11)
  260. mtsprg 1,r4
  261. lwz r4,SL_SPRG0+8(r11)
  262. mtsprg 2,r4
  263. lwz r4,SL_SPRG0+12(r11)
  264. mtsprg 3,r4
  265. #if 0
  266. lwz r4,SL_DBAT0(r11)
  267. mtdbatu 0,r4
  268. lwz r4,SL_DBAT0+4(r11)
  269. mtdbatl 0,r4
  270. lwz r4,SL_DBAT1(r11)
  271. mtdbatu 1,r4
  272. lwz r4,SL_DBAT1+4(r11)
  273. mtdbatl 1,r4
  274. lwz r4,SL_DBAT2(r11)
  275. mtdbatu 2,r4
  276. lwz r4,SL_DBAT2+4(r11)
  277. mtdbatl 2,r4
  278. lwz r4,SL_DBAT3(r11)
  279. mtdbatu 3,r4
  280. lwz r4,SL_DBAT3+4(r11)
  281. mtdbatl 3,r4
  282. lwz r4,SL_IBAT0(r11)
  283. mtibatu 0,r4
  284. lwz r4,SL_IBAT0+4(r11)
  285. mtibatl 0,r4
  286. lwz r4,SL_IBAT1(r11)
  287. mtibatu 1,r4
  288. lwz r4,SL_IBAT1+4(r11)
  289. mtibatl 1,r4
  290. lwz r4,SL_IBAT2(r11)
  291. mtibatu 2,r4
  292. lwz r4,SL_IBAT2+4(r11)
  293. mtibatl 2,r4
  294. lwz r4,SL_IBAT3(r11)
  295. mtibatu 3,r4
  296. lwz r4,SL_IBAT3+4(r11)
  297. mtibatl 3,r4
  298. BEGIN_MMU_FTR_SECTION
  299. lwz r4,SL_DBAT4(r11)
  300. mtspr SPRN_DBAT4U,r4
  301. lwz r4,SL_DBAT4+4(r11)
  302. mtspr SPRN_DBAT4L,r4
  303. lwz r4,SL_DBAT5(r11)
  304. mtspr SPRN_DBAT5U,r4
  305. lwz r4,SL_DBAT5+4(r11)
  306. mtspr SPRN_DBAT5L,r4
  307. lwz r4,SL_DBAT6(r11)
  308. mtspr SPRN_DBAT6U,r4
  309. lwz r4,SL_DBAT6+4(r11)
  310. mtspr SPRN_DBAT6L,r4
  311. lwz r4,SL_DBAT7(r11)
  312. mtspr SPRN_DBAT7U,r4
  313. lwz r4,SL_DBAT7+4(r11)
  314. mtspr SPRN_DBAT7L,r4
  315. lwz r4,SL_IBAT4(r11)
  316. mtspr SPRN_IBAT4U,r4
  317. lwz r4,SL_IBAT4+4(r11)
  318. mtspr SPRN_IBAT4L,r4
  319. lwz r4,SL_IBAT5(r11)
  320. mtspr SPRN_IBAT5U,r4
  321. lwz r4,SL_IBAT5+4(r11)
  322. mtspr SPRN_IBAT5L,r4
  323. lwz r4,SL_IBAT6(r11)
  324. mtspr SPRN_IBAT6U,r4
  325. lwz r4,SL_IBAT6+4(r11)
  326. mtspr SPRN_IBAT6L,r4
  327. lwz r4,SL_IBAT7(r11)
  328. mtspr SPRN_IBAT7U,r4
  329. lwz r4,SL_IBAT7+4(r11)
  330. mtspr SPRN_IBAT7L,r4
  331. END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_HIGH_BATS)
  332. #endif
  333. /* Flush all TLBs */
  334. lis r4,0x1000
  335. 1: addic. r4,r4,-0x1000
  336. tlbie r4
  337. bgt 1b
  338. sync
  339. /* restore the MSR and turn on the MMU */
  340. lwz r3,SL_MSR(r11)
  341. bl turn_on_mmu
  342. tovirt(r11,r11)
  343. /* Restore TB */
  344. li r3,0
  345. mttbl r3
  346. lwz r3,SL_TB(r11)
  347. lwz r4,SL_TB+4(r11)
  348. mttbu r3
  349. mttbl r4
  350. /* Kick decrementer */
  351. li r0,1
  352. mtdec r0
  353. /* Restore the callee-saved registers and return */
  354. lwz r0,SL_CR(r11)
  355. mtcr r0
  356. lwz r2,SL_R2(r11)
  357. lmw r12,SL_R12(r11)
  358. lwz r1,SL_SP(r11)
  359. lwz r0,SL_LR(r11)
  360. mtlr r0
  361. // XXX Note: we don't really need to call swsusp_resume
  362. li r3,0
  363. blr
  364. _ASM_NOKPROBE_SYMBOL(swsusp_arch_resume)
  365. /* FIXME:This construct is actually not useful since we don't shut
  366. * down the instruction MMU, we could just flip back MSR-DR on.
  367. */
  368. turn_on_mmu:
  369. mflr r4
  370. mtsrr0 r4
  371. mtsrr1 r3
  372. sync
  373. isync
  374. rfi
  375. _ASM_NOKPROBE_SYMBOL(turn_on_mmu)