swsusp_85xx.S 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /*
  3. * Based on swsusp_32.S, modified for FSL BookE by
  4. * Anton Vorontsov <[email protected]>
  5. * Copyright (c) 2009-2010 MontaVista Software, LLC.
  6. */
  7. #include <linux/threads.h>
  8. #include <asm/processor.h>
  9. #include <asm/page.h>
  10. #include <asm/cputable.h>
  11. #include <asm/thread_info.h>
  12. #include <asm/ppc_asm.h>
  13. #include <asm/asm-offsets.h>
  14. #include <asm/mmu.h>
  15. /*
  16. * Structure for storing CPU registers on the save area.
  17. */
  18. #define SL_SP 0
  19. #define SL_PC 4
  20. #define SL_MSR 8
  21. #define SL_TCR 0xc
  22. #define SL_SPRG0 0x10
  23. #define SL_SPRG1 0x14
  24. #define SL_SPRG2 0x18
  25. #define SL_SPRG3 0x1c
  26. #define SL_SPRG4 0x20
  27. #define SL_SPRG5 0x24
  28. #define SL_SPRG6 0x28
  29. #define SL_SPRG7 0x2c
  30. #define SL_TBU 0x30
  31. #define SL_TBL 0x34
  32. #define SL_R2 0x38
  33. #define SL_CR 0x3c
  34. #define SL_LR 0x40
  35. #define SL_R12 0x44 /* r12 to r31 */
  36. #define SL_SIZE (SL_R12 + 80)
  37. .section .data
  38. .align 5
  39. _GLOBAL(swsusp_save_area)
  40. .space SL_SIZE
  41. .section .text
  42. .align 5
  43. _GLOBAL(swsusp_arch_suspend)
  44. lis r11,swsusp_save_area@h
  45. ori r11,r11,swsusp_save_area@l
  46. mflr r0
  47. stw r0,SL_LR(r11)
  48. mfcr r0
  49. stw r0,SL_CR(r11)
  50. stw r1,SL_SP(r11)
  51. stw r2,SL_R2(r11)
  52. stmw r12,SL_R12(r11)
  53. /* Save MSR & TCR */
  54. mfmsr r4
  55. stw r4,SL_MSR(r11)
  56. mfspr r4,SPRN_TCR
  57. stw r4,SL_TCR(r11)
  58. /* Get a stable timebase and save it */
  59. 1: mfspr r4,SPRN_TBRU
  60. stw r4,SL_TBU(r11)
  61. mfspr r5,SPRN_TBRL
  62. stw r5,SL_TBL(r11)
  63. mfspr r3,SPRN_TBRU
  64. cmpw r3,r4
  65. bne 1b
  66. /* Save SPRGs */
  67. mfspr r4,SPRN_SPRG0
  68. stw r4,SL_SPRG0(r11)
  69. mfspr r4,SPRN_SPRG1
  70. stw r4,SL_SPRG1(r11)
  71. mfspr r4,SPRN_SPRG2
  72. stw r4,SL_SPRG2(r11)
  73. mfspr r4,SPRN_SPRG3
  74. stw r4,SL_SPRG3(r11)
  75. mfspr r4,SPRN_SPRG4
  76. stw r4,SL_SPRG4(r11)
  77. mfspr r4,SPRN_SPRG5
  78. stw r4,SL_SPRG5(r11)
  79. mfspr r4,SPRN_SPRG6
  80. stw r4,SL_SPRG6(r11)
  81. mfspr r4,SPRN_SPRG7
  82. stw r4,SL_SPRG7(r11)
  83. /* Call the low level suspend stuff (we should probably have made
  84. * a stackframe...
  85. */
  86. bl swsusp_save
  87. /* Restore LR from the save area */
  88. lis r11,swsusp_save_area@h
  89. ori r11,r11,swsusp_save_area@l
  90. lwz r0,SL_LR(r11)
  91. mtlr r0
  92. blr
  93. _GLOBAL(swsusp_arch_resume)
  94. sync
  95. /* Load ptr the list of pages to copy in r3 */
  96. lis r11,(restore_pblist)@h
  97. ori r11,r11,restore_pblist@l
  98. lwz r3,0(r11)
  99. /* Copy the pages. This is a very basic implementation, to
  100. * be replaced by something more cache efficient */
  101. 1:
  102. li r0,256
  103. mtctr r0
  104. lwz r5,pbe_address(r3) /* source */
  105. lwz r6,pbe_orig_address(r3) /* destination */
  106. 2:
  107. lwz r8,0(r5)
  108. lwz r9,4(r5)
  109. lwz r10,8(r5)
  110. lwz r11,12(r5)
  111. addi r5,r5,16
  112. stw r8,0(r6)
  113. stw r9,4(r6)
  114. stw r10,8(r6)
  115. stw r11,12(r6)
  116. addi r6,r6,16
  117. bdnz 2b
  118. lwz r3,pbe_next(r3)
  119. cmpwi 0,r3,0
  120. bne 1b
  121. bl flush_dcache_L1
  122. bl flush_instruction_cache
  123. lis r11,swsusp_save_area@h
  124. ori r11,r11,swsusp_save_area@l
  125. /*
  126. * Mappings from virtual addresses to physical addresses may be
  127. * different than they were prior to restoring hibernation state.
  128. * Invalidate the TLB so that the boot CPU is using the new
  129. * mappings.
  130. */
  131. bl _tlbil_all
  132. lwz r4,SL_SPRG0(r11)
  133. mtspr SPRN_SPRG0,r4
  134. lwz r4,SL_SPRG1(r11)
  135. mtspr SPRN_SPRG1,r4
  136. lwz r4,SL_SPRG2(r11)
  137. mtspr SPRN_SPRG2,r4
  138. lwz r4,SL_SPRG3(r11)
  139. mtspr SPRN_SPRG3,r4
  140. lwz r4,SL_SPRG4(r11)
  141. mtspr SPRN_SPRG4,r4
  142. lwz r4,SL_SPRG5(r11)
  143. mtspr SPRN_SPRG5,r4
  144. lwz r4,SL_SPRG6(r11)
  145. mtspr SPRN_SPRG6,r4
  146. lwz r4,SL_SPRG7(r11)
  147. mtspr SPRN_SPRG7,r4
  148. /* restore the MSR */
  149. lwz r3,SL_MSR(r11)
  150. mtmsr r3
  151. /* Restore TB */
  152. li r3,0
  153. mtspr SPRN_TBWL,r3
  154. lwz r3,SL_TBU(r11)
  155. lwz r4,SL_TBL(r11)
  156. mtspr SPRN_TBWU,r3
  157. mtspr SPRN_TBWL,r4
  158. /* Restore TCR and clear any pending bits in TSR. */
  159. lwz r4,SL_TCR(r11)
  160. mtspr SPRN_TCR,r4
  161. lis r4, (TSR_ENW | TSR_WIS | TSR_DIS | TSR_FIS)@h
  162. mtspr SPRN_TSR,r4
  163. /* Kick decrementer */
  164. li r0,1
  165. mtdec r0
  166. /* Restore the callee-saved registers and return */
  167. lwz r0,SL_CR(r11)
  168. mtcr r0
  169. lwz r2,SL_R2(r11)
  170. lmw r12,SL_R12(r11)
  171. lwz r1,SL_SP(r11)
  172. lwz r0,SL_LR(r11)
  173. mtlr r0
  174. li r3,0
  175. blr