spiterrs.S 7.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /* We need to carefully read the error status, ACK the errors,
  3. * prevent recursive traps, and pass the information on to C
  4. * code for logging.
  5. *
  6. * We pass the AFAR in as-is, and we encode the status
  7. * information as described in asm-sparc64/sfafsr.h
  8. */
  9. .type __spitfire_access_error,#function
  10. __spitfire_access_error:
  11. /* Disable ESTATE error reporting so that we do not take
  12. * recursive traps and RED state the processor.
  13. */
  14. stxa %g0, [%g0] ASI_ESTATE_ERROR_EN
  15. membar #Sync
  16. mov UDBE_UE, %g1
  17. ldxa [%g0] ASI_AFSR, %g4 ! Get AFSR
  18. /* __spitfire_cee_trap branches here with AFSR in %g4 and
  19. * UDBE_CE in %g1. It only clears ESTATE_ERR_CE in the ESTATE
  20. * Error Enable register.
  21. */
  22. __spitfire_cee_trap_continue:
  23. ldxa [%g0] ASI_AFAR, %g5 ! Get AFAR
  24. rdpr %tt, %g3
  25. and %g3, 0x1ff, %g3 ! Paranoia
  26. sllx %g3, SFSTAT_TRAP_TYPE_SHIFT, %g3
  27. or %g4, %g3, %g4
  28. rdpr %tl, %g3
  29. cmp %g3, 1
  30. mov 1, %g3
  31. bleu %xcc, 1f
  32. sllx %g3, SFSTAT_TL_GT_ONE_SHIFT, %g3
  33. or %g4, %g3, %g4
  34. /* Read in the UDB error register state, clearing the sticky
  35. * error bits as-needed. We only clear them if the UE bit is
  36. * set. Likewise, __spitfire_cee_trap below will only do so
  37. * if the CE bit is set.
  38. *
  39. * NOTE: UltraSparc-I/II have high and low UDB error
  40. * registers, corresponding to the two UDB units
  41. * present on those chips. UltraSparc-IIi only
  42. * has a single UDB, called "SDB" in the manual.
  43. * For IIi the upper UDB register always reads
  44. * as zero so for our purposes things will just
  45. * work with the checks below.
  46. */
  47. 1: ldxa [%g0] ASI_UDBH_ERROR_R, %g3
  48. and %g3, 0x3ff, %g7 ! Paranoia
  49. sllx %g7, SFSTAT_UDBH_SHIFT, %g7
  50. or %g4, %g7, %g4
  51. andcc %g3, %g1, %g3 ! UDBE_UE or UDBE_CE
  52. be,pn %xcc, 1f
  53. nop
  54. stxa %g3, [%g0] ASI_UDB_ERROR_W
  55. membar #Sync
  56. 1: mov 0x18, %g3
  57. ldxa [%g3] ASI_UDBL_ERROR_R, %g3
  58. and %g3, 0x3ff, %g7 ! Paranoia
  59. sllx %g7, SFSTAT_UDBL_SHIFT, %g7
  60. or %g4, %g7, %g4
  61. andcc %g3, %g1, %g3 ! UDBE_UE or UDBE_CE
  62. be,pn %xcc, 1f
  63. nop
  64. mov 0x18, %g7
  65. stxa %g3, [%g7] ASI_UDB_ERROR_W
  66. membar #Sync
  67. 1: /* Ok, now that we've latched the error state, clear the
  68. * sticky bits in the AFSR.
  69. */
  70. stxa %g4, [%g0] ASI_AFSR
  71. membar #Sync
  72. rdpr %tl, %g2
  73. cmp %g2, 1
  74. rdpr %pil, %g2
  75. bleu,pt %xcc, 1f
  76. wrpr %g0, PIL_NORMAL_MAX, %pil
  77. ba,pt %xcc, etraptl1
  78. rd %pc, %g7
  79. ba,a,pt %xcc, 2f
  80. nop
  81. 1: ba,pt %xcc, etrap_irq
  82. rd %pc, %g7
  83. 2:
  84. #ifdef CONFIG_TRACE_IRQFLAGS
  85. call trace_hardirqs_off
  86. nop
  87. #endif
  88. mov %l4, %o1
  89. mov %l5, %o2
  90. call spitfire_access_error
  91. add %sp, PTREGS_OFF, %o0
  92. ba,a,pt %xcc, rtrap
  93. .size __spitfire_access_error,.-__spitfire_access_error
  94. /* This is the trap handler entry point for ECC correctable
  95. * errors. They are corrected, but we listen for the trap so
  96. * that the event can be logged.
  97. *
  98. * Disrupting errors are either:
  99. * 1) single-bit ECC errors during UDB reads to system
  100. * memory
  101. * 2) data parity errors during write-back events
  102. *
  103. * As far as I can make out from the manual, the CEE trap is
  104. * only for correctable errors during memory read accesses by
  105. * the front-end of the processor.
  106. *
  107. * The code below is only for trap level 1 CEE events, as it
  108. * is the only situation where we can safely record and log.
  109. * For trap level >1 we just clear the CE bit in the AFSR and
  110. * return.
  111. *
  112. * This is just like __spiftire_access_error above, but it
  113. * specifically handles correctable errors. If an
  114. * uncorrectable error is indicated in the AFSR we will branch
  115. * directly above to __spitfire_access_error to handle it
  116. * instead. Uncorrectable therefore takes priority over
  117. * correctable, and the error logging C code will notice this
  118. * case by inspecting the trap type.
  119. */
  120. .type __spitfire_cee_trap,#function
  121. __spitfire_cee_trap:
  122. ldxa [%g0] ASI_AFSR, %g4 ! Get AFSR
  123. mov 1, %g3
  124. sllx %g3, SFAFSR_UE_SHIFT, %g3
  125. andcc %g4, %g3, %g0 ! Check for UE
  126. bne,pn %xcc, __spitfire_access_error
  127. nop
  128. /* Ok, in this case we only have a correctable error.
  129. * Indicate we only wish to capture that state in register
  130. * %g1, and we only disable CE error reporting unlike UE
  131. * handling which disables all errors.
  132. */
  133. ldxa [%g0] ASI_ESTATE_ERROR_EN, %g3
  134. andn %g3, ESTATE_ERR_CE, %g3
  135. stxa %g3, [%g0] ASI_ESTATE_ERROR_EN
  136. membar #Sync
  137. /* Preserve AFSR in %g4, indicate UDB state to capture in %g1 */
  138. ba,pt %xcc, __spitfire_cee_trap_continue
  139. mov UDBE_CE, %g1
  140. .size __spitfire_cee_trap,.-__spitfire_cee_trap
  141. .type __spitfire_data_access_exception_tl1,#function
  142. __spitfire_data_access_exception_tl1:
  143. rdpr %pstate, %g4
  144. wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate
  145. mov TLB_SFSR, %g3
  146. mov DMMU_SFAR, %g5
  147. ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR
  148. ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR
  149. stxa %g0, [%g3] ASI_DMMU ! Clear SFSR.FaultValid bit
  150. membar #Sync
  151. rdpr %tt, %g3
  152. cmp %g3, 0x80 ! first win spill/fill trap
  153. blu,pn %xcc, 1f
  154. cmp %g3, 0xff ! last win spill/fill trap
  155. bgu,pn %xcc, 1f
  156. nop
  157. ba,pt %xcc, winfix_dax
  158. rdpr %tpc, %g3
  159. 1: sethi %hi(109f), %g7
  160. ba,pt %xcc, etraptl1
  161. 109: or %g7, %lo(109b), %g7
  162. mov %l4, %o1
  163. mov %l5, %o2
  164. call spitfire_data_access_exception_tl1
  165. add %sp, PTREGS_OFF, %o0
  166. ba,a,pt %xcc, rtrap
  167. .size __spitfire_data_access_exception_tl1,.-__spitfire_data_access_exception_tl1
  168. .type __spitfire_data_access_exception,#function
  169. __spitfire_data_access_exception:
  170. rdpr %pstate, %g4
  171. wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate
  172. mov TLB_SFSR, %g3
  173. mov DMMU_SFAR, %g5
  174. ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR
  175. ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR
  176. stxa %g0, [%g3] ASI_DMMU ! Clear SFSR.FaultValid bit
  177. membar #Sync
  178. sethi %hi(109f), %g7
  179. ba,pt %xcc, etrap
  180. 109: or %g7, %lo(109b), %g7
  181. mov %l4, %o1
  182. mov %l5, %o2
  183. call spitfire_data_access_exception
  184. add %sp, PTREGS_OFF, %o0
  185. ba,a,pt %xcc, rtrap
  186. .size __spitfire_data_access_exception,.-__spitfire_data_access_exception
  187. .type __spitfire_insn_access_exception_tl1,#function
  188. __spitfire_insn_access_exception_tl1:
  189. rdpr %pstate, %g4
  190. wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate
  191. mov TLB_SFSR, %g3
  192. ldxa [%g3] ASI_IMMU, %g4 ! Get SFSR
  193. rdpr %tpc, %g5 ! IMMU has no SFAR, use TPC
  194. stxa %g0, [%g3] ASI_IMMU ! Clear FaultValid bit
  195. membar #Sync
  196. sethi %hi(109f), %g7
  197. ba,pt %xcc, etraptl1
  198. 109: or %g7, %lo(109b), %g7
  199. mov %l4, %o1
  200. mov %l5, %o2
  201. call spitfire_insn_access_exception_tl1
  202. add %sp, PTREGS_OFF, %o0
  203. ba,a,pt %xcc, rtrap
  204. .size __spitfire_insn_access_exception_tl1,.-__spitfire_insn_access_exception_tl1
  205. .type __spitfire_insn_access_exception,#function
  206. __spitfire_insn_access_exception:
  207. rdpr %pstate, %g4
  208. wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate
  209. mov TLB_SFSR, %g3
  210. ldxa [%g3] ASI_IMMU, %g4 ! Get SFSR
  211. rdpr %tpc, %g5 ! IMMU has no SFAR, use TPC
  212. stxa %g0, [%g3] ASI_IMMU ! Clear FaultValid bit
  213. membar #Sync
  214. sethi %hi(109f), %g7
  215. ba,pt %xcc, etrap
  216. 109: or %g7, %lo(109b), %g7
  217. mov %l4, %o1
  218. mov %l5, %o2
  219. call spitfire_insn_access_exception
  220. add %sp, PTREGS_OFF, %o0
  221. ba,a,pt %xcc, rtrap
  222. .size __spitfire_insn_access_exception,.-__spitfire_insn_access_exception