esr.h 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358
  1. /* SPDX-License-Identifier: GPL-2.0-only */
  2. /*
  3. * Copyright (C) 2013 - ARM Ltd
  4. * Author: Marc Zyngier <[email protected]>
  5. */
  6. #ifndef __ASM_ESR_H
  7. #define __ASM_ESR_H
  8. #include <asm/memory.h>
  9. #include <asm/sysreg.h>
  10. #define ESR_ELx_EC_UNKNOWN (0x00)
  11. #define ESR_ELx_EC_WFx (0x01)
  12. /* Unallocated EC: 0x02 */
  13. #define ESR_ELx_EC_CP15_32 (0x03)
  14. #define ESR_ELx_EC_CP15_64 (0x04)
  15. #define ESR_ELx_EC_CP14_MR (0x05)
  16. #define ESR_ELx_EC_CP14_LS (0x06)
  17. #define ESR_ELx_EC_FP_ASIMD (0x07)
  18. #define ESR_ELx_EC_CP10_ID (0x08) /* EL2 only */
  19. #define ESR_ELx_EC_PAC (0x09) /* EL2 and above */
  20. /* Unallocated EC: 0x0A - 0x0B */
  21. #define ESR_ELx_EC_CP14_64 (0x0C)
  22. #define ESR_ELx_EC_BTI (0x0D)
  23. #define ESR_ELx_EC_ILL (0x0E)
  24. /* Unallocated EC: 0x0F - 0x10 */
  25. #define ESR_ELx_EC_SVC32 (0x11)
  26. #define ESR_ELx_EC_HVC32 (0x12) /* EL2 only */
  27. #define ESR_ELx_EC_SMC32 (0x13) /* EL2 and above */
  28. /* Unallocated EC: 0x14 */
  29. #define ESR_ELx_EC_SVC64 (0x15)
  30. #define ESR_ELx_EC_HVC64 (0x16) /* EL2 and above */
  31. #define ESR_ELx_EC_SMC64 (0x17) /* EL2 and above */
  32. #define ESR_ELx_EC_SYS64 (0x18)
  33. #define ESR_ELx_EC_SVE (0x19)
  34. #define ESR_ELx_EC_ERET (0x1a) /* EL2 only */
  35. /* Unallocated EC: 0x1B */
  36. #define ESR_ELx_EC_FPAC (0x1C) /* EL1 and above */
  37. #define ESR_ELx_EC_SME (0x1D)
  38. /* Unallocated EC: 0x1E */
  39. #define ESR_ELx_EC_IMP_DEF (0x1f) /* EL3 only */
  40. #define ESR_ELx_EC_IABT_LOW (0x20)
  41. #define ESR_ELx_EC_IABT_CUR (0x21)
  42. #define ESR_ELx_EC_PC_ALIGN (0x22)
  43. /* Unallocated EC: 0x23 */
  44. #define ESR_ELx_EC_DABT_LOW (0x24)
  45. #define ESR_ELx_EC_DABT_CUR (0x25)
  46. #define ESR_ELx_EC_SP_ALIGN (0x26)
  47. /* Unallocated EC: 0x27 */
  48. #define ESR_ELx_EC_FP_EXC32 (0x28)
  49. /* Unallocated EC: 0x29 - 0x2B */
  50. #define ESR_ELx_EC_FP_EXC64 (0x2C)
  51. /* Unallocated EC: 0x2D - 0x2E */
  52. #define ESR_ELx_EC_SERROR (0x2F)
  53. #define ESR_ELx_EC_BREAKPT_LOW (0x30)
  54. #define ESR_ELx_EC_BREAKPT_CUR (0x31)
  55. #define ESR_ELx_EC_SOFTSTP_LOW (0x32)
  56. #define ESR_ELx_EC_SOFTSTP_CUR (0x33)
  57. #define ESR_ELx_EC_WATCHPT_LOW (0x34)
  58. #define ESR_ELx_EC_WATCHPT_CUR (0x35)
  59. /* Unallocated EC: 0x36 - 0x37 */
  60. #define ESR_ELx_EC_BKPT32 (0x38)
  61. /* Unallocated EC: 0x39 */
  62. #define ESR_ELx_EC_VECTOR32 (0x3A) /* EL2 only */
  63. /* Unallocated EC: 0x3B */
  64. #define ESR_ELx_EC_BRK64 (0x3C)
  65. /* Unallocated EC: 0x3D - 0x3F */
  66. #define ESR_ELx_EC_MAX (0x3F)
  67. #define ESR_ELx_EC_SHIFT (26)
  68. #define ESR_ELx_EC_WIDTH (6)
  69. #define ESR_ELx_EC_MASK (UL(0x3F) << ESR_ELx_EC_SHIFT)
  70. #define ESR_ELx_EC(esr) (((esr) & ESR_ELx_EC_MASK) >> ESR_ELx_EC_SHIFT)
  71. #define ESR_ELx_IL_SHIFT (25)
  72. #define ESR_ELx_IL (UL(1) << ESR_ELx_IL_SHIFT)
  73. #define ESR_ELx_ISS_MASK (ESR_ELx_IL - 1)
  74. #define ESR_ELx_ISS(esr) ((esr) & ESR_ELx_ISS_MASK)
  75. /* ISS field definitions shared by different classes */
  76. #define ESR_ELx_WNR_SHIFT (6)
  77. #define ESR_ELx_WNR (UL(1) << ESR_ELx_WNR_SHIFT)
  78. /* Asynchronous Error Type */
  79. #define ESR_ELx_IDS_SHIFT (24)
  80. #define ESR_ELx_IDS (UL(1) << ESR_ELx_IDS_SHIFT)
  81. #define ESR_ELx_AET_SHIFT (10)
  82. #define ESR_ELx_AET (UL(0x7) << ESR_ELx_AET_SHIFT)
  83. #define ESR_ELx_AET_UC (UL(0) << ESR_ELx_AET_SHIFT)
  84. #define ESR_ELx_AET_UEU (UL(1) << ESR_ELx_AET_SHIFT)
  85. #define ESR_ELx_AET_UEO (UL(2) << ESR_ELx_AET_SHIFT)
  86. #define ESR_ELx_AET_UER (UL(3) << ESR_ELx_AET_SHIFT)
  87. #define ESR_ELx_AET_CE (UL(6) << ESR_ELx_AET_SHIFT)
  88. /* Shared ISS field definitions for Data/Instruction aborts */
  89. #define ESR_ELx_SET_SHIFT (11)
  90. #define ESR_ELx_SET_MASK (UL(3) << ESR_ELx_SET_SHIFT)
  91. #define ESR_ELx_FnV_SHIFT (10)
  92. #define ESR_ELx_FnV (UL(1) << ESR_ELx_FnV_SHIFT)
  93. #define ESR_ELx_EA_SHIFT (9)
  94. #define ESR_ELx_EA (UL(1) << ESR_ELx_EA_SHIFT)
  95. #define ESR_ELx_S1PTW_SHIFT (7)
  96. #define ESR_ELx_S1PTW (UL(1) << ESR_ELx_S1PTW_SHIFT)
  97. /* Shared ISS fault status code(IFSC/DFSC) for Data/Instruction aborts */
  98. #define ESR_ELx_FSC (0x3F)
  99. #define ESR_ELx_FSC_TYPE (0x3C)
  100. #define ESR_ELx_FSC_LEVEL (0x03)
  101. #define ESR_ELx_FSC_EXTABT (0x10)
  102. #define ESR_ELx_FSC_MTE (0x11)
  103. #define ESR_ELx_FSC_SERROR (0x11)
  104. #define ESR_ELx_FSC_ACCESS (0x08)
  105. #define ESR_ELx_FSC_FAULT (0x04)
  106. #define ESR_ELx_FSC_PERM (0x0C)
  107. /* ISS field definitions for Data Aborts */
  108. #define ESR_ELx_ISV_SHIFT (24)
  109. #define ESR_ELx_ISV (UL(1) << ESR_ELx_ISV_SHIFT)
  110. #define ESR_ELx_SAS_SHIFT (22)
  111. #define ESR_ELx_SAS (UL(3) << ESR_ELx_SAS_SHIFT)
  112. #define ESR_ELx_SSE_SHIFT (21)
  113. #define ESR_ELx_SSE (UL(1) << ESR_ELx_SSE_SHIFT)
  114. #define ESR_ELx_SRT_SHIFT (16)
  115. #define ESR_ELx_SRT_MASK (UL(0x1F) << ESR_ELx_SRT_SHIFT)
  116. #define ESR_ELx_SF_SHIFT (15)
  117. #define ESR_ELx_SF (UL(1) << ESR_ELx_SF_SHIFT)
  118. #define ESR_ELx_AR_SHIFT (14)
  119. #define ESR_ELx_AR (UL(1) << ESR_ELx_AR_SHIFT)
  120. #define ESR_ELx_CM_SHIFT (8)
  121. #define ESR_ELx_CM (UL(1) << ESR_ELx_CM_SHIFT)
  122. /* ISS field definitions for exceptions taken in to Hyp */
  123. #define ESR_ELx_CV (UL(1) << 24)
  124. #define ESR_ELx_COND_SHIFT (20)
  125. #define ESR_ELx_COND_MASK (UL(0xF) << ESR_ELx_COND_SHIFT)
  126. #define ESR_ELx_WFx_ISS_RN (UL(0x1F) << 5)
  127. #define ESR_ELx_WFx_ISS_RV (UL(1) << 2)
  128. #define ESR_ELx_WFx_ISS_TI (UL(3) << 0)
  129. #define ESR_ELx_WFx_ISS_WFxT (UL(2) << 0)
  130. #define ESR_ELx_WFx_ISS_WFI (UL(0) << 0)
  131. #define ESR_ELx_WFx_ISS_WFE (UL(1) << 0)
  132. #define ESR_ELx_xVC_IMM_MASK ((UL(1) << 16) - 1)
  133. #define DISR_EL1_IDS (UL(1) << 24)
  134. /*
  135. * DISR_EL1 and ESR_ELx share the bottom 13 bits, but the RES0 bits may mean
  136. * different things in the future...
  137. */
  138. #define DISR_EL1_ESR_MASK (ESR_ELx_AET | ESR_ELx_EA | ESR_ELx_FSC)
  139. /* ESR value templates for specific events */
  140. #define ESR_ELx_WFx_MASK (ESR_ELx_EC_MASK | \
  141. (ESR_ELx_WFx_ISS_TI & ~ESR_ELx_WFx_ISS_WFxT))
  142. #define ESR_ELx_WFx_WFI_VAL ((ESR_ELx_EC_WFx << ESR_ELx_EC_SHIFT) | \
  143. ESR_ELx_WFx_ISS_WFI)
  144. /* BRK instruction trap from AArch64 state */
  145. #define ESR_ELx_BRK64_ISS_COMMENT_MASK 0xffff
  146. /* ISS field definitions for System instruction traps */
  147. #define ESR_ELx_SYS64_ISS_RES0_SHIFT 22
  148. #define ESR_ELx_SYS64_ISS_RES0_MASK (UL(0x7) << ESR_ELx_SYS64_ISS_RES0_SHIFT)
  149. #define ESR_ELx_SYS64_ISS_DIR_MASK 0x1
  150. #define ESR_ELx_SYS64_ISS_DIR_READ 0x1
  151. #define ESR_ELx_SYS64_ISS_DIR_WRITE 0x0
  152. #define ESR_ELx_SYS64_ISS_RT_SHIFT 5
  153. #define ESR_ELx_SYS64_ISS_RT_MASK (UL(0x1f) << ESR_ELx_SYS64_ISS_RT_SHIFT)
  154. #define ESR_ELx_SYS64_ISS_CRM_SHIFT 1
  155. #define ESR_ELx_SYS64_ISS_CRM_MASK (UL(0xf) << ESR_ELx_SYS64_ISS_CRM_SHIFT)
  156. #define ESR_ELx_SYS64_ISS_CRN_SHIFT 10
  157. #define ESR_ELx_SYS64_ISS_CRN_MASK (UL(0xf) << ESR_ELx_SYS64_ISS_CRN_SHIFT)
  158. #define ESR_ELx_SYS64_ISS_OP1_SHIFT 14
  159. #define ESR_ELx_SYS64_ISS_OP1_MASK (UL(0x7) << ESR_ELx_SYS64_ISS_OP1_SHIFT)
  160. #define ESR_ELx_SYS64_ISS_OP2_SHIFT 17
  161. #define ESR_ELx_SYS64_ISS_OP2_MASK (UL(0x7) << ESR_ELx_SYS64_ISS_OP2_SHIFT)
  162. #define ESR_ELx_SYS64_ISS_OP0_SHIFT 20
  163. #define ESR_ELx_SYS64_ISS_OP0_MASK (UL(0x3) << ESR_ELx_SYS64_ISS_OP0_SHIFT)
  164. #define ESR_ELx_SYS64_ISS_SYS_MASK (ESR_ELx_SYS64_ISS_OP0_MASK | \
  165. ESR_ELx_SYS64_ISS_OP1_MASK | \
  166. ESR_ELx_SYS64_ISS_OP2_MASK | \
  167. ESR_ELx_SYS64_ISS_CRN_MASK | \
  168. ESR_ELx_SYS64_ISS_CRM_MASK)
  169. #define ESR_ELx_SYS64_ISS_SYS_VAL(op0, op1, op2, crn, crm) \
  170. (((op0) << ESR_ELx_SYS64_ISS_OP0_SHIFT) | \
  171. ((op1) << ESR_ELx_SYS64_ISS_OP1_SHIFT) | \
  172. ((op2) << ESR_ELx_SYS64_ISS_OP2_SHIFT) | \
  173. ((crn) << ESR_ELx_SYS64_ISS_CRN_SHIFT) | \
  174. ((crm) << ESR_ELx_SYS64_ISS_CRM_SHIFT))
  175. #define ESR_ELx_SYS64_ISS_SYS_OP_MASK (ESR_ELx_SYS64_ISS_SYS_MASK | \
  176. ESR_ELx_SYS64_ISS_DIR_MASK)
  177. #define ESR_ELx_SYS64_ISS_RT(esr) \
  178. (((esr) & ESR_ELx_SYS64_ISS_RT_MASK) >> ESR_ELx_SYS64_ISS_RT_SHIFT)
  179. /*
  180. * User space cache operations have the following sysreg encoding
  181. * in System instructions.
  182. * op0=1, op1=3, op2=1, crn=7, crm={ 5, 10, 11, 12, 13, 14 }, WRITE (L=0)
  183. */
  184. #define ESR_ELx_SYS64_ISS_CRM_DC_CIVAC 14
  185. #define ESR_ELx_SYS64_ISS_CRM_DC_CVADP 13
  186. #define ESR_ELx_SYS64_ISS_CRM_DC_CVAP 12
  187. #define ESR_ELx_SYS64_ISS_CRM_DC_CVAU 11
  188. #define ESR_ELx_SYS64_ISS_CRM_DC_CVAC 10
  189. #define ESR_ELx_SYS64_ISS_CRM_IC_IVAU 5
  190. #define ESR_ELx_SYS64_ISS_EL0_CACHE_OP_MASK (ESR_ELx_SYS64_ISS_OP0_MASK | \
  191. ESR_ELx_SYS64_ISS_OP1_MASK | \
  192. ESR_ELx_SYS64_ISS_OP2_MASK | \
  193. ESR_ELx_SYS64_ISS_CRN_MASK | \
  194. ESR_ELx_SYS64_ISS_DIR_MASK)
  195. #define ESR_ELx_SYS64_ISS_EL0_CACHE_OP_VAL \
  196. (ESR_ELx_SYS64_ISS_SYS_VAL(1, 3, 1, 7, 0) | \
  197. ESR_ELx_SYS64_ISS_DIR_WRITE)
  198. /*
  199. * User space MRS operations which are supported for emulation
  200. * have the following sysreg encoding in System instructions.
  201. * op0 = 3, op1= 0, crn = 0, {crm = 0, 4-7}, READ (L = 1)
  202. */
  203. #define ESR_ELx_SYS64_ISS_SYS_MRS_OP_MASK (ESR_ELx_SYS64_ISS_OP0_MASK | \
  204. ESR_ELx_SYS64_ISS_OP1_MASK | \
  205. ESR_ELx_SYS64_ISS_CRN_MASK | \
  206. ESR_ELx_SYS64_ISS_DIR_MASK)
  207. #define ESR_ELx_SYS64_ISS_SYS_MRS_OP_VAL \
  208. (ESR_ELx_SYS64_ISS_SYS_VAL(3, 0, 0, 0, 0) | \
  209. ESR_ELx_SYS64_ISS_DIR_READ)
  210. #define ESR_ELx_SYS64_ISS_SYS_CTR ESR_ELx_SYS64_ISS_SYS_VAL(3, 3, 1, 0, 0)
  211. #define ESR_ELx_SYS64_ISS_SYS_CTR_READ (ESR_ELx_SYS64_ISS_SYS_CTR | \
  212. ESR_ELx_SYS64_ISS_DIR_READ)
  213. #define ESR_ELx_SYS64_ISS_SYS_CNTVCT (ESR_ELx_SYS64_ISS_SYS_VAL(3, 3, 2, 14, 0) | \
  214. ESR_ELx_SYS64_ISS_DIR_READ)
  215. #define ESR_ELx_SYS64_ISS_SYS_CNTVCTSS (ESR_ELx_SYS64_ISS_SYS_VAL(3, 3, 6, 14, 0) | \
  216. ESR_ELx_SYS64_ISS_DIR_READ)
  217. #define ESR_ELx_SYS64_ISS_SYS_CNTFRQ (ESR_ELx_SYS64_ISS_SYS_VAL(3, 3, 0, 14, 0) | \
  218. ESR_ELx_SYS64_ISS_DIR_READ)
  219. #define esr_sys64_to_sysreg(e) \
  220. sys_reg((((e) & ESR_ELx_SYS64_ISS_OP0_MASK) >> \
  221. ESR_ELx_SYS64_ISS_OP0_SHIFT), \
  222. (((e) & ESR_ELx_SYS64_ISS_OP1_MASK) >> \
  223. ESR_ELx_SYS64_ISS_OP1_SHIFT), \
  224. (((e) & ESR_ELx_SYS64_ISS_CRN_MASK) >> \
  225. ESR_ELx_SYS64_ISS_CRN_SHIFT), \
  226. (((e) & ESR_ELx_SYS64_ISS_CRM_MASK) >> \
  227. ESR_ELx_SYS64_ISS_CRM_SHIFT), \
  228. (((e) & ESR_ELx_SYS64_ISS_OP2_MASK) >> \
  229. ESR_ELx_SYS64_ISS_OP2_SHIFT))
  230. #define esr_cp15_to_sysreg(e) \
  231. sys_reg(3, \
  232. (((e) & ESR_ELx_SYS64_ISS_OP1_MASK) >> \
  233. ESR_ELx_SYS64_ISS_OP1_SHIFT), \
  234. (((e) & ESR_ELx_SYS64_ISS_CRN_MASK) >> \
  235. ESR_ELx_SYS64_ISS_CRN_SHIFT), \
  236. (((e) & ESR_ELx_SYS64_ISS_CRM_MASK) >> \
  237. ESR_ELx_SYS64_ISS_CRM_SHIFT), \
  238. (((e) & ESR_ELx_SYS64_ISS_OP2_MASK) >> \
  239. ESR_ELx_SYS64_ISS_OP2_SHIFT))
  240. /*
  241. * ISS field definitions for floating-point exception traps
  242. * (FP_EXC_32/FP_EXC_64).
  243. *
  244. * (The FPEXC_* constants are used instead for common bits.)
  245. */
  246. #define ESR_ELx_FP_EXC_TFV (UL(1) << 23)
  247. /*
  248. * ISS field definitions for CP15 accesses
  249. */
  250. #define ESR_ELx_CP15_32_ISS_DIR_MASK 0x1
  251. #define ESR_ELx_CP15_32_ISS_DIR_READ 0x1
  252. #define ESR_ELx_CP15_32_ISS_DIR_WRITE 0x0
  253. #define ESR_ELx_CP15_32_ISS_RT_SHIFT 5
  254. #define ESR_ELx_CP15_32_ISS_RT_MASK (UL(0x1f) << ESR_ELx_CP15_32_ISS_RT_SHIFT)
  255. #define ESR_ELx_CP15_32_ISS_CRM_SHIFT 1
  256. #define ESR_ELx_CP15_32_ISS_CRM_MASK (UL(0xf) << ESR_ELx_CP15_32_ISS_CRM_SHIFT)
  257. #define ESR_ELx_CP15_32_ISS_CRN_SHIFT 10
  258. #define ESR_ELx_CP15_32_ISS_CRN_MASK (UL(0xf) << ESR_ELx_CP15_32_ISS_CRN_SHIFT)
  259. #define ESR_ELx_CP15_32_ISS_OP1_SHIFT 14
  260. #define ESR_ELx_CP15_32_ISS_OP1_MASK (UL(0x7) << ESR_ELx_CP15_32_ISS_OP1_SHIFT)
  261. #define ESR_ELx_CP15_32_ISS_OP2_SHIFT 17
  262. #define ESR_ELx_CP15_32_ISS_OP2_MASK (UL(0x7) << ESR_ELx_CP15_32_ISS_OP2_SHIFT)
  263. #define ESR_ELx_CP15_32_ISS_SYS_MASK (ESR_ELx_CP15_32_ISS_OP1_MASK | \
  264. ESR_ELx_CP15_32_ISS_OP2_MASK | \
  265. ESR_ELx_CP15_32_ISS_CRN_MASK | \
  266. ESR_ELx_CP15_32_ISS_CRM_MASK | \
  267. ESR_ELx_CP15_32_ISS_DIR_MASK)
  268. #define ESR_ELx_CP15_32_ISS_SYS_VAL(op1, op2, crn, crm) \
  269. (((op1) << ESR_ELx_CP15_32_ISS_OP1_SHIFT) | \
  270. ((op2) << ESR_ELx_CP15_32_ISS_OP2_SHIFT) | \
  271. ((crn) << ESR_ELx_CP15_32_ISS_CRN_SHIFT) | \
  272. ((crm) << ESR_ELx_CP15_32_ISS_CRM_SHIFT))
  273. #define ESR_ELx_CP15_64_ISS_DIR_MASK 0x1
  274. #define ESR_ELx_CP15_64_ISS_DIR_READ 0x1
  275. #define ESR_ELx_CP15_64_ISS_DIR_WRITE 0x0
  276. #define ESR_ELx_CP15_64_ISS_RT_SHIFT 5
  277. #define ESR_ELx_CP15_64_ISS_RT_MASK (UL(0x1f) << ESR_ELx_CP15_64_ISS_RT_SHIFT)
  278. #define ESR_ELx_CP15_64_ISS_RT2_SHIFT 10
  279. #define ESR_ELx_CP15_64_ISS_RT2_MASK (UL(0x1f) << ESR_ELx_CP15_64_ISS_RT2_SHIFT)
  280. #define ESR_ELx_CP15_64_ISS_OP1_SHIFT 16
  281. #define ESR_ELx_CP15_64_ISS_OP1_MASK (UL(0xf) << ESR_ELx_CP15_64_ISS_OP1_SHIFT)
  282. #define ESR_ELx_CP15_64_ISS_CRM_SHIFT 1
  283. #define ESR_ELx_CP15_64_ISS_CRM_MASK (UL(0xf) << ESR_ELx_CP15_64_ISS_CRM_SHIFT)
  284. #define ESR_ELx_CP15_64_ISS_SYS_VAL(op1, crm) \
  285. (((op1) << ESR_ELx_CP15_64_ISS_OP1_SHIFT) | \
  286. ((crm) << ESR_ELx_CP15_64_ISS_CRM_SHIFT))
  287. #define ESR_ELx_CP15_64_ISS_SYS_MASK (ESR_ELx_CP15_64_ISS_OP1_MASK | \
  288. ESR_ELx_CP15_64_ISS_CRM_MASK | \
  289. ESR_ELx_CP15_64_ISS_DIR_MASK)
  290. #define ESR_ELx_CP15_64_ISS_SYS_CNTVCT (ESR_ELx_CP15_64_ISS_SYS_VAL(1, 14) | \
  291. ESR_ELx_CP15_64_ISS_DIR_READ)
  292. #define ESR_ELx_CP15_64_ISS_SYS_CNTVCTSS (ESR_ELx_CP15_64_ISS_SYS_VAL(9, 14) | \
  293. ESR_ELx_CP15_64_ISS_DIR_READ)
  294. #define ESR_ELx_CP15_32_ISS_SYS_CNTFRQ (ESR_ELx_CP15_32_ISS_SYS_VAL(0, 0, 14, 0) |\
  295. ESR_ELx_CP15_32_ISS_DIR_READ)
  296. /*
  297. * ISS values for SME traps
  298. */
  299. #define ESR_ELx_SME_ISS_SME_DISABLED 0
  300. #define ESR_ELx_SME_ISS_ILL 1
  301. #define ESR_ELx_SME_ISS_SM_DISABLED 2
  302. #define ESR_ELx_SME_ISS_ZA_DISABLED 3
  303. #ifndef __ASSEMBLY__
  304. #include <asm/types.h>
  305. static inline bool esr_is_data_abort(unsigned long esr)
  306. {
  307. const unsigned long ec = ESR_ELx_EC(esr);
  308. return ec == ESR_ELx_EC_DABT_LOW || ec == ESR_ELx_EC_DABT_CUR;
  309. }
  310. const char *esr_get_class_string(unsigned long esr);
  311. #endif /* __ASSEMBLY */
  312. #endif /* __ASM_ESR_H */