uaccess-asm.h 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111
  1. /* SPDX-License-Identifier: GPL-2.0-only */
  2. #ifndef __ASM_UACCESS_ASM_H__
  3. #define __ASM_UACCESS_ASM_H__
  4. #include <asm/asm-offsets.h>
  5. #include <asm/domain.h>
  6. #include <asm/memory.h>
  7. #include <asm/thread_info.h>
  8. .macro csdb
  9. #ifdef CONFIG_THUMB2_KERNEL
  10. .inst.w 0xf3af8014
  11. #else
  12. .inst 0xe320f014
  13. #endif
  14. .endm
  15. .macro check_uaccess, addr:req, size:req, limit:req, tmp:req, bad:req
  16. #ifndef CONFIG_CPU_USE_DOMAINS
  17. adds \tmp, \addr, #\size - 1
  18. sbcscc \tmp, \tmp, \limit
  19. bcs \bad
  20. #ifdef CONFIG_CPU_SPECTRE
  21. movcs \addr, #0
  22. csdb
  23. #endif
  24. #endif
  25. .endm
  26. .macro uaccess_mask_range_ptr, addr:req, size:req, limit:req, tmp:req
  27. #ifdef CONFIG_CPU_SPECTRE
  28. sub \tmp, \limit, #1
  29. subs \tmp, \tmp, \addr @ tmp = limit - 1 - addr
  30. addhs \tmp, \tmp, #1 @ if (tmp >= 0) {
  31. subshs \tmp, \tmp, \size @ tmp = limit - (addr + size) }
  32. movlo \addr, #0 @ if (tmp < 0) addr = NULL
  33. csdb
  34. #endif
  35. .endm
  36. .macro uaccess_disable, tmp, isb=1
  37. #ifdef CONFIG_CPU_SW_DOMAIN_PAN
  38. /*
  39. * Whenever we re-enter userspace, the domains should always be
  40. * set appropriately.
  41. */
  42. mov \tmp, #DACR_UACCESS_DISABLE
  43. mcr p15, 0, \tmp, c3, c0, 0 @ Set domain register
  44. .if \isb
  45. instr_sync
  46. .endif
  47. #endif
  48. .endm
  49. .macro uaccess_enable, tmp, isb=1
  50. #ifdef CONFIG_CPU_SW_DOMAIN_PAN
  51. /*
  52. * Whenever we re-enter userspace, the domains should always be
  53. * set appropriately.
  54. */
  55. mov \tmp, #DACR_UACCESS_ENABLE
  56. mcr p15, 0, \tmp, c3, c0, 0
  57. .if \isb
  58. instr_sync
  59. .endif
  60. #endif
  61. .endm
  62. #if defined(CONFIG_CPU_SW_DOMAIN_PAN) || defined(CONFIG_CPU_USE_DOMAINS)
  63. #define DACR(x...) x
  64. #else
  65. #define DACR(x...)
  66. #endif
  67. /*
  68. * Save the address limit on entry to a privileged exception.
  69. *
  70. * If we are using the DACR for kernel access by the user accessors
  71. * (CONFIG_CPU_USE_DOMAINS=y), always reset the DACR kernel domain
  72. * back to client mode, whether or not \disable is set.
  73. *
  74. * If we are using SW PAN, set the DACR user domain to no access
  75. * if \disable is set.
  76. */
  77. .macro uaccess_entry, tsk, tmp0, tmp1, tmp2, disable
  78. DACR( mrc p15, 0, \tmp0, c3, c0, 0)
  79. DACR( str \tmp0, [sp, #SVC_DACR])
  80. .if \disable && IS_ENABLED(CONFIG_CPU_SW_DOMAIN_PAN)
  81. /* kernel=client, user=no access */
  82. mov \tmp2, #DACR_UACCESS_DISABLE
  83. mcr p15, 0, \tmp2, c3, c0, 0
  84. instr_sync
  85. .elseif IS_ENABLED(CONFIG_CPU_USE_DOMAINS)
  86. /* kernel=client */
  87. bic \tmp2, \tmp0, #domain_mask(DOMAIN_KERNEL)
  88. orr \tmp2, \tmp2, #domain_val(DOMAIN_KERNEL, DOMAIN_CLIENT)
  89. mcr p15, 0, \tmp2, c3, c0, 0
  90. instr_sync
  91. .endif
  92. .endm
  93. /* Restore the user access state previously saved by uaccess_entry */
  94. .macro uaccess_exit, tsk, tmp0, tmp1
  95. DACR( ldr \tmp0, [sp, #SVC_DACR])
  96. DACR( mcr p15, 0, \tmp0, c3, c0, 0)
  97. .endm
  98. #undef DACR
  99. #endif /* __ASM_UACCESS_ASM_H__ */