futex.h 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120
  1. /* SPDX-License-Identifier: GPL-2.0-only */
  2. /*
  3. * Copyright (C) 2012 ARM Ltd.
  4. */
  5. #ifndef __ASM_FUTEX_H
  6. #define __ASM_FUTEX_H
  7. #include <linux/futex.h>
  8. #include <linux/uaccess.h>
  9. #include <asm/errno.h>
  10. #define FUTEX_MAX_LOOPS 128 /* What's the largest number you can think of? */
  11. #define __futex_atomic_op(insn, ret, oldval, uaddr, tmp, oparg) \
  12. do { \
  13. unsigned int loops = FUTEX_MAX_LOOPS; \
  14. \
  15. uaccess_enable_privileged(); \
  16. asm volatile( \
  17. " prfm pstl1strm, %2\n" \
  18. "1: ldxr %w1, %2\n" \
  19. insn "\n" \
  20. "2: stlxr %w0, %w3, %2\n" \
  21. " cbz %w0, 3f\n" \
  22. " sub %w4, %w4, %w0\n" \
  23. " cbnz %w4, 1b\n" \
  24. " mov %w0, %w6\n" \
  25. "3:\n" \
  26. " dmb ish\n" \
  27. _ASM_EXTABLE_UACCESS_ERR(1b, 3b, %w0) \
  28. _ASM_EXTABLE_UACCESS_ERR(2b, 3b, %w0) \
  29. : "=&r" (ret), "=&r" (oldval), "+Q" (*uaddr), "=&r" (tmp), \
  30. "+r" (loops) \
  31. : "r" (oparg), "Ir" (-EAGAIN) \
  32. : "memory"); \
  33. uaccess_disable_privileged(); \
  34. } while (0)
  35. static inline int
  36. arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *_uaddr)
  37. {
  38. int oldval = 0, ret, tmp;
  39. u32 __user *uaddr = __uaccess_mask_ptr(_uaddr);
  40. if (!access_ok(_uaddr, sizeof(u32)))
  41. return -EFAULT;
  42. switch (op) {
  43. case FUTEX_OP_SET:
  44. __futex_atomic_op("mov %w3, %w5",
  45. ret, oldval, uaddr, tmp, oparg);
  46. break;
  47. case FUTEX_OP_ADD:
  48. __futex_atomic_op("add %w3, %w1, %w5",
  49. ret, oldval, uaddr, tmp, oparg);
  50. break;
  51. case FUTEX_OP_OR:
  52. __futex_atomic_op("orr %w3, %w1, %w5",
  53. ret, oldval, uaddr, tmp, oparg);
  54. break;
  55. case FUTEX_OP_ANDN:
  56. __futex_atomic_op("and %w3, %w1, %w5",
  57. ret, oldval, uaddr, tmp, ~oparg);
  58. break;
  59. case FUTEX_OP_XOR:
  60. __futex_atomic_op("eor %w3, %w1, %w5",
  61. ret, oldval, uaddr, tmp, oparg);
  62. break;
  63. default:
  64. ret = -ENOSYS;
  65. }
  66. if (!ret)
  67. *oval = oldval;
  68. return ret;
  69. }
  70. static inline int
  71. futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *_uaddr,
  72. u32 oldval, u32 newval)
  73. {
  74. int ret = 0;
  75. unsigned int loops = FUTEX_MAX_LOOPS;
  76. u32 val, tmp;
  77. u32 __user *uaddr;
  78. if (!access_ok(_uaddr, sizeof(u32)))
  79. return -EFAULT;
  80. uaddr = __uaccess_mask_ptr(_uaddr);
  81. uaccess_enable_privileged();
  82. asm volatile("// futex_atomic_cmpxchg_inatomic\n"
  83. " prfm pstl1strm, %2\n"
  84. "1: ldxr %w1, %2\n"
  85. " sub %w3, %w1, %w5\n"
  86. " cbnz %w3, 4f\n"
  87. "2: stlxr %w3, %w6, %2\n"
  88. " cbz %w3, 3f\n"
  89. " sub %w4, %w4, %w3\n"
  90. " cbnz %w4, 1b\n"
  91. " mov %w0, %w7\n"
  92. "3:\n"
  93. " dmb ish\n"
  94. "4:\n"
  95. _ASM_EXTABLE_UACCESS_ERR(1b, 4b, %w0)
  96. _ASM_EXTABLE_UACCESS_ERR(2b, 4b, %w0)
  97. : "+r" (ret), "=&r" (val), "+Q" (*uaddr), "=&r" (tmp), "+r" (loops)
  98. : "r" (oldval), "r" (newval), "Ir" (-EAGAIN)
  99. : "memory");
  100. uaccess_disable_privileged();
  101. if (!ret)
  102. *uval = val;
  103. return ret;
  104. }
  105. #endif /* __ASM_FUTEX_H */