futex.h 4.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167
  1. /* SPDX-License-Identifier: GPL-2.0-only */
  2. /*
  3. * Atomic futex routines
  4. *
  5. * Based on the PowerPC implementataion
  6. *
  7. * Copyright (C) 2013 TangoTec Ltd.
  8. *
  9. * Baruch Siach <[email protected]>
  10. */
  11. #ifndef _ASM_XTENSA_FUTEX_H
  12. #define _ASM_XTENSA_FUTEX_H
  13. #include <linux/futex.h>
  14. #include <linux/uaccess.h>
  15. #include <linux/errno.h>
  16. #define arch_futex_atomic_op_inuser arch_futex_atomic_op_inuser
  17. #define futex_atomic_cmpxchg_inatomic futex_atomic_cmpxchg_inatomic
  18. #include <asm-generic/futex.h>
  19. #if XCHAL_HAVE_EXCLUSIVE
  20. #define __futex_atomic_op(insn, ret, old, uaddr, arg) \
  21. __asm__ __volatile( \
  22. "1: l32ex %[oldval], %[addr]\n" \
  23. insn "\n" \
  24. "2: s32ex %[newval], %[addr]\n" \
  25. " getex %[newval]\n" \
  26. " beqz %[newval], 1b\n" \
  27. " movi %[newval], 0\n" \
  28. "3:\n" \
  29. " .section .fixup,\"ax\"\n" \
  30. " .align 4\n" \
  31. " .literal_position\n" \
  32. "5: movi %[oldval], 3b\n" \
  33. " movi %[newval], %[fault]\n" \
  34. " jx %[oldval]\n" \
  35. " .previous\n" \
  36. " .section __ex_table,\"a\"\n" \
  37. " .long 1b, 5b, 2b, 5b\n" \
  38. " .previous\n" \
  39. : [oldval] "=&r" (old), [newval] "=&r" (ret) \
  40. : [addr] "r" (uaddr), [oparg] "r" (arg), \
  41. [fault] "I" (-EFAULT) \
  42. : "memory")
  43. #elif XCHAL_HAVE_S32C1I
  44. #define __futex_atomic_op(insn, ret, old, uaddr, arg) \
  45. __asm__ __volatile( \
  46. "1: l32i %[oldval], %[mem]\n" \
  47. insn "\n" \
  48. " wsr %[oldval], scompare1\n" \
  49. "2: s32c1i %[newval], %[mem]\n" \
  50. " bne %[newval], %[oldval], 1b\n" \
  51. " movi %[newval], 0\n" \
  52. "3:\n" \
  53. " .section .fixup,\"ax\"\n" \
  54. " .align 4\n" \
  55. " .literal_position\n" \
  56. "5: movi %[oldval], 3b\n" \
  57. " movi %[newval], %[fault]\n" \
  58. " jx %[oldval]\n" \
  59. " .previous\n" \
  60. " .section __ex_table,\"a\"\n" \
  61. " .long 1b, 5b, 2b, 5b\n" \
  62. " .previous\n" \
  63. : [oldval] "=&r" (old), [newval] "=&r" (ret), \
  64. [mem] "+m" (*(uaddr)) \
  65. : [oparg] "r" (arg), [fault] "I" (-EFAULT) \
  66. : "memory")
  67. #endif
  68. static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
  69. u32 __user *uaddr)
  70. {
  71. #if XCHAL_HAVE_S32C1I || XCHAL_HAVE_EXCLUSIVE
  72. int oldval = 0, ret;
  73. if (!access_ok(uaddr, sizeof(u32)))
  74. return -EFAULT;
  75. switch (op) {
  76. case FUTEX_OP_SET:
  77. __futex_atomic_op("mov %[newval], %[oparg]",
  78. ret, oldval, uaddr, oparg);
  79. break;
  80. case FUTEX_OP_ADD:
  81. __futex_atomic_op("add %[newval], %[oldval], %[oparg]",
  82. ret, oldval, uaddr, oparg);
  83. break;
  84. case FUTEX_OP_OR:
  85. __futex_atomic_op("or %[newval], %[oldval], %[oparg]",
  86. ret, oldval, uaddr, oparg);
  87. break;
  88. case FUTEX_OP_ANDN:
  89. __futex_atomic_op("and %[newval], %[oldval], %[oparg]",
  90. ret, oldval, uaddr, ~oparg);
  91. break;
  92. case FUTEX_OP_XOR:
  93. __futex_atomic_op("xor %[newval], %[oldval], %[oparg]",
  94. ret, oldval, uaddr, oparg);
  95. break;
  96. default:
  97. ret = -ENOSYS;
  98. }
  99. if (!ret)
  100. *oval = oldval;
  101. return ret;
  102. #else
  103. return futex_atomic_op_inuser_local(op, oparg, oval, uaddr);
  104. #endif
  105. }
  106. static inline int
  107. futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
  108. u32 oldval, u32 newval)
  109. {
  110. #if XCHAL_HAVE_S32C1I || XCHAL_HAVE_EXCLUSIVE
  111. unsigned long tmp;
  112. int ret = 0;
  113. if (!access_ok(uaddr, sizeof(u32)))
  114. return -EFAULT;
  115. __asm__ __volatile__ (
  116. " # futex_atomic_cmpxchg_inatomic\n"
  117. #if XCHAL_HAVE_EXCLUSIVE
  118. "1: l32ex %[tmp], %[addr]\n"
  119. " s32i %[tmp], %[uval], 0\n"
  120. " bne %[tmp], %[oldval], 2f\n"
  121. " mov %[tmp], %[newval]\n"
  122. "3: s32ex %[tmp], %[addr]\n"
  123. " getex %[tmp]\n"
  124. " beqz %[tmp], 1b\n"
  125. #elif XCHAL_HAVE_S32C1I
  126. " wsr %[oldval], scompare1\n"
  127. "1: s32c1i %[newval], %[addr], 0\n"
  128. " s32i %[newval], %[uval], 0\n"
  129. #endif
  130. "2:\n"
  131. " .section .fixup,\"ax\"\n"
  132. " .align 4\n"
  133. " .literal_position\n"
  134. "4: movi %[tmp], 2b\n"
  135. " movi %[ret], %[fault]\n"
  136. " jx %[tmp]\n"
  137. " .previous\n"
  138. " .section __ex_table,\"a\"\n"
  139. " .long 1b, 4b\n"
  140. #if XCHAL_HAVE_EXCLUSIVE
  141. " .long 3b, 4b\n"
  142. #endif
  143. " .previous\n"
  144. : [ret] "+r" (ret), [newval] "+r" (newval), [tmp] "=&r" (tmp)
  145. : [addr] "r" (uaddr), [oldval] "r" (oldval), [uval] "r" (uval),
  146. [fault] "I" (-EFAULT)
  147. : "memory");
  148. return ret;
  149. #else
  150. return futex_atomic_cmpxchg_inatomic_local(uval, uaddr, oldval, newval);
  151. #endif
  152. }
  153. #endif /* _ASM_XTENSA_FUTEX_H */