futex.h 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (c) 2006 Ralf Baechle ([email protected])
  7. */
  8. #ifndef _ASM_FUTEX_H
  9. #define _ASM_FUTEX_H
  10. #ifdef __KERNEL__
  11. #include <linux/futex.h>
  12. #include <linux/uaccess.h>
  13. #include <asm/asm-eva.h>
  14. #include <asm/barrier.h>
  15. #include <asm/compiler.h>
  16. #include <asm/errno.h>
  17. #include <asm/sync.h>
  18. #define arch_futex_atomic_op_inuser arch_futex_atomic_op_inuser
  19. #define futex_atomic_cmpxchg_inatomic futex_atomic_cmpxchg_inatomic
  20. #include <asm-generic/futex.h>
  21. #define __futex_atomic_op(op, insn, ret, oldval, uaddr, oparg) \
  22. { \
  23. if (cpu_has_llsc && IS_ENABLED(CONFIG_WAR_R10000_LLSC)) { \
  24. __asm__ __volatile__( \
  25. " .set push \n" \
  26. " .set noat \n" \
  27. " .set push \n" \
  28. " .set arch=r4000 \n" \
  29. "1: ll %1, %4 # __futex_atomic_op \n" \
  30. " .set pop \n" \
  31. " " insn " \n" \
  32. " .set arch=r4000 \n" \
  33. "2: sc $1, %2 \n" \
  34. " beqzl $1, 1b \n" \
  35. __stringify(__WEAK_LLSC_MB) " \n" \
  36. "3: \n" \
  37. " .insn \n" \
  38. " .set pop \n" \
  39. " .section .fixup,\"ax\" \n" \
  40. "4: li %0, %6 \n" \
  41. " j 3b \n" \
  42. " .previous \n" \
  43. " .section __ex_table,\"a\" \n" \
  44. " "__UA_ADDR "\t1b, 4b \n" \
  45. " "__UA_ADDR "\t2b, 4b \n" \
  46. " .previous \n" \
  47. : "=r" (ret), "=&r" (oldval), \
  48. "=" GCC_OFF_SMALL_ASM() (*uaddr) \
  49. : "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg), \
  50. "i" (-EFAULT) \
  51. : "memory"); \
  52. } else if (cpu_has_llsc) { \
  53. __asm__ __volatile__( \
  54. " .set push \n" \
  55. " .set noat \n" \
  56. " .set push \n" \
  57. " .set "MIPS_ISA_ARCH_LEVEL" \n" \
  58. " " __SYNC(full, loongson3_war) " \n" \
  59. "1: "user_ll("%1", "%4")" # __futex_atomic_op\n" \
  60. " .set pop \n" \
  61. " " insn " \n" \
  62. " .set "MIPS_ISA_ARCH_LEVEL" \n" \
  63. "2: "user_sc("$1", "%2")" \n" \
  64. " beqz $1, 1b \n" \
  65. __stringify(__WEAK_LLSC_MB) " \n" \
  66. "3: \n" \
  67. " .insn \n" \
  68. " .set pop \n" \
  69. " .section .fixup,\"ax\" \n" \
  70. "4: li %0, %6 \n" \
  71. " j 3b \n" \
  72. " .previous \n" \
  73. " .section __ex_table,\"a\" \n" \
  74. " "__UA_ADDR "\t1b, 4b \n" \
  75. " "__UA_ADDR "\t2b, 4b \n" \
  76. " .previous \n" \
  77. : "=r" (ret), "=&r" (oldval), \
  78. "=" GCC_OFF_SMALL_ASM() (*uaddr) \
  79. : "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg), \
  80. "i" (-EFAULT) \
  81. : "memory"); \
  82. } else { \
  83. /* fallback for non-SMP */ \
  84. ret = futex_atomic_op_inuser_local(op, oparg, oval, uaddr); \
  85. } \
  86. }
  87. static inline int
  88. arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
  89. {
  90. int oldval = 0, ret;
  91. if (!access_ok(uaddr, sizeof(u32)))
  92. return -EFAULT;
  93. switch (op) {
  94. case FUTEX_OP_SET:
  95. __futex_atomic_op(op, "move $1, %z5", ret, oldval, uaddr, oparg);
  96. break;
  97. case FUTEX_OP_ADD:
  98. __futex_atomic_op(op, "addu $1, %1, %z5",
  99. ret, oldval, uaddr, oparg);
  100. break;
  101. case FUTEX_OP_OR:
  102. __futex_atomic_op(op, "or $1, %1, %z5",
  103. ret, oldval, uaddr, oparg);
  104. break;
  105. case FUTEX_OP_ANDN:
  106. __futex_atomic_op(op, "and $1, %1, %z5",
  107. ret, oldval, uaddr, ~oparg);
  108. break;
  109. case FUTEX_OP_XOR:
  110. __futex_atomic_op(op, "xor $1, %1, %z5",
  111. ret, oldval, uaddr, oparg);
  112. break;
  113. default:
  114. ret = -ENOSYS;
  115. }
  116. if (!ret)
  117. *oval = oldval;
  118. return ret;
  119. }
  120. static inline int
  121. futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
  122. u32 oldval, u32 newval)
  123. {
  124. int ret = 0;
  125. u32 val;
  126. if (!access_ok(uaddr, sizeof(u32)))
  127. return -EFAULT;
  128. if (cpu_has_llsc && IS_ENABLED(CONFIG_WAR_R10000_LLSC)) {
  129. __asm__ __volatile__(
  130. "# futex_atomic_cmpxchg_inatomic \n"
  131. " .set push \n"
  132. " .set noat \n"
  133. " .set push \n"
  134. " .set arch=r4000 \n"
  135. "1: ll %1, %3 \n"
  136. " bne %1, %z4, 3f \n"
  137. " .set pop \n"
  138. " move $1, %z5 \n"
  139. " .set arch=r4000 \n"
  140. "2: sc $1, %2 \n"
  141. " beqzl $1, 1b \n"
  142. __stringify(__WEAK_LLSC_MB) " \n"
  143. "3: \n"
  144. " .insn \n"
  145. " .set pop \n"
  146. " .section .fixup,\"ax\" \n"
  147. "4: li %0, %6 \n"
  148. " j 3b \n"
  149. " .previous \n"
  150. " .section __ex_table,\"a\" \n"
  151. " "__UA_ADDR "\t1b, 4b \n"
  152. " "__UA_ADDR "\t2b, 4b \n"
  153. " .previous \n"
  154. : "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr)
  155. : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval),
  156. "i" (-EFAULT)
  157. : "memory");
  158. } else if (cpu_has_llsc) {
  159. __asm__ __volatile__(
  160. "# futex_atomic_cmpxchg_inatomic \n"
  161. " .set push \n"
  162. " .set noat \n"
  163. " .set push \n"
  164. " .set "MIPS_ISA_ARCH_LEVEL" \n"
  165. " " __SYNC(full, loongson3_war) " \n"
  166. "1: "user_ll("%1", "%3")" \n"
  167. " bne %1, %z4, 3f \n"
  168. " .set pop \n"
  169. " move $1, %z5 \n"
  170. " .set "MIPS_ISA_ARCH_LEVEL" \n"
  171. "2: "user_sc("$1", "%2")" \n"
  172. " beqz $1, 1b \n"
  173. "3: " __SYNC_ELSE(full, loongson3_war, __WEAK_LLSC_MB) "\n"
  174. " .insn \n"
  175. " .set pop \n"
  176. " .section .fixup,\"ax\" \n"
  177. "4: li %0, %6 \n"
  178. " j 3b \n"
  179. " .previous \n"
  180. " .section __ex_table,\"a\" \n"
  181. " "__UA_ADDR "\t1b, 4b \n"
  182. " "__UA_ADDR "\t2b, 4b \n"
  183. " .previous \n"
  184. : "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr)
  185. : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval),
  186. "i" (-EFAULT)
  187. : "memory");
  188. } else {
  189. return futex_atomic_cmpxchg_inatomic_local(uval, uaddr, oldval, newval);
  190. }
  191. *uval = val;
  192. return ret;
  193. }
  194. #endif
  195. #endif /* _ASM_FUTEX_H */