futex.h 2.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef __ASM_CSKY_FUTEX_H
  3. #define __ASM_CSKY_FUTEX_H
  4. #ifndef CONFIG_SMP
  5. #include <asm-generic/futex.h>
  6. #else
  7. #include <linux/atomic.h>
  8. #include <linux/futex.h>
  9. #include <linux/uaccess.h>
  10. #include <linux/errno.h>
  11. #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
  12. { \
  13. u32 tmp; \
  14. \
  15. __atomic_pre_full_fence(); \
  16. \
  17. __asm__ __volatile__ ( \
  18. "1: ldex.w %[ov], %[u] \n" \
  19. " "insn" \n" \
  20. "2: stex.w %[t], %[u] \n" \
  21. " bez %[t], 1b \n" \
  22. " br 4f \n" \
  23. "3: mov %[r], %[e] \n" \
  24. "4: \n" \
  25. " .section __ex_table,\"a\" \n" \
  26. " .balign 4 \n" \
  27. " .long 1b, 3b \n" \
  28. " .long 2b, 3b \n" \
  29. " .previous \n" \
  30. : [r] "+r" (ret), [ov] "=&r" (oldval), \
  31. [u] "+m" (*uaddr), [t] "=&r" (tmp) \
  32. : [op] "Jr" (oparg), [e] "jr" (-EFAULT) \
  33. : "memory"); \
  34. \
  35. __atomic_post_full_fence(); \
  36. }
  37. static inline int
  38. arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
  39. {
  40. int oldval = 0, ret = 0;
  41. if (!access_ok(uaddr, sizeof(u32)))
  42. return -EFAULT;
  43. switch (op) {
  44. case FUTEX_OP_SET:
  45. __futex_atomic_op("mov %[t], %[ov]",
  46. ret, oldval, uaddr, oparg);
  47. break;
  48. case FUTEX_OP_ADD:
  49. __futex_atomic_op("add %[t], %[ov], %[op]",
  50. ret, oldval, uaddr, oparg);
  51. break;
  52. case FUTEX_OP_OR:
  53. __futex_atomic_op("or %[t], %[ov], %[op]",
  54. ret, oldval, uaddr, oparg);
  55. break;
  56. case FUTEX_OP_ANDN:
  57. __futex_atomic_op("and %[t], %[ov], %[op]",
  58. ret, oldval, uaddr, ~oparg);
  59. break;
  60. case FUTEX_OP_XOR:
  61. __futex_atomic_op("xor %[t], %[ov], %[op]",
  62. ret, oldval, uaddr, oparg);
  63. break;
  64. default:
  65. ret = -ENOSYS;
  66. }
  67. if (!ret)
  68. *oval = oldval;
  69. return ret;
  70. }
  71. static inline int
  72. futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
  73. u32 oldval, u32 newval)
  74. {
  75. int ret = 0;
  76. u32 val, tmp;
  77. if (!access_ok(uaddr, sizeof(u32)))
  78. return -EFAULT;
  79. __atomic_pre_full_fence();
  80. __asm__ __volatile__ (
  81. "1: ldex.w %[v], %[u] \n"
  82. " cmpne %[v], %[ov] \n"
  83. " bt 4f \n"
  84. " mov %[t], %[nv] \n"
  85. "2: stex.w %[t], %[u] \n"
  86. " bez %[t], 1b \n"
  87. " br 4f \n"
  88. "3: mov %[r], %[e] \n"
  89. "4: \n"
  90. " .section __ex_table,\"a\" \n"
  91. " .balign 4 \n"
  92. " .long 1b, 3b \n"
  93. " .long 2b, 3b \n"
  94. " .previous \n"
  95. : [r] "+r" (ret), [v] "=&r" (val), [u] "+m" (*uaddr),
  96. [t] "=&r" (tmp)
  97. : [ov] "Jr" (oldval), [nv] "Jr" (newval), [e] "Jr" (-EFAULT)
  98. : "memory");
  99. __atomic_post_full_fence();
  100. *uval = val;
  101. return ret;
  102. }
  103. #endif /* CONFIG_SMP */
  104. #endif /* __ASM_CSKY_FUTEX_H */