futex.h 2.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /*
  3. * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
  4. */
  5. #ifndef _ASM_FUTEX_H
  6. #define _ASM_FUTEX_H
  7. #include <linux/futex.h>
  8. #include <linux/uaccess.h>
  9. #include <asm/barrier.h>
  10. #include <asm/errno.h>
  11. #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
  12. { \
  13. __asm__ __volatile__( \
  14. "1: ll.w %1, %4 # __futex_atomic_op\n" \
  15. " " insn " \n" \
  16. "2: sc.w $t0, %2 \n" \
  17. " beqz $t0, 1b \n" \
  18. "3: \n" \
  19. " .section .fixup,\"ax\" \n" \
  20. "4: li.w %0, %6 \n" \
  21. " b 3b \n" \
  22. " .previous \n" \
  23. " .section __ex_table,\"a\" \n" \
  24. " "__UA_ADDR "\t1b, 4b \n" \
  25. " "__UA_ADDR "\t2b, 4b \n" \
  26. " .previous \n" \
  27. : "=r" (ret), "=&r" (oldval), \
  28. "=ZC" (*uaddr) \
  29. : "0" (0), "ZC" (*uaddr), "Jr" (oparg), \
  30. "i" (-EFAULT) \
  31. : "memory", "t0"); \
  32. }
  33. static inline int
  34. arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
  35. {
  36. int oldval = 0, ret = 0;
  37. pagefault_disable();
  38. switch (op) {
  39. case FUTEX_OP_SET:
  40. __futex_atomic_op("move $t0, %z5", ret, oldval, uaddr, oparg);
  41. break;
  42. case FUTEX_OP_ADD:
  43. __futex_atomic_op("add.w $t0, %1, %z5", ret, oldval, uaddr, oparg);
  44. break;
  45. case FUTEX_OP_OR:
  46. __futex_atomic_op("or $t0, %1, %z5", ret, oldval, uaddr, oparg);
  47. break;
  48. case FUTEX_OP_ANDN:
  49. __futex_atomic_op("and $t0, %1, %z5", ret, oldval, uaddr, ~oparg);
  50. break;
  51. case FUTEX_OP_XOR:
  52. __futex_atomic_op("xor $t0, %1, %z5", ret, oldval, uaddr, oparg);
  53. break;
  54. default:
  55. ret = -ENOSYS;
  56. }
  57. pagefault_enable();
  58. if (!ret)
  59. *oval = oldval;
  60. return ret;
  61. }
  62. static inline int
  63. futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 oldval, u32 newval)
  64. {
  65. int ret = 0;
  66. u32 val = 0;
  67. if (!access_ok(uaddr, sizeof(u32)))
  68. return -EFAULT;
  69. __asm__ __volatile__(
  70. "# futex_atomic_cmpxchg_inatomic \n"
  71. "1: ll.w %1, %3 \n"
  72. " bne %1, %z4, 3f \n"
  73. " move $t0, %z5 \n"
  74. "2: sc.w $t0, %2 \n"
  75. " beqz $t0, 1b \n"
  76. "3: \n"
  77. __WEAK_LLSC_MB
  78. " .section .fixup,\"ax\" \n"
  79. "4: li.d %0, %6 \n"
  80. " b 3b \n"
  81. " .previous \n"
  82. " .section __ex_table,\"a\" \n"
  83. " "__UA_ADDR "\t1b, 4b \n"
  84. " "__UA_ADDR "\t2b, 4b \n"
  85. " .previous \n"
  86. : "+r" (ret), "=&r" (val), "=ZC" (*uaddr)
  87. : "ZC" (*uaddr), "Jr" (oldval), "Jr" (newval),
  88. "i" (-EFAULT)
  89. : "memory", "t0");
  90. *uval = val;
  91. return ret;
  92. }
  93. #endif /* _ASM_FUTEX_H */