atomic.h 8.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226
  1. /* SPDX-License-Identifier: GPL-2.0-only */
  2. /*
  3. * Based on arch/arm/include/asm/atomic.h
  4. *
  5. * Copyright (C) 1996 Russell King.
  6. * Copyright (C) 2002 Deep Blue Solutions Ltd.
  7. * Copyright (C) 2012 ARM Ltd.
  8. */
  9. #ifndef __ASM_ATOMIC_H
  10. #define __ASM_ATOMIC_H
  11. #include <linux/compiler.h>
  12. #include <linux/types.h>
  13. #include <asm/barrier.h>
  14. #include <asm/cmpxchg.h>
  15. #include <asm/lse.h>
  16. #define ATOMIC_OP(op) \
  17. static __always_inline void arch_##op(int i, atomic_t *v) \
  18. { \
  19. __lse_ll_sc_body(op, i, v); \
  20. }
  21. ATOMIC_OP(atomic_andnot)
  22. ATOMIC_OP(atomic_or)
  23. ATOMIC_OP(atomic_xor)
  24. ATOMIC_OP(atomic_add)
  25. ATOMIC_OP(atomic_and)
  26. ATOMIC_OP(atomic_sub)
  27. #undef ATOMIC_OP
  28. #define ATOMIC_FETCH_OP(name, op) \
  29. static __always_inline int arch_##op##name(int i, atomic_t *v) \
  30. { \
  31. return __lse_ll_sc_body(op##name, i, v); \
  32. }
  33. #define ATOMIC_FETCH_OPS(op) \
  34. ATOMIC_FETCH_OP(_relaxed, op) \
  35. ATOMIC_FETCH_OP(_acquire, op) \
  36. ATOMIC_FETCH_OP(_release, op) \
  37. ATOMIC_FETCH_OP( , op)
  38. ATOMIC_FETCH_OPS(atomic_fetch_andnot)
  39. ATOMIC_FETCH_OPS(atomic_fetch_or)
  40. ATOMIC_FETCH_OPS(atomic_fetch_xor)
  41. ATOMIC_FETCH_OPS(atomic_fetch_add)
  42. ATOMIC_FETCH_OPS(atomic_fetch_and)
  43. ATOMIC_FETCH_OPS(atomic_fetch_sub)
  44. ATOMIC_FETCH_OPS(atomic_add_return)
  45. ATOMIC_FETCH_OPS(atomic_sub_return)
  46. #undef ATOMIC_FETCH_OP
  47. #undef ATOMIC_FETCH_OPS
  48. #define ATOMIC64_OP(op) \
  49. static __always_inline void arch_##op(long i, atomic64_t *v) \
  50. { \
  51. __lse_ll_sc_body(op, i, v); \
  52. }
  53. ATOMIC64_OP(atomic64_andnot)
  54. ATOMIC64_OP(atomic64_or)
  55. ATOMIC64_OP(atomic64_xor)
  56. ATOMIC64_OP(atomic64_add)
  57. ATOMIC64_OP(atomic64_and)
  58. ATOMIC64_OP(atomic64_sub)
  59. #undef ATOMIC64_OP
  60. #define ATOMIC64_FETCH_OP(name, op) \
  61. static __always_inline long arch_##op##name(long i, atomic64_t *v) \
  62. { \
  63. return __lse_ll_sc_body(op##name, i, v); \
  64. }
  65. #define ATOMIC64_FETCH_OPS(op) \
  66. ATOMIC64_FETCH_OP(_relaxed, op) \
  67. ATOMIC64_FETCH_OP(_acquire, op) \
  68. ATOMIC64_FETCH_OP(_release, op) \
  69. ATOMIC64_FETCH_OP( , op)
  70. ATOMIC64_FETCH_OPS(atomic64_fetch_andnot)
  71. ATOMIC64_FETCH_OPS(atomic64_fetch_or)
  72. ATOMIC64_FETCH_OPS(atomic64_fetch_xor)
  73. ATOMIC64_FETCH_OPS(atomic64_fetch_add)
  74. ATOMIC64_FETCH_OPS(atomic64_fetch_and)
  75. ATOMIC64_FETCH_OPS(atomic64_fetch_sub)
  76. ATOMIC64_FETCH_OPS(atomic64_add_return)
  77. ATOMIC64_FETCH_OPS(atomic64_sub_return)
  78. #undef ATOMIC64_FETCH_OP
  79. #undef ATOMIC64_FETCH_OPS
  80. static __always_inline long arch_atomic64_dec_if_positive(atomic64_t *v)
  81. {
  82. return __lse_ll_sc_body(atomic64_dec_if_positive, v);
  83. }
  84. #define arch_atomic_read(v) __READ_ONCE((v)->counter)
  85. #define arch_atomic_set(v, i) __WRITE_ONCE(((v)->counter), (i))
  86. #define arch_atomic_add_return_relaxed arch_atomic_add_return_relaxed
  87. #define arch_atomic_add_return_acquire arch_atomic_add_return_acquire
  88. #define arch_atomic_add_return_release arch_atomic_add_return_release
  89. #define arch_atomic_add_return arch_atomic_add_return
  90. #define arch_atomic_sub_return_relaxed arch_atomic_sub_return_relaxed
  91. #define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire
  92. #define arch_atomic_sub_return_release arch_atomic_sub_return_release
  93. #define arch_atomic_sub_return arch_atomic_sub_return
  94. #define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add_relaxed
  95. #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire
  96. #define arch_atomic_fetch_add_release arch_atomic_fetch_add_release
  97. #define arch_atomic_fetch_add arch_atomic_fetch_add
  98. #define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub_relaxed
  99. #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire
  100. #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release
  101. #define arch_atomic_fetch_sub arch_atomic_fetch_sub
  102. #define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and_relaxed
  103. #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire
  104. #define arch_atomic_fetch_and_release arch_atomic_fetch_and_release
  105. #define arch_atomic_fetch_and arch_atomic_fetch_and
  106. #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed
  107. #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
  108. #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
  109. #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
  110. #define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or_relaxed
  111. #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire
  112. #define arch_atomic_fetch_or_release arch_atomic_fetch_or_release
  113. #define arch_atomic_fetch_or arch_atomic_fetch_or
  114. #define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor_relaxed
  115. #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire
  116. #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release
  117. #define arch_atomic_fetch_xor arch_atomic_fetch_xor
  118. #define arch_atomic_xchg_relaxed(v, new) \
  119. arch_xchg_relaxed(&((v)->counter), (new))
  120. #define arch_atomic_xchg_acquire(v, new) \
  121. arch_xchg_acquire(&((v)->counter), (new))
  122. #define arch_atomic_xchg_release(v, new) \
  123. arch_xchg_release(&((v)->counter), (new))
  124. #define arch_atomic_xchg(v, new) \
  125. arch_xchg(&((v)->counter), (new))
  126. #define arch_atomic_cmpxchg_relaxed(v, old, new) \
  127. arch_cmpxchg_relaxed(&((v)->counter), (old), (new))
  128. #define arch_atomic_cmpxchg_acquire(v, old, new) \
  129. arch_cmpxchg_acquire(&((v)->counter), (old), (new))
  130. #define arch_atomic_cmpxchg_release(v, old, new) \
  131. arch_cmpxchg_release(&((v)->counter), (old), (new))
  132. #define arch_atomic_cmpxchg(v, old, new) \
  133. arch_cmpxchg(&((v)->counter), (old), (new))
  134. #define arch_atomic_andnot arch_atomic_andnot
  135. /*
  136. * 64-bit arch_atomic operations.
  137. */
  138. #define ATOMIC64_INIT ATOMIC_INIT
  139. #define arch_atomic64_read arch_atomic_read
  140. #define arch_atomic64_set arch_atomic_set
  141. #define arch_atomic64_add_return_relaxed arch_atomic64_add_return_relaxed
  142. #define arch_atomic64_add_return_acquire arch_atomic64_add_return_acquire
  143. #define arch_atomic64_add_return_release arch_atomic64_add_return_release
  144. #define arch_atomic64_add_return arch_atomic64_add_return
  145. #define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return_relaxed
  146. #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire
  147. #define arch_atomic64_sub_return_release arch_atomic64_sub_return_release
  148. #define arch_atomic64_sub_return arch_atomic64_sub_return
  149. #define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add_relaxed
  150. #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire
  151. #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add_release
  152. #define arch_atomic64_fetch_add arch_atomic64_fetch_add
  153. #define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub_relaxed
  154. #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire
  155. #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release
  156. #define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
  157. #define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and_relaxed
  158. #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire
  159. #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and_release
  160. #define arch_atomic64_fetch_and arch_atomic64_fetch_and
  161. #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed
  162. #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
  163. #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
  164. #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
  165. #define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or_relaxed
  166. #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire
  167. #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or_release
  168. #define arch_atomic64_fetch_or arch_atomic64_fetch_or
  169. #define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor_relaxed
  170. #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire
  171. #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release
  172. #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
  173. #define arch_atomic64_xchg_relaxed arch_atomic_xchg_relaxed
  174. #define arch_atomic64_xchg_acquire arch_atomic_xchg_acquire
  175. #define arch_atomic64_xchg_release arch_atomic_xchg_release
  176. #define arch_atomic64_xchg arch_atomic_xchg
  177. #define arch_atomic64_cmpxchg_relaxed arch_atomic_cmpxchg_relaxed
  178. #define arch_atomic64_cmpxchg_acquire arch_atomic_cmpxchg_acquire
  179. #define arch_atomic64_cmpxchg_release arch_atomic_cmpxchg_release
  180. #define arch_atomic64_cmpxchg arch_atomic_cmpxchg
  181. #define arch_atomic64_andnot arch_atomic64_andnot
  182. #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
  183. #endif /* __ASM_ATOMIC_H */