atomic64_64.h 6.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef _ASM_X86_ATOMIC64_64_H
  3. #define _ASM_X86_ATOMIC64_64_H
  4. #include <linux/types.h>
  5. #include <asm/alternative.h>
  6. #include <asm/cmpxchg.h>
  7. /* The 64-bit atomic type */
  8. #define ATOMIC64_INIT(i) { (i) }
  9. /**
  10. * arch_atomic64_read - read atomic64 variable
  11. * @v: pointer of type atomic64_t
  12. *
  13. * Atomically reads the value of @v.
  14. * Doesn't imply a read memory barrier.
  15. */
  16. static inline s64 arch_atomic64_read(const atomic64_t *v)
  17. {
  18. return __READ_ONCE((v)->counter);
  19. }
  20. /**
  21. * arch_atomic64_set - set atomic64 variable
  22. * @v: pointer to type atomic64_t
  23. * @i: required value
  24. *
  25. * Atomically sets the value of @v to @i.
  26. */
  27. static inline void arch_atomic64_set(atomic64_t *v, s64 i)
  28. {
  29. __WRITE_ONCE(v->counter, i);
  30. }
  31. /**
  32. * arch_atomic64_add - add integer to atomic64 variable
  33. * @i: integer value to add
  34. * @v: pointer to type atomic64_t
  35. *
  36. * Atomically adds @i to @v.
  37. */
  38. static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v)
  39. {
  40. asm volatile(LOCK_PREFIX "addq %1,%0"
  41. : "=m" (v->counter)
  42. : "er" (i), "m" (v->counter) : "memory");
  43. }
  44. /**
  45. * arch_atomic64_sub - subtract the atomic64 variable
  46. * @i: integer value to subtract
  47. * @v: pointer to type atomic64_t
  48. *
  49. * Atomically subtracts @i from @v.
  50. */
  51. static inline void arch_atomic64_sub(s64 i, atomic64_t *v)
  52. {
  53. asm volatile(LOCK_PREFIX "subq %1,%0"
  54. : "=m" (v->counter)
  55. : "er" (i), "m" (v->counter) : "memory");
  56. }
  57. /**
  58. * arch_atomic64_sub_and_test - subtract value from variable and test result
  59. * @i: integer value to subtract
  60. * @v: pointer to type atomic64_t
  61. *
  62. * Atomically subtracts @i from @v and returns
  63. * true if the result is zero, or false for all
  64. * other cases.
  65. */
  66. static inline bool arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
  67. {
  68. return GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, e, "er", i);
  69. }
  70. #define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
  71. /**
  72. * arch_atomic64_inc - increment atomic64 variable
  73. * @v: pointer to type atomic64_t
  74. *
  75. * Atomically increments @v by 1.
  76. */
  77. static __always_inline void arch_atomic64_inc(atomic64_t *v)
  78. {
  79. asm volatile(LOCK_PREFIX "incq %0"
  80. : "=m" (v->counter)
  81. : "m" (v->counter) : "memory");
  82. }
  83. #define arch_atomic64_inc arch_atomic64_inc
  84. /**
  85. * arch_atomic64_dec - decrement atomic64 variable
  86. * @v: pointer to type atomic64_t
  87. *
  88. * Atomically decrements @v by 1.
  89. */
  90. static __always_inline void arch_atomic64_dec(atomic64_t *v)
  91. {
  92. asm volatile(LOCK_PREFIX "decq %0"
  93. : "=m" (v->counter)
  94. : "m" (v->counter) : "memory");
  95. }
  96. #define arch_atomic64_dec arch_atomic64_dec
  97. /**
  98. * arch_atomic64_dec_and_test - decrement and test
  99. * @v: pointer to type atomic64_t
  100. *
  101. * Atomically decrements @v by 1 and
  102. * returns true if the result is 0, or false for all other
  103. * cases.
  104. */
  105. static inline bool arch_atomic64_dec_and_test(atomic64_t *v)
  106. {
  107. return GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, e);
  108. }
  109. #define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
  110. /**
  111. * arch_atomic64_inc_and_test - increment and test
  112. * @v: pointer to type atomic64_t
  113. *
  114. * Atomically increments @v by 1
  115. * and returns true if the result is zero, or false for all
  116. * other cases.
  117. */
  118. static inline bool arch_atomic64_inc_and_test(atomic64_t *v)
  119. {
  120. return GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, e);
  121. }
  122. #define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
  123. /**
  124. * arch_atomic64_add_negative - add and test if negative
  125. * @i: integer value to add
  126. * @v: pointer to type atomic64_t
  127. *
  128. * Atomically adds @i to @v and returns true
  129. * if the result is negative, or false when
  130. * result is greater than or equal to zero.
  131. */
  132. static inline bool arch_atomic64_add_negative(s64 i, atomic64_t *v)
  133. {
  134. return GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, s, "er", i);
  135. }
  136. #define arch_atomic64_add_negative arch_atomic64_add_negative
  137. /**
  138. * arch_atomic64_add_return - add and return
  139. * @i: integer value to add
  140. * @v: pointer to type atomic64_t
  141. *
  142. * Atomically adds @i to @v and returns @i + @v
  143. */
  144. static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
  145. {
  146. return i + xadd(&v->counter, i);
  147. }
  148. #define arch_atomic64_add_return arch_atomic64_add_return
  149. static inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v)
  150. {
  151. return arch_atomic64_add_return(-i, v);
  152. }
  153. #define arch_atomic64_sub_return arch_atomic64_sub_return
  154. static inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
  155. {
  156. return xadd(&v->counter, i);
  157. }
  158. #define arch_atomic64_fetch_add arch_atomic64_fetch_add
  159. static inline s64 arch_atomic64_fetch_sub(s64 i, atomic64_t *v)
  160. {
  161. return xadd(&v->counter, -i);
  162. }
  163. #define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
  164. static inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
  165. {
  166. return arch_cmpxchg(&v->counter, old, new);
  167. }
  168. #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
  169. static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
  170. {
  171. return arch_try_cmpxchg(&v->counter, old, new);
  172. }
  173. #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
  174. static inline s64 arch_atomic64_xchg(atomic64_t *v, s64 new)
  175. {
  176. return arch_xchg(&v->counter, new);
  177. }
  178. #define arch_atomic64_xchg arch_atomic64_xchg
  179. static inline void arch_atomic64_and(s64 i, atomic64_t *v)
  180. {
  181. asm volatile(LOCK_PREFIX "andq %1,%0"
  182. : "+m" (v->counter)
  183. : "er" (i)
  184. : "memory");
  185. }
  186. static inline s64 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
  187. {
  188. s64 val = arch_atomic64_read(v);
  189. do {
  190. } while (!arch_atomic64_try_cmpxchg(v, &val, val & i));
  191. return val;
  192. }
  193. #define arch_atomic64_fetch_and arch_atomic64_fetch_and
  194. static inline void arch_atomic64_or(s64 i, atomic64_t *v)
  195. {
  196. asm volatile(LOCK_PREFIX "orq %1,%0"
  197. : "+m" (v->counter)
  198. : "er" (i)
  199. : "memory");
  200. }
  201. static inline s64 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
  202. {
  203. s64 val = arch_atomic64_read(v);
  204. do {
  205. } while (!arch_atomic64_try_cmpxchg(v, &val, val | i));
  206. return val;
  207. }
  208. #define arch_atomic64_fetch_or arch_atomic64_fetch_or
  209. static inline void arch_atomic64_xor(s64 i, atomic64_t *v)
  210. {
  211. asm volatile(LOCK_PREFIX "xorq %1,%0"
  212. : "+m" (v->counter)
  213. : "er" (i)
  214. : "memory");
  215. }
  216. static inline s64 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
  217. {
  218. s64 val = arch_atomic64_read(v);
  219. do {
  220. } while (!arch_atomic64_try_cmpxchg(v, &val, val ^ i));
  221. return val;
  222. }
  223. #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
  224. #endif /* _ASM_X86_ATOMIC64_64_H */