atomic64_32.h 8.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef _ASM_X86_ATOMIC64_32_H
  3. #define _ASM_X86_ATOMIC64_32_H
  4. #include <linux/compiler.h>
  5. #include <linux/types.h>
  6. //#include <asm/cmpxchg.h>
  7. /* An 64bit atomic type */
  8. typedef struct {
  9. s64 __aligned(8) counter;
  10. } atomic64_t;
  11. #define ATOMIC64_INIT(val) { (val) }
  12. #define __ATOMIC64_DECL(sym) void atomic64_##sym(atomic64_t *, ...)
  13. #ifndef ATOMIC64_EXPORT
  14. #define ATOMIC64_DECL_ONE __ATOMIC64_DECL
  15. #else
  16. #define ATOMIC64_DECL_ONE(sym) __ATOMIC64_DECL(sym); \
  17. ATOMIC64_EXPORT(atomic64_##sym)
  18. #endif
  19. #ifdef CONFIG_X86_CMPXCHG64
  20. #define __alternative_atomic64(f, g, out, in...) \
  21. asm volatile("call %P[func]" \
  22. : out : [func] "i" (atomic64_##g##_cx8), ## in)
  23. #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8)
  24. #else
  25. #define __alternative_atomic64(f, g, out, in...) \
  26. alternative_call(atomic64_##f##_386, atomic64_##g##_cx8, \
  27. X86_FEATURE_CX8, ASM_OUTPUT2(out), ## in)
  28. #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8); \
  29. ATOMIC64_DECL_ONE(sym##_386)
  30. ATOMIC64_DECL_ONE(add_386);
  31. ATOMIC64_DECL_ONE(sub_386);
  32. ATOMIC64_DECL_ONE(inc_386);
  33. ATOMIC64_DECL_ONE(dec_386);
  34. #endif
  35. #define alternative_atomic64(f, out, in...) \
  36. __alternative_atomic64(f, f, ASM_OUTPUT2(out), ## in)
  37. ATOMIC64_DECL(read);
  38. ATOMIC64_DECL(set);
  39. ATOMIC64_DECL(xchg);
  40. ATOMIC64_DECL(add_return);
  41. ATOMIC64_DECL(sub_return);
  42. ATOMIC64_DECL(inc_return);
  43. ATOMIC64_DECL(dec_return);
  44. ATOMIC64_DECL(dec_if_positive);
  45. ATOMIC64_DECL(inc_not_zero);
  46. ATOMIC64_DECL(add_unless);
  47. #undef ATOMIC64_DECL
  48. #undef ATOMIC64_DECL_ONE
  49. #undef __ATOMIC64_DECL
  50. #undef ATOMIC64_EXPORT
  51. /**
  52. * arch_atomic64_cmpxchg - cmpxchg atomic64 variable
  53. * @v: pointer to type atomic64_t
  54. * @o: expected value
  55. * @n: new value
  56. *
  57. * Atomically sets @v to @n if it was equal to @o and returns
  58. * the old value.
  59. */
  60. static inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 o, s64 n)
  61. {
  62. return arch_cmpxchg64(&v->counter, o, n);
  63. }
  64. #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
  65. /**
  66. * arch_atomic64_xchg - xchg atomic64 variable
  67. * @v: pointer to type atomic64_t
  68. * @n: value to assign
  69. *
  70. * Atomically xchgs the value of @v to @n and returns
  71. * the old value.
  72. */
  73. static inline s64 arch_atomic64_xchg(atomic64_t *v, s64 n)
  74. {
  75. s64 o;
  76. unsigned high = (unsigned)(n >> 32);
  77. unsigned low = (unsigned)n;
  78. alternative_atomic64(xchg, "=&A" (o),
  79. "S" (v), "b" (low), "c" (high)
  80. : "memory");
  81. return o;
  82. }
  83. #define arch_atomic64_xchg arch_atomic64_xchg
  84. /**
  85. * arch_atomic64_set - set atomic64 variable
  86. * @v: pointer to type atomic64_t
  87. * @i: value to assign
  88. *
  89. * Atomically sets the value of @v to @n.
  90. */
  91. static inline void arch_atomic64_set(atomic64_t *v, s64 i)
  92. {
  93. unsigned high = (unsigned)(i >> 32);
  94. unsigned low = (unsigned)i;
  95. alternative_atomic64(set, /* no output */,
  96. "S" (v), "b" (low), "c" (high)
  97. : "eax", "edx", "memory");
  98. }
  99. /**
  100. * arch_atomic64_read - read atomic64 variable
  101. * @v: pointer to type atomic64_t
  102. *
  103. * Atomically reads the value of @v and returns it.
  104. */
  105. static inline s64 arch_atomic64_read(const atomic64_t *v)
  106. {
  107. s64 r;
  108. alternative_atomic64(read, "=&A" (r), "c" (v) : "memory");
  109. return r;
  110. }
  111. /**
  112. * arch_atomic64_add_return - add and return
  113. * @i: integer value to add
  114. * @v: pointer to type atomic64_t
  115. *
  116. * Atomically adds @i to @v and returns @i + *@v
  117. */
  118. static inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
  119. {
  120. alternative_atomic64(add_return,
  121. ASM_OUTPUT2("+A" (i), "+c" (v)),
  122. ASM_NO_INPUT_CLOBBER("memory"));
  123. return i;
  124. }
  125. #define arch_atomic64_add_return arch_atomic64_add_return
  126. /*
  127. * Other variants with different arithmetic operators:
  128. */
  129. static inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v)
  130. {
  131. alternative_atomic64(sub_return,
  132. ASM_OUTPUT2("+A" (i), "+c" (v)),
  133. ASM_NO_INPUT_CLOBBER("memory"));
  134. return i;
  135. }
  136. #define arch_atomic64_sub_return arch_atomic64_sub_return
  137. static inline s64 arch_atomic64_inc_return(atomic64_t *v)
  138. {
  139. s64 a;
  140. alternative_atomic64(inc_return, "=&A" (a),
  141. "S" (v) : "memory", "ecx");
  142. return a;
  143. }
  144. #define arch_atomic64_inc_return arch_atomic64_inc_return
  145. static inline s64 arch_atomic64_dec_return(atomic64_t *v)
  146. {
  147. s64 a;
  148. alternative_atomic64(dec_return, "=&A" (a),
  149. "S" (v) : "memory", "ecx");
  150. return a;
  151. }
  152. #define arch_atomic64_dec_return arch_atomic64_dec_return
  153. /**
  154. * arch_atomic64_add - add integer to atomic64 variable
  155. * @i: integer value to add
  156. * @v: pointer to type atomic64_t
  157. *
  158. * Atomically adds @i to @v.
  159. */
  160. static inline s64 arch_atomic64_add(s64 i, atomic64_t *v)
  161. {
  162. __alternative_atomic64(add, add_return,
  163. ASM_OUTPUT2("+A" (i), "+c" (v)),
  164. ASM_NO_INPUT_CLOBBER("memory"));
  165. return i;
  166. }
  167. /**
  168. * arch_atomic64_sub - subtract the atomic64 variable
  169. * @i: integer value to subtract
  170. * @v: pointer to type atomic64_t
  171. *
  172. * Atomically subtracts @i from @v.
  173. */
  174. static inline s64 arch_atomic64_sub(s64 i, atomic64_t *v)
  175. {
  176. __alternative_atomic64(sub, sub_return,
  177. ASM_OUTPUT2("+A" (i), "+c" (v)),
  178. ASM_NO_INPUT_CLOBBER("memory"));
  179. return i;
  180. }
  181. /**
  182. * arch_atomic64_inc - increment atomic64 variable
  183. * @v: pointer to type atomic64_t
  184. *
  185. * Atomically increments @v by 1.
  186. */
  187. static inline void arch_atomic64_inc(atomic64_t *v)
  188. {
  189. __alternative_atomic64(inc, inc_return, /* no output */,
  190. "S" (v) : "memory", "eax", "ecx", "edx");
  191. }
  192. #define arch_atomic64_inc arch_atomic64_inc
  193. /**
  194. * arch_atomic64_dec - decrement atomic64 variable
  195. * @v: pointer to type atomic64_t
  196. *
  197. * Atomically decrements @v by 1.
  198. */
  199. static inline void arch_atomic64_dec(atomic64_t *v)
  200. {
  201. __alternative_atomic64(dec, dec_return, /* no output */,
  202. "S" (v) : "memory", "eax", "ecx", "edx");
  203. }
  204. #define arch_atomic64_dec arch_atomic64_dec
  205. /**
  206. * arch_atomic64_add_unless - add unless the number is a given value
  207. * @v: pointer of type atomic64_t
  208. * @a: the amount to add to v...
  209. * @u: ...unless v is equal to u.
  210. *
  211. * Atomically adds @a to @v, so long as it was not @u.
  212. * Returns non-zero if the add was done, zero otherwise.
  213. */
  214. static inline int arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
  215. {
  216. unsigned low = (unsigned)u;
  217. unsigned high = (unsigned)(u >> 32);
  218. alternative_atomic64(add_unless,
  219. ASM_OUTPUT2("+A" (a), "+c" (low), "+D" (high)),
  220. "S" (v) : "memory");
  221. return (int)a;
  222. }
  223. #define arch_atomic64_add_unless arch_atomic64_add_unless
  224. static inline int arch_atomic64_inc_not_zero(atomic64_t *v)
  225. {
  226. int r;
  227. alternative_atomic64(inc_not_zero, "=&a" (r),
  228. "S" (v) : "ecx", "edx", "memory");
  229. return r;
  230. }
  231. #define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
  232. static inline s64 arch_atomic64_dec_if_positive(atomic64_t *v)
  233. {
  234. s64 r;
  235. alternative_atomic64(dec_if_positive, "=&A" (r),
  236. "S" (v) : "ecx", "memory");
  237. return r;
  238. }
  239. #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
  240. #undef alternative_atomic64
  241. #undef __alternative_atomic64
  242. static inline void arch_atomic64_and(s64 i, atomic64_t *v)
  243. {
  244. s64 old, c = 0;
  245. while ((old = arch_atomic64_cmpxchg(v, c, c & i)) != c)
  246. c = old;
  247. }
  248. static inline s64 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
  249. {
  250. s64 old, c = 0;
  251. while ((old = arch_atomic64_cmpxchg(v, c, c & i)) != c)
  252. c = old;
  253. return old;
  254. }
  255. #define arch_atomic64_fetch_and arch_atomic64_fetch_and
  256. static inline void arch_atomic64_or(s64 i, atomic64_t *v)
  257. {
  258. s64 old, c = 0;
  259. while ((old = arch_atomic64_cmpxchg(v, c, c | i)) != c)
  260. c = old;
  261. }
  262. static inline s64 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
  263. {
  264. s64 old, c = 0;
  265. while ((old = arch_atomic64_cmpxchg(v, c, c | i)) != c)
  266. c = old;
  267. return old;
  268. }
  269. #define arch_atomic64_fetch_or arch_atomic64_fetch_or
  270. static inline void arch_atomic64_xor(s64 i, atomic64_t *v)
  271. {
  272. s64 old, c = 0;
  273. while ((old = arch_atomic64_cmpxchg(v, c, c ^ i)) != c)
  274. c = old;
  275. }
  276. static inline s64 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
  277. {
  278. s64 old, c = 0;
  279. while ((old = arch_atomic64_cmpxchg(v, c, c ^ i)) != c)
  280. c = old;
  281. return old;
  282. }
  283. #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
  284. static inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
  285. {
  286. s64 old, c = 0;
  287. while ((old = arch_atomic64_cmpxchg(v, c, c + i)) != c)
  288. c = old;
  289. return old;
  290. }
  291. #define arch_atomic64_fetch_add arch_atomic64_fetch_add
  292. #define arch_atomic64_fetch_sub(i, v) arch_atomic64_fetch_add(-(i), (v))
  293. #endif /* _ASM_X86_ATOMIC64_32_H */