cmpxchg.h 8.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (C) 2003, 06, 07 by Ralf Baechle ([email protected])
  7. */
  8. #ifndef __ASM_CMPXCHG_H
  9. #define __ASM_CMPXCHG_H
  10. #include <linux/bug.h>
  11. #include <linux/irqflags.h>
  12. #include <asm/asm.h>
  13. #include <asm/compiler.h>
  14. #include <asm/sync.h>
  15. /*
  16. * These functions doesn't exist, so if they are called you'll either:
  17. *
  18. * - Get an error at compile-time due to __compiletime_error, if supported by
  19. * your compiler.
  20. *
  21. * or:
  22. *
  23. * - Get an error at link-time due to the call to the missing function.
  24. */
  25. extern unsigned long __cmpxchg_called_with_bad_pointer(void)
  26. __compiletime_error("Bad argument size for cmpxchg");
  27. extern unsigned long __cmpxchg64_unsupported(void)
  28. __compiletime_error("cmpxchg64 not available; cpu_has_64bits may be false");
  29. extern unsigned long __xchg_called_with_bad_pointer(void)
  30. __compiletime_error("Bad argument size for xchg");
  31. #define __xchg_asm(ld, st, m, val) \
  32. ({ \
  33. __typeof(*(m)) __ret; \
  34. \
  35. if (kernel_uses_llsc) { \
  36. __asm__ __volatile__( \
  37. " .set push \n" \
  38. " .set noat \n" \
  39. " .set push \n" \
  40. " .set " MIPS_ISA_ARCH_LEVEL " \n" \
  41. " " __SYNC(full, loongson3_war) " \n" \
  42. "1: " ld " %0, %2 # __xchg_asm \n" \
  43. " .set pop \n" \
  44. " move $1, %z3 \n" \
  45. " .set " MIPS_ISA_ARCH_LEVEL " \n" \
  46. " " st " $1, %1 \n" \
  47. "\t" __stringify(SC_BEQZ) " $1, 1b \n" \
  48. " .set pop \n" \
  49. : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
  50. : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) \
  51. : __LLSC_CLOBBER); \
  52. } else { \
  53. unsigned long __flags; \
  54. \
  55. raw_local_irq_save(__flags); \
  56. __ret = *m; \
  57. *m = val; \
  58. raw_local_irq_restore(__flags); \
  59. } \
  60. \
  61. __ret; \
  62. })
  63. extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
  64. unsigned int size);
  65. static __always_inline
  66. unsigned long __xchg(volatile void *ptr, unsigned long x, int size)
  67. {
  68. switch (size) {
  69. case 1:
  70. case 2:
  71. return __xchg_small(ptr, x, size);
  72. case 4:
  73. return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x);
  74. case 8:
  75. if (!IS_ENABLED(CONFIG_64BIT))
  76. return __xchg_called_with_bad_pointer();
  77. return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x);
  78. default:
  79. return __xchg_called_with_bad_pointer();
  80. }
  81. }
  82. #define arch_xchg(ptr, x) \
  83. ({ \
  84. __typeof__(*(ptr)) __res; \
  85. \
  86. /* \
  87. * In the Loongson3 workaround case __xchg_asm() already \
  88. * contains a completion barrier prior to the LL, so we don't \
  89. * need to emit an extra one here. \
  90. */ \
  91. if (__SYNC_loongson3_war == 0) \
  92. smp_mb__before_llsc(); \
  93. \
  94. __res = (__typeof__(*(ptr))) \
  95. __xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \
  96. \
  97. smp_llsc_mb(); \
  98. \
  99. __res; \
  100. })
  101. #define __cmpxchg_asm(ld, st, m, old, new) \
  102. ({ \
  103. __typeof(*(m)) __ret; \
  104. \
  105. if (kernel_uses_llsc) { \
  106. __asm__ __volatile__( \
  107. " .set push \n" \
  108. " .set noat \n" \
  109. " .set push \n" \
  110. " .set "MIPS_ISA_ARCH_LEVEL" \n" \
  111. " " __SYNC(full, loongson3_war) " \n" \
  112. "1: " ld " %0, %2 # __cmpxchg_asm \n" \
  113. " bne %0, %z3, 2f \n" \
  114. " .set pop \n" \
  115. " move $1, %z4 \n" \
  116. " .set "MIPS_ISA_ARCH_LEVEL" \n" \
  117. " " st " $1, %1 \n" \
  118. "\t" __stringify(SC_BEQZ) " $1, 1b \n" \
  119. " .set pop \n" \
  120. "2: " __SYNC(full, loongson3_war) " \n" \
  121. : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
  122. : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \
  123. : __LLSC_CLOBBER); \
  124. } else { \
  125. unsigned long __flags; \
  126. \
  127. raw_local_irq_save(__flags); \
  128. __ret = *m; \
  129. if (__ret == old) \
  130. *m = new; \
  131. raw_local_irq_restore(__flags); \
  132. } \
  133. \
  134. __ret; \
  135. })
  136. extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
  137. unsigned long new, unsigned int size);
  138. static __always_inline
  139. unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
  140. unsigned long new, unsigned int size)
  141. {
  142. switch (size) {
  143. case 1:
  144. case 2:
  145. return __cmpxchg_small(ptr, old, new, size);
  146. case 4:
  147. return __cmpxchg_asm("ll", "sc", (volatile u32 *)ptr,
  148. (u32)old, new);
  149. case 8:
  150. /* lld/scd are only available for MIPS64 */
  151. if (!IS_ENABLED(CONFIG_64BIT))
  152. return __cmpxchg_called_with_bad_pointer();
  153. return __cmpxchg_asm("lld", "scd", (volatile u64 *)ptr,
  154. (u64)old, new);
  155. default:
  156. return __cmpxchg_called_with_bad_pointer();
  157. }
  158. }
  159. #define arch_cmpxchg_local(ptr, old, new) \
  160. ((__typeof__(*(ptr))) \
  161. __cmpxchg((ptr), \
  162. (unsigned long)(__typeof__(*(ptr)))(old), \
  163. (unsigned long)(__typeof__(*(ptr)))(new), \
  164. sizeof(*(ptr))))
  165. #define arch_cmpxchg(ptr, old, new) \
  166. ({ \
  167. __typeof__(*(ptr)) __res; \
  168. \
  169. /* \
  170. * In the Loongson3 workaround case __cmpxchg_asm() already \
  171. * contains a completion barrier prior to the LL, so we don't \
  172. * need to emit an extra one here. \
  173. */ \
  174. if (__SYNC_loongson3_war == 0) \
  175. smp_mb__before_llsc(); \
  176. \
  177. __res = arch_cmpxchg_local((ptr), (old), (new)); \
  178. \
  179. /* \
  180. * In the Loongson3 workaround case __cmpxchg_asm() already \
  181. * contains a completion barrier after the SC, so we don't \
  182. * need to emit an extra one here. \
  183. */ \
  184. if (__SYNC_loongson3_war == 0) \
  185. smp_llsc_mb(); \
  186. \
  187. __res; \
  188. })
  189. #ifdef CONFIG_64BIT
  190. #define arch_cmpxchg64_local(ptr, o, n) \
  191. ({ \
  192. BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
  193. arch_cmpxchg_local((ptr), (o), (n)); \
  194. })
  195. #define arch_cmpxchg64(ptr, o, n) \
  196. ({ \
  197. BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
  198. arch_cmpxchg((ptr), (o), (n)); \
  199. })
  200. #else
  201. # include <asm-generic/cmpxchg-local.h>
  202. # define arch_cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
  203. # ifdef CONFIG_SMP
  204. static inline unsigned long __cmpxchg64(volatile void *ptr,
  205. unsigned long long old,
  206. unsigned long long new)
  207. {
  208. unsigned long long tmp, ret;
  209. unsigned long flags;
  210. /*
  211. * The assembly below has to combine 32 bit values into a 64 bit
  212. * register, and split 64 bit values from one register into two. If we
  213. * were to take an interrupt in the middle of this we'd only save the
  214. * least significant 32 bits of each register & probably clobber the
  215. * most significant 32 bits of the 64 bit values we're using. In order
  216. * to avoid this we must disable interrupts.
  217. */
  218. local_irq_save(flags);
  219. asm volatile(
  220. " .set push \n"
  221. " .set " MIPS_ISA_ARCH_LEVEL " \n"
  222. /* Load 64 bits from ptr */
  223. " " __SYNC(full, loongson3_war) " \n"
  224. "1: lld %L0, %3 # __cmpxchg64 \n"
  225. " .set pop \n"
  226. /*
  227. * Split the 64 bit value we loaded into the 2 registers that hold the
  228. * ret variable.
  229. */
  230. " dsra %M0, %L0, 32 \n"
  231. " sll %L0, %L0, 0 \n"
  232. /*
  233. * Compare ret against old, breaking out of the loop if they don't
  234. * match.
  235. */
  236. " bne %M0, %M4, 2f \n"
  237. " bne %L0, %L4, 2f \n"
  238. /*
  239. * Combine the 32 bit halves from the 2 registers that hold the new
  240. * variable into a single 64 bit register.
  241. */
  242. # if MIPS_ISA_REV >= 2
  243. " move %L1, %L5 \n"
  244. " dins %L1, %M5, 32, 32 \n"
  245. # else
  246. " dsll %L1, %L5, 32 \n"
  247. " dsrl %L1, %L1, 32 \n"
  248. " .set noat \n"
  249. " dsll $at, %M5, 32 \n"
  250. " or %L1, %L1, $at \n"
  251. " .set at \n"
  252. # endif
  253. " .set push \n"
  254. " .set " MIPS_ISA_ARCH_LEVEL " \n"
  255. /* Attempt to store new at ptr */
  256. " scd %L1, %2 \n"
  257. /* If we failed, loop! */
  258. "\t" __stringify(SC_BEQZ) " %L1, 1b \n"
  259. "2: " __SYNC(full, loongson3_war) " \n"
  260. " .set pop \n"
  261. : "=&r"(ret),
  262. "=&r"(tmp),
  263. "=" GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr)
  264. : GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr),
  265. "r" (old),
  266. "r" (new)
  267. : "memory");
  268. local_irq_restore(flags);
  269. return ret;
  270. }
  271. # define arch_cmpxchg64(ptr, o, n) ({ \
  272. unsigned long long __old = (__typeof__(*(ptr)))(o); \
  273. unsigned long long __new = (__typeof__(*(ptr)))(n); \
  274. __typeof__(*(ptr)) __res; \
  275. \
  276. /* \
  277. * We can only use cmpxchg64 if we know that the CPU supports \
  278. * 64-bits, ie. lld & scd. Our call to __cmpxchg64_unsupported \
  279. * will cause a build error unless cpu_has_64bits is a \
  280. * compile-time constant 1. \
  281. */ \
  282. if (cpu_has_64bits && kernel_uses_llsc) { \
  283. smp_mb__before_llsc(); \
  284. __res = __cmpxchg64((ptr), __old, __new); \
  285. smp_llsc_mb(); \
  286. } else { \
  287. __res = __cmpxchg64_unsupported(); \
  288. } \
  289. \
  290. __res; \
  291. })
  292. # else /* !CONFIG_SMP */
  293. # define arch_cmpxchg64(ptr, o, n) arch_cmpxchg64_local((ptr), (o), (n))
  294. # endif /* !CONFIG_SMP */
  295. #endif /* !CONFIG_64BIT */
  296. #endif /* __ASM_CMPXCHG_H */