cmpxchg.h 3.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef __ASM_CSKY_CMPXCHG_H
  3. #define __ASM_CSKY_CMPXCHG_H
  4. #ifdef CONFIG_SMP
  5. #include <linux/bug.h>
  6. #include <asm/barrier.h>
  7. #define __xchg_relaxed(new, ptr, size) \
  8. ({ \
  9. __typeof__(ptr) __ptr = (ptr); \
  10. __typeof__(new) __new = (new); \
  11. __typeof__(*(ptr)) __ret; \
  12. unsigned long tmp; \
  13. switch (size) { \
  14. case 2: { \
  15. u32 ret; \
  16. u32 shif = ((ulong)__ptr & 2) ? 16 : 0; \
  17. u32 mask = 0xffff << shif; \
  18. __ptr = (__typeof__(ptr))((ulong)__ptr & ~2); \
  19. __asm__ __volatile__ ( \
  20. "1: ldex.w %0, (%4)\n" \
  21. " and %1, %0, %2\n" \
  22. " or %1, %1, %3\n" \
  23. " stex.w %1, (%4)\n" \
  24. " bez %1, 1b\n" \
  25. : "=&r" (ret), "=&r" (tmp) \
  26. : "r" (~mask), \
  27. "r" ((u32)__new << shif), \
  28. "r" (__ptr) \
  29. : "memory"); \
  30. __ret = (__typeof__(*(ptr))) \
  31. ((ret & mask) >> shif); \
  32. break; \
  33. } \
  34. case 4: \
  35. asm volatile ( \
  36. "1: ldex.w %0, (%3) \n" \
  37. " mov %1, %2 \n" \
  38. " stex.w %1, (%3) \n" \
  39. " bez %1, 1b \n" \
  40. : "=&r" (__ret), "=&r" (tmp) \
  41. : "r" (__new), "r"(__ptr) \
  42. :); \
  43. break; \
  44. default: \
  45. BUILD_BUG(); \
  46. } \
  47. __ret; \
  48. })
  49. #define arch_xchg_relaxed(ptr, x) \
  50. (__xchg_relaxed((x), (ptr), sizeof(*(ptr))))
  51. #define __cmpxchg_relaxed(ptr, old, new, size) \
  52. ({ \
  53. __typeof__(ptr) __ptr = (ptr); \
  54. __typeof__(new) __new = (new); \
  55. __typeof__(new) __tmp; \
  56. __typeof__(old) __old = (old); \
  57. __typeof__(*(ptr)) __ret; \
  58. switch (size) { \
  59. case 4: \
  60. asm volatile ( \
  61. "1: ldex.w %0, (%3) \n" \
  62. " cmpne %0, %4 \n" \
  63. " bt 2f \n" \
  64. " mov %1, %2 \n" \
  65. " stex.w %1, (%3) \n" \
  66. " bez %1, 1b \n" \
  67. "2: \n" \
  68. : "=&r" (__ret), "=&r" (__tmp) \
  69. : "r" (__new), "r"(__ptr), "r"(__old) \
  70. :); \
  71. break; \
  72. default: \
  73. BUILD_BUG(); \
  74. } \
  75. __ret; \
  76. })
  77. #define arch_cmpxchg_relaxed(ptr, o, n) \
  78. (__cmpxchg_relaxed((ptr), (o), (n), sizeof(*(ptr))))
  79. #define __cmpxchg_acquire(ptr, old, new, size) \
  80. ({ \
  81. __typeof__(ptr) __ptr = (ptr); \
  82. __typeof__(new) __new = (new); \
  83. __typeof__(new) __tmp; \
  84. __typeof__(old) __old = (old); \
  85. __typeof__(*(ptr)) __ret; \
  86. switch (size) { \
  87. case 4: \
  88. asm volatile ( \
  89. "1: ldex.w %0, (%3) \n" \
  90. " cmpne %0, %4 \n" \
  91. " bt 2f \n" \
  92. " mov %1, %2 \n" \
  93. " stex.w %1, (%3) \n" \
  94. " bez %1, 1b \n" \
  95. ACQUIRE_FENCE \
  96. "2: \n" \
  97. : "=&r" (__ret), "=&r" (__tmp) \
  98. : "r" (__new), "r"(__ptr), "r"(__old) \
  99. :); \
  100. break; \
  101. default: \
  102. BUILD_BUG(); \
  103. } \
  104. __ret; \
  105. })
  106. #define arch_cmpxchg_acquire(ptr, o, n) \
  107. (__cmpxchg_acquire((ptr), (o), (n), sizeof(*(ptr))))
  108. #define __cmpxchg(ptr, old, new, size) \
  109. ({ \
  110. __typeof__(ptr) __ptr = (ptr); \
  111. __typeof__(new) __new = (new); \
  112. __typeof__(new) __tmp; \
  113. __typeof__(old) __old = (old); \
  114. __typeof__(*(ptr)) __ret; \
  115. switch (size) { \
  116. case 4: \
  117. asm volatile ( \
  118. RELEASE_FENCE \
  119. "1: ldex.w %0, (%3) \n" \
  120. " cmpne %0, %4 \n" \
  121. " bt 2f \n" \
  122. " mov %1, %2 \n" \
  123. " stex.w %1, (%3) \n" \
  124. " bez %1, 1b \n" \
  125. FULL_FENCE \
  126. "2: \n" \
  127. : "=&r" (__ret), "=&r" (__tmp) \
  128. : "r" (__new), "r"(__ptr), "r"(__old) \
  129. :); \
  130. break; \
  131. default: \
  132. BUILD_BUG(); \
  133. } \
  134. __ret; \
  135. })
  136. #define arch_cmpxchg(ptr, o, n) \
  137. (__cmpxchg((ptr), (o), (n), sizeof(*(ptr))))
  138. #define arch_cmpxchg_local(ptr, o, n) \
  139. (__cmpxchg_relaxed((ptr), (o), (n), sizeof(*(ptr))))
  140. #else
  141. #include <asm-generic/cmpxchg.h>
  142. #endif
  143. #endif /* __ASM_CSKY_CMPXCHG_H */