cmpxchg.h 5.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /*
  3. * Copyright IBM Corp. 1999, 2011
  4. *
  5. * Author(s): Martin Schwidefsky <[email protected]>,
  6. */
  7. #ifndef __ASM_CMPXCHG_H
  8. #define __ASM_CMPXCHG_H
  9. #include <linux/mmdebug.h>
  10. #include <linux/types.h>
  11. #include <linux/bug.h>
  12. void __xchg_called_with_bad_pointer(void);
  13. static __always_inline unsigned long __xchg(unsigned long x,
  14. unsigned long address, int size)
  15. {
  16. unsigned long old;
  17. int shift;
  18. switch (size) {
  19. case 1:
  20. shift = (3 ^ (address & 3)) << 3;
  21. address ^= address & 3;
  22. asm volatile(
  23. " l %0,%1\n"
  24. "0: lr 0,%0\n"
  25. " nr 0,%3\n"
  26. " or 0,%2\n"
  27. " cs %0,0,%1\n"
  28. " jl 0b\n"
  29. : "=&d" (old), "+Q" (*(int *) address)
  30. : "d" ((x & 0xff) << shift), "d" (~(0xff << shift))
  31. : "memory", "cc", "0");
  32. return old >> shift;
  33. case 2:
  34. shift = (2 ^ (address & 2)) << 3;
  35. address ^= address & 2;
  36. asm volatile(
  37. " l %0,%1\n"
  38. "0: lr 0,%0\n"
  39. " nr 0,%3\n"
  40. " or 0,%2\n"
  41. " cs %0,0,%1\n"
  42. " jl 0b\n"
  43. : "=&d" (old), "+Q" (*(int *) address)
  44. : "d" ((x & 0xffff) << shift), "d" (~(0xffff << shift))
  45. : "memory", "cc", "0");
  46. return old >> shift;
  47. case 4:
  48. asm volatile(
  49. " l %0,%1\n"
  50. "0: cs %0,%2,%1\n"
  51. " jl 0b\n"
  52. : "=&d" (old), "+Q" (*(int *) address)
  53. : "d" (x)
  54. : "memory", "cc");
  55. return old;
  56. case 8:
  57. asm volatile(
  58. " lg %0,%1\n"
  59. "0: csg %0,%2,%1\n"
  60. " jl 0b\n"
  61. : "=&d" (old), "+QS" (*(long *) address)
  62. : "d" (x)
  63. : "memory", "cc");
  64. return old;
  65. }
  66. __xchg_called_with_bad_pointer();
  67. return x;
  68. }
  69. #define arch_xchg(ptr, x) \
  70. ({ \
  71. __typeof__(*(ptr)) __ret; \
  72. \
  73. __ret = (__typeof__(*(ptr))) \
  74. __xchg((unsigned long)(x), (unsigned long)(ptr), \
  75. sizeof(*(ptr))); \
  76. __ret; \
  77. })
  78. void __cmpxchg_called_with_bad_pointer(void);
  79. static __always_inline unsigned long __cmpxchg(unsigned long address,
  80. unsigned long old,
  81. unsigned long new, int size)
  82. {
  83. unsigned long prev, tmp;
  84. int shift;
  85. switch (size) {
  86. case 1:
  87. shift = (3 ^ (address & 3)) << 3;
  88. address ^= address & 3;
  89. asm volatile(
  90. " l %0,%2\n"
  91. "0: nr %0,%5\n"
  92. " lr %1,%0\n"
  93. " or %0,%3\n"
  94. " or %1,%4\n"
  95. " cs %0,%1,%2\n"
  96. " jnl 1f\n"
  97. " xr %1,%0\n"
  98. " nr %1,%5\n"
  99. " jnz 0b\n"
  100. "1:"
  101. : "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) address)
  102. : "d" ((old & 0xff) << shift),
  103. "d" ((new & 0xff) << shift),
  104. "d" (~(0xff << shift))
  105. : "memory", "cc");
  106. return prev >> shift;
  107. case 2:
  108. shift = (2 ^ (address & 2)) << 3;
  109. address ^= address & 2;
  110. asm volatile(
  111. " l %0,%2\n"
  112. "0: nr %0,%5\n"
  113. " lr %1,%0\n"
  114. " or %0,%3\n"
  115. " or %1,%4\n"
  116. " cs %0,%1,%2\n"
  117. " jnl 1f\n"
  118. " xr %1,%0\n"
  119. " nr %1,%5\n"
  120. " jnz 0b\n"
  121. "1:"
  122. : "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) address)
  123. : "d" ((old & 0xffff) << shift),
  124. "d" ((new & 0xffff) << shift),
  125. "d" (~(0xffff << shift))
  126. : "memory", "cc");
  127. return prev >> shift;
  128. case 4:
  129. asm volatile(
  130. " cs %0,%3,%1\n"
  131. : "=&d" (prev), "+Q" (*(int *) address)
  132. : "0" (old), "d" (new)
  133. : "memory", "cc");
  134. return prev;
  135. case 8:
  136. asm volatile(
  137. " csg %0,%3,%1\n"
  138. : "=&d" (prev), "+QS" (*(long *) address)
  139. : "0" (old), "d" (new)
  140. : "memory", "cc");
  141. return prev;
  142. }
  143. __cmpxchg_called_with_bad_pointer();
  144. return old;
  145. }
  146. #define arch_cmpxchg(ptr, o, n) \
  147. ({ \
  148. __typeof__(*(ptr)) __ret; \
  149. \
  150. __ret = (__typeof__(*(ptr))) \
  151. __cmpxchg((unsigned long)(ptr), (unsigned long)(o), \
  152. (unsigned long)(n), sizeof(*(ptr))); \
  153. __ret; \
  154. })
  155. #define arch_cmpxchg64 arch_cmpxchg
  156. #define arch_cmpxchg_local arch_cmpxchg
  157. #define arch_cmpxchg64_local arch_cmpxchg
  158. #define system_has_cmpxchg_double() 1
  159. static __always_inline int __cmpxchg_double(unsigned long p1, unsigned long p2,
  160. unsigned long o1, unsigned long o2,
  161. unsigned long n1, unsigned long n2)
  162. {
  163. union register_pair old = { .even = o1, .odd = o2, };
  164. union register_pair new = { .even = n1, .odd = n2, };
  165. int cc;
  166. asm volatile(
  167. " cdsg %[old],%[new],%[ptr]\n"
  168. " ipm %[cc]\n"
  169. " srl %[cc],28\n"
  170. : [cc] "=&d" (cc), [old] "+&d" (old.pair)
  171. : [new] "d" (new.pair),
  172. [ptr] "QS" (*(unsigned long *)p1), "Q" (*(unsigned long *)p2)
  173. : "memory", "cc");
  174. return !cc;
  175. }
  176. #define arch_cmpxchg_double(p1, p2, o1, o2, n1, n2) \
  177. ({ \
  178. typeof(p1) __p1 = (p1); \
  179. typeof(p2) __p2 = (p2); \
  180. \
  181. BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long)); \
  182. BUILD_BUG_ON(sizeof(*(p2)) != sizeof(long)); \
  183. VM_BUG_ON((unsigned long)((__p1) + 1) != (unsigned long)(__p2));\
  184. __cmpxchg_double((unsigned long)__p1, (unsigned long)__p2, \
  185. (unsigned long)(o1), (unsigned long)(o2), \
  186. (unsigned long)(n1), (unsigned long)(n2)); \
  187. })
  188. #endif /* __ASM_CMPXCHG_H */