atomic_ops.h 5.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /*
  3. * Low level function for atomic operations
  4. *
  5. * Copyright IBM Corp. 1999, 2016
  6. */
  7. #ifndef __ARCH_S390_ATOMIC_OPS__
  8. #define __ARCH_S390_ATOMIC_OPS__
  9. static inline int __atomic_read(const atomic_t *v)
  10. {
  11. int c;
  12. asm volatile(
  13. " l %0,%1\n"
  14. : "=d" (c) : "R" (v->counter));
  15. return c;
  16. }
  17. static inline void __atomic_set(atomic_t *v, int i)
  18. {
  19. asm volatile(
  20. " st %1,%0\n"
  21. : "=R" (v->counter) : "d" (i));
  22. }
  23. static inline s64 __atomic64_read(const atomic64_t *v)
  24. {
  25. s64 c;
  26. asm volatile(
  27. " lg %0,%1\n"
  28. : "=d" (c) : "RT" (v->counter));
  29. return c;
  30. }
  31. static inline void __atomic64_set(atomic64_t *v, s64 i)
  32. {
  33. asm volatile(
  34. " stg %1,%0\n"
  35. : "=RT" (v->counter) : "d" (i));
  36. }
  37. #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
  38. #define __ATOMIC_OP(op_name, op_type, op_string, op_barrier) \
  39. static inline op_type op_name(op_type val, op_type *ptr) \
  40. { \
  41. op_type old; \
  42. \
  43. asm volatile( \
  44. op_string " %[old],%[val],%[ptr]\n" \
  45. op_barrier \
  46. : [old] "=d" (old), [ptr] "+QS" (*ptr) \
  47. : [val] "d" (val) : "cc", "memory"); \
  48. return old; \
  49. } \
  50. #define __ATOMIC_OPS(op_name, op_type, op_string) \
  51. __ATOMIC_OP(op_name, op_type, op_string, "\n") \
  52. __ATOMIC_OP(op_name##_barrier, op_type, op_string, "bcr 14,0\n")
  53. __ATOMIC_OPS(__atomic_add, int, "laa")
  54. __ATOMIC_OPS(__atomic_and, int, "lan")
  55. __ATOMIC_OPS(__atomic_or, int, "lao")
  56. __ATOMIC_OPS(__atomic_xor, int, "lax")
  57. __ATOMIC_OPS(__atomic64_add, long, "laag")
  58. __ATOMIC_OPS(__atomic64_and, long, "lang")
  59. __ATOMIC_OPS(__atomic64_or, long, "laog")
  60. __ATOMIC_OPS(__atomic64_xor, long, "laxg")
  61. #undef __ATOMIC_OPS
  62. #undef __ATOMIC_OP
  63. #define __ATOMIC_CONST_OP(op_name, op_type, op_string, op_barrier) \
  64. static __always_inline void op_name(op_type val, op_type *ptr) \
  65. { \
  66. asm volatile( \
  67. op_string " %[ptr],%[val]\n" \
  68. op_barrier \
  69. : [ptr] "+QS" (*ptr) : [val] "i" (val) : "cc", "memory");\
  70. }
  71. #define __ATOMIC_CONST_OPS(op_name, op_type, op_string) \
  72. __ATOMIC_CONST_OP(op_name, op_type, op_string, "\n") \
  73. __ATOMIC_CONST_OP(op_name##_barrier, op_type, op_string, "bcr 14,0\n")
  74. __ATOMIC_CONST_OPS(__atomic_add_const, int, "asi")
  75. __ATOMIC_CONST_OPS(__atomic64_add_const, long, "agsi")
  76. #undef __ATOMIC_CONST_OPS
  77. #undef __ATOMIC_CONST_OP
  78. #else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
  79. #define __ATOMIC_OP(op_name, op_string) \
  80. static inline int op_name(int val, int *ptr) \
  81. { \
  82. int old, new; \
  83. \
  84. asm volatile( \
  85. "0: lr %[new],%[old]\n" \
  86. op_string " %[new],%[val]\n" \
  87. " cs %[old],%[new],%[ptr]\n" \
  88. " jl 0b" \
  89. : [old] "=d" (old), [new] "=&d" (new), [ptr] "+Q" (*ptr)\
  90. : [val] "d" (val), "0" (*ptr) : "cc", "memory"); \
  91. return old; \
  92. }
  93. #define __ATOMIC_OPS(op_name, op_string) \
  94. __ATOMIC_OP(op_name, op_string) \
  95. __ATOMIC_OP(op_name##_barrier, op_string)
  96. __ATOMIC_OPS(__atomic_add, "ar")
  97. __ATOMIC_OPS(__atomic_and, "nr")
  98. __ATOMIC_OPS(__atomic_or, "or")
  99. __ATOMIC_OPS(__atomic_xor, "xr")
  100. #undef __ATOMIC_OPS
  101. #define __ATOMIC64_OP(op_name, op_string) \
  102. static inline long op_name(long val, long *ptr) \
  103. { \
  104. long old, new; \
  105. \
  106. asm volatile( \
  107. "0: lgr %[new],%[old]\n" \
  108. op_string " %[new],%[val]\n" \
  109. " csg %[old],%[new],%[ptr]\n" \
  110. " jl 0b" \
  111. : [old] "=d" (old), [new] "=&d" (new), [ptr] "+QS" (*ptr)\
  112. : [val] "d" (val), "0" (*ptr) : "cc", "memory"); \
  113. return old; \
  114. }
  115. #define __ATOMIC64_OPS(op_name, op_string) \
  116. __ATOMIC64_OP(op_name, op_string) \
  117. __ATOMIC64_OP(op_name##_barrier, op_string)
  118. __ATOMIC64_OPS(__atomic64_add, "agr")
  119. __ATOMIC64_OPS(__atomic64_and, "ngr")
  120. __ATOMIC64_OPS(__atomic64_or, "ogr")
  121. __ATOMIC64_OPS(__atomic64_xor, "xgr")
  122. #undef __ATOMIC64_OPS
  123. #define __atomic_add_const(val, ptr) __atomic_add(val, ptr)
  124. #define __atomic_add_const_barrier(val, ptr) __atomic_add(val, ptr)
  125. #define __atomic64_add_const(val, ptr) __atomic64_add(val, ptr)
  126. #define __atomic64_add_const_barrier(val, ptr) __atomic64_add(val, ptr)
  127. #endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
  128. static inline int __atomic_cmpxchg(int *ptr, int old, int new)
  129. {
  130. asm volatile(
  131. " cs %[old],%[new],%[ptr]"
  132. : [old] "+d" (old), [ptr] "+Q" (*ptr)
  133. : [new] "d" (new)
  134. : "cc", "memory");
  135. return old;
  136. }
  137. static inline bool __atomic_cmpxchg_bool(int *ptr, int old, int new)
  138. {
  139. int old_expected = old;
  140. asm volatile(
  141. " cs %[old],%[new],%[ptr]"
  142. : [old] "+d" (old), [ptr] "+Q" (*ptr)
  143. : [new] "d" (new)
  144. : "cc", "memory");
  145. return old == old_expected;
  146. }
  147. static inline long __atomic64_cmpxchg(long *ptr, long old, long new)
  148. {
  149. asm volatile(
  150. " csg %[old],%[new],%[ptr]"
  151. : [old] "+d" (old), [ptr] "+QS" (*ptr)
  152. : [new] "d" (new)
  153. : "cc", "memory");
  154. return old;
  155. }
  156. static inline bool __atomic64_cmpxchg_bool(long *ptr, long old, long new)
  157. {
  158. long old_expected = old;
  159. asm volatile(
  160. " csg %[old],%[new],%[ptr]"
  161. : [old] "+d" (old), [ptr] "+QS" (*ptr)
  162. : [new] "d" (new)
  163. : "cc", "memory");
  164. return old == old_expected;
  165. }
  166. #endif /* __ARCH_S390_ATOMIC_OPS__ */