atomic.h 4.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /*
  3. * Copyright IBM Corp. 1999, 2016
  4. * Author(s): Martin Schwidefsky <[email protected]>,
  5. * Denis Joseph Barrow,
  6. * Arnd Bergmann,
  7. */
  8. #ifndef __ARCH_S390_ATOMIC__
  9. #define __ARCH_S390_ATOMIC__
  10. #include <linux/compiler.h>
  11. #include <linux/types.h>
  12. #include <asm/atomic_ops.h>
  13. #include <asm/barrier.h>
  14. #include <asm/cmpxchg.h>
  15. static inline int arch_atomic_read(const atomic_t *v)
  16. {
  17. return __atomic_read(v);
  18. }
  19. #define arch_atomic_read arch_atomic_read
  20. static inline void arch_atomic_set(atomic_t *v, int i)
  21. {
  22. __atomic_set(v, i);
  23. }
  24. #define arch_atomic_set arch_atomic_set
  25. static inline int arch_atomic_add_return(int i, atomic_t *v)
  26. {
  27. return __atomic_add_barrier(i, &v->counter) + i;
  28. }
  29. #define arch_atomic_add_return arch_atomic_add_return
  30. static inline int arch_atomic_fetch_add(int i, atomic_t *v)
  31. {
  32. return __atomic_add_barrier(i, &v->counter);
  33. }
  34. #define arch_atomic_fetch_add arch_atomic_fetch_add
  35. static inline void arch_atomic_add(int i, atomic_t *v)
  36. {
  37. __atomic_add(i, &v->counter);
  38. }
  39. #define arch_atomic_add arch_atomic_add
  40. #define arch_atomic_sub(_i, _v) arch_atomic_add(-(int)(_i), _v)
  41. #define arch_atomic_sub_return(_i, _v) arch_atomic_add_return(-(int)(_i), _v)
  42. #define arch_atomic_fetch_sub(_i, _v) arch_atomic_fetch_add(-(int)(_i), _v)
  43. #define ATOMIC_OPS(op) \
  44. static inline void arch_atomic_##op(int i, atomic_t *v) \
  45. { \
  46. __atomic_##op(i, &v->counter); \
  47. } \
  48. static inline int arch_atomic_fetch_##op(int i, atomic_t *v) \
  49. { \
  50. return __atomic_##op##_barrier(i, &v->counter); \
  51. }
  52. ATOMIC_OPS(and)
  53. ATOMIC_OPS(or)
  54. ATOMIC_OPS(xor)
  55. #undef ATOMIC_OPS
  56. #define arch_atomic_and arch_atomic_and
  57. #define arch_atomic_or arch_atomic_or
  58. #define arch_atomic_xor arch_atomic_xor
  59. #define arch_atomic_fetch_and arch_atomic_fetch_and
  60. #define arch_atomic_fetch_or arch_atomic_fetch_or
  61. #define arch_atomic_fetch_xor arch_atomic_fetch_xor
  62. #define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), new))
  63. static inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
  64. {
  65. return __atomic_cmpxchg(&v->counter, old, new);
  66. }
  67. #define arch_atomic_cmpxchg arch_atomic_cmpxchg
  68. #define ATOMIC64_INIT(i) { (i) }
  69. static inline s64 arch_atomic64_read(const atomic64_t *v)
  70. {
  71. return __atomic64_read(v);
  72. }
  73. #define arch_atomic64_read arch_atomic64_read
  74. static inline void arch_atomic64_set(atomic64_t *v, s64 i)
  75. {
  76. __atomic64_set(v, i);
  77. }
  78. #define arch_atomic64_set arch_atomic64_set
  79. static inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
  80. {
  81. return __atomic64_add_barrier(i, (long *)&v->counter) + i;
  82. }
  83. #define arch_atomic64_add_return arch_atomic64_add_return
  84. static inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
  85. {
  86. return __atomic64_add_barrier(i, (long *)&v->counter);
  87. }
  88. #define arch_atomic64_fetch_add arch_atomic64_fetch_add
  89. static inline void arch_atomic64_add(s64 i, atomic64_t *v)
  90. {
  91. __atomic64_add(i, (long *)&v->counter);
  92. }
  93. #define arch_atomic64_add arch_atomic64_add
  94. #define arch_atomic64_xchg(v, new) (arch_xchg(&((v)->counter), new))
  95. static inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
  96. {
  97. return __atomic64_cmpxchg((long *)&v->counter, old, new);
  98. }
  99. #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
  100. #define ATOMIC64_OPS(op) \
  101. static inline void arch_atomic64_##op(s64 i, atomic64_t *v) \
  102. { \
  103. __atomic64_##op(i, (long *)&v->counter); \
  104. } \
  105. static inline long arch_atomic64_fetch_##op(s64 i, atomic64_t *v) \
  106. { \
  107. return __atomic64_##op##_barrier(i, (long *)&v->counter); \
  108. }
  109. ATOMIC64_OPS(and)
  110. ATOMIC64_OPS(or)
  111. ATOMIC64_OPS(xor)
  112. #undef ATOMIC64_OPS
  113. #define arch_atomic64_and arch_atomic64_and
  114. #define arch_atomic64_or arch_atomic64_or
  115. #define arch_atomic64_xor arch_atomic64_xor
  116. #define arch_atomic64_fetch_and arch_atomic64_fetch_and
  117. #define arch_atomic64_fetch_or arch_atomic64_fetch_or
  118. #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
  119. #define arch_atomic64_sub_return(_i, _v) arch_atomic64_add_return(-(s64)(_i), _v)
  120. #define arch_atomic64_fetch_sub(_i, _v) arch_atomic64_fetch_add(-(s64)(_i), _v)
  121. #define arch_atomic64_sub(_i, _v) arch_atomic64_add(-(s64)(_i), _v)
  122. #endif /* __ARCH_S390_ATOMIC__ */