atomic.h 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136
  1. /* SPDX-License-Identifier: GPL-2.0-or-later */
  2. /*
  3. * Generic C implementation of atomic counter operations. Do not include in
  4. * machine independent code.
  5. *
  6. * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
  7. * Written by David Howells ([email protected])
  8. */
  9. #ifndef __ASM_GENERIC_ATOMIC_H
  10. #define __ASM_GENERIC_ATOMIC_H
  11. #include <asm/cmpxchg.h>
  12. #include <asm/barrier.h>
  13. #ifdef CONFIG_SMP
  14. /* we can build all atomic primitives from cmpxchg */
  15. #define ATOMIC_OP(op, c_op) \
  16. static inline void generic_atomic_##op(int i, atomic_t *v) \
  17. { \
  18. int c, old; \
  19. \
  20. c = v->counter; \
  21. while ((old = arch_cmpxchg(&v->counter, c, c c_op i)) != c) \
  22. c = old; \
  23. }
  24. #define ATOMIC_OP_RETURN(op, c_op) \
  25. static inline int generic_atomic_##op##_return(int i, atomic_t *v) \
  26. { \
  27. int c, old; \
  28. \
  29. c = v->counter; \
  30. while ((old = arch_cmpxchg(&v->counter, c, c c_op i)) != c) \
  31. c = old; \
  32. \
  33. return c c_op i; \
  34. }
  35. #define ATOMIC_FETCH_OP(op, c_op) \
  36. static inline int generic_atomic_fetch_##op(int i, atomic_t *v) \
  37. { \
  38. int c, old; \
  39. \
  40. c = v->counter; \
  41. while ((old = arch_cmpxchg(&v->counter, c, c c_op i)) != c) \
  42. c = old; \
  43. \
  44. return c; \
  45. }
  46. #else
  47. #include <linux/irqflags.h>
  48. #define ATOMIC_OP(op, c_op) \
  49. static inline void generic_atomic_##op(int i, atomic_t *v) \
  50. { \
  51. unsigned long flags; \
  52. \
  53. raw_local_irq_save(flags); \
  54. v->counter = v->counter c_op i; \
  55. raw_local_irq_restore(flags); \
  56. }
  57. #define ATOMIC_OP_RETURN(op, c_op) \
  58. static inline int generic_atomic_##op##_return(int i, atomic_t *v) \
  59. { \
  60. unsigned long flags; \
  61. int ret; \
  62. \
  63. raw_local_irq_save(flags); \
  64. ret = (v->counter = v->counter c_op i); \
  65. raw_local_irq_restore(flags); \
  66. \
  67. return ret; \
  68. }
  69. #define ATOMIC_FETCH_OP(op, c_op) \
  70. static inline int generic_atomic_fetch_##op(int i, atomic_t *v) \
  71. { \
  72. unsigned long flags; \
  73. int ret; \
  74. \
  75. raw_local_irq_save(flags); \
  76. ret = v->counter; \
  77. v->counter = v->counter c_op i; \
  78. raw_local_irq_restore(flags); \
  79. \
  80. return ret; \
  81. }
  82. #endif /* CONFIG_SMP */
  83. ATOMIC_OP_RETURN(add, +)
  84. ATOMIC_OP_RETURN(sub, -)
  85. ATOMIC_FETCH_OP(add, +)
  86. ATOMIC_FETCH_OP(sub, -)
  87. ATOMIC_FETCH_OP(and, &)
  88. ATOMIC_FETCH_OP(or, |)
  89. ATOMIC_FETCH_OP(xor, ^)
  90. ATOMIC_OP(add, +)
  91. ATOMIC_OP(sub, -)
  92. ATOMIC_OP(and, &)
  93. ATOMIC_OP(or, |)
  94. ATOMIC_OP(xor, ^)
  95. #undef ATOMIC_FETCH_OP
  96. #undef ATOMIC_OP_RETURN
  97. #undef ATOMIC_OP
  98. #define arch_atomic_add_return generic_atomic_add_return
  99. #define arch_atomic_sub_return generic_atomic_sub_return
  100. #define arch_atomic_fetch_add generic_atomic_fetch_add
  101. #define arch_atomic_fetch_sub generic_atomic_fetch_sub
  102. #define arch_atomic_fetch_and generic_atomic_fetch_and
  103. #define arch_atomic_fetch_or generic_atomic_fetch_or
  104. #define arch_atomic_fetch_xor generic_atomic_fetch_xor
  105. #define arch_atomic_add generic_atomic_add
  106. #define arch_atomic_sub generic_atomic_sub
  107. #define arch_atomic_and generic_atomic_and
  108. #define arch_atomic_or generic_atomic_or
  109. #define arch_atomic_xor generic_atomic_xor
  110. #define arch_atomic_read(v) READ_ONCE((v)->counter)
  111. #define arch_atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
  112. #define arch_atomic_xchg(ptr, v) (arch_xchg(&(ptr)->counter, (v)))
  113. #define arch_atomic_cmpxchg(v, old, new) (arch_cmpxchg(&((v)->counter), (old), (new)))
  114. #endif /* __ASM_GENERIC_ATOMIC_H */