cmpxchg.h 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143
  1. /* SPDX-License-Identifier: GPL-2.0-only */
  2. /*
  3. * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
  4. */
  5. #ifndef __ASM_ARC_CMPXCHG_H
  6. #define __ASM_ARC_CMPXCHG_H
  7. #include <linux/build_bug.h>
  8. #include <linux/types.h>
  9. #include <asm/barrier.h>
  10. #include <asm/smp.h>
  11. #ifdef CONFIG_ARC_HAS_LLSC
  12. /*
  13. * if (*ptr == @old)
  14. * *ptr = @new
  15. */
  16. #define __cmpxchg(ptr, old, new) \
  17. ({ \
  18. __typeof__(*(ptr)) _prev; \
  19. \
  20. __asm__ __volatile__( \
  21. "1: llock %0, [%1] \n" \
  22. " brne %0, %2, 2f \n" \
  23. " scond %3, [%1] \n" \
  24. " bnz 1b \n" \
  25. "2: \n" \
  26. : "=&r"(_prev) /* Early clobber prevent reg reuse */ \
  27. : "r"(ptr), /* Not "m": llock only supports reg */ \
  28. "ir"(old), \
  29. "r"(new) /* Not "ir": scond can't take LIMM */ \
  30. : "cc", \
  31. "memory"); /* gcc knows memory is clobbered */ \
  32. \
  33. _prev; \
  34. })
  35. #define arch_cmpxchg_relaxed(ptr, old, new) \
  36. ({ \
  37. __typeof__(ptr) _p_ = (ptr); \
  38. __typeof__(*(ptr)) _o_ = (old); \
  39. __typeof__(*(ptr)) _n_ = (new); \
  40. __typeof__(*(ptr)) _prev_; \
  41. \
  42. switch(sizeof((_p_))) { \
  43. case 4: \
  44. _prev_ = __cmpxchg(_p_, _o_, _n_); \
  45. break; \
  46. default: \
  47. BUILD_BUG(); \
  48. } \
  49. _prev_; \
  50. })
  51. #else
  52. #define arch_cmpxchg(ptr, old, new) \
  53. ({ \
  54. volatile __typeof__(ptr) _p_ = (ptr); \
  55. __typeof__(*(ptr)) _o_ = (old); \
  56. __typeof__(*(ptr)) _n_ = (new); \
  57. __typeof__(*(ptr)) _prev_; \
  58. unsigned long __flags; \
  59. \
  60. BUILD_BUG_ON(sizeof(_p_) != 4); \
  61. \
  62. /* \
  63. * spin lock/unlock provide the needed smp_mb() before/after \
  64. */ \
  65. atomic_ops_lock(__flags); \
  66. _prev_ = *_p_; \
  67. if (_prev_ == _o_) \
  68. *_p_ = _n_; \
  69. atomic_ops_unlock(__flags); \
  70. _prev_; \
  71. })
  72. #endif
  73. /*
  74. * xchg
  75. */
  76. #ifdef CONFIG_ARC_HAS_LLSC
  77. #define __xchg(ptr, val) \
  78. ({ \
  79. __asm__ __volatile__( \
  80. " ex %0, [%1] \n" /* set new value */ \
  81. : "+r"(val) \
  82. : "r"(ptr) \
  83. : "memory"); \
  84. _val_; /* get old value */ \
  85. })
  86. #define arch_xchg_relaxed(ptr, val) \
  87. ({ \
  88. __typeof__(ptr) _p_ = (ptr); \
  89. __typeof__(*(ptr)) _val_ = (val); \
  90. \
  91. switch(sizeof(*(_p_))) { \
  92. case 4: \
  93. _val_ = __xchg(_p_, _val_); \
  94. break; \
  95. default: \
  96. BUILD_BUG(); \
  97. } \
  98. _val_; \
  99. })
  100. #else /* !CONFIG_ARC_HAS_LLSC */
  101. /*
  102. * EX instructions is baseline and present in !LLSC too. But in this
  103. * regime it still needs use @atomic_ops_lock spinlock to allow interop
  104. * with cmpxchg() which uses spinlock in !LLSC
  105. * (llist.h use xchg and cmpxchg on sama data)
  106. */
  107. #define arch_xchg(ptr, val) \
  108. ({ \
  109. __typeof__(ptr) _p_ = (ptr); \
  110. __typeof__(*(ptr)) _val_ = (val); \
  111. \
  112. unsigned long __flags; \
  113. \
  114. atomic_ops_lock(__flags); \
  115. \
  116. __asm__ __volatile__( \
  117. " ex %0, [%1] \n" \
  118. : "+r"(_val_) \
  119. : "r"(_p_) \
  120. : "memory"); \
  121. \
  122. atomic_ops_unlock(__flags); \
  123. _val_; \
  124. })
  125. #endif
  126. #endif