barrier.h 4.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /*
  3. * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
  4. */
  5. #ifndef __ASM_BARRIER_H
  6. #define __ASM_BARRIER_H
  7. #define __sync() __asm__ __volatile__("dbar 0" : : : "memory")
  8. #define fast_wmb() __sync()
  9. #define fast_rmb() __sync()
  10. #define fast_mb() __sync()
  11. #define fast_iob() __sync()
  12. #define wbflush() __sync()
  13. #define wmb() fast_wmb()
  14. #define rmb() fast_rmb()
  15. #define mb() fast_mb()
  16. #define iob() fast_iob()
  17. #define __smp_mb() __asm__ __volatile__("dbar 0" : : : "memory")
  18. #define __smp_rmb() __asm__ __volatile__("dbar 0" : : : "memory")
  19. #define __smp_wmb() __asm__ __volatile__("dbar 0" : : : "memory")
  20. #ifdef CONFIG_SMP
  21. #define __WEAK_LLSC_MB " dbar 0 \n"
  22. #else
  23. #define __WEAK_LLSC_MB " \n"
  24. #endif
  25. #define __smp_mb__before_atomic() barrier()
  26. #define __smp_mb__after_atomic() barrier()
  27. /**
  28. * array_index_mask_nospec() - generate a ~0 mask when index < size, 0 otherwise
  29. * @index: array element index
  30. * @size: number of elements in array
  31. *
  32. * Returns:
  33. * 0 - (@index < @size)
  34. */
  35. #define array_index_mask_nospec array_index_mask_nospec
  36. static inline unsigned long array_index_mask_nospec(unsigned long index,
  37. unsigned long size)
  38. {
  39. unsigned long mask;
  40. __asm__ __volatile__(
  41. "sltu %0, %1, %2\n\t"
  42. #if (__SIZEOF_LONG__ == 4)
  43. "sub.w %0, $zero, %0\n\t"
  44. #elif (__SIZEOF_LONG__ == 8)
  45. "sub.d %0, $zero, %0\n\t"
  46. #endif
  47. : "=r" (mask)
  48. : "r" (index), "r" (size)
  49. :);
  50. return mask;
  51. }
  52. #define __smp_load_acquire(p) \
  53. ({ \
  54. union { typeof(*p) __val; char __c[1]; } __u; \
  55. unsigned long __tmp = 0; \
  56. compiletime_assert_atomic_type(*p); \
  57. switch (sizeof(*p)) { \
  58. case 1: \
  59. *(__u8 *)__u.__c = *(volatile __u8 *)p; \
  60. __smp_mb(); \
  61. break; \
  62. case 2: \
  63. *(__u16 *)__u.__c = *(volatile __u16 *)p; \
  64. __smp_mb(); \
  65. break; \
  66. case 4: \
  67. __asm__ __volatile__( \
  68. "amor_db.w %[val], %[tmp], %[mem] \n" \
  69. : [val] "=&r" (*(__u32 *)__u.__c) \
  70. : [mem] "ZB" (*(u32 *) p), [tmp] "r" (__tmp) \
  71. : "memory"); \
  72. break; \
  73. case 8: \
  74. __asm__ __volatile__( \
  75. "amor_db.d %[val], %[tmp], %[mem] \n" \
  76. : [val] "=&r" (*(__u64 *)__u.__c) \
  77. : [mem] "ZB" (*(u64 *) p), [tmp] "r" (__tmp) \
  78. : "memory"); \
  79. break; \
  80. } \
  81. (typeof(*p))__u.__val; \
  82. })
  83. #define __smp_store_release(p, v) \
  84. do { \
  85. union { typeof(*p) __val; char __c[1]; } __u = \
  86. { .__val = (__force typeof(*p)) (v) }; \
  87. unsigned long __tmp; \
  88. compiletime_assert_atomic_type(*p); \
  89. switch (sizeof(*p)) { \
  90. case 1: \
  91. __smp_mb(); \
  92. *(volatile __u8 *)p = *(__u8 *)__u.__c; \
  93. break; \
  94. case 2: \
  95. __smp_mb(); \
  96. *(volatile __u16 *)p = *(__u16 *)__u.__c; \
  97. break; \
  98. case 4: \
  99. __asm__ __volatile__( \
  100. "amswap_db.w %[tmp], %[val], %[mem] \n" \
  101. : [mem] "+ZB" (*(u32 *)p), [tmp] "=&r" (__tmp) \
  102. : [val] "r" (*(__u32 *)__u.__c) \
  103. : ); \
  104. break; \
  105. case 8: \
  106. __asm__ __volatile__( \
  107. "amswap_db.d %[tmp], %[val], %[mem] \n" \
  108. : [mem] "+ZB" (*(u64 *)p), [tmp] "=&r" (__tmp) \
  109. : [val] "r" (*(__u64 *)__u.__c) \
  110. : ); \
  111. break; \
  112. } \
  113. } while (0)
  114. #define __smp_store_mb(p, v) \
  115. do { \
  116. union { typeof(p) __val; char __c[1]; } __u = \
  117. { .__val = (__force typeof(p)) (v) }; \
  118. unsigned long __tmp; \
  119. switch (sizeof(p)) { \
  120. case 1: \
  121. *(volatile __u8 *)&p = *(__u8 *)__u.__c; \
  122. __smp_mb(); \
  123. break; \
  124. case 2: \
  125. *(volatile __u16 *)&p = *(__u16 *)__u.__c; \
  126. __smp_mb(); \
  127. break; \
  128. case 4: \
  129. __asm__ __volatile__( \
  130. "amswap_db.w %[tmp], %[val], %[mem] \n" \
  131. : [mem] "+ZB" (*(u32 *)&p), [tmp] "=&r" (__tmp) \
  132. : [val] "r" (*(__u32 *)__u.__c) \
  133. : ); \
  134. break; \
  135. case 8: \
  136. __asm__ __volatile__( \
  137. "amswap_db.d %[tmp], %[val], %[mem] \n" \
  138. : [mem] "+ZB" (*(u64 *)&p), [tmp] "=&r" (__tmp) \
  139. : [val] "r" (*(__u64 *)__u.__c) \
  140. : ); \
  141. break; \
  142. } \
  143. } while (0)
  144. #include <asm-generic/barrier.h>
  145. #endif /* __ASM_BARRIER_H */