barrier.h 2.5 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef __ASM_BARRIER_H
  3. #define __ASM_BARRIER_H
  4. #include <asm/alternative.h>
  5. #ifndef __ASSEMBLY__
  6. /* The synchronize caches instruction executes as a nop on systems in
  7. which all memory references are performed in order. */
  8. #define synchronize_caches() asm volatile("sync" \
  9. ALTERNATIVE(ALT_COND_NO_SMP, INSN_NOP) \
  10. : : : "memory")
  11. #if defined(CONFIG_SMP)
  12. #define mb() do { synchronize_caches(); } while (0)
  13. #define rmb() mb()
  14. #define wmb() mb()
  15. #define dma_rmb() mb()
  16. #define dma_wmb() mb()
  17. #else
  18. #define mb() barrier()
  19. #define rmb() barrier()
  20. #define wmb() barrier()
  21. #define dma_rmb() barrier()
  22. #define dma_wmb() barrier()
  23. #endif
  24. #define __smp_mb() mb()
  25. #define __smp_rmb() mb()
  26. #define __smp_wmb() mb()
  27. #define __smp_store_release(p, v) \
  28. do { \
  29. typeof(p) __p = (p); \
  30. union { typeof(*p) __val; char __c[1]; } __u = \
  31. { .__val = (__force typeof(*p)) (v) }; \
  32. compiletime_assert_atomic_type(*p); \
  33. switch (sizeof(*p)) { \
  34. case 1: \
  35. asm volatile("stb,ma %0,0(%1)" \
  36. : : "r"(*(__u8 *)__u.__c), "r"(__p) \
  37. : "memory"); \
  38. break; \
  39. case 2: \
  40. asm volatile("sth,ma %0,0(%1)" \
  41. : : "r"(*(__u16 *)__u.__c), "r"(__p) \
  42. : "memory"); \
  43. break; \
  44. case 4: \
  45. asm volatile("stw,ma %0,0(%1)" \
  46. : : "r"(*(__u32 *)__u.__c), "r"(__p) \
  47. : "memory"); \
  48. break; \
  49. case 8: \
  50. if (IS_ENABLED(CONFIG_64BIT)) \
  51. asm volatile("std,ma %0,0(%1)" \
  52. : : "r"(*(__u64 *)__u.__c), "r"(__p) \
  53. : "memory"); \
  54. break; \
  55. } \
  56. } while (0)
  57. #define __smp_load_acquire(p) \
  58. ({ \
  59. union { typeof(*p) __val; char __c[1]; } __u; \
  60. typeof(p) __p = (p); \
  61. compiletime_assert_atomic_type(*p); \
  62. switch (sizeof(*p)) { \
  63. case 1: \
  64. asm volatile("ldb,ma 0(%1),%0" \
  65. : "=r"(*(__u8 *)__u.__c) : "r"(__p) \
  66. : "memory"); \
  67. break; \
  68. case 2: \
  69. asm volatile("ldh,ma 0(%1),%0" \
  70. : "=r"(*(__u16 *)__u.__c) : "r"(__p) \
  71. : "memory"); \
  72. break; \
  73. case 4: \
  74. asm volatile("ldw,ma 0(%1),%0" \
  75. : "=r"(*(__u32 *)__u.__c) : "r"(__p) \
  76. : "memory"); \
  77. break; \
  78. case 8: \
  79. if (IS_ENABLED(CONFIG_64BIT)) \
  80. asm volatile("ldd,ma 0(%1),%0" \
  81. : "=r"(*(__u64 *)__u.__c) : "r"(__p) \
  82. : "memory"); \
  83. break; \
  84. } \
  85. __u.__val; \
  86. })
  87. #include <asm-generic/barrier.h>
  88. #endif /* !__ASSEMBLY__ */
  89. #endif /* __ASM_BARRIER_H */