Merge tag 'for_linus' of git://git.kernel.org/pub/scm/linux/kernel/git/mst/vhost
Pull virtio barrier rework+fixes from Michael Tsirkin: "This adds a new kind of barrier, and reworks virtio and xen to use it. Plus some fixes here and there" * tag 'for_linus' of git://git.kernel.org/pub/scm/linux/kernel/git/mst/vhost: (44 commits) checkpatch: add virt barriers checkpatch: check for __smp outside barrier.h checkpatch.pl: add missing memory barriers virtio: make find_vqs() checkpatch.pl-friendly virtio_balloon: fix race between migration and ballooning virtio_balloon: fix race by fill and leak s390: more efficient smp barriers s390: use generic memory barriers xen/events: use virt_xxx barriers xen/io: use virt_xxx barriers xenbus: use virt_xxx barriers virtio_ring: use virt_store_mb sh: move xchg_cmpxchg to a header by itself sh: support 1 and 2 byte xchg virtio_ring: update weak barriers to use virt_xxx Revert "virtio_ring: Update weak barriers to use dma_wmb/rmb" asm-generic: implement virt_xxx memory barriers x86: define __smp_xxx xtensa: define __smp_xxx tile: define __smp_xxx ...
This commit is contained in:
@@ -1,7 +1,6 @@
|
||||
#ifndef __SPARC_BARRIER_H
|
||||
#define __SPARC_BARRIER_H
|
||||
|
||||
#include <asm/processor.h> /* for nop() */
|
||||
#include <asm-generic/barrier.h>
|
||||
|
||||
#endif /* !(__SPARC_BARRIER_H) */
|
||||
|
@@ -37,33 +37,14 @@ do { __asm__ __volatile__("ba,pt %%xcc, 1f\n\t" \
|
||||
#define rmb() __asm__ __volatile__("":::"memory")
|
||||
#define wmb() __asm__ __volatile__("":::"memory")
|
||||
|
||||
#define dma_rmb() rmb()
|
||||
#define dma_wmb() wmb()
|
||||
|
||||
#define smp_store_mb(__var, __value) \
|
||||
do { WRITE_ONCE(__var, __value); membar_safe("#StoreLoad"); } while(0)
|
||||
|
||||
#ifdef CONFIG_SMP
|
||||
#define smp_mb() mb()
|
||||
#define smp_rmb() rmb()
|
||||
#define smp_wmb() wmb()
|
||||
#else
|
||||
#define smp_mb() __asm__ __volatile__("":::"memory")
|
||||
#define smp_rmb() __asm__ __volatile__("":::"memory")
|
||||
#define smp_wmb() __asm__ __volatile__("":::"memory")
|
||||
#endif
|
||||
|
||||
#define read_barrier_depends() do { } while (0)
|
||||
#define smp_read_barrier_depends() do { } while (0)
|
||||
|
||||
#define smp_store_release(p, v) \
|
||||
#define __smp_store_release(p, v) \
|
||||
do { \
|
||||
compiletime_assert_atomic_type(*p); \
|
||||
barrier(); \
|
||||
WRITE_ONCE(*p, v); \
|
||||
} while (0)
|
||||
|
||||
#define smp_load_acquire(p) \
|
||||
#define __smp_load_acquire(p) \
|
||||
({ \
|
||||
typeof(*p) ___p1 = READ_ONCE(*p); \
|
||||
compiletime_assert_atomic_type(*p); \
|
||||
@@ -71,7 +52,9 @@ do { \
|
||||
___p1; \
|
||||
})
|
||||
|
||||
#define smp_mb__before_atomic() barrier()
|
||||
#define smp_mb__after_atomic() barrier()
|
||||
#define __smp_mb__before_atomic() barrier()
|
||||
#define __smp_mb__after_atomic() barrier()
|
||||
|
||||
#include <asm-generic/barrier.h>
|
||||
|
||||
#endif /* !(__SPARC64_BARRIER_H) */
|
||||
|
@@ -5,7 +5,4 @@
|
||||
#else
|
||||
#include <asm/processor_32.h>
|
||||
#endif
|
||||
|
||||
#define nop() __asm__ __volatile__ ("nop")
|
||||
|
||||
#endif
|
||||
|
Reference in New Issue
Block a user