Commit fa083e28 authored by Michael S. Tsirkin's avatar Michael S. Tsirkin

mips: reuse asm-generic/barrier.h

On mips dma_rmb, dma_wmb, smp_store_mb, read_barrier_depends,
smp_read_barrier_depends, smp_store_release and smp_load_acquire  match
the asm-generic variants exactly. Drop the local definitions and pull in
asm-generic/barrier.h instead.

This is in preparation to refactoring this code area.
Signed-off-by: default avatarMichael S. Tsirkin <mst@redhat.com>
Acked-by: default avatarArnd Bergmann <arnd@arndb.de>
Acked-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
parent abe114d9
......@@ -10,9 +10,6 @@
#include <asm/addrspace.h>
#define read_barrier_depends() do { } while(0)
#define smp_read_barrier_depends() do { } while(0)
#ifdef CONFIG_CPU_HAS_SYNC
#define __sync() \
__asm__ __volatile__( \
......@@ -87,8 +84,6 @@
#define wmb() fast_wmb()
#define rmb() fast_rmb()
#define dma_wmb() fast_wmb()
#define dma_rmb() fast_rmb()
#if defined(CONFIG_WEAK_ORDERING) && defined(CONFIG_SMP)
# ifdef CONFIG_CPU_CAVIUM_OCTEON
......@@ -112,9 +107,6 @@
#define __WEAK_LLSC_MB " \n"
#endif
#define smp_store_mb(var, value) \
do { WRITE_ONCE(var, value); smp_mb(); } while (0)
#define smp_llsc_mb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
#ifdef CONFIG_CPU_CAVIUM_OCTEON
......@@ -129,22 +121,9 @@
#define nudge_writes() mb()
#endif
#define smp_store_release(p, v) \
do { \
compiletime_assert_atomic_type(*p); \
smp_mb(); \
WRITE_ONCE(*p, v); \
} while (0)
#define smp_load_acquire(p) \
({ \
typeof(*p) ___p1 = READ_ONCE(*p); \
compiletime_assert_atomic_type(*p); \
smp_mb(); \
___p1; \
})
#define smp_mb__before_atomic() smp_mb__before_llsc()
#define smp_mb__after_atomic() smp_llsc_mb()
#include <asm-generic/barrier.h>
#endif /* __ASM_BARRIER_H */
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment