Commit d038c0e8 authored by Peter Zijlstra's avatar Peter Zijlstra Committed by Ingo Molnar

arch,frv: Convert smp_mb__*()

Because:

arch/frv/include/asm/smp.h:#error SMP not supported

smp_mb() is barrier() and we can use the default implementation that
uses smp_mb().
Signed-off-by: default avatarPeter Zijlstra <peterz@infradead.org>
Acked-by: default avatarPaul E. McKenney <paulmck@linux.vnet.ibm.com>
Link: http://lkml.kernel.org/n/tip-n296g51yzdu5ru1vp7mccxmf@git.kernel.org
Cc: David Howells <dhowells@redhat.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: linux-kernel@vger.kernel.org
Signed-off-by: default avatarIngo Molnar <mingo@kernel.org>
parent 17b40213
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
#include <linux/types.h> #include <linux/types.h>
#include <asm/spr-regs.h> #include <asm/spr-regs.h>
#include <asm/cmpxchg.h> #include <asm/cmpxchg.h>
#include <asm/barrier.h>
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
#error not SMP safe #error not SMP safe
...@@ -29,12 +30,6 @@ ...@@ -29,12 +30,6 @@
* We do not have SMP systems, so we don't have to deal with that. * We do not have SMP systems, so we don't have to deal with that.
*/ */
/* Atomic operations are already serializing */
#define smp_mb__before_atomic_dec() barrier()
#define smp_mb__after_atomic_dec() barrier()
#define smp_mb__before_atomic_inc() barrier()
#define smp_mb__after_atomic_inc() barrier()
#define ATOMIC_INIT(i) { (i) } #define ATOMIC_INIT(i) { (i) }
#define atomic_read(v) (*(volatile int *)&(v)->counter) #define atomic_read(v) (*(volatile int *)&(v)->counter)
#define atomic_set(v, i) (((v)->counter) = (i)) #define atomic_set(v, i) (((v)->counter) = (i))
......
...@@ -25,12 +25,6 @@ ...@@ -25,12 +25,6 @@
#include <asm-generic/bitops/ffz.h> #include <asm-generic/bitops/ffz.h>
/*
* clear_bit() doesn't provide any barrier for the compiler.
*/
#define smp_mb__before_clear_bit() barrier()
#define smp_mb__after_clear_bit() barrier()
#ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS #ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
static inline static inline
unsigned long atomic_test_and_ANDNOT_mask(unsigned long mask, volatile unsigned long *v) unsigned long atomic_test_and_ANDNOT_mask(unsigned long mask, volatile unsigned long *v)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment