Commit 1de7da37 authored by Peter Zijlstra's avatar Peter Zijlstra Committed by Ingo Molnar

arch: Move smp_mb__{before,after}_atomic_{inc,dec}.h into asm/atomic.h

Move the barriers functions that depend on the atomic implementation
into the atomic implementation.
Reviewed-by: default avatarPaul E. McKenney <paulmck@linux.vnet.ibm.com>
Signed-off-by: default avatarPeter Zijlstra <peterz@infradead.org>
Acked-by: Vineet Gupta <vgupta@synopsys.com> [for arch/arc bits]
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Andrew Morton <akpm@linux-foundation.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: Paul E. McKenney <paulmck@linux.vnet.ibm.com>
Link: http://lkml.kernel.org/r/20131213150640.786183683@infradead.orgSigned-off-by: default avatarIngo Molnar <mingo@kernel.org>
parent 2e4f5382
...@@ -190,6 +190,11 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr) ...@@ -190,6 +190,11 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
#endif /* !CONFIG_ARC_HAS_LLSC */ #endif /* !CONFIG_ARC_HAS_LLSC */
#define smp_mb__before_atomic_dec() barrier()
#define smp_mb__after_atomic_dec() barrier()
#define smp_mb__before_atomic_inc() barrier()
#define smp_mb__after_atomic_inc() barrier()
/** /**
* __atomic_add_unless - add unless the number is a given value * __atomic_add_unless - add unless the number is a given value
* @v: pointer of type atomic_t * @v: pointer of type atomic_t
......
...@@ -30,11 +30,6 @@ ...@@ -30,11 +30,6 @@
#define smp_wmb() barrier() #define smp_wmb() barrier()
#endif #endif
#define smp_mb__before_atomic_dec() barrier()
#define smp_mb__after_atomic_dec() barrier()
#define smp_mb__before_atomic_inc() barrier()
#define smp_mb__after_atomic_inc() barrier()
#define smp_read_barrier_depends() do { } while (0) #define smp_read_barrier_depends() do { } while (0)
#endif #endif
......
...@@ -160,8 +160,12 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u) ...@@ -160,8 +160,12 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u)
#define atomic_sub_and_test(i, v) (atomic_sub_return(i, (v)) == 0) #define atomic_sub_and_test(i, v) (atomic_sub_return(i, (v)) == 0)
#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0) #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
#define atomic_inc_return(v) (atomic_add_return(1, v)) #define atomic_inc_return(v) (atomic_add_return(1, v))
#define atomic_dec_return(v) (atomic_sub_return(1, v)) #define atomic_dec_return(v) (atomic_sub_return(1, v))
#define smp_mb__before_atomic_dec() barrier()
#define smp_mb__after_atomic_dec() barrier()
#define smp_mb__before_atomic_inc() barrier()
#define smp_mb__after_atomic_inc() barrier()
#endif #endif
...@@ -29,10 +29,6 @@ ...@@ -29,10 +29,6 @@
#define smp_read_barrier_depends() barrier() #define smp_read_barrier_depends() barrier()
#define smp_wmb() barrier() #define smp_wmb() barrier()
#define smp_mb() barrier() #define smp_mb() barrier()
#define smp_mb__before_atomic_dec() barrier()
#define smp_mb__after_atomic_dec() barrier()
#define smp_mb__before_atomic_inc() barrier()
#define smp_mb__after_atomic_inc() barrier()
/* Set a value and use a memory barrier. Used by the scheduler somewhere. */ /* Set a value and use a memory barrier. Used by the scheduler somewhere. */
#define set_mb(var, value) \ #define set_mb(var, value) \
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment