Commit 1993dbc7 authored by Martin Schwidefsky's avatar Martin Schwidefsky

s390/bitops: use atomic primitives for bitops

Replace the bitops specific atomic update code by the functions
from atomic_ops.h. This saves a few lines of non-trivial code.
Signed-off-by: default avatarMartin Schwidefsky <schwidefsky@de.ibm.com>
parent 126b30c3
...@@ -42,57 +42,9 @@ ...@@ -42,57 +42,9 @@
#include <linux/typecheck.h> #include <linux/typecheck.h>
#include <linux/compiler.h> #include <linux/compiler.h>
#include <asm/atomic_ops.h>
#include <asm/barrier.h> #include <asm/barrier.h>
#define __BITOPS_NO_BARRIER "\n"
#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
#define __BITOPS_OR "laog"
#define __BITOPS_AND "lang"
#define __BITOPS_XOR "laxg"
#define __BITOPS_BARRIER "bcr 14,0\n"
#define __BITOPS_LOOP(__addr, __val, __op_string, __barrier) \
({ \
unsigned long __old; \
\
typecheck(unsigned long *, (__addr)); \
asm volatile( \
__op_string " %0,%2,%1\n" \
__barrier \
: "=d" (__old), "+Q" (*(__addr)) \
: "d" (__val) \
: "cc", "memory"); \
__old; \
})
#else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
#define __BITOPS_OR "ogr"
#define __BITOPS_AND "ngr"
#define __BITOPS_XOR "xgr"
#define __BITOPS_BARRIER "\n"
#define __BITOPS_LOOP(__addr, __val, __op_string, __barrier) \
({ \
unsigned long __old, __new; \
\
typecheck(unsigned long *, (__addr)); \
asm volatile( \
" lg %0,%2\n" \
"0: lgr %1,%0\n" \
__op_string " %1,%3\n" \
" csg %0,%1,%2\n" \
" jl 0b" \
: "=&d" (__old), "=&d" (__new), "+Q" (*(__addr))\
: "d" (__val) \
: "cc", "memory"); \
__old; \
})
#endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
#define __BITOPS_WORDS(bits) (((bits) + BITS_PER_LONG - 1) / BITS_PER_LONG) #define __BITOPS_WORDS(bits) (((bits) + BITS_PER_LONG - 1) / BITS_PER_LONG)
static inline unsigned long * static inline unsigned long *
...@@ -128,7 +80,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *ptr) ...@@ -128,7 +80,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *ptr)
} }
#endif #endif
mask = 1UL << (nr & (BITS_PER_LONG - 1)); mask = 1UL << (nr & (BITS_PER_LONG - 1));
__BITOPS_LOOP(addr, mask, __BITOPS_OR, __BITOPS_NO_BARRIER); __atomic64_or(mask, addr);
} }
static inline void clear_bit(unsigned long nr, volatile unsigned long *ptr) static inline void clear_bit(unsigned long nr, volatile unsigned long *ptr)
...@@ -149,7 +101,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *ptr) ...@@ -149,7 +101,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *ptr)
} }
#endif #endif
mask = ~(1UL << (nr & (BITS_PER_LONG - 1))); mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
__BITOPS_LOOP(addr, mask, __BITOPS_AND, __BITOPS_NO_BARRIER); __atomic64_and(mask, addr);
} }
static inline void change_bit(unsigned long nr, volatile unsigned long *ptr) static inline void change_bit(unsigned long nr, volatile unsigned long *ptr)
...@@ -170,7 +122,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *ptr) ...@@ -170,7 +122,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *ptr)
} }
#endif #endif
mask = 1UL << (nr & (BITS_PER_LONG - 1)); mask = 1UL << (nr & (BITS_PER_LONG - 1));
__BITOPS_LOOP(addr, mask, __BITOPS_XOR, __BITOPS_NO_BARRIER); __atomic64_xor(mask, addr);
} }
static inline int static inline int
...@@ -180,7 +132,7 @@ test_and_set_bit(unsigned long nr, volatile unsigned long *ptr) ...@@ -180,7 +132,7 @@ test_and_set_bit(unsigned long nr, volatile unsigned long *ptr)
unsigned long old, mask; unsigned long old, mask;
mask = 1UL << (nr & (BITS_PER_LONG - 1)); mask = 1UL << (nr & (BITS_PER_LONG - 1));
old = __BITOPS_LOOP(addr, mask, __BITOPS_OR, __BITOPS_BARRIER); old = __atomic64_or_barrier(mask, addr);
return (old & mask) != 0; return (old & mask) != 0;
} }
...@@ -191,7 +143,7 @@ test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr) ...@@ -191,7 +143,7 @@ test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr)
unsigned long old, mask; unsigned long old, mask;
mask = ~(1UL << (nr & (BITS_PER_LONG - 1))); mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
old = __BITOPS_LOOP(addr, mask, __BITOPS_AND, __BITOPS_BARRIER); old = __atomic64_and_barrier(mask, addr);
return (old & ~mask) != 0; return (old & ~mask) != 0;
} }
...@@ -202,7 +154,7 @@ test_and_change_bit(unsigned long nr, volatile unsigned long *ptr) ...@@ -202,7 +154,7 @@ test_and_change_bit(unsigned long nr, volatile unsigned long *ptr)
unsigned long old, mask; unsigned long old, mask;
mask = 1UL << (nr & (BITS_PER_LONG - 1)); mask = 1UL << (nr & (BITS_PER_LONG - 1));
old = __BITOPS_LOOP(addr, mask, __BITOPS_XOR, __BITOPS_BARRIER); old = __atomic64_xor_barrier(mask, addr);
return (old & mask) != 0; return (old & mask) != 0;
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment