Commit 6b8ecb84 authored by Yury Norov's avatar Yury Norov

bitops: move find_bit_*_le functions from le.h to find.h

It's convenient to have all find_bit declarations in one place.
Signed-off-by: default avatarYury Norov <yury.norov@gmail.com>
Tested-by: default avatarWolfram Sang <wsa+renesas@sang-engineering.com>
parent b7ec62d7
......@@ -190,4 +190,73 @@ extern unsigned long find_next_clump8(unsigned long *clump,
#define find_first_clump8(clump, bits, size) \
find_next_clump8((clump), (bits), (size), 0)
#if defined(__LITTLE_ENDIAN)
static inline unsigned long find_next_zero_bit_le(const void *addr,
unsigned long size, unsigned long offset)
{
return find_next_zero_bit(addr, size, offset);
}
static inline unsigned long find_next_bit_le(const void *addr,
unsigned long size, unsigned long offset)
{
return find_next_bit(addr, size, offset);
}
static inline unsigned long find_first_zero_bit_le(const void *addr,
unsigned long size)
{
return find_first_zero_bit(addr, size);
}
#elif defined(__BIG_ENDIAN)
#ifndef find_next_zero_bit_le
static inline
unsigned long find_next_zero_bit_le(const void *addr, unsigned
long size, unsigned long offset)
{
if (small_const_nbits(size)) {
unsigned long val = *(const unsigned long *)addr;
if (unlikely(offset >= size))
return size;
val = swab(val) | ~GENMASK(size - 1, offset);
return val == ~0UL ? size : ffz(val);
}
return _find_next_bit(addr, NULL, size, offset, ~0UL, 1);
}
#endif
#ifndef find_next_bit_le
static inline
unsigned long find_next_bit_le(const void *addr, unsigned
long size, unsigned long offset)
{
if (small_const_nbits(size)) {
unsigned long val = *(const unsigned long *)addr;
if (unlikely(offset >= size))
return size;
val = swab(val) & GENMASK(size - 1, offset);
return val ? __ffs(val) : size;
}
return _find_next_bit(addr, NULL, size, offset, 0UL, 1);
}
#endif
#ifndef find_first_zero_bit_le
#define find_first_zero_bit_le(addr, size) \
find_next_zero_bit_le((addr), (size), 0)
#endif
#else
#error "Please fix <asm/byteorder.h>"
#endif
#endif /*_ASM_GENERIC_BITOPS_FIND_H_ */
......@@ -2,83 +2,19 @@
#ifndef _ASM_GENERIC_BITOPS_LE_H_
#define _ASM_GENERIC_BITOPS_LE_H_
#include <asm-generic/bitops/find.h>
#include <asm/types.h>
#include <asm/byteorder.h>
#include <linux/swab.h>
#if defined(__LITTLE_ENDIAN)
#define BITOP_LE_SWIZZLE 0
static inline unsigned long find_next_zero_bit_le(const void *addr,
unsigned long size, unsigned long offset)
{
return find_next_zero_bit(addr, size, offset);
}
static inline unsigned long find_next_bit_le(const void *addr,
unsigned long size, unsigned long offset)
{
return find_next_bit(addr, size, offset);
}
static inline unsigned long find_first_zero_bit_le(const void *addr,
unsigned long size)
{
return find_first_zero_bit(addr, size);
}
#elif defined(__BIG_ENDIAN)
#define BITOP_LE_SWIZZLE ((BITS_PER_LONG-1) & ~0x7)
#ifndef find_next_zero_bit_le
static inline
unsigned long find_next_zero_bit_le(const void *addr, unsigned
long size, unsigned long offset)
{
if (small_const_nbits(size)) {
unsigned long val = *(const unsigned long *)addr;
if (unlikely(offset >= size))
return size;
val = swab(val) | ~GENMASK(size - 1, offset);
return val == ~0UL ? size : ffz(val);
}
return _find_next_bit(addr, NULL, size, offset, ~0UL, 1);
}
#endif
#ifndef find_next_bit_le
static inline
unsigned long find_next_bit_le(const void *addr, unsigned
long size, unsigned long offset)
{
if (small_const_nbits(size)) {
unsigned long val = *(const unsigned long *)addr;
if (unlikely(offset >= size))
return size;
val = swab(val) & GENMASK(size - 1, offset);
return val ? __ffs(val) : size;
}
return _find_next_bit(addr, NULL, size, offset, 0UL, 1);
}
#endif
#ifndef find_first_zero_bit_le
#define find_first_zero_bit_le(addr, size) \
find_next_zero_bit_le((addr), (size), 0)
#endif
#else
#error "Please fix <asm/byteorder.h>"
#endif
static inline int test_bit_le(int nr, const void *addr)
{
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment