Commit a2e66b86 authored by Paul Burton's avatar Paul Burton

MIPS: bitops: Allow immediates in test_and_{set,clear,change}_bit

The logical operations or & xor used in the test_and_set_bit_lock(),
test_and_clear_bit() & test_and_change_bit() functions currently force
the value 1<<bit to be placed in a register. If the bit is compile-time
constant & fits within the immediate field of an or/xor instruction (ie.
16 bits) then we can make use of the ori/xori instruction variants &
avoid the use of an extra register. Add the extra "i" constraints in
order to allow use of these immediate encodings.
Signed-off-by: default avatarPaul Burton <paul.burton@mips.com>
Cc: linux-mips@vger.kernel.org
Cc: Huacai Chen <chenhc@lemote.com>
Cc: Jiaxun Yang <jiaxun.yang@flygoat.com>
Cc: linux-kernel@vger.kernel.org
parent 6bbe043b
...@@ -261,7 +261,7 @@ static inline int test_and_set_bit_lock(unsigned long nr, ...@@ -261,7 +261,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,
" and %2, %0, %3 \n" " and %2, %0, %3 \n"
" .set pop \n" " .set pop \n"
: "=&r" (temp), "+m" (*m), "=&r" (res) : "=&r" (temp), "+m" (*m), "=&r" (res)
: "r" (1UL << bit) : "ir" (1UL << bit)
: __LLSC_CLOBBER); : __LLSC_CLOBBER);
} else { } else {
loongson_llsc_mb(); loongson_llsc_mb();
...@@ -274,7 +274,7 @@ static inline int test_and_set_bit_lock(unsigned long nr, ...@@ -274,7 +274,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,
" " __SC "%2, %1 \n" " " __SC "%2, %1 \n"
" .set pop \n" " .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "r" (1UL << bit) : "ir" (1UL << bit)
: __LLSC_CLOBBER); : __LLSC_CLOBBER);
} while (unlikely(!res)); } while (unlikely(!res));
...@@ -332,7 +332,7 @@ static inline int test_and_clear_bit(unsigned long nr, ...@@ -332,7 +332,7 @@ static inline int test_and_clear_bit(unsigned long nr,
" and %2, %0, %3 \n" " and %2, %0, %3 \n"
" .set pop \n" " .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "r" (1UL << bit) : "ir" (1UL << bit)
: __LLSC_CLOBBER); : __LLSC_CLOBBER);
} else if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(nr)) { } else if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(nr)) {
loongson_llsc_mb(); loongson_llsc_mb();
...@@ -358,7 +358,7 @@ static inline int test_and_clear_bit(unsigned long nr, ...@@ -358,7 +358,7 @@ static inline int test_and_clear_bit(unsigned long nr,
" " __SC "%2, %1 \n" " " __SC "%2, %1 \n"
" .set pop \n" " .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "r" (1UL << bit) : "ir" (1UL << bit)
: __LLSC_CLOBBER); : __LLSC_CLOBBER);
} while (unlikely(!res)); } while (unlikely(!res));
...@@ -400,7 +400,7 @@ static inline int test_and_change_bit(unsigned long nr, ...@@ -400,7 +400,7 @@ static inline int test_and_change_bit(unsigned long nr,
" and %2, %0, %3 \n" " and %2, %0, %3 \n"
" .set pop \n" " .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "r" (1UL << bit) : "ir" (1UL << bit)
: __LLSC_CLOBBER); : __LLSC_CLOBBER);
} else { } else {
loongson_llsc_mb(); loongson_llsc_mb();
...@@ -413,7 +413,7 @@ static inline int test_and_change_bit(unsigned long nr, ...@@ -413,7 +413,7 @@ static inline int test_and_change_bit(unsigned long nr,
" " __SC "\t%2, %1 \n" " " __SC "\t%2, %1 \n"
" .set pop \n" " .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "r" (1UL << bit) : "ir" (1UL << bit)
: __LLSC_CLOBBER); : __LLSC_CLOBBER);
} while (unlikely(!res)); } while (unlikely(!res));
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment