Commit 87a927ef authored by Markos Chandras's avatar Markos Chandras

MIPS: asm: bitops: Update ISA constraints for MIPS R6 support

MIPS R6 changed the opcodes for LL/SC instructions so we need to set
the correct ISA level.

Cc: Matthew Fortune <Matthew.Fortune@imgtec.com>
Signed-off-by: default avatarMarkos Chandras <markos.chandras@imgtec.com>
parent 0038df22
...@@ -81,7 +81,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -81,7 +81,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
" .set mips0 \n" " .set mips0 \n"
: "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m) : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m)
: "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m)); : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m));
#ifdef CONFIG_CPU_MIPSR2 #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
} else if (kernel_uses_llsc && __builtin_constant_p(bit)) { } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
do { do {
__asm__ __volatile__( __asm__ __volatile__(
...@@ -91,11 +91,11 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -91,11 +91,11 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
: "ir" (bit), "r" (~0)); : "ir" (bit), "r" (~0));
} while (unlikely(!temp)); } while (unlikely(!temp));
#endif /* CONFIG_CPU_MIPSR2 */ #endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
} else if (kernel_uses_llsc) { } else if (kernel_uses_llsc) {
do { do {
__asm__ __volatile__( __asm__ __volatile__(
" .set arch=r4000 \n" " .set "MIPS_ISA_ARCH_LEVEL" \n"
" " __LL "%0, %1 # set_bit \n" " " __LL "%0, %1 # set_bit \n"
" or %0, %2 \n" " or %0, %2 \n"
" " __SC "%0, %1 \n" " " __SC "%0, %1 \n"
...@@ -133,7 +133,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -133,7 +133,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
" .set mips0 \n" " .set mips0 \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
: "ir" (~(1UL << bit))); : "ir" (~(1UL << bit)));
#ifdef CONFIG_CPU_MIPSR2 #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
} else if (kernel_uses_llsc && __builtin_constant_p(bit)) { } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
do { do {
__asm__ __volatile__( __asm__ __volatile__(
...@@ -143,11 +143,11 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -143,11 +143,11 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
: "ir" (bit)); : "ir" (bit));
} while (unlikely(!temp)); } while (unlikely(!temp));
#endif /* CONFIG_CPU_MIPSR2 */ #endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
} else if (kernel_uses_llsc) { } else if (kernel_uses_llsc) {
do { do {
__asm__ __volatile__( __asm__ __volatile__(
" .set arch=r4000 \n" " .set "MIPS_ISA_ARCH_LEVEL" \n"
" " __LL "%0, %1 # clear_bit \n" " " __LL "%0, %1 # clear_bit \n"
" and %0, %2 \n" " and %0, %2 \n"
" " __SC "%0, %1 \n" " " __SC "%0, %1 \n"
...@@ -205,7 +205,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -205,7 +205,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
do { do {
__asm__ __volatile__( __asm__ __volatile__(
" .set arch=r4000 \n" " .set "MIPS_ISA_ARCH_LEVEL" \n"
" " __LL "%0, %1 # change_bit \n" " " __LL "%0, %1 # change_bit \n"
" xor %0, %2 \n" " xor %0, %2 \n"
" " __SC "%0, %1 \n" " " __SC "%0, %1 \n"
...@@ -254,7 +254,7 @@ static inline int test_and_set_bit(unsigned long nr, ...@@ -254,7 +254,7 @@ static inline int test_and_set_bit(unsigned long nr,
do { do {
__asm__ __volatile__( __asm__ __volatile__(
" .set arch=r4000 \n" " .set "MIPS_ISA_ARCH_LEVEL" \n"
" " __LL "%0, %1 # test_and_set_bit \n" " " __LL "%0, %1 # test_and_set_bit \n"
" or %2, %0, %3 \n" " or %2, %0, %3 \n"
" " __SC "%2, %1 \n" " " __SC "%2, %1 \n"
...@@ -308,7 +308,7 @@ static inline int test_and_set_bit_lock(unsigned long nr, ...@@ -308,7 +308,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,
do { do {
__asm__ __volatile__( __asm__ __volatile__(
" .set arch=r4000 \n" " .set "MIPS_ISA_ARCH_LEVEL" \n"
" " __LL "%0, %1 # test_and_set_bit \n" " " __LL "%0, %1 # test_and_set_bit \n"
" or %2, %0, %3 \n" " or %2, %0, %3 \n"
" " __SC "%2, %1 \n" " " __SC "%2, %1 \n"
...@@ -358,7 +358,7 @@ static inline int test_and_clear_bit(unsigned long nr, ...@@ -358,7 +358,7 @@ static inline int test_and_clear_bit(unsigned long nr,
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "r" (1UL << bit) : "r" (1UL << bit)
: "memory"); : "memory");
#ifdef CONFIG_CPU_MIPSR2 #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
} else if (kernel_uses_llsc && __builtin_constant_p(nr)) { } else if (kernel_uses_llsc && __builtin_constant_p(nr)) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp; unsigned long temp;
...@@ -380,7 +380,7 @@ static inline int test_and_clear_bit(unsigned long nr, ...@@ -380,7 +380,7 @@ static inline int test_and_clear_bit(unsigned long nr,
do { do {
__asm__ __volatile__( __asm__ __volatile__(
" .set arch=r4000 \n" " .set "MIPS_ISA_ARCH_LEVEL" \n"
" " __LL "%0, %1 # test_and_clear_bit \n" " " __LL "%0, %1 # test_and_clear_bit \n"
" or %2, %0, %3 \n" " or %2, %0, %3 \n"
" xor %2, %3 \n" " xor %2, %3 \n"
...@@ -437,7 +437,7 @@ static inline int test_and_change_bit(unsigned long nr, ...@@ -437,7 +437,7 @@ static inline int test_and_change_bit(unsigned long nr,
do { do {
__asm__ __volatile__( __asm__ __volatile__(
" .set arch=r4000 \n" " .set "MIPS_ISA_ARCH_LEVEL" \n"
" " __LL "%0, %1 # test_and_change_bit \n" " " __LL "%0, %1 # test_and_change_bit \n"
" xor %2, %0, %3 \n" " xor %2, %0, %3 \n"
" " __SC "\t%2, %1 \n" " " __SC "\t%2, %1 \n"
...@@ -485,7 +485,7 @@ static inline unsigned long __fls(unsigned long word) ...@@ -485,7 +485,7 @@ static inline unsigned long __fls(unsigned long word)
__builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) { __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
__asm__( __asm__(
" .set push \n" " .set push \n"
" .set mips32 \n" " .set "MIPS_ISA_LEVEL" \n"
" clz %0, %1 \n" " clz %0, %1 \n"
" .set pop \n" " .set pop \n"
: "=r" (num) : "=r" (num)
...@@ -498,7 +498,7 @@ static inline unsigned long __fls(unsigned long word) ...@@ -498,7 +498,7 @@ static inline unsigned long __fls(unsigned long word)
__builtin_constant_p(cpu_has_mips64) && cpu_has_mips64) { __builtin_constant_p(cpu_has_mips64) && cpu_has_mips64) {
__asm__( __asm__(
" .set push \n" " .set push \n"
" .set mips64 \n" " .set "MIPS_ISA_LEVEL" \n"
" dclz %0, %1 \n" " dclz %0, %1 \n"
" .set pop \n" " .set pop \n"
: "=r" (num) : "=r" (num)
...@@ -562,7 +562,7 @@ static inline int fls(int x) ...@@ -562,7 +562,7 @@ static inline int fls(int x)
if (__builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) { if (__builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
__asm__( __asm__(
" .set push \n" " .set push \n"
" .set mips32 \n" " .set "MIPS_ISA_LEVEL" \n"
" clz %0, %1 \n" " clz %0, %1 \n"
" .set pop \n" " .set pop \n"
: "=r" (x) : "=r" (x)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment