Commit 378ed6f0 authored by Paul Burton's avatar Paul Burton

MIPS: Avoid using .set mips0 to restore ISA

We currently have 2 commonly used methods for switching ISA within
assembly code, then restoring the original ISA.

  1) Using a pair of .set push & .set pop directives. For example:

     .set	push
     .set	mips32r2
     <some_insn>
     .set	pop

  2) Using .set mips0 to restore the ISA originally specified on the
     command line. For example:

     .set	mips32r2
     <some_insn>
     .set	mips0

Unfortunately method 2 does not work with nanoMIPS toolchains, where the
assembler rejects the .set mips0 directive like so:

     Error: cannot change ISA from nanoMIPS to mips0

In preparation for supporting nanoMIPS builds, switch all instances of
method 2 in generic non-platform-specific code to use push & pop as in
method 1 instead. The .set push & .set pop is arguably cleaner anyway,
and if nothing else it's good to consistently use one method.
Signed-off-by: default avatarPaul Burton <paul.burton@mips.com>
Patchwork: https://patchwork.linux-mips.org/patch/21037/
Cc: linux-mips@linux-mips.org
parent 183b40f9
......@@ -59,12 +59,13 @@ static __inline__ void atomic_##op(int i, atomic_t * v) \
int temp; \
\
__asm__ __volatile__( \
" .set push \n" \
" .set "MIPS_ISA_LEVEL" \n" \
"1: ll %0, %1 # atomic_" #op " \n" \
" " #asm_op " %0, %2 \n" \
" sc %0, %1 \n" \
"\t" __scbeqz " %0, 1b \n" \
" .set mips0 \n" \
" .set pop \n" \
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
: "Ir" (i)); \
} else { \
......@@ -85,13 +86,14 @@ static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v) \
int temp; \
\
__asm__ __volatile__( \
" .set push \n" \
" .set "MIPS_ISA_LEVEL" \n" \
"1: ll %1, %2 # atomic_" #op "_return \n" \
" " #asm_op " %0, %1, %3 \n" \
" sc %0, %2 \n" \
"\t" __scbeqz " %0, 1b \n" \
" " #asm_op " %0, %1, %3 \n" \
" .set mips0 \n" \
" .set pop \n" \
: "=&r" (result), "=&r" (temp), \
"+" GCC_OFF_SMALL_ASM() (v->counter) \
: "Ir" (i)); \
......@@ -117,12 +119,13 @@ static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v) \
int temp; \
\
__asm__ __volatile__( \
" .set push \n" \
" .set "MIPS_ISA_LEVEL" \n" \
"1: ll %1, %2 # atomic_fetch_" #op " \n" \
" " #asm_op " %0, %1, %3 \n" \
" sc %0, %2 \n" \
"\t" __scbeqz " %0, 1b \n" \
" .set mips0 \n" \
" .set pop \n" \
" move %0, %1 \n" \
: "=&r" (result), "=&r" (temp), \
"+" GCC_OFF_SMALL_ASM() (v->counter) \
......@@ -188,17 +191,19 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
int temp;
__asm__ __volatile__(
" .set push \n"
" .set "MIPS_ISA_LEVEL" \n"
"1: ll %1, %2 # atomic_sub_if_positive\n"
" .set mips0 \n"
" .set pop \n"
" subu %0, %1, %3 \n"
" move %1, %0 \n"
" bltz %0, 1f \n"
" .set push \n"
" .set "MIPS_ISA_LEVEL" \n"
" sc %1, %2 \n"
"\t" __scbeqz " %1, 1b \n"
"1: \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (result), "=&r" (temp),
"+" GCC_OFF_SMALL_ASM() (v->counter)
: "Ir" (i));
......@@ -252,12 +257,13 @@ static __inline__ void atomic64_##op(long i, atomic64_t * v) \
long temp; \
\
__asm__ __volatile__( \
" .set push \n" \
" .set "MIPS_ISA_LEVEL" \n" \
"1: lld %0, %1 # atomic64_" #op " \n" \
" " #asm_op " %0, %2 \n" \
" scd %0, %1 \n" \
"\t" __scbeqz " %0, 1b \n" \
" .set mips0 \n" \
" .set pop \n" \
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
: "Ir" (i)); \
} else { \
......@@ -278,13 +284,14 @@ static __inline__ long atomic64_##op##_return_relaxed(long i, atomic64_t * v) \
long temp; \
\
__asm__ __volatile__( \
" .set push \n" \
" .set "MIPS_ISA_LEVEL" \n" \
"1: lld %1, %2 # atomic64_" #op "_return\n" \
" " #asm_op " %0, %1, %3 \n" \
" scd %0, %2 \n" \
"\t" __scbeqz " %0, 1b \n" \
" " #asm_op " %0, %1, %3 \n" \
" .set mips0 \n" \
" .set pop \n" \
: "=&r" (result), "=&r" (temp), \
"+" GCC_OFF_SMALL_ASM() (v->counter) \
: "Ir" (i)); \
......@@ -310,13 +317,14 @@ static __inline__ long atomic64_fetch_##op##_relaxed(long i, atomic64_t * v) \
long temp; \
\
__asm__ __volatile__( \
" .set push \n" \
" .set "MIPS_ISA_LEVEL" \n" \
"1: lld %1, %2 # atomic64_fetch_" #op "\n" \
" " #asm_op " %0, %1, %3 \n" \
" scd %0, %2 \n" \
"\t" __scbeqz " %0, 1b \n" \
" move %0, %1 \n" \
" .set mips0 \n" \
" .set pop \n" \
: "=&r" (result), "=&r" (temp), \
"+" GCC_OFF_SMALL_ASM() (v->counter) \
: "Ir" (i)); \
......@@ -382,6 +390,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
long temp;
__asm__ __volatile__(
" .set push \n"
" .set "MIPS_ISA_LEVEL" \n"
"1: lld %1, %2 # atomic64_sub_if_positive\n"
" dsubu %0, %1, %3 \n"
......@@ -390,7 +399,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
" scd %1, %2 \n"
"\t" __scbeqz " %1, 1b \n"
"1: \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (result), "=&r" (temp),
"+" GCC_OFF_SMALL_ASM() (v->counter)
: "Ir" (i));
......
......@@ -58,12 +58,13 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
if (kernel_uses_llsc && R10000_LLSC_WAR) {
__asm__ __volatile__(
" .set push \n"
" .set arch=r4000 \n"
"1: " __LL "%0, %1 # set_bit \n"
" or %0, %2 \n"
" " __SC "%0, %1 \n"
" beqzl %0, 1b \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m)
: "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m));
#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
......@@ -80,11 +81,12 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
} else if (kernel_uses_llsc) {
do {
__asm__ __volatile__(
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
" " __LL "%0, %1 # set_bit \n"
" or %0, %2 \n"
" " __SC "%0, %1 \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
: "ir" (1UL << bit));
} while (unlikely(!temp));
......@@ -110,12 +112,13 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
if (kernel_uses_llsc && R10000_LLSC_WAR) {
__asm__ __volatile__(
" .set push \n"
" .set arch=r4000 \n"
"1: " __LL "%0, %1 # clear_bit \n"
" and %0, %2 \n"
" " __SC "%0, %1 \n"
" beqzl %0, 1b \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
: "ir" (~(1UL << bit)));
#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
......@@ -132,11 +135,12 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
} else if (kernel_uses_llsc) {
do {
__asm__ __volatile__(
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
" " __LL "%0, %1 # clear_bit \n"
" and %0, %2 \n"
" " __SC "%0, %1 \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
: "ir" (~(1UL << bit)));
} while (unlikely(!temp));
......@@ -176,12 +180,13 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
unsigned long temp;
__asm__ __volatile__(
" .set push \n"
" .set arch=r4000 \n"
"1: " __LL "%0, %1 # change_bit \n"
" xor %0, %2 \n"
" " __SC "%0, %1 \n"
" beqzl %0, 1b \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
: "ir" (1UL << bit));
} else if (kernel_uses_llsc) {
......@@ -190,11 +195,12 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
do {
__asm__ __volatile__(
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
" " __LL "%0, %1 # change_bit \n"
" xor %0, %2 \n"
" " __SC "%0, %1 \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
: "ir" (1UL << bit));
} while (unlikely(!temp));
......@@ -223,13 +229,14 @@ static inline int test_and_set_bit(unsigned long nr,
unsigned long temp;
__asm__ __volatile__(
" .set push \n"
" .set arch=r4000 \n"
"1: " __LL "%0, %1 # test_and_set_bit \n"
" or %2, %0, %3 \n"
" " __SC "%2, %1 \n"
" beqzl %2, 1b \n"
" and %2, %0, %3 \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "r" (1UL << bit)
: "memory");
......@@ -239,11 +246,12 @@ static inline int test_and_set_bit(unsigned long nr,
do {
__asm__ __volatile__(
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
" " __LL "%0, %1 # test_and_set_bit \n"
" or %2, %0, %3 \n"
" " __SC "%2, %1 \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "r" (1UL << bit)
: "memory");
......@@ -277,13 +285,14 @@ static inline int test_and_set_bit_lock(unsigned long nr,
unsigned long temp;
__asm__ __volatile__(
" .set push \n"
" .set arch=r4000 \n"
"1: " __LL "%0, %1 # test_and_set_bit \n"
" or %2, %0, %3 \n"
" " __SC "%2, %1 \n"
" beqzl %2, 1b \n"
" and %2, %0, %3 \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (temp), "+m" (*m), "=&r" (res)
: "r" (1UL << bit)
: "memory");
......@@ -293,11 +302,12 @@ static inline int test_and_set_bit_lock(unsigned long nr,
do {
__asm__ __volatile__(
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
" " __LL "%0, %1 # test_and_set_bit \n"
" or %2, %0, %3 \n"
" " __SC "%2, %1 \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "r" (1UL << bit)
: "memory");
......@@ -332,6 +342,7 @@ static inline int test_and_clear_bit(unsigned long nr,
unsigned long temp;
__asm__ __volatile__(
" .set push \n"
" .set arch=r4000 \n"
"1: " __LL "%0, %1 # test_and_clear_bit \n"
" or %2, %0, %3 \n"
......@@ -339,7 +350,7 @@ static inline int test_and_clear_bit(unsigned long nr,
" " __SC "%2, %1 \n"
" beqzl %2, 1b \n"
" and %2, %0, %3 \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "r" (1UL << bit)
: "memory");
......@@ -365,12 +376,13 @@ static inline int test_and_clear_bit(unsigned long nr,
do {
__asm__ __volatile__(
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
" " __LL "%0, %1 # test_and_clear_bit \n"
" or %2, %0, %3 \n"
" xor %2, %3 \n"
" " __SC "%2, %1 \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "r" (1UL << bit)
: "memory");
......@@ -406,13 +418,14 @@ static inline int test_and_change_bit(unsigned long nr,
unsigned long temp;
__asm__ __volatile__(
" .set push \n"
" .set arch=r4000 \n"
"1: " __LL "%0, %1 # test_and_change_bit \n"
" xor %2, %0, %3 \n"
" " __SC "%2, %1 \n"
" beqzl %2, 1b \n"
" and %2, %0, %3 \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "r" (1UL << bit)
: "memory");
......@@ -422,11 +435,12 @@ static inline int test_and_change_bit(unsigned long nr,
do {
__asm__ __volatile__(
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
" " __LL "%0, %1 # test_and_change_bit \n"
" xor %2, %0, %3 \n"
" " __SC "\t%2, %1 \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "r" (1UL << bit)
: "memory");
......
......@@ -47,9 +47,10 @@ extern unsigned long __xchg_called_with_bad_pointer(void)
__asm__ __volatile__( \
" .set push \n" \
" .set noat \n" \
" .set push \n" \
" .set " MIPS_ISA_ARCH_LEVEL " \n" \
"1: " ld " %0, %2 # __xchg_asm \n" \
" .set mips0 \n" \
" .set pop \n" \
" move $1, %z3 \n" \
" .set " MIPS_ISA_ARCH_LEVEL " \n" \
" " st " $1, %1 \n" \
......@@ -117,10 +118,11 @@ static inline unsigned long __xchg(volatile void *ptr, unsigned long x,
__asm__ __volatile__( \
" .set push \n" \
" .set noat \n" \
" .set push \n" \
" .set "MIPS_ISA_ARCH_LEVEL" \n" \
"1: " ld " %0, %2 # __cmpxchg_asm \n" \
" bne %0, %z3, 2f \n" \
" .set mips0 \n" \
" .set pop \n" \
" move $1, %z4 \n" \
" .set "MIPS_ISA_ARCH_LEVEL" \n" \
" " st " $1, %1 \n" \
......
......@@ -21,12 +21,13 @@ static inline void edac_atomic_scrub(void *va, u32 size)
*/
__asm__ __volatile__ (
" .set push \n"
" .set mips2 \n"
"1: ll %0, %1 # edac_atomic_scrub \n"
" addu %0, $0 \n"
" sc %0, %1 \n"
" beqz %0, 1b \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*virt_addr)
: GCC_OFF_SMALL_ASM() (*virt_addr));
......
......@@ -24,9 +24,10 @@
__asm__ __volatile__( \
" .set push \n" \
" .set noat \n" \
" .set push \n" \
" .set arch=r4000 \n" \
"1: ll %1, %4 # __futex_atomic_op \n" \
" .set mips0 \n" \
" .set pop \n" \
" " insn " \n" \
" .set arch=r4000 \n" \
"2: sc $1, %2 \n" \
......@@ -35,7 +36,6 @@
"3: \n" \
" .insn \n" \
" .set pop \n" \
" .set mips0 \n" \
" .section .fixup,\"ax\" \n" \
"4: li %0, %6 \n" \
" j 3b \n" \
......@@ -53,9 +53,10 @@
__asm__ __volatile__( \
" .set push \n" \
" .set noat \n" \
" .set push \n" \
" .set "MIPS_ISA_ARCH_LEVEL" \n" \
"1: "user_ll("%1", "%4")" # __futex_atomic_op\n" \
" .set mips0 \n" \
" .set pop \n" \
" " insn " \n" \
" .set "MIPS_ISA_ARCH_LEVEL" \n" \
"2: "user_sc("$1", "%2")" \n" \
......@@ -64,7 +65,6 @@
"3: \n" \
" .insn \n" \
" .set pop \n" \
" .set mips0 \n" \
" .section .fixup,\"ax\" \n" \
"4: li %0, %6 \n" \
" j 3b \n" \
......@@ -137,10 +137,11 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
"# futex_atomic_cmpxchg_inatomic \n"
" .set push \n"
" .set noat \n"
" .set push \n"
" .set arch=r4000 \n"
"1: ll %1, %3 \n"
" bne %1, %z4, 3f \n"
" .set mips0 \n"
" .set pop \n"
" move $1, %z5 \n"
" .set arch=r4000 \n"
"2: sc $1, %2 \n"
......@@ -166,10 +167,11 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
"# futex_atomic_cmpxchg_inatomic \n"
" .set push \n"
" .set noat \n"
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
"1: "user_ll("%1", "%3")" \n"
" bne %1, %z4, 3f \n"
" .set mips0 \n"
" .set pop \n"
" move $1, %z5 \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
"2: "user_sc("$1", "%2")" \n"
......
......@@ -66,10 +66,11 @@ do { \
unsigned long tmp; \
\
__asm__ __volatile__( \
" .set push \n" \
" .set "MIPS_ISA_LEVEL" \n" \
" dla %0, 1f \n" \
" jr.hb %0 \n" \
" .set mips0 \n" \
" .set pop \n" \
"1: \n" \
: "=r" (tmp)); \
} while (0)
......@@ -141,10 +142,11 @@ do { \
unsigned long tmp; \
\
__asm__ __volatile__( \
" .set push \n" \
" .set mips64r2 \n" \
" dla %0, 1f \n" \
" jr.hb %0 \n" \
" .set mips0 \n" \
" .set pop \n" \
"1: \n" \
: "=r" (tmp)); \
} while (0)
......
......@@ -354,13 +354,14 @@ static inline void pfx##write##bwlq(type val, \
if (irq) \
local_irq_save(__flags); \
__asm__ __volatile__( \
".set arch=r4000" "\t\t# __writeq""\n\t" \
".set push" "\t\t# __writeq""\n\t" \
".set arch=r4000" "\n\t" \
"dsll32 %L0, %L0, 0" "\n\t" \
"dsrl32 %L0, %L0, 0" "\n\t" \
"dsll32 %M0, %M0, 0" "\n\t" \
"or %L0, %L0, %M0" "\n\t" \
"sd %L0, %2" "\n\t" \
".set mips0" "\n" \
".set pop" "\n" \
: "=r" (__tmp) \
: "0" (__val), "m" (*__mem)); \
if (irq) \
......@@ -387,11 +388,12 @@ static inline type pfx##read##bwlq(const volatile void __iomem *mem) \
if (irq) \
local_irq_save(__flags); \
__asm__ __volatile__( \
".set arch=r4000" "\t\t# __readq" "\n\t" \
".set push" "\t\t# __readq" "\n\t" \
".set arch=r4000" "\n\t" \
"ld %L0, %1" "\n\t" \
"dsra32 %M0, %L0, 0" "\n\t" \
"sll %L0, %L0, 0" "\n\t" \
".set mips0" "\n" \
".set pop" "\n" \
: "=r" (__val) \
: "m" (*__mem)); \
if (irq) \
......
......@@ -411,11 +411,12 @@ static inline void _kvm_atomic_set_c0_guest_reg(unsigned long *reg,
unsigned long temp;
do {
__asm__ __volatile__(
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
" " __LL "%0, %1 \n"
" or %0, %2 \n"
" " __SC "%0, %1 \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (temp), "+m" (*reg)
: "r" (val));
} while (unlikely(!temp));
......@@ -427,11 +428,12 @@ static inline void _kvm_atomic_clear_c0_guest_reg(unsigned long *reg,
unsigned long temp;
do {
__asm__ __volatile__(
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
" " __LL "%0, %1 \n"
" and %0, %2 \n"
" " __SC "%0, %1 \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (temp), "+m" (*reg)
: "r" (~val));
} while (unlikely(!temp));
......@@ -444,12 +446,13 @@ static inline void _kvm_atomic_change_c0_guest_reg(unsigned long *reg,
unsigned long temp;
do {
__asm__ __volatile__(
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
" " __LL "%0, %1 \n"
" and %0, %2 \n"
" or %0, %3 \n"
" " __SC "%0, %1 \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (temp), "+m" (*reg)
: "r" (~change), "r" (val & change));
} while (unlikely(!temp));
......
......@@ -35,13 +35,14 @@ static __inline__ long local_add_return(long i, local_t * l)
unsigned long temp;
__asm__ __volatile__(
" .set push \n"
" .set arch=r4000 \n"
"1:" __LL "%1, %2 # local_add_return \n"
" addu %0, %1, %3 \n"
__SC "%0, %2 \n"
" beqzl %0, 1b \n"
" addu %0, %1, %3 \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
: "Ir" (i), "m" (l->a.counter)
: "memory");
......@@ -49,13 +50,14 @@ static __inline__ long local_add_return(long i, local_t * l)
unsigned long temp;
__asm__ __volatile__(
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
"1:" __LL "%1, %2 # local_add_return \n"
" addu %0, %1, %3 \n"
__SC "%0, %2 \n"
" beqz %0, 1b \n"
" addu %0, %1, %3 \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
: "Ir" (i), "m" (l->a.counter)
: "memory");
......@@ -80,13 +82,14 @@ static __inline__ long local_sub_return(long i, local_t * l)
unsigned long temp;
__asm__ __volatile__(
" .set push \n"
" .set arch=r4000 \n"
"1:" __LL "%1, %2 # local_sub_return \n"
" subu %0, %1, %3 \n"
__SC "%0, %2 \n"
" beqzl %0, 1b \n"
" subu %0, %1, %3 \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
: "Ir" (i), "m" (l->a.counter)
: "memory");
......@@ -94,13 +97,14 @@ static __inline__ long local_sub_return(long i, local_t * l)
unsigned long temp;
__asm__ __volatile__(
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
"1:" __LL "%1, %2 # local_sub_return \n"
" subu %0, %1, %3 \n"
__SC "%0, %2 \n"
" beqz %0, 1b \n"
" subu %0, %1, %3 \n"
" .set mips0 \n"
" .set pop \n"
: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
: "Ir" (i), "m" (l->a.counter)
: "memory");
......
......@@ -255,12 +255,12 @@ static inline unsigned int dmt(void)
static inline void __raw_emt(void)
{
__asm__ __volatile__(
" .set push \n"
" .set noreorder \n"
" .set mips32r2 \n"
" .word 0x41600be1 # emt \n"
" ehb \n"
" .set mips0 \n"
" .set reorder");
" .set pop");
}
/* enable multi-threaded execution if previous suggested it should be.
......@@ -277,9 +277,10 @@ static inline void emt(int previous)
static inline void ehb(void)
{
__asm__ __volatile__(
" .set push \n"
" .set mips32r2 \n"
" ehb \n"
" .set mips0 \n");
" .set pop \n");
}
#define mftc0(rt,sel) \
......
......@@ -1345,9 +1345,10 @@ do { \
: "=r" (__res)); \
else \
__asm__ vol( \
".set\tpush\n\t" \
".set\tmips32\n\t" \
"mfc0\t%0, " #source ", " #sel "\n\t" \
".set\tmips0\n\t" \
".set\tpop\n\t" \
: "=r" (__res)); \
__res; \
})
......@@ -1358,15 +1359,17 @@ do { \
__res = __read_64bit_c0_split(source, sel, vol); \
else if (sel == 0) \
__asm__ vol( \
".set\tpush\n\t" \
".set\tmips3\n\t" \
"dmfc0\t%0, " #source "\n\t" \
".set\tmips0" \
".set\tpop" \
: "=r" (__res)); \
else \
__asm__ vol( \
".set\tpush\n\t" \
".set\tmips64\n\t" \
"dmfc0\t%0, " #source ", " #sel "\n\t" \
".set\tmips0" \
".set\tpop" \
: "=r" (__res)); \
__res; \
})
......@@ -1391,9 +1394,10 @@ do { \
: : "Jr" ((unsigned int)(value))); \
else \
__asm__ __volatile__( \
".set\tpush\n\t" \
".set\tmips32\n\t" \
"mtc0\t%z0, " #register ", " #sel "\n\t" \
".set\tmips0" \
".set\tpop" \
: : "Jr" ((unsigned int)(value))); \
} while (0)
......@@ -1403,15 +1407,17 @@ do { \
__write_64bit_c0_split(register, sel, value); \
else if (sel == 0) \
__asm__ __volatile__( \
".set\tpush\n\t" \
".set\tmips3\n\t" \
"dmtc0\t%z0, " #register "\n\t" \
".set\tmips0" \
".set\tpop" \
: : "Jr" (value)); \
else \
__asm__ __volatile__( \
".set\tpush\n\t" \
".set\tmips64\n\t" \
"dmtc0\t%z0, " #register ", " #sel "\n\t" \
".set\tmips0" \
".set\tpop" \
: : "Jr" (value)); \
} while (0)
......@@ -1463,19 +1469,21 @@ do { \
local_irq_save(__flags); \
if (sel == 0) \
__asm__ vol( \
".set\tpush\n\t" \
".set\tmips64\n\t" \
"dmfc0\t%L0, " #source "\n\t" \
"dsra\t%M0, %L0, 32\n\t" \
"sll\t%L0, %L0, 0\n\t" \
".set\tmips0" \
".set\tpop" \
: "=r" (__val)); \
else \
__asm__ vol( \
".set\tpush\n\t" \
".set\tmips64\n\t" \
"dmfc0\t%L0, " #source ", " #sel "\n\t" \
"dsra\t%M0, %L0, 32\n\t" \
"sll\t%L0, %L0, 0\n\t" \
".set\tmips0" \
".set\tpop" \
: "=r" (__val)); \
local_irq_restore(__flags); \
\
......@@ -1498,23 +1506,25 @@ do { \
: "+r" (__tmp)); \
else if (sel == 0) \
__asm__ __volatile__( \
".set\tpush\n\t" \
".set\tmips64\n\t" \
"dsll\t%L0, %L0, 32\n\t" \
"dsrl\t%L0, %L0, 32\n\t" \
"dsll\t%M0, %M0, 32\n\t" \
"or\t%L0, %L0, %M0\n\t" \
"dmtc0\t%L0, " #source "\n\t" \
".set\tmips0" \
".set\tpop" \
: "+r" (__tmp)); \
else \
__asm__ __volatile__( \
".set\tpush\n\t" \
".set\tmips64\n\t" \
"dsll\t%L0, %L0, 32\n\t" \
"dsrl\t%L0, %L0, 32\n\t" \
"dsll\t%M0, %M0, 32\n\t" \
"or\t%L0, %L0, %M0\n\t" \
"dmtc0\t%L0, " #source ", " #sel "\n\t" \
".set\tmips0" \
".set\tpop" \
: "+r" (__tmp)); \
local_irq_restore(__flags); \
} while (0)
......
......@@ -214,8 +214,8 @@ static inline void set_pte(pte_t *ptep, pte_t pteval)
if (kernel_uses_llsc && R10000_LLSC_WAR) {
__asm__ __volatile__ (
" .set arch=r4000 \n"
" .set push \n"
" .set arch=r4000 \n"
" .set noreorder \n"
"1:" __LL "%[tmp], %[buddy] \n"
" bnez %[tmp], 2f \n"
......@@ -225,13 +225,12 @@ static inline void set_pte(pte_t *ptep, pte_t pteval)
" nop \n"
"2: \n"
" .set pop \n"
" .set mips0 \n"
: [buddy] "+m" (buddy->pte), [tmp] "=&r" (tmp)
: [global] "r" (page_global));
} else if (kernel_uses_llsc) {
__asm__ __volatile__ (
" .set "MIPS_ISA_ARCH_LEVEL" \n"
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
" .set noreorder \n"
"1:" __LL "%[tmp], %[buddy] \n"
" bnez %[tmp], 2f \n"
......@@ -241,7 +240,6 @@ static inline void set_pte(pte_t *ptep, pte_t pteval)
" nop \n"
"2: \n"
" .set pop \n"
" .set mips0 \n"
: [buddy] "+m" (buddy->pte), [tmp] "=&r" (tmp)
: [global] "r" (page_global));
}
......
......@@ -427,9 +427,10 @@
#ifdef CONFIG_CPU_MIPSR6
eretnc
#else
.set push
.set arch=r4000
eret
.set mips0
.set pop
#endif
.endm
......
......@@ -652,9 +652,10 @@ isrdhwr:
ori k1, _THREAD_MASK
xori k1, _THREAD_MASK
LONG_L v1, TI_TP_VALUE(k1)
.set push
.set arch=r4000
eret
.set mips0
.set pop
#endif
.set pop
END(handle_ri_rdhwr)
......
......@@ -101,6 +101,7 @@ static void __cpuidle au1k_wait(void)
unsigned long c0status = read_c0_status() | 1; /* irqs on */
__asm__(
" .set push \n"
" .set arch=r4000 \n"
" cache 0x14, 0(%0) \n"
" cache 0x14, 32(%0) \n"
......@@ -111,7 +112,7 @@ static void __cpuidle au1k_wait(void)
" nop \n"
" nop \n"
" nop \n"
" .set mips0 \n"
" .set pop \n"
: : "r" (au1k_wait), "r" (c0status));
}
......
......@@ -106,6 +106,7 @@ static inline int mips_atomic_set(unsigned long addr, unsigned long new)
if (cpu_has_llsc && R10000_LLSC_WAR) {
__asm__ __volatile__ (
" .set push \n"
" .set arch=r4000 \n"
" li %[err], 0 \n"
"1: ll %[old], (%[addr]) \n"
......@@ -122,7 +123,7 @@ static inline int mips_atomic_set(unsigned long addr, unsigned long new)
" "STR(PTR)" 1b, 4b \n"
" "STR(PTR)" 2b, 4b \n"
" .previous \n"
" .set mips0 \n"
" .set pop \n"
: [old] "=&r" (old),
[err] "=&r" (err),
[tmp] "=&r" (tmp)
......@@ -132,6 +133,7 @@ static inline int mips_atomic_set(unsigned long addr, unsigned long new)
: "memory");
} else if (cpu_has_llsc) {
__asm__ __volatile__ (
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
" li %[err], 0 \n"
"1: \n"
......@@ -150,7 +152,7 @@ static inline int mips_atomic_set(unsigned long addr, unsigned long new)
" "STR(PTR)" 1b, 5b \n"
" "STR(PTR)" 2b, 5b \n"
" .previous \n"
" .set mips0 \n"
" .set pop \n"
: [old] "=&r" (old),
[err] "=&r" (err),
[tmp] "=&r" (tmp)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment