Commit 2a24d80f authored by Nick Desaulniers's avatar Nick Desaulniers Committed by Michael Ellerman

powerpc/asm: Remove UPD_CONSTR after GCC 4.9 removal

UPD_CONSTR was previously a preprocessor define for an old GCC 4.9
inline asm bug with m<> constraints.

Fixes: 6563139d ("powerpc: remove GCC version check for UPD_CONSTR")
Suggested-by: default avatarNathan Chancellor <nathan@kernel.org>
Suggested-by: default avatarChristophe Leroy <christophe.leroy@csgroup.eu>
Suggested-by: default avatarMichael Ellerman <mpe@ellerman.id.au>
Signed-off-by: default avatarNick Desaulniers <ndesaulniers@google.com>
Reviewed-by: default avatarNathan Chancellor <nathan@kernel.org>
Signed-off-by: default avatarMichael Ellerman <mpe@ellerman.id.au>
Link: https://lore.kernel.org/r/20210914161712.2463458-1-ndesaulniers@google.com
parent 7eff9bc0
......@@ -12,6 +12,4 @@
# define ASM_CONST(x) __ASM_CONST(x)
#endif
#define UPD_CONSTR "<>"
#endif /* _ASM_POWERPC_ASM_CONST_H */
......@@ -27,14 +27,14 @@ static __inline__ int arch_atomic_read(const atomic_t *v)
{
int t;
__asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"UPD_CONSTR(v->counter));
__asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m<>"(v->counter));
return t;
}
static __inline__ void arch_atomic_set(atomic_t *v, int i)
{
__asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"UPD_CONSTR(v->counter) : "r"(i));
__asm__ __volatile__("stw%U0%X0 %1,%0" : "=m<>"(v->counter) : "r"(i));
}
#define ATOMIC_OP(op, asm_op) \
......@@ -320,14 +320,14 @@ static __inline__ s64 arch_atomic64_read(const atomic64_t *v)
{
s64 t;
__asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"UPD_CONSTR(v->counter));
__asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m<>"(v->counter));
return t;
}
static __inline__ void arch_atomic64_set(atomic64_t *v, s64 i)
{
__asm__ __volatile__("std%U0%X0 %1,%0" : "=m"UPD_CONSTR(v->counter) : "r"(i));
__asm__ __volatile__("std%U0%X0 %1,%0" : "=m<>"(v->counter) : "r"(i));
}
#define ATOMIC64_OP(op, asm_op) \
......
......@@ -122,7 +122,7 @@ static inline u##size name(const volatile u##size __iomem *addr) \
{ \
u##size ret; \
__asm__ __volatile__("sync;"#insn"%U1%X1 %0,%1;twi 0,%0,0;isync"\
: "=r" (ret) : "m"UPD_CONSTR (*addr) : "memory"); \
: "=r" (ret) : "m<>" (*addr) : "memory"); \
return ret; \
}
......@@ -130,7 +130,7 @@ static inline u##size name(const volatile u##size __iomem *addr) \
static inline void name(volatile u##size __iomem *addr, u##size val) \
{ \
__asm__ __volatile__("sync;"#insn"%U0%X0 %1,%0" \
: "=m"UPD_CONSTR (*addr) : "r" (val) : "memory"); \
: "=m<>" (*addr) : "r" (val) : "memory"); \
mmiowb_set_pending(); \
}
......
......@@ -86,7 +86,7 @@ __pu_failed: \
"1: " op "%U1%X1 %0,%1 # put_user\n" \
EX_TABLE(1b, %l2) \
: \
: "r" (x), "m"UPD_CONSTR (*addr) \
: "r" (x), "m<>" (*addr) \
: \
: label)
......@@ -143,7 +143,7 @@ do { \
"1: "op"%U1%X1 %0, %1 # get_user\n" \
EX_TABLE(1b, %l2) \
: "=r" (x) \
: "m"UPD_CONSTR (*addr) \
: "m<>" (*addr) \
: \
: label)
......@@ -200,7 +200,7 @@ __gus_failed: \
".previous\n" \
EX_TABLE(1b, 3b) \
: "=r" (err), "=r" (x) \
: "m"UPD_CONSTR (*addr), "i" (-EFAULT), "0" (err))
: "m<>" (*addr), "i" (-EFAULT), "0" (err))
#ifdef __powerpc64__
#define __get_user_asm2(x, addr, err) \
......
......@@ -1094,7 +1094,7 @@ static inline u64 sp_to_dp(u32 fprs)
preempt_disable();
enable_kernel_fp();
asm ("lfs%U1%X1 0,%1; stfd%U0%X0 0,%0" : "=m"UPD_CONSTR (fprd) : "m"UPD_CONSTR (fprs)
asm ("lfs%U1%X1 0,%1; stfd%U0%X0 0,%0" : "=m<>" (fprd) : "m<>" (fprs)
: "fr0");
preempt_enable();
return fprd;
......@@ -1106,7 +1106,7 @@ static inline u32 dp_to_sp(u64 fprd)
preempt_disable();
enable_kernel_fp();
asm ("lfd%U1%X1 0,%1; stfs%U0%X0 0,%0" : "=m"UPD_CONSTR (fprs) : "m"UPD_CONSTR (fprd)
asm ("lfd%U1%X1 0,%1; stfs%U0%X0 0,%0" : "=m<>" (fprs) : "m<>" (fprd)
: "fr0");
preempt_enable();
return fprs;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment