Commit 0f6563a3 authored by Will Deacon's avatar Will Deacon

Merge branch 'for-next/asm' into for-next/core

* for-next/asm:
  arm64: uaccess: remove unnecessary earlyclobber
  arm64: uaccess: permit put_{user,kernel} to use zero register
  arm64: uaccess: permit __smp_store_release() to use zero register
  arm64: atomics: lse: improve cmpxchg implementation
parents 67eacd61 17242086
...@@ -251,22 +251,15 @@ __lse__cmpxchg_case_##name##sz(volatile void *ptr, \ ...@@ -251,22 +251,15 @@ __lse__cmpxchg_case_##name##sz(volatile void *ptr, \
u##sz old, \ u##sz old, \
u##sz new) \ u##sz new) \
{ \ { \
register unsigned long x0 asm ("x0") = (unsigned long)ptr; \
register u##sz x1 asm ("x1") = old; \
register u##sz x2 asm ("x2") = new; \
unsigned long tmp; \
\
asm volatile( \ asm volatile( \
__LSE_PREAMBLE \ __LSE_PREAMBLE \
" mov %" #w "[tmp], %" #w "[old]\n" \ " cas" #mb #sfx " %" #w "[old], %" #w "[new], %[v]\n" \
" cas" #mb #sfx "\t%" #w "[tmp], %" #w "[new], %[v]\n" \ : [v] "+Q" (*(u##sz *)ptr), \
" mov %" #w "[ret], %" #w "[tmp]" \ [old] "+r" (old) \
: [ret] "+r" (x0), [v] "+Q" (*(u##sz *)ptr), \ : [new] "rZ" (new) \
[tmp] "=&r" (tmp) \
: [old] "r" (x1), [new] "r" (x2) \
: cl); \ : cl); \
\ \
return x0; \ return old; \
} }
__CMPXCHG_CASE(w, b, , 8, ) __CMPXCHG_CASE(w, b, , 8, )
......
...@@ -131,25 +131,25 @@ do { \ ...@@ -131,25 +131,25 @@ do { \
case 1: \ case 1: \
asm volatile ("stlrb %w1, %0" \ asm volatile ("stlrb %w1, %0" \
: "=Q" (*__p) \ : "=Q" (*__p) \
: "r" (*(__u8 *)__u.__c) \ : "rZ" (*(__u8 *)__u.__c) \
: "memory"); \ : "memory"); \
break; \ break; \
case 2: \ case 2: \
asm volatile ("stlrh %w1, %0" \ asm volatile ("stlrh %w1, %0" \
: "=Q" (*__p) \ : "=Q" (*__p) \
: "r" (*(__u16 *)__u.__c) \ : "rZ" (*(__u16 *)__u.__c) \
: "memory"); \ : "memory"); \
break; \ break; \
case 4: \ case 4: \
asm volatile ("stlr %w1, %0" \ asm volatile ("stlr %w1, %0" \
: "=Q" (*__p) \ : "=Q" (*__p) \
: "r" (*(__u32 *)__u.__c) \ : "rZ" (*(__u32 *)__u.__c) \
: "memory"); \ : "memory"); \
break; \ break; \
case 8: \ case 8: \
asm volatile ("stlr %1, %0" \ asm volatile ("stlr %x1, %0" \
: "=Q" (*__p) \ : "=Q" (*__p) \
: "r" (*(__u64 *)__u.__c) \ : "rZ" (*(__u64 *)__u.__c) \
: "memory"); \ : "memory"); \
break; \ break; \
} \ } \
......
...@@ -237,7 +237,7 @@ static inline void __user *__uaccess_mask_ptr(const void __user *ptr) ...@@ -237,7 +237,7 @@ static inline void __user *__uaccess_mask_ptr(const void __user *ptr)
"1: " load " " reg "1, [%2]\n" \ "1: " load " " reg "1, [%2]\n" \
"2:\n" \ "2:\n" \
_ASM_EXTABLE_##type##ACCESS_ERR_ZERO(1b, 2b, %w0, %w1) \ _ASM_EXTABLE_##type##ACCESS_ERR_ZERO(1b, 2b, %w0, %w1) \
: "+r" (err), "=&r" (x) \ : "+r" (err), "=r" (x) \
: "r" (addr)) : "r" (addr))
#define __raw_get_mem(ldr, x, ptr, err, type) \ #define __raw_get_mem(ldr, x, ptr, err, type) \
...@@ -327,7 +327,7 @@ do { \ ...@@ -327,7 +327,7 @@ do { \
"2:\n" \ "2:\n" \
_ASM_EXTABLE_##type##ACCESS_ERR(1b, 2b, %w0) \ _ASM_EXTABLE_##type##ACCESS_ERR(1b, 2b, %w0) \
: "+r" (err) \ : "+r" (err) \
: "r" (x), "r" (addr)) : "rZ" (x), "r" (addr))
#define __raw_put_mem(str, x, ptr, err, type) \ #define __raw_put_mem(str, x, ptr, err, type) \
do { \ do { \
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment