Commit a41e0d75 authored by Al Viro's avatar Al Viro

x86: don't wank with magical size in __copy_in_user()

... especially since copy_in_user() doesn't
Signed-off-by: default avatarAl Viro <viro@zeniv.linux.org.uk>
parent 3f763453
...@@ -185,62 +185,8 @@ int __copy_to_user(void __user *dst, const void *src, unsigned size) ...@@ -185,62 +185,8 @@ int __copy_to_user(void __user *dst, const void *src, unsigned size)
static __always_inline __must_check static __always_inline __must_check
int __copy_in_user(void __user *dst, const void __user *src, unsigned size) int __copy_in_user(void __user *dst, const void __user *src, unsigned size)
{ {
int ret = 0; return copy_user_generic((__force void *)dst,
(__force void *)src, size);
might_fault();
if (!__builtin_constant_p(size))
return copy_user_generic((__force void *)dst,
(__force void *)src, size);
switch (size) {
case 1: {
u8 tmp;
__uaccess_begin();
__get_user_asm(tmp, (u8 __user *)src,
ret, "b", "b", "=q", 1);
if (likely(!ret))
__put_user_asm(tmp, (u8 __user *)dst,
ret, "b", "b", "iq", 1);
__uaccess_end();
return ret;
}
case 2: {
u16 tmp;
__uaccess_begin();
__get_user_asm(tmp, (u16 __user *)src,
ret, "w", "w", "=r", 2);
if (likely(!ret))
__put_user_asm(tmp, (u16 __user *)dst,
ret, "w", "w", "ir", 2);
__uaccess_end();
return ret;
}
case 4: {
u32 tmp;
__uaccess_begin();
__get_user_asm(tmp, (u32 __user *)src,
ret, "l", "k", "=r", 4);
if (likely(!ret))
__put_user_asm(tmp, (u32 __user *)dst,
ret, "l", "k", "ir", 4);
__uaccess_end();
return ret;
}
case 8: {
u64 tmp;
__uaccess_begin();
__get_user_asm(tmp, (u64 __user *)src,
ret, "q", "", "=r", 8);
if (likely(!ret))
__put_user_asm(tmp, (u64 __user *)dst,
ret, "q", "", "er", 8);
__uaccess_end();
return ret;
}
default:
return copy_user_generic((__force void *)dst,
(__force void *)src, size);
}
} }
static __must_check __always_inline int static __must_check __always_inline int
......
...@@ -54,15 +54,6 @@ unsigned long clear_user(void __user *to, unsigned long n) ...@@ -54,15 +54,6 @@ unsigned long clear_user(void __user *to, unsigned long n)
} }
EXPORT_SYMBOL(clear_user); EXPORT_SYMBOL(clear_user);
unsigned long copy_in_user(void __user *to, const void __user *from, unsigned len)
{
if (access_ok(VERIFY_WRITE, to, len) && access_ok(VERIFY_READ, from, len)) {
return copy_user_generic((__force void *)to, (__force void *)from, len);
}
return len;
}
EXPORT_SYMBOL(copy_in_user);
/* /*
* Try to copy last bytes and clear the rest if needed. * Try to copy last bytes and clear the rest if needed.
* Since protection fault in copy_from/to_user is not a normal situation, * Since protection fault in copy_from/to_user is not a normal situation,
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment