Commit 16bcddbe authored by James Hogan's avatar James Hogan Committed by Jiri Slaby

metag/usercopy: Add early abort to copy_to_user

commit fb8ea062 upstream.

When copying to userland on Meta, if any faults are encountered
immediately abort the copy instead of continuing on and repeatedly
faulting, and worse potentially copying further bytes successfully to
subsequent valid pages.

Fixes: 373cd784 ("metag: Memory handling")
Reported-by: default avatarAl Viro <viro@zeniv.linux.org.uk>
Signed-off-by: default avatarJames Hogan <james.hogan@imgtec.com>
Cc: linux-metag@vger.kernel.org
Signed-off-by: default avatarJiri Slaby <jslaby@suse.cz>
parent e7ef4a6c
...@@ -538,23 +538,31 @@ unsigned long __copy_user(void __user *pdst, const void *psrc, ...@@ -538,23 +538,31 @@ unsigned long __copy_user(void __user *pdst, const void *psrc,
if ((unsigned long) src & 1) { if ((unsigned long) src & 1) {
__asm_copy_to_user_1(dst, src, retn); __asm_copy_to_user_1(dst, src, retn);
n--; n--;
if (retn)
return retn + n;
} }
if ((unsigned long) dst & 1) { if ((unsigned long) dst & 1) {
/* Worst case - byte copy */ /* Worst case - byte copy */
while (n > 0) { while (n > 0) {
__asm_copy_to_user_1(dst, src, retn); __asm_copy_to_user_1(dst, src, retn);
n--; n--;
if (retn)
return retn + n;
} }
} }
if (((unsigned long) src & 2) && n >= 2) { if (((unsigned long) src & 2) && n >= 2) {
__asm_copy_to_user_2(dst, src, retn); __asm_copy_to_user_2(dst, src, retn);
n -= 2; n -= 2;
if (retn)
return retn + n;
} }
if ((unsigned long) dst & 2) { if ((unsigned long) dst & 2) {
/* Second worst case - word copy */ /* Second worst case - word copy */
while (n >= 2) { while (n >= 2) {
__asm_copy_to_user_2(dst, src, retn); __asm_copy_to_user_2(dst, src, retn);
n -= 2; n -= 2;
if (retn)
return retn + n;
} }
} }
...@@ -569,6 +577,8 @@ unsigned long __copy_user(void __user *pdst, const void *psrc, ...@@ -569,6 +577,8 @@ unsigned long __copy_user(void __user *pdst, const void *psrc,
while (n >= 8) { while (n >= 8) {
__asm_copy_to_user_8x64(dst, src, retn); __asm_copy_to_user_8x64(dst, src, retn);
n -= 8; n -= 8;
if (retn)
return retn + n;
} }
} }
if (n >= RAPF_MIN_BUF_SIZE) { if (n >= RAPF_MIN_BUF_SIZE) {
...@@ -581,6 +591,8 @@ unsigned long __copy_user(void __user *pdst, const void *psrc, ...@@ -581,6 +591,8 @@ unsigned long __copy_user(void __user *pdst, const void *psrc,
while (n >= 8) { while (n >= 8) {
__asm_copy_to_user_8x64(dst, src, retn); __asm_copy_to_user_8x64(dst, src, retn);
n -= 8; n -= 8;
if (retn)
return retn + n;
} }
} }
#endif #endif
...@@ -588,11 +600,15 @@ unsigned long __copy_user(void __user *pdst, const void *psrc, ...@@ -588,11 +600,15 @@ unsigned long __copy_user(void __user *pdst, const void *psrc,
while (n >= 16) { while (n >= 16) {
__asm_copy_to_user_16(dst, src, retn); __asm_copy_to_user_16(dst, src, retn);
n -= 16; n -= 16;
if (retn)
return retn + n;
} }
while (n >= 4) { while (n >= 4) {
__asm_copy_to_user_4(dst, src, retn); __asm_copy_to_user_4(dst, src, retn);
n -= 4; n -= 4;
if (retn)
return retn + n;
} }
switch (n) { switch (n) {
...@@ -609,6 +625,10 @@ unsigned long __copy_user(void __user *pdst, const void *psrc, ...@@ -609,6 +625,10 @@ unsigned long __copy_user(void __user *pdst, const void *psrc,
break; break;
} }
/*
* If we get here, retn correctly reflects the number of failing
* bytes.
*/
return retn; return retn;
} }
EXPORT_SYMBOL(__copy_user); EXPORT_SYMBOL(__copy_user);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment