Commit 640ce3d1 authored by Gordon Jin's avatar Gordon Jin Committed by Linus Torvalds

[PATCH] x86_64: correct copy_user_generic return value when exception happens

Fix a bug that arch/x86_64/lib/copy_user:copy_user_generic will return a
wrong value when exception happens.

In the case the address is not 8-byte aligned (i.e.  go into
Lbad_alignment), if exception happens in Ls11, %rdx will be wrong number of
copied bytes, then copy_user_generic returns wrong value.  It also fixed a
bug of zeroing wrong number of bytes of destination at this situation.  (In
Lzero_rest)
Signed-off-by: default avatarYanmin Zhang <yanmin.zhang@intel.com>
Signed-off-by: default avatarNanhai Zou <nanhai.zou@intel.com>
Signed-off-by: default avatarGordon Jin <gordon.jin@intel.com>
Signed-off-by: default avatarSuresh Siddha <suresh.b.siddha@intel.com>
Signed-off-by: default avatarAndrew Morton <akpm@osdl.org>
Signed-off-by: default avatarLinus Torvalds <torvalds@osdl.org>
parent c47f0fce
...@@ -73,7 +73,7 @@ bad_to_user: ...@@ -73,7 +73,7 @@ bad_to_user:
* rdx count * rdx count
* *
* Output: * Output:
* eax uncopied bytes or 0 if successfull. * eax uncopied bytes or 0 if successful.
*/ */
.globl copy_user_generic .globl copy_user_generic
.p2align 4 .p2align 4
...@@ -179,9 +179,9 @@ copy_user_generic: ...@@ -179,9 +179,9 @@ copy_user_generic:
movl $8,%r9d movl $8,%r9d
subl %ecx,%r9d subl %ecx,%r9d
movl %r9d,%ecx movl %r9d,%ecx
subq %r9,%rdx cmpq %r9,%rdx
jz .Lsmall_align jz .Lhandle_7
js .Lsmall_align js .Lhandle_7
.Lalign_1: .Lalign_1:
.Ls11: movb (%rsi),%bl .Ls11: movb (%rsi),%bl
.Ld11: movb %bl,(%rdi) .Ld11: movb %bl,(%rdi)
...@@ -189,10 +189,8 @@ copy_user_generic: ...@@ -189,10 +189,8 @@ copy_user_generic:
incq %rdi incq %rdi
decl %ecx decl %ecx
jnz .Lalign_1 jnz .Lalign_1
subq %r9,%rdx
jmp .Lafter_bad_alignment jmp .Lafter_bad_alignment
.Lsmall_align:
addq %r9,%rdx
jmp .Lhandle_7
#endif #endif
/* table sorted by exception address */ /* table sorted by exception address */
...@@ -219,8 +217,8 @@ copy_user_generic: ...@@ -219,8 +217,8 @@ copy_user_generic:
.quad .Ls10,.Le_byte .quad .Ls10,.Le_byte
.quad .Ld10,.Le_byte .quad .Ld10,.Le_byte
#ifdef FIX_ALIGNMENT #ifdef FIX_ALIGNMENT
.quad .Ls11,.Le_byte .quad .Ls11,.Lzero_rest
.quad .Ld11,.Le_byte .quad .Ld11,.Lzero_rest
#endif #endif
.quad .Le5,.Le_zero .quad .Le5,.Le_zero
.previous .previous
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment