Commit 75045f77 authored by Jann Horn's avatar Jann Horn Committed by Thomas Gleixner

x86/extable: Introduce _ASM_EXTABLE_UA for uaccess fixups

Currently, most fixups for attempting to access userspace memory are
handled using _ASM_EXTABLE, which is also used for various other types of
fixups (e.g. safe MSR access, IRET failures, and a bunch of other things).
In order to make it possible to add special safety checks to uaccess fixups
(in particular, checking whether the fault address is actually in
userspace), introduce a new exception table handler ex_handler_uaccess()
and wire it up to all the user access fixups (excluding ones that
already use _ASM_EXTABLE_EX).
Signed-off-by: default avatarJann Horn <jannh@google.com>
Signed-off-by: default avatarThomas Gleixner <tglx@linutronix.de>
Tested-by: default avatarKees Cook <keescook@chromium.org>
Cc: Andy Lutomirski <luto@kernel.org>
Cc: kernel-hardening@lists.openwall.com
Cc: dvyukov@google.com
Cc: Masami Hiramatsu <mhiramat@kernel.org>
Cc: "Naveen N. Rao" <naveen.n.rao@linux.vnet.ibm.com>
Cc: Anil S Keshavamurthy <anil.s.keshavamurthy@intel.com>
Cc: "David S. Miller" <davem@davemloft.net>
Cc: Alexander Viro <viro@zeniv.linux.org.uk>
Cc: linux-fsdevel@vger.kernel.org
Cc: Borislav Petkov <bp@alien8.de>
Link: https://lkml.kernel.org/r/20180828201421.157735-5-jannh@google.com
parent e3e4d501
...@@ -130,6 +130,9 @@ ...@@ -130,6 +130,9 @@
# define _ASM_EXTABLE(from, to) \ # define _ASM_EXTABLE(from, to) \
_ASM_EXTABLE_HANDLE(from, to, ex_handler_default) _ASM_EXTABLE_HANDLE(from, to, ex_handler_default)
# define _ASM_EXTABLE_UA(from, to) \
_ASM_EXTABLE_HANDLE(from, to, ex_handler_uaccess)
# define _ASM_EXTABLE_FAULT(from, to) \ # define _ASM_EXTABLE_FAULT(from, to) \
_ASM_EXTABLE_HANDLE(from, to, ex_handler_fault) _ASM_EXTABLE_HANDLE(from, to, ex_handler_fault)
...@@ -165,8 +168,8 @@ ...@@ -165,8 +168,8 @@
jmp copy_user_handle_tail jmp copy_user_handle_tail
.previous .previous
_ASM_EXTABLE(100b,103b) _ASM_EXTABLE_UA(100b, 103b)
_ASM_EXTABLE(101b,103b) _ASM_EXTABLE_UA(101b, 103b)
.endm .endm
#else #else
...@@ -182,6 +185,9 @@ ...@@ -182,6 +185,9 @@
# define _ASM_EXTABLE(from, to) \ # define _ASM_EXTABLE(from, to) \
_ASM_EXTABLE_HANDLE(from, to, ex_handler_default) _ASM_EXTABLE_HANDLE(from, to, ex_handler_default)
# define _ASM_EXTABLE_UA(from, to) \
_ASM_EXTABLE_HANDLE(from, to, ex_handler_uaccess)
# define _ASM_EXTABLE_FAULT(from, to) \ # define _ASM_EXTABLE_FAULT(from, to) \
_ASM_EXTABLE_HANDLE(from, to, ex_handler_fault) _ASM_EXTABLE_HANDLE(from, to, ex_handler_fault)
......
...@@ -226,7 +226,7 @@ static inline void copy_fxregs_to_kernel(struct fpu *fpu) ...@@ -226,7 +226,7 @@ static inline void copy_fxregs_to_kernel(struct fpu *fpu)
"3: movl $-2,%[err]\n\t" \ "3: movl $-2,%[err]\n\t" \
"jmp 2b\n\t" \ "jmp 2b\n\t" \
".popsection\n\t" \ ".popsection\n\t" \
_ASM_EXTABLE(1b, 3b) \ _ASM_EXTABLE_UA(1b, 3b) \
: [err] "=r" (err) \ : [err] "=r" (err) \
: "D" (st), "m" (*st), "a" (lmask), "d" (hmask) \ : "D" (st), "m" (*st), "a" (lmask), "d" (hmask) \
: "memory") : "memory")
......
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
"3:\tmov\t%3, %1\n" \ "3:\tmov\t%3, %1\n" \
"\tjmp\t2b\n" \ "\tjmp\t2b\n" \
"\t.previous\n" \ "\t.previous\n" \
_ASM_EXTABLE(1b, 3b) \ _ASM_EXTABLE_UA(1b, 3b) \
: "=r" (oldval), "=r" (ret), "+m" (*uaddr) \ : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \
: "i" (-EFAULT), "0" (oparg), "1" (0)) : "i" (-EFAULT), "0" (oparg), "1" (0))
...@@ -36,8 +36,8 @@ ...@@ -36,8 +36,8 @@
"4:\tmov\t%5, %1\n" \ "4:\tmov\t%5, %1\n" \
"\tjmp\t3b\n" \ "\tjmp\t3b\n" \
"\t.previous\n" \ "\t.previous\n" \
_ASM_EXTABLE(1b, 4b) \ _ASM_EXTABLE_UA(1b, 4b) \
_ASM_EXTABLE(2b, 4b) \ _ASM_EXTABLE_UA(2b, 4b) \
: "=&a" (oldval), "=&r" (ret), \ : "=&a" (oldval), "=&r" (ret), \
"+m" (*uaddr), "=&r" (tem) \ "+m" (*uaddr), "=&r" (tem) \
: "r" (oparg), "i" (-EFAULT), "1" (0)) : "r" (oparg), "i" (-EFAULT), "1" (0))
......
...@@ -198,8 +198,8 @@ __typeof__(__builtin_choose_expr(sizeof(x) > sizeof(0UL), 0ULL, 0UL)) ...@@ -198,8 +198,8 @@ __typeof__(__builtin_choose_expr(sizeof(x) > sizeof(0UL), 0ULL, 0UL))
"4: movl %3,%0\n" \ "4: movl %3,%0\n" \
" jmp 3b\n" \ " jmp 3b\n" \
".previous\n" \ ".previous\n" \
_ASM_EXTABLE(1b, 4b) \ _ASM_EXTABLE_UA(1b, 4b) \
_ASM_EXTABLE(2b, 4b) \ _ASM_EXTABLE_UA(2b, 4b) \
: "=r" (err) \ : "=r" (err) \
: "A" (x), "r" (addr), "i" (errret), "0" (err)) : "A" (x), "r" (addr), "i" (errret), "0" (err))
...@@ -340,8 +340,8 @@ do { \ ...@@ -340,8 +340,8 @@ do { \
" xorl %%edx,%%edx\n" \ " xorl %%edx,%%edx\n" \
" jmp 3b\n" \ " jmp 3b\n" \
".previous\n" \ ".previous\n" \
_ASM_EXTABLE(1b, 4b) \ _ASM_EXTABLE_UA(1b, 4b) \
_ASM_EXTABLE(2b, 4b) \ _ASM_EXTABLE_UA(2b, 4b) \
: "=r" (retval), "=&A"(x) \ : "=r" (retval), "=&A"(x) \
: "m" (__m(__ptr)), "m" __m(((u32 __user *)(__ptr)) + 1), \ : "m" (__m(__ptr)), "m" __m(((u32 __user *)(__ptr)) + 1), \
"i" (errret), "0" (retval)); \ "i" (errret), "0" (retval)); \
...@@ -386,7 +386,7 @@ do { \ ...@@ -386,7 +386,7 @@ do { \
" xor"itype" %"rtype"1,%"rtype"1\n" \ " xor"itype" %"rtype"1,%"rtype"1\n" \
" jmp 2b\n" \ " jmp 2b\n" \
".previous\n" \ ".previous\n" \
_ASM_EXTABLE(1b, 3b) \ _ASM_EXTABLE_UA(1b, 3b) \
: "=r" (err), ltype(x) \ : "=r" (err), ltype(x) \
: "m" (__m(addr)), "i" (errret), "0" (err)) : "m" (__m(addr)), "i" (errret), "0" (err))
...@@ -398,7 +398,7 @@ do { \ ...@@ -398,7 +398,7 @@ do { \
"3: mov %3,%0\n" \ "3: mov %3,%0\n" \
" jmp 2b\n" \ " jmp 2b\n" \
".previous\n" \ ".previous\n" \
_ASM_EXTABLE(1b, 3b) \ _ASM_EXTABLE_UA(1b, 3b) \
: "=r" (err), ltype(x) \ : "=r" (err), ltype(x) \
: "m" (__m(addr)), "i" (errret), "0" (err)) : "m" (__m(addr)), "i" (errret), "0" (err))
...@@ -474,7 +474,7 @@ struct __large_struct { unsigned long buf[100]; }; ...@@ -474,7 +474,7 @@ struct __large_struct { unsigned long buf[100]; };
"3: mov %3,%0\n" \ "3: mov %3,%0\n" \
" jmp 2b\n" \ " jmp 2b\n" \
".previous\n" \ ".previous\n" \
_ASM_EXTABLE(1b, 3b) \ _ASM_EXTABLE_UA(1b, 3b) \
: "=r"(err) \ : "=r"(err) \
: ltype(x), "m" (__m(addr)), "i" (errret), "0" (err)) : ltype(x), "m" (__m(addr)), "i" (errret), "0" (err))
...@@ -602,7 +602,7 @@ extern void __cmpxchg_wrong_size(void) ...@@ -602,7 +602,7 @@ extern void __cmpxchg_wrong_size(void)
"3:\tmov %3, %0\n" \ "3:\tmov %3, %0\n" \
"\tjmp 2b\n" \ "\tjmp 2b\n" \
"\t.previous\n" \ "\t.previous\n" \
_ASM_EXTABLE(1b, 3b) \ _ASM_EXTABLE_UA(1b, 3b) \
: "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \ : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \
: "i" (-EFAULT), "q" (__new), "1" (__old) \ : "i" (-EFAULT), "q" (__new), "1" (__old) \
: "memory" \ : "memory" \
...@@ -618,7 +618,7 @@ extern void __cmpxchg_wrong_size(void) ...@@ -618,7 +618,7 @@ extern void __cmpxchg_wrong_size(void)
"3:\tmov %3, %0\n" \ "3:\tmov %3, %0\n" \
"\tjmp 2b\n" \ "\tjmp 2b\n" \
"\t.previous\n" \ "\t.previous\n" \
_ASM_EXTABLE(1b, 3b) \ _ASM_EXTABLE_UA(1b, 3b) \
: "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \ : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \
: "i" (-EFAULT), "r" (__new), "1" (__old) \ : "i" (-EFAULT), "r" (__new), "1" (__old) \
: "memory" \ : "memory" \
...@@ -634,7 +634,7 @@ extern void __cmpxchg_wrong_size(void) ...@@ -634,7 +634,7 @@ extern void __cmpxchg_wrong_size(void)
"3:\tmov %3, %0\n" \ "3:\tmov %3, %0\n" \
"\tjmp 2b\n" \ "\tjmp 2b\n" \
"\t.previous\n" \ "\t.previous\n" \
_ASM_EXTABLE(1b, 3b) \ _ASM_EXTABLE_UA(1b, 3b) \
: "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \ : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \
: "i" (-EFAULT), "r" (__new), "1" (__old) \ : "i" (-EFAULT), "r" (__new), "1" (__old) \
: "memory" \ : "memory" \
...@@ -653,7 +653,7 @@ extern void __cmpxchg_wrong_size(void) ...@@ -653,7 +653,7 @@ extern void __cmpxchg_wrong_size(void)
"3:\tmov %3, %0\n" \ "3:\tmov %3, %0\n" \
"\tjmp 2b\n" \ "\tjmp 2b\n" \
"\t.previous\n" \ "\t.previous\n" \
_ASM_EXTABLE(1b, 3b) \ _ASM_EXTABLE_UA(1b, 3b) \
: "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \ : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \
: "i" (-EFAULT), "r" (__new), "1" (__old) \ : "i" (-EFAULT), "r" (__new), "1" (__old) \
: "memory" \ : "memory" \
......
...@@ -273,11 +273,11 @@ unsigned int csum_partial_copy_generic (const char *src, char *dst, ...@@ -273,11 +273,11 @@ unsigned int csum_partial_copy_generic (const char *src, char *dst,
#define SRC(y...) \ #define SRC(y...) \
9999: y; \ 9999: y; \
_ASM_EXTABLE(9999b, 6001f) _ASM_EXTABLE_UA(9999b, 6001f)
#define DST(y...) \ #define DST(y...) \
9999: y; \ 9999: y; \
_ASM_EXTABLE(9999b, 6002f) _ASM_EXTABLE_UA(9999b, 6002f)
#ifndef CONFIG_X86_USE_PPRO_CHECKSUM #ifndef CONFIG_X86_USE_PPRO_CHECKSUM
......
...@@ -92,26 +92,26 @@ ENTRY(copy_user_generic_unrolled) ...@@ -92,26 +92,26 @@ ENTRY(copy_user_generic_unrolled)
60: jmp copy_user_handle_tail /* ecx is zerorest also */ 60: jmp copy_user_handle_tail /* ecx is zerorest also */
.previous .previous
_ASM_EXTABLE(1b,30b) _ASM_EXTABLE_UA(1b, 30b)
_ASM_EXTABLE(2b,30b) _ASM_EXTABLE_UA(2b, 30b)
_ASM_EXTABLE(3b,30b) _ASM_EXTABLE_UA(3b, 30b)
_ASM_EXTABLE(4b,30b) _ASM_EXTABLE_UA(4b, 30b)
_ASM_EXTABLE(5b,30b) _ASM_EXTABLE_UA(5b, 30b)
_ASM_EXTABLE(6b,30b) _ASM_EXTABLE_UA(6b, 30b)
_ASM_EXTABLE(7b,30b) _ASM_EXTABLE_UA(7b, 30b)
_ASM_EXTABLE(8b,30b) _ASM_EXTABLE_UA(8b, 30b)
_ASM_EXTABLE(9b,30b) _ASM_EXTABLE_UA(9b, 30b)
_ASM_EXTABLE(10b,30b) _ASM_EXTABLE_UA(10b, 30b)
_ASM_EXTABLE(11b,30b) _ASM_EXTABLE_UA(11b, 30b)
_ASM_EXTABLE(12b,30b) _ASM_EXTABLE_UA(12b, 30b)
_ASM_EXTABLE(13b,30b) _ASM_EXTABLE_UA(13b, 30b)
_ASM_EXTABLE(14b,30b) _ASM_EXTABLE_UA(14b, 30b)
_ASM_EXTABLE(15b,30b) _ASM_EXTABLE_UA(15b, 30b)
_ASM_EXTABLE(16b,30b) _ASM_EXTABLE_UA(16b, 30b)
_ASM_EXTABLE(18b,40b) _ASM_EXTABLE_UA(18b, 40b)
_ASM_EXTABLE(19b,40b) _ASM_EXTABLE_UA(19b, 40b)
_ASM_EXTABLE(21b,50b) _ASM_EXTABLE_UA(21b, 50b)
_ASM_EXTABLE(22b,50b) _ASM_EXTABLE_UA(22b, 50b)
ENDPROC(copy_user_generic_unrolled) ENDPROC(copy_user_generic_unrolled)
EXPORT_SYMBOL(copy_user_generic_unrolled) EXPORT_SYMBOL(copy_user_generic_unrolled)
...@@ -156,8 +156,8 @@ ENTRY(copy_user_generic_string) ...@@ -156,8 +156,8 @@ ENTRY(copy_user_generic_string)
jmp copy_user_handle_tail jmp copy_user_handle_tail
.previous .previous
_ASM_EXTABLE(1b,11b) _ASM_EXTABLE_UA(1b, 11b)
_ASM_EXTABLE(3b,12b) _ASM_EXTABLE_UA(3b, 12b)
ENDPROC(copy_user_generic_string) ENDPROC(copy_user_generic_string)
EXPORT_SYMBOL(copy_user_generic_string) EXPORT_SYMBOL(copy_user_generic_string)
...@@ -189,7 +189,7 @@ ENTRY(copy_user_enhanced_fast_string) ...@@ -189,7 +189,7 @@ ENTRY(copy_user_enhanced_fast_string)
jmp copy_user_handle_tail jmp copy_user_handle_tail
.previous .previous
_ASM_EXTABLE(1b,12b) _ASM_EXTABLE_UA(1b, 12b)
ENDPROC(copy_user_enhanced_fast_string) ENDPROC(copy_user_enhanced_fast_string)
EXPORT_SYMBOL(copy_user_enhanced_fast_string) EXPORT_SYMBOL(copy_user_enhanced_fast_string)
...@@ -319,27 +319,27 @@ ENTRY(__copy_user_nocache) ...@@ -319,27 +319,27 @@ ENTRY(__copy_user_nocache)
jmp copy_user_handle_tail jmp copy_user_handle_tail
.previous .previous
_ASM_EXTABLE(1b,.L_fixup_4x8b_copy) _ASM_EXTABLE_UA(1b, .L_fixup_4x8b_copy)
_ASM_EXTABLE(2b,.L_fixup_4x8b_copy) _ASM_EXTABLE_UA(2b, .L_fixup_4x8b_copy)
_ASM_EXTABLE(3b,.L_fixup_4x8b_copy) _ASM_EXTABLE_UA(3b, .L_fixup_4x8b_copy)
_ASM_EXTABLE(4b,.L_fixup_4x8b_copy) _ASM_EXTABLE_UA(4b, .L_fixup_4x8b_copy)
_ASM_EXTABLE(5b,.L_fixup_4x8b_copy) _ASM_EXTABLE_UA(5b, .L_fixup_4x8b_copy)
_ASM_EXTABLE(6b,.L_fixup_4x8b_copy) _ASM_EXTABLE_UA(6b, .L_fixup_4x8b_copy)
_ASM_EXTABLE(7b,.L_fixup_4x8b_copy) _ASM_EXTABLE_UA(7b, .L_fixup_4x8b_copy)
_ASM_EXTABLE(8b,.L_fixup_4x8b_copy) _ASM_EXTABLE_UA(8b, .L_fixup_4x8b_copy)
_ASM_EXTABLE(9b,.L_fixup_4x8b_copy) _ASM_EXTABLE_UA(9b, .L_fixup_4x8b_copy)
_ASM_EXTABLE(10b,.L_fixup_4x8b_copy) _ASM_EXTABLE_UA(10b, .L_fixup_4x8b_copy)
_ASM_EXTABLE(11b,.L_fixup_4x8b_copy) _ASM_EXTABLE_UA(11b, .L_fixup_4x8b_copy)
_ASM_EXTABLE(12b,.L_fixup_4x8b_copy) _ASM_EXTABLE_UA(12b, .L_fixup_4x8b_copy)
_ASM_EXTABLE(13b,.L_fixup_4x8b_copy) _ASM_EXTABLE_UA(13b, .L_fixup_4x8b_copy)
_ASM_EXTABLE(14b,.L_fixup_4x8b_copy) _ASM_EXTABLE_UA(14b, .L_fixup_4x8b_copy)
_ASM_EXTABLE(15b,.L_fixup_4x8b_copy) _ASM_EXTABLE_UA(15b, .L_fixup_4x8b_copy)
_ASM_EXTABLE(16b,.L_fixup_4x8b_copy) _ASM_EXTABLE_UA(16b, .L_fixup_4x8b_copy)
_ASM_EXTABLE(20b,.L_fixup_8b_copy) _ASM_EXTABLE_UA(20b, .L_fixup_8b_copy)
_ASM_EXTABLE(21b,.L_fixup_8b_copy) _ASM_EXTABLE_UA(21b, .L_fixup_8b_copy)
_ASM_EXTABLE(30b,.L_fixup_4b_copy) _ASM_EXTABLE_UA(30b, .L_fixup_4b_copy)
_ASM_EXTABLE(31b,.L_fixup_4b_copy) _ASM_EXTABLE_UA(31b, .L_fixup_4b_copy)
_ASM_EXTABLE(40b,.L_fixup_1b_copy) _ASM_EXTABLE_UA(40b, .L_fixup_1b_copy)
_ASM_EXTABLE(41b,.L_fixup_1b_copy) _ASM_EXTABLE_UA(41b, .L_fixup_1b_copy)
ENDPROC(__copy_user_nocache) ENDPROC(__copy_user_nocache)
EXPORT_SYMBOL(__copy_user_nocache) EXPORT_SYMBOL(__copy_user_nocache)
...@@ -31,14 +31,18 @@ ...@@ -31,14 +31,18 @@
.macro source .macro source
10: 10:
_ASM_EXTABLE(10b, .Lbad_source) _ASM_EXTABLE_UA(10b, .Lbad_source)
.endm .endm
.macro dest .macro dest
20: 20:
_ASM_EXTABLE(20b, .Lbad_dest) _ASM_EXTABLE_UA(20b, .Lbad_dest)
.endm .endm
/*
* No _ASM_EXTABLE_UA; this is used for intentional prefetch on a
* potentially unmapped kernel address.
*/
.macro ignore L=.Lignore .macro ignore L=.Lignore
30: 30:
_ASM_EXTABLE(30b, \L) _ASM_EXTABLE(30b, \L)
......
...@@ -132,12 +132,12 @@ bad_get_user_8: ...@@ -132,12 +132,12 @@ bad_get_user_8:
END(bad_get_user_8) END(bad_get_user_8)
#endif #endif
_ASM_EXTABLE(1b,bad_get_user) _ASM_EXTABLE_UA(1b, bad_get_user)
_ASM_EXTABLE(2b,bad_get_user) _ASM_EXTABLE_UA(2b, bad_get_user)
_ASM_EXTABLE(3b,bad_get_user) _ASM_EXTABLE_UA(3b, bad_get_user)
#ifdef CONFIG_X86_64 #ifdef CONFIG_X86_64
_ASM_EXTABLE(4b,bad_get_user) _ASM_EXTABLE_UA(4b, bad_get_user)
#else #else
_ASM_EXTABLE(4b,bad_get_user_8) _ASM_EXTABLE_UA(4b, bad_get_user_8)
_ASM_EXTABLE(5b,bad_get_user_8) _ASM_EXTABLE_UA(5b, bad_get_user_8)
#endif #endif
...@@ -94,10 +94,10 @@ bad_put_user: ...@@ -94,10 +94,10 @@ bad_put_user:
EXIT EXIT
END(bad_put_user) END(bad_put_user)
_ASM_EXTABLE(1b,bad_put_user) _ASM_EXTABLE_UA(1b, bad_put_user)
_ASM_EXTABLE(2b,bad_put_user) _ASM_EXTABLE_UA(2b, bad_put_user)
_ASM_EXTABLE(3b,bad_put_user) _ASM_EXTABLE_UA(3b, bad_put_user)
_ASM_EXTABLE(4b,bad_put_user) _ASM_EXTABLE_UA(4b, bad_put_user)
#ifdef CONFIG_X86_32 #ifdef CONFIG_X86_32
_ASM_EXTABLE(5b,bad_put_user) _ASM_EXTABLE_UA(5b, bad_put_user)
#endif #endif
...@@ -47,8 +47,8 @@ do { \ ...@@ -47,8 +47,8 @@ do { \
"3: lea 0(%2,%0,4),%0\n" \ "3: lea 0(%2,%0,4),%0\n" \
" jmp 2b\n" \ " jmp 2b\n" \
".previous\n" \ ".previous\n" \
_ASM_EXTABLE(0b,3b) \ _ASM_EXTABLE_UA(0b, 3b) \
_ASM_EXTABLE(1b,2b) \ _ASM_EXTABLE_UA(1b, 2b) \
: "=&c"(size), "=&D" (__d0) \ : "=&c"(size), "=&D" (__d0) \
: "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0)); \ : "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0)); \
} while (0) } while (0)
...@@ -153,44 +153,44 @@ __copy_user_intel(void __user *to, const void *from, unsigned long size) ...@@ -153,44 +153,44 @@ __copy_user_intel(void __user *to, const void *from, unsigned long size)
"101: lea 0(%%eax,%0,4),%0\n" "101: lea 0(%%eax,%0,4),%0\n"
" jmp 100b\n" " jmp 100b\n"
".previous\n" ".previous\n"
_ASM_EXTABLE(1b,100b) _ASM_EXTABLE_UA(1b, 100b)
_ASM_EXTABLE(2b,100b) _ASM_EXTABLE_UA(2b, 100b)
_ASM_EXTABLE(3b,100b) _ASM_EXTABLE_UA(3b, 100b)
_ASM_EXTABLE(4b,100b) _ASM_EXTABLE_UA(4b, 100b)
_ASM_EXTABLE(5b,100b) _ASM_EXTABLE_UA(5b, 100b)
_ASM_EXTABLE(6b,100b) _ASM_EXTABLE_UA(6b, 100b)
_ASM_EXTABLE(7b,100b) _ASM_EXTABLE_UA(7b, 100b)
_ASM_EXTABLE(8b,100b) _ASM_EXTABLE_UA(8b, 100b)
_ASM_EXTABLE(9b,100b) _ASM_EXTABLE_UA(9b, 100b)
_ASM_EXTABLE(10b,100b) _ASM_EXTABLE_UA(10b, 100b)
_ASM_EXTABLE(11b,100b) _ASM_EXTABLE_UA(11b, 100b)
_ASM_EXTABLE(12b,100b) _ASM_EXTABLE_UA(12b, 100b)
_ASM_EXTABLE(13b,100b) _ASM_EXTABLE_UA(13b, 100b)
_ASM_EXTABLE(14b,100b) _ASM_EXTABLE_UA(14b, 100b)
_ASM_EXTABLE(15b,100b) _ASM_EXTABLE_UA(15b, 100b)
_ASM_EXTABLE(16b,100b) _ASM_EXTABLE_UA(16b, 100b)
_ASM_EXTABLE(17b,100b) _ASM_EXTABLE_UA(17b, 100b)
_ASM_EXTABLE(18b,100b) _ASM_EXTABLE_UA(18b, 100b)
_ASM_EXTABLE(19b,100b) _ASM_EXTABLE_UA(19b, 100b)
_ASM_EXTABLE(20b,100b) _ASM_EXTABLE_UA(20b, 100b)
_ASM_EXTABLE(21b,100b) _ASM_EXTABLE_UA(21b, 100b)
_ASM_EXTABLE(22b,100b) _ASM_EXTABLE_UA(22b, 100b)
_ASM_EXTABLE(23b,100b) _ASM_EXTABLE_UA(23b, 100b)
_ASM_EXTABLE(24b,100b) _ASM_EXTABLE_UA(24b, 100b)
_ASM_EXTABLE(25b,100b) _ASM_EXTABLE_UA(25b, 100b)
_ASM_EXTABLE(26b,100b) _ASM_EXTABLE_UA(26b, 100b)
_ASM_EXTABLE(27b,100b) _ASM_EXTABLE_UA(27b, 100b)
_ASM_EXTABLE(28b,100b) _ASM_EXTABLE_UA(28b, 100b)
_ASM_EXTABLE(29b,100b) _ASM_EXTABLE_UA(29b, 100b)
_ASM_EXTABLE(30b,100b) _ASM_EXTABLE_UA(30b, 100b)
_ASM_EXTABLE(31b,100b) _ASM_EXTABLE_UA(31b, 100b)
_ASM_EXTABLE(32b,100b) _ASM_EXTABLE_UA(32b, 100b)
_ASM_EXTABLE(33b,100b) _ASM_EXTABLE_UA(33b, 100b)
_ASM_EXTABLE(34b,100b) _ASM_EXTABLE_UA(34b, 100b)
_ASM_EXTABLE(35b,100b) _ASM_EXTABLE_UA(35b, 100b)
_ASM_EXTABLE(36b,100b) _ASM_EXTABLE_UA(36b, 100b)
_ASM_EXTABLE(37b,100b) _ASM_EXTABLE_UA(37b, 100b)
_ASM_EXTABLE(99b,101b) _ASM_EXTABLE_UA(99b, 101b)
: "=&c"(size), "=&D" (d0), "=&S" (d1) : "=&c"(size), "=&D" (d0), "=&S" (d1)
: "1"(to), "2"(from), "0"(size) : "1"(to), "2"(from), "0"(size)
: "eax", "edx", "memory"); : "eax", "edx", "memory");
...@@ -259,26 +259,26 @@ static unsigned long __copy_user_intel_nocache(void *to, ...@@ -259,26 +259,26 @@ static unsigned long __copy_user_intel_nocache(void *to,
"9: lea 0(%%eax,%0,4),%0\n" "9: lea 0(%%eax,%0,4),%0\n"
"16: jmp 8b\n" "16: jmp 8b\n"
".previous\n" ".previous\n"
_ASM_EXTABLE(0b,16b) _ASM_EXTABLE_UA(0b, 16b)
_ASM_EXTABLE(1b,16b) _ASM_EXTABLE_UA(1b, 16b)
_ASM_EXTABLE(2b,16b) _ASM_EXTABLE_UA(2b, 16b)
_ASM_EXTABLE(21b,16b) _ASM_EXTABLE_UA(21b, 16b)
_ASM_EXTABLE(3b,16b) _ASM_EXTABLE_UA(3b, 16b)
_ASM_EXTABLE(31b,16b) _ASM_EXTABLE_UA(31b, 16b)
_ASM_EXTABLE(4b,16b) _ASM_EXTABLE_UA(4b, 16b)
_ASM_EXTABLE(41b,16b) _ASM_EXTABLE_UA(41b, 16b)
_ASM_EXTABLE(10b,16b) _ASM_EXTABLE_UA(10b, 16b)
_ASM_EXTABLE(51b,16b) _ASM_EXTABLE_UA(51b, 16b)
_ASM_EXTABLE(11b,16b) _ASM_EXTABLE_UA(11b, 16b)
_ASM_EXTABLE(61b,16b) _ASM_EXTABLE_UA(61b, 16b)
_ASM_EXTABLE(12b,16b) _ASM_EXTABLE_UA(12b, 16b)
_ASM_EXTABLE(71b,16b) _ASM_EXTABLE_UA(71b, 16b)
_ASM_EXTABLE(13b,16b) _ASM_EXTABLE_UA(13b, 16b)
_ASM_EXTABLE(81b,16b) _ASM_EXTABLE_UA(81b, 16b)
_ASM_EXTABLE(14b,16b) _ASM_EXTABLE_UA(14b, 16b)
_ASM_EXTABLE(91b,16b) _ASM_EXTABLE_UA(91b, 16b)
_ASM_EXTABLE(6b,9b) _ASM_EXTABLE_UA(6b, 9b)
_ASM_EXTABLE(7b,16b) _ASM_EXTABLE_UA(7b, 16b)
: "=&c"(size), "=&D" (d0), "=&S" (d1) : "=&c"(size), "=&D" (d0), "=&S" (d1)
: "1"(to), "2"(from), "0"(size) : "1"(to), "2"(from), "0"(size)
: "eax", "edx", "memory"); : "eax", "edx", "memory");
...@@ -321,9 +321,9 @@ do { \ ...@@ -321,9 +321,9 @@ do { \
"3: lea 0(%3,%0,4),%0\n" \ "3: lea 0(%3,%0,4),%0\n" \
" jmp 2b\n" \ " jmp 2b\n" \
".previous\n" \ ".previous\n" \
_ASM_EXTABLE(4b,5b) \ _ASM_EXTABLE_UA(4b, 5b) \
_ASM_EXTABLE(0b,3b) \ _ASM_EXTABLE_UA(0b, 3b) \
_ASM_EXTABLE(1b,2b) \ _ASM_EXTABLE_UA(1b, 2b) \
: "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) \ : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) \
: "3"(size), "0"(size), "1"(to), "2"(from) \ : "3"(size), "0"(size), "1"(to), "2"(from) \
: "memory"); \ : "memory"); \
......
...@@ -37,8 +37,8 @@ unsigned long __clear_user(void __user *addr, unsigned long size) ...@@ -37,8 +37,8 @@ unsigned long __clear_user(void __user *addr, unsigned long size)
"3: lea 0(%[size1],%[size8],8),%[size8]\n" "3: lea 0(%[size1],%[size8],8),%[size8]\n"
" jmp 2b\n" " jmp 2b\n"
".previous\n" ".previous\n"
_ASM_EXTABLE(0b,3b) _ASM_EXTABLE_UA(0b, 3b)
_ASM_EXTABLE(1b,2b) _ASM_EXTABLE_UA(1b, 2b)
: [size8] "=&c"(size), [dst] "=&D" (__d0) : [size8] "=&c"(size), [dst] "=&D" (__d0)
: [size1] "r"(size & 7), "[size8]" (size / 8), "[dst]"(addr)); : [size1] "r"(size & 7), "[size8]" (size / 8), "[dst]"(addr));
clac(); clac();
......
...@@ -108,6 +108,14 @@ __visible bool ex_handler_fprestore(const struct exception_table_entry *fixup, ...@@ -108,6 +108,14 @@ __visible bool ex_handler_fprestore(const struct exception_table_entry *fixup,
} }
EXPORT_SYMBOL_GPL(ex_handler_fprestore); EXPORT_SYMBOL_GPL(ex_handler_fprestore);
__visible bool ex_handler_uaccess(const struct exception_table_entry *fixup,
struct pt_regs *regs, int trapnr)
{
regs->ip = ex_fixup_addr(fixup);
return true;
}
EXPORT_SYMBOL(ex_handler_uaccess);
__visible bool ex_handler_ext(const struct exception_table_entry *fixup, __visible bool ex_handler_ext(const struct exception_table_entry *fixup,
struct pt_regs *regs, int trapnr) struct pt_regs *regs, int trapnr)
{ {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment