Commit 7cefa5a0 authored by Al Viro's avatar Al Viro

m68k: get rid of zeroing

Signed-off-by: default avatarAl Viro <viro@zeniv.linux.org.uk>
parent 68acfdcb
...@@ -179,39 +179,55 @@ asm volatile ("\n" \ ...@@ -179,39 +179,55 @@ asm volatile ("\n" \
unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n); unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n);
unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n); unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n);
#define __constant_copy_from_user_asm(res, to, from, tmp, n, s1, s2, s3)\ #define __suffix0
#define __suffix1 b
#define __suffix2 w
#define __suffix4 l
#define ____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\
asm volatile ("\n" \ asm volatile ("\n" \
"1: "MOVES"."#s1" (%2)+,%3\n" \ "1: "MOVES"."#s1" (%2)+,%3\n" \
" move."#s1" %3,(%1)+\n" \ " move."#s1" %3,(%1)+\n" \
" .ifnc \""#s2"\",\"\"\n" \
"2: "MOVES"."#s2" (%2)+,%3\n" \ "2: "MOVES"."#s2" (%2)+,%3\n" \
" move."#s2" %3,(%1)+\n" \ " move."#s2" %3,(%1)+\n" \
" .ifnc \""#s3"\",\"\"\n" \ " .ifnc \""#s3"\",\"\"\n" \
"3: "MOVES"."#s3" (%2)+,%3\n" \ "3: "MOVES"."#s3" (%2)+,%3\n" \
" move."#s3" %3,(%1)+\n" \ " move."#s3" %3,(%1)+\n" \
" .endif\n" \ " .endif\n" \
" .endif\n" \
"4:\n" \ "4:\n" \
" .section __ex_table,\"a\"\n" \ " .section __ex_table,\"a\"\n" \
" .align 4\n" \ " .align 4\n" \
" .long 1b,10f\n" \ " .long 1b,10f\n" \
" .ifnc \""#s2"\",\"\"\n" \
" .long 2b,20f\n" \ " .long 2b,20f\n" \
" .ifnc \""#s3"\",\"\"\n" \ " .ifnc \""#s3"\",\"\"\n" \
" .long 3b,30f\n" \ " .long 3b,30f\n" \
" .endif\n" \ " .endif\n" \
" .endif\n" \
" .previous\n" \ " .previous\n" \
"\n" \ "\n" \
" .section .fixup,\"ax\"\n" \ " .section .fixup,\"ax\"\n" \
" .even\n" \ " .even\n" \
"10: clr."#s1" (%1)+\n" \ "10: addq.l #"#n1",%0\n" \
"20: clr."#s2" (%1)+\n" \ " .ifnc \""#s2"\",\"\"\n" \
"20: addq.l #"#n2",%0\n" \
" .ifnc \""#s3"\",\"\"\n" \ " .ifnc \""#s3"\",\"\"\n" \
"30: clr."#s3" (%1)+\n" \ "30: addq.l #"#n3",%0\n" \
" .endif\n" \
" .endif\n" \ " .endif\n" \
" moveq.l #"#n",%0\n" \
" jra 4b\n" \ " jra 4b\n" \
" .previous\n" \ " .previous\n" \
: "+d" (res), "+&a" (to), "+a" (from), "=&d" (tmp) \ : "+d" (res), "+&a" (to), "+a" (from), "=&d" (tmp) \
: : "memory") : : "memory")
#define ___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\
____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)
#define __constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3) \
___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, \
__suffix##n1, __suffix##n2, __suffix##n3)
static __always_inline unsigned long static __always_inline unsigned long
__constant_copy_from_user(void *to, const void __user *from, unsigned long n) __constant_copy_from_user(void *to, const void __user *from, unsigned long n)
{ {
...@@ -219,37 +235,37 @@ __constant_copy_from_user(void *to, const void __user *from, unsigned long n) ...@@ -219,37 +235,37 @@ __constant_copy_from_user(void *to, const void __user *from, unsigned long n)
switch (n) { switch (n) {
case 1: case 1:
__get_user_asm(res, *(u8 *)to, (u8 __user *)from, u8, b, d, 1); __constant_copy_from_user_asm(res, to, from, tmp, 1, 0, 0);
break; break;
case 2: case 2:
__get_user_asm(res, *(u16 *)to, (u16 __user *)from, u16, w, r, 2); __constant_copy_from_user_asm(res, to, from, tmp, 2, 0, 0);
break; break;
case 3: case 3:
__constant_copy_from_user_asm(res, to, from, tmp, 3, w, b,); __constant_copy_from_user_asm(res, to, from, tmp, 2, 1, 0);
break; break;
case 4: case 4:
__get_user_asm(res, *(u32 *)to, (u32 __user *)from, u32, l, r, 4); __constant_copy_from_user_asm(res, to, from, tmp, 4, 0, 0);
break; break;
case 5: case 5:
__constant_copy_from_user_asm(res, to, from, tmp, 5, l, b,); __constant_copy_from_user_asm(res, to, from, tmp, 4, 1, 0);
break; break;
case 6: case 6:
__constant_copy_from_user_asm(res, to, from, tmp, 6, l, w,); __constant_copy_from_user_asm(res, to, from, tmp, 4, 2, 0);
break; break;
case 7: case 7:
__constant_copy_from_user_asm(res, to, from, tmp, 7, l, w, b); __constant_copy_from_user_asm(res, to, from, tmp, 4, 2, 1);
break; break;
case 8: case 8:
__constant_copy_from_user_asm(res, to, from, tmp, 8, l, l,); __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 0);
break; break;
case 9: case 9:
__constant_copy_from_user_asm(res, to, from, tmp, 9, l, l, b); __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 1);
break; break;
case 10: case 10:
__constant_copy_from_user_asm(res, to, from, tmp, 10, l, l, w); __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 2);
break; break;
case 12: case 12:
__constant_copy_from_user_asm(res, to, from, tmp, 12, l, l, l); __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 4);
break; break;
default: default:
/* we limit the inlined version to 3 moves */ /* we limit the inlined version to 3 moves */
...@@ -353,7 +369,14 @@ __constant_copy_to_user(void __user *to, const void *from, unsigned long n) ...@@ -353,7 +369,14 @@ __constant_copy_to_user(void __user *to, const void *from, unsigned long n)
#define __copy_to_user_inatomic __copy_to_user #define __copy_to_user_inatomic __copy_to_user
#define __copy_from_user_inatomic __copy_from_user #define __copy_from_user_inatomic __copy_from_user
#define copy_from_user(to, from, n) __copy_from_user(to, from, n) static inline unsigned long
copy_from_user(void *to, const void __user *from, unsigned long n)
{
unsigned long res = __copy_from_user_inatomic(to, from, n);
if (unlikely(res))
memset(to + (n - res), 0, res);
return res;
}
#define copy_to_user(to, from, n) __copy_to_user(to, from, n) #define copy_to_user(to, from, n) __copy_to_user(to, from, n)
#define user_addr_max() \ #define user_addr_max() \
......
...@@ -30,19 +30,13 @@ unsigned long __generic_copy_from_user(void *to, const void __user *from, ...@@ -30,19 +30,13 @@ unsigned long __generic_copy_from_user(void *to, const void __user *from,
"6:\n" "6:\n"
" .section .fixup,\"ax\"\n" " .section .fixup,\"ax\"\n"
" .even\n" " .even\n"
"10: move.l %0,%3\n" "10: lsl.l #2,%0\n"
"7: clr.l (%2)+\n"
" subq.l #1,%3\n"
" jne 7b\n"
" lsl.l #2,%0\n"
" btst #1,%5\n" " btst #1,%5\n"
" jeq 8f\n" " jeq 8f\n"
"30: clr.w (%2)+\n" "30: addq.l #2,%0\n"
" addq.l #2,%0\n"
"8: btst #0,%5\n" "8: btst #0,%5\n"
" jeq 6b\n" " jeq 6b\n"
"50: clr.b (%2)+\n" "50: addq.l #1,%0\n"
" addq.l #1,%0\n"
" jra 6b\n" " jra 6b\n"
" .previous\n" " .previous\n"
"\n" "\n"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment