Commit 70d65cd5 authored by Al Viro's avatar Al Viro

ppc: propagate the calling conventions change down to csum_partial_copy_generic()

... and get rid of the pointless fallback in the wrappers.  On error it used
to zero the unwritten area and calculate the csum of the entire thing.  Not
wanting to do it in assembler part had been very reasonable; doing that in
the first place, OTOH...  In case of an error the caller discards the data
we'd copied, along with whatever checksum it might've had.
Signed-off-by: default avatarAl Viro <viro@zeniv.linux.org.uk>
parent daf52375
...@@ -18,9 +18,7 @@ ...@@ -18,9 +18,7 @@
* Like csum_partial, this must be called with even lengths, * Like csum_partial, this must be called with even lengths,
* except for the last fragment. * except for the last fragment.
*/ */
extern __wsum csum_partial_copy_generic(const void *src, void *dst, extern __wsum csum_partial_copy_generic(const void *src, void *dst, int len);
int len, __wsum sum,
int *src_err, int *dst_err);
#define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER #define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
extern __wsum csum_and_copy_from_user(const void __user *src, void *dst, extern __wsum csum_and_copy_from_user(const void __user *src, void *dst,
...@@ -31,7 +29,7 @@ extern __wsum csum_and_copy_to_user(const void *src, void __user *dst, ...@@ -31,7 +29,7 @@ extern __wsum csum_and_copy_to_user(const void *src, void __user *dst,
#define _HAVE_ARCH_CSUM_AND_COPY #define _HAVE_ARCH_CSUM_AND_COPY
#define csum_partial_copy_nocheck(src, dst, len) \ #define csum_partial_copy_nocheck(src, dst, len) \
csum_partial_copy_generic((src), (dst), (len), 0, NULL, NULL) csum_partial_copy_generic((src), (dst), (len))
/* /*
......
...@@ -78,12 +78,10 @@ EXPORT_SYMBOL(__csum_partial) ...@@ -78,12 +78,10 @@ EXPORT_SYMBOL(__csum_partial)
/* /*
* Computes the checksum of a memory block at src, length len, * Computes the checksum of a memory block at src, length len,
* and adds in "sum" (32-bit), while copying the block to dst. * and adds in 0xffffffff, while copying the block to dst.
* If an access exception occurs on src or dst, it stores -EFAULT * If an access exception occurs it returns zero.
* to *src_err or *dst_err respectively, and (for an error on
* src) zeroes the rest of dst.
* *
* csum_partial_copy_generic(src, dst, len, sum, src_err, dst_err) * csum_partial_copy_generic(src, dst, len)
*/ */
#define CSUM_COPY_16_BYTES_WITHEX(n) \ #define CSUM_COPY_16_BYTES_WITHEX(n) \
8 ## n ## 0: \ 8 ## n ## 0: \
...@@ -108,14 +106,14 @@ EXPORT_SYMBOL(__csum_partial) ...@@ -108,14 +106,14 @@ EXPORT_SYMBOL(__csum_partial)
adde r12,r12,r10 adde r12,r12,r10
#define CSUM_COPY_16_BYTES_EXCODE(n) \ #define CSUM_COPY_16_BYTES_EXCODE(n) \
EX_TABLE(8 ## n ## 0b, src_error); \ EX_TABLE(8 ## n ## 0b, fault); \
EX_TABLE(8 ## n ## 1b, src_error); \ EX_TABLE(8 ## n ## 1b, fault); \
EX_TABLE(8 ## n ## 2b, src_error); \ EX_TABLE(8 ## n ## 2b, fault); \
EX_TABLE(8 ## n ## 3b, src_error); \ EX_TABLE(8 ## n ## 3b, fault); \
EX_TABLE(8 ## n ## 4b, dst_error); \ EX_TABLE(8 ## n ## 4b, fault); \
EX_TABLE(8 ## n ## 5b, dst_error); \ EX_TABLE(8 ## n ## 5b, fault); \
EX_TABLE(8 ## n ## 6b, dst_error); \ EX_TABLE(8 ## n ## 6b, fault); \
EX_TABLE(8 ## n ## 7b, dst_error); EX_TABLE(8 ## n ## 7b, fault);
.text .text
.stabs "arch/powerpc/lib/",N_SO,0,0,0f .stabs "arch/powerpc/lib/",N_SO,0,0,0f
...@@ -127,11 +125,8 @@ LG_CACHELINE_BYTES = L1_CACHE_SHIFT ...@@ -127,11 +125,8 @@ LG_CACHELINE_BYTES = L1_CACHE_SHIFT
CACHELINE_MASK = (L1_CACHE_BYTES-1) CACHELINE_MASK = (L1_CACHE_BYTES-1)
_GLOBAL(csum_partial_copy_generic) _GLOBAL(csum_partial_copy_generic)
stwu r1,-16(r1) li r12,-1
stw r7,12(r1) addic r0,r0,0 /* clear carry */
stw r8,8(r1)
addic r12,r6,0
addi r6,r4,-4 addi r6,r4,-4
neg r0,r4 neg r0,r4
addi r4,r3,-4 addi r4,r3,-4
...@@ -246,34 +241,19 @@ _GLOBAL(csum_partial_copy_generic) ...@@ -246,34 +241,19 @@ _GLOBAL(csum_partial_copy_generic)
rlwinm r3,r3,8,0,31 /* odd destination address: rotate one byte */ rlwinm r3,r3,8,0,31 /* odd destination address: rotate one byte */
blr blr
/* read fault */ fault:
src_error: li r3,0
lwz r7,12(r1)
addi r1,r1,16
cmpwi cr0,r7,0
beqlr
li r0,-EFAULT
stw r0,0(r7)
blr
/* write fault */
dst_error:
lwz r8,8(r1)
addi r1,r1,16
cmpwi cr0,r8,0
beqlr
li r0,-EFAULT
stw r0,0(r8)
blr blr
EX_TABLE(70b, src_error); EX_TABLE(70b, fault);
EX_TABLE(71b, dst_error); EX_TABLE(71b, fault);
EX_TABLE(72b, src_error); EX_TABLE(72b, fault);
EX_TABLE(73b, dst_error); EX_TABLE(73b, fault);
EX_TABLE(54b, dst_error); EX_TABLE(54b, fault);
/* /*
* this stuff handles faults in the cacheline loop and branches to either * this stuff handles faults in the cacheline loop and branches to either
* src_error (if in read part) or dst_error (if in write part) * fault (if in read part) or fault (if in write part)
*/ */
CSUM_COPY_16_BYTES_EXCODE(0) CSUM_COPY_16_BYTES_EXCODE(0)
#if L1_CACHE_BYTES >= 32 #if L1_CACHE_BYTES >= 32
...@@ -290,12 +270,12 @@ dst_error: ...@@ -290,12 +270,12 @@ dst_error:
#endif #endif
#endif #endif
EX_TABLE(30b, src_error); EX_TABLE(30b, fault);
EX_TABLE(31b, dst_error); EX_TABLE(31b, fault);
EX_TABLE(40b, src_error); EX_TABLE(40b, fault);
EX_TABLE(41b, dst_error); EX_TABLE(41b, fault);
EX_TABLE(50b, src_error); EX_TABLE(50b, fault);
EX_TABLE(51b, dst_error); EX_TABLE(51b, fault);
EXPORT_SYMBOL(csum_partial_copy_generic) EXPORT_SYMBOL(csum_partial_copy_generic)
......
...@@ -182,34 +182,33 @@ EXPORT_SYMBOL(__csum_partial) ...@@ -182,34 +182,33 @@ EXPORT_SYMBOL(__csum_partial)
.macro srcnr .macro srcnr
100: 100:
EX_TABLE(100b,.Lsrc_error_nr) EX_TABLE(100b,.Lerror_nr)
.endm .endm
.macro source .macro source
150: 150:
EX_TABLE(150b,.Lsrc_error) EX_TABLE(150b,.Lerror)
.endm .endm
.macro dstnr .macro dstnr
200: 200:
EX_TABLE(200b,.Ldest_error_nr) EX_TABLE(200b,.Lerror_nr)
.endm .endm
.macro dest .macro dest
250: 250:
EX_TABLE(250b,.Ldest_error) EX_TABLE(250b,.Lerror)
.endm .endm
/* /*
* Computes the checksum of a memory block at src, length len, * Computes the checksum of a memory block at src, length len,
* and adds in "sum" (32-bit), while copying the block to dst. * and adds in 0xffffffff (32-bit), while copying the block to dst.
* If an access exception occurs on src or dst, it stores -EFAULT * If an access exception occurs, it returns 0.
* to *src_err or *dst_err respectively. The caller must take any action
* required in this case (zeroing memory, recalculating partial checksum etc).
* *
* csum_partial_copy_generic(r3=src, r4=dst, r5=len, r6=sum, r7=src_err, r8=dst_err) * csum_partial_copy_generic(r3=src, r4=dst, r5=len)
*/ */
_GLOBAL(csum_partial_copy_generic) _GLOBAL(csum_partial_copy_generic)
li r6,-1
addic r0,r6,0 /* clear carry */ addic r0,r6,0 /* clear carry */
srdi. r6,r5,3 /* less than 8 bytes? */ srdi. r6,r5,3 /* less than 8 bytes? */
...@@ -401,29 +400,15 @@ dstnr; stb r6,0(r4) ...@@ -401,29 +400,15 @@ dstnr; stb r6,0(r4)
srdi r3,r3,32 srdi r3,r3,32
blr blr
.Lsrc_error: .Lerror:
ld r14,STK_REG(R14)(r1) ld r14,STK_REG(R14)(r1)
ld r15,STK_REG(R15)(r1) ld r15,STK_REG(R15)(r1)
ld r16,STK_REG(R16)(r1) ld r16,STK_REG(R16)(r1)
addi r1,r1,STACKFRAMESIZE addi r1,r1,STACKFRAMESIZE
.Lsrc_error_nr: .Lerror_nr:
cmpdi 0,r7,0 li r3,0
beqlr
li r6,-EFAULT
stw r6,0(r7)
blr blr
.Ldest_error:
ld r14,STK_REG(R14)(r1)
ld r15,STK_REG(R15)(r1)
ld r16,STK_REG(R16)(r1)
addi r1,r1,STACKFRAMESIZE
.Ldest_error_nr:
cmpdi 0,r8,0
beqlr
li r6,-EFAULT
stw r6,0(r8)
blr
EXPORT_SYMBOL(csum_partial_copy_generic) EXPORT_SYMBOL(csum_partial_copy_generic)
/* /*
......
...@@ -14,8 +14,7 @@ ...@@ -14,8 +14,7 @@
__wsum csum_and_copy_from_user(const void __user *src, void *dst, __wsum csum_and_copy_from_user(const void __user *src, void *dst,
int len) int len)
{ {
unsigned int csum; __wsum csum;
int err = 0;
might_sleep(); might_sleep();
...@@ -24,27 +23,16 @@ __wsum csum_and_copy_from_user(const void __user *src, void *dst, ...@@ -24,27 +23,16 @@ __wsum csum_and_copy_from_user(const void __user *src, void *dst,
allow_read_from_user(src, len); allow_read_from_user(src, len);
csum = csum_partial_copy_generic((void __force *)src, dst, csum = csum_partial_copy_generic((void __force *)src, dst, len);
len, ~0U, &err, NULL);
if (unlikely(err)) {
int missing = __copy_from_user(dst, src, len);
if (missing)
csum = 0;
else
csum = csum_partial(dst, len, ~0U);
}
prevent_read_from_user(src, len); prevent_read_from_user(src, len);
return (__force __wsum)csum; return csum;
} }
EXPORT_SYMBOL(csum_and_copy_from_user); EXPORT_SYMBOL(csum_and_copy_from_user);
__wsum csum_and_copy_to_user(const void *src, void __user *dst, int len) __wsum csum_and_copy_to_user(const void *src, void __user *dst, int len)
{ {
unsigned int csum; __wsum csum;
int err = 0;
might_sleep(); might_sleep();
if (unlikely(!access_ok(dst, len))) if (unlikely(!access_ok(dst, len)))
...@@ -52,17 +40,9 @@ __wsum csum_and_copy_to_user(const void *src, void __user *dst, int len) ...@@ -52,17 +40,9 @@ __wsum csum_and_copy_to_user(const void *src, void __user *dst, int len)
allow_write_to_user(dst, len); allow_write_to_user(dst, len);
csum = csum_partial_copy_generic(src, (void __force *)dst, csum = csum_partial_copy_generic(src, (void __force *)dst, len);
len, ~0U, NULL, &err);
if (unlikely(err)) {
csum = csum_partial(src, len, ~0U);
if (copy_to_user(dst, src, len))
csum = 0;
}
prevent_write_to_user(dst, len); prevent_write_to_user(dst, len);
return (__force __wsum)csum; return csum;
} }
EXPORT_SYMBOL(csum_and_copy_to_user); EXPORT_SYMBOL(csum_and_copy_to_user);
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment