Commit 31af2f36 authored by Al Viro's avatar Al Viro

sparc: switch to RAW_COPY_USER

... and drop zeroing in sparc32.
Signed-off-by: default avatarAl Viro <viro@zeniv.linux.org.uk>
parent 1333eb78
......@@ -45,6 +45,7 @@ config SPARC
select HAVE_ARCH_HARDENED_USERCOPY
select PROVE_LOCKING_SMALL if PROVE_LOCKING
select ARCH_WANT_RELAX_ORDER
select ARCH_HAS_RAW_COPY_USER
config SPARC32
def_bool !64BIT
......
......@@ -235,39 +235,18 @@ int __get_user_bad(void);
unsigned long __copy_user(void __user *to, const void __user *from, unsigned long size);
static inline unsigned long copy_to_user(void __user *to, const void *from, unsigned long n)
static inline unsigned long raw_copy_to_user(void __user *to, const void *from, unsigned long n)
{
if (n && __access_ok((unsigned long) to, n)) {
check_object_size(from, n, true);
return __copy_user(to, (__force void __user *) from, n);
} else
return n;
}
static inline unsigned long __copy_to_user(void __user *to, const void *from, unsigned long n)
{
check_object_size(from, n, true);
return __copy_user(to, (__force void __user *) from, n);
}
static inline unsigned long copy_from_user(void *to, const void __user *from, unsigned long n)
{
if (n && __access_ok((unsigned long) from, n)) {
check_object_size(to, n, false);
return __copy_user((__force void __user *) to, from, n);
} else {
memset(to, 0, n);
return n;
}
}
static inline unsigned long __copy_from_user(void *to, const void __user *from, unsigned long n)
static inline unsigned long raw_copy_from_user(void *to, const void __user *from, unsigned long n)
{
return __copy_user((__force void __user *) to, from, n);
}
#define __copy_to_user_inatomic __copy_to_user
#define __copy_from_user_inatomic __copy_from_user
#define INLINE_COPY_FROM_USER
#define INLINE_COPY_TO_USER
static inline unsigned long __clear_user(void __user *addr, unsigned long size)
{
......
......@@ -176,39 +176,19 @@ __asm__ __volatile__( \
int __get_user_bad(void);
unsigned long __must_check ___copy_from_user(void *to,
unsigned long __must_check raw_copy_from_user(void *to,
const void __user *from,
unsigned long size);
static inline unsigned long __must_check
copy_from_user(void *to, const void __user *from, unsigned long size)
{
check_object_size(to, size, false);
return ___copy_from_user(to, from, size);
}
#define __copy_from_user copy_from_user
unsigned long __must_check ___copy_to_user(void __user *to,
unsigned long __must_check raw_copy_to_user(void __user *to,
const void *from,
unsigned long size);
static inline unsigned long __must_check
copy_to_user(void __user *to, const void *from, unsigned long size)
{
check_object_size(from, size, true);
#define INLINE_COPY_FROM_USER
#define INLINE_COPY_TO_USER
return ___copy_to_user(to, from, size);
}
#define __copy_to_user copy_to_user
unsigned long __must_check ___copy_in_user(void __user *to,
unsigned long __must_check raw_copy_in_user(void __user *to,
const void __user *from,
unsigned long size);
static inline unsigned long __must_check
copy_in_user(void __user *to, void __user *from, unsigned long size)
{
return ___copy_in_user(to, from, size);
}
#define __copy_in_user copy_in_user
unsigned long __must_check __clear_user(void __user *, unsigned long);
......@@ -217,9 +197,6 @@ unsigned long __must_check __clear_user(void __user *, unsigned long);
__must_check long strlen_user(const char __user *str);
__must_check long strnlen_user(const char __user *str, long n);
#define __copy_to_user_inatomic __copy_to_user
#define __copy_from_user_inatomic __copy_from_user
struct pt_regs;
unsigned long compute_effective_address(struct pt_regs *,
unsigned int insn,
......
......@@ -23,7 +23,7 @@
#define PREAMBLE \
rd %asi, %g1; \
cmp %g1, ASI_AIUS; \
bne,pn %icc, ___copy_in_user; \
bne,pn %icc, raw_copy_in_user; \
nop
#endif
......
......@@ -27,7 +27,7 @@
#define PREAMBLE \
rd %asi, %g1; \
cmp %g1, ASI_AIUS; \
bne,pn %icc, ___copy_in_user; \
bne,pn %icc, raw_copy_in_user; \
nop
#endif
......
......@@ -26,8 +26,8 @@
.type generic_patch_copyops,#function
generic_patch_copyops:
GEN_DO_PATCH(memcpy, GENmemcpy)
GEN_DO_PATCH(___copy_from_user, GENcopy_from_user)
GEN_DO_PATCH(___copy_to_user, GENcopy_to_user)
GEN_DO_PATCH(raw_copy_from_user, GENcopy_from_user)
GEN_DO_PATCH(raw_copy_to_user, GENcopy_to_user)
retl
nop
.size generic_patch_copyops,.-generic_patch_copyops
......@@ -36,7 +36,7 @@
#define PREAMBLE \
rd %asi, %g1; \
cmp %g1, ASI_AIUS; \
bne,pn %icc, ___copy_in_user; \
bne,pn %icc, raw_copy_in_user; \
nop
#endif
......
......@@ -45,7 +45,7 @@
#define PREAMBLE \
rd %asi, %g1; \
cmp %g1, ASI_AIUS; \
bne,pn %icc, ___copy_in_user; \
bne,pn %icc, raw_copy_in_user; \
nop
#endif
......
......@@ -26,8 +26,8 @@
.type niagara2_patch_copyops,#function
niagara2_patch_copyops:
NG_DO_PATCH(memcpy, NG2memcpy)
NG_DO_PATCH(___copy_from_user, NG2copy_from_user)
NG_DO_PATCH(___copy_to_user, NG2copy_to_user)
NG_DO_PATCH(raw_copy_from_user, NG2copy_from_user)
NG_DO_PATCH(raw_copy_to_user, NG2copy_to_user)
retl
nop
.size niagara2_patch_copyops,.-niagara2_patch_copyops
......@@ -31,7 +31,7 @@
#define PREAMBLE \
rd %asi, %g1; \
cmp %g1, ASI_AIUS; \
bne,pn %icc, ___copy_in_user; \
bne,pn %icc, raw_copy_in_user; \
nop
#endif
......
......@@ -40,7 +40,7 @@
#define PREAMBLE \
rd %asi, %g1; \
cmp %g1, ASI_AIUS; \
bne,pn %icc, ___copy_in_user; \
bne,pn %icc, raw_copy_in_user; \
nop
#endif
......
......@@ -26,8 +26,8 @@
.type niagara4_patch_copyops,#function
niagara4_patch_copyops:
NG_DO_PATCH(memcpy, NG4memcpy)
NG_DO_PATCH(___copy_from_user, NG4copy_from_user)
NG_DO_PATCH(___copy_to_user, NG4copy_to_user)
NG_DO_PATCH(raw_copy_from_user, NG4copy_from_user)
NG_DO_PATCH(raw_copy_to_user, NG4copy_to_user)
retl
nop
.size niagara4_patch_copyops,.-niagara4_patch_copyops
......
......@@ -25,7 +25,7 @@
#define PREAMBLE \
rd %asi, %g1; \
cmp %g1, ASI_AIUS; \
bne,pn %icc, ___copy_in_user; \
bne,pn %icc, raw_copy_in_user; \
nop
#endif
......
......@@ -28,7 +28,7 @@
#define PREAMBLE \
rd %asi, %g1; \
cmp %g1, ASI_AIUS; \
bne,pn %icc, ___copy_in_user; \
bne,pn %icc, raw_copy_in_user; \
nop
#endif
......
......@@ -26,8 +26,8 @@
.type niagara_patch_copyops,#function
niagara_patch_copyops:
NG_DO_PATCH(memcpy, NGmemcpy)
NG_DO_PATCH(___copy_from_user, NGcopy_from_user)
NG_DO_PATCH(___copy_to_user, NGcopy_to_user)
NG_DO_PATCH(raw_copy_from_user, NGcopy_from_user)
NG_DO_PATCH(raw_copy_to_user, NGcopy_to_user)
retl
nop
.size niagara_patch_copyops,.-niagara_patch_copyops
......@@ -19,7 +19,7 @@
.text; \
.align 4;
#define FUNC_NAME ___copy_from_user
#define FUNC_NAME raw_copy_from_user
#define LOAD(type,addr,dest) type##a [addr] %asi, dest
#define LOAD_BLK(addr,dest) ldda [addr] ASI_BLK_AIUS, dest
#define EX_RETVAL(x) 0
......@@ -31,7 +31,7 @@
#define PREAMBLE \
rd %asi, %g1; \
cmp %g1, ASI_AIUS; \
bne,pn %icc, ___copy_in_user; \
bne,pn %icc, raw_copy_in_user; \
nop; \
#include "U1memcpy.S"
......@@ -19,7 +19,7 @@
.text; \
.align 4;
#define FUNC_NAME ___copy_to_user
#define FUNC_NAME raw_copy_to_user
#define STORE(type,src,addr) type##a src, [addr] ASI_AIUS
#define STORE_BLK(src,addr) stda src, [addr] ASI_BLK_AIUS
#define EX_RETVAL(x) 0
......@@ -31,7 +31,7 @@
#define PREAMBLE \
rd %asi, %g1; \
cmp %g1, ASI_AIUS; \
bne,pn %icc, ___copy_in_user; \
bne,pn %icc, raw_copy_in_user; \
nop; \
#include "U1memcpy.S"
......@@ -31,7 +31,7 @@
#define PREAMBLE \
rd %asi, %g1; \
cmp %g1, ASI_AIUS; \
bne,pn %icc, ___copy_in_user; \
bne,pn %icc, raw_copy_in_user; \
nop; \
#include "U3memcpy.S"
......@@ -26,8 +26,8 @@
.type cheetah_patch_copyops,#function
cheetah_patch_copyops:
ULTRA3_DO_PATCH(memcpy, U3memcpy)
ULTRA3_DO_PATCH(___copy_from_user, U3copy_from_user)
ULTRA3_DO_PATCH(___copy_to_user, U3copy_to_user)
ULTRA3_DO_PATCH(raw_copy_from_user, U3copy_from_user)
ULTRA3_DO_PATCH(raw_copy_to_user, U3copy_to_user)
retl
nop
.size cheetah_patch_copyops,.-cheetah_patch_copyops
......@@ -44,7 +44,7 @@ __retl_o2_plus_1:
* to copy register windows around during thread cloning.
*/
ENTRY(___copy_in_user) /* %o0=dst, %o1=src, %o2=len */
ENTRY(raw_copy_in_user) /* %o0=dst, %o1=src, %o2=len */
cmp %o2, 0
be,pn %XCC, 85f
or %o0, %o1, %o3
......@@ -105,5 +105,5 @@ ENTRY(___copy_in_user) /* %o0=dst, %o1=src, %o2=len */
add %o0, 1, %o0
retl
clr %o0
ENDPROC(___copy_in_user)
EXPORT_SYMBOL(___copy_in_user)
ENDPROC(raw_copy_in_user)
EXPORT_SYMBOL(raw_copy_in_user)
......@@ -364,21 +364,7 @@ short_aligned_end:
97:
mov %o2, %g3
fixupretl:
sethi %hi(PAGE_OFFSET), %g1
cmp %o0, %g1
blu 1f
cmp %o1, %g1
bgeu 1f
ld [%g6 + TI_PREEMPT], %g1
cmp %g1, 0
bne 1f
nop
save %sp, -64, %sp
mov %i0, %o0
call __bzero
mov %g3, %o1
restore
1: retl
retl
mov %g3, %o0
/* exception routine sets %g2 to (broken_insn - first_insn)>>2 */
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment