Commit bd79f947 authored by Christoph Hellwig's avatar Christoph Hellwig Committed by Arnd Bergmann

asm-generic: provide entirely generic nommu uaccess

Move the code to implement uaccess using memcpy or direct loads and
stores to asm-generic/uaccess.h and make it selectable kconfig option.
Signed-off-by: default avatarChristoph Hellwig <hch@lst.de>
Signed-off-by: default avatarArnd Bergmann <arnd@arndb.de>
parent c67fdc1f
......@@ -23,6 +23,7 @@ config H8300
select HAVE_ARCH_KGDB
select HAVE_ARCH_HASH
select CPU_NO_EFFICIENT_FFS
select UACCESS_MEMCPY
config CPU_BIG_ENDIAN
def_bool y
......
......@@ -46,6 +46,7 @@ generic-y += timex.h
generic-y += tlbflush.h
generic-y += topology.h
generic-y += trace_clock.h
generic-y += uaccess.h
generic-y += unaligned.h
generic-y += vga.h
generic-y += word-at-a-time.h
......
/* SPDX-License-Identifier: GPL-2.0 */
#ifndef _ASM_UACCESS_H
#define _ASM_UACCESS_H
#include <linux/string.h>
static inline __must_check unsigned long
raw_copy_from_user(void *to, const void __user * from, unsigned long n)
{
if (__builtin_constant_p(n)) {
switch(n) {
case 1:
*(u8 *)to = *(u8 __force *)from;
return 0;
case 2:
*(u16 *)to = *(u16 __force *)from;
return 0;
case 4:
*(u32 *)to = *(u32 __force *)from;
return 0;
}
}
memcpy(to, (const void __force *)from, n);
return 0;
}
static inline __must_check unsigned long
raw_copy_to_user(void __user *to, const void *from, unsigned long n)
{
if (__builtin_constant_p(n)) {
switch(n) {
case 1:
*(u8 __force *)to = *(u8 *)from;
return 0;
case 2:
*(u16 __force *)to = *(u16 *)from;
return 0;
case 4:
*(u32 __force *)to = *(u32 *)from;
return 0;
default:
break;
}
}
memcpy((void __force *)to, from, n);
return 0;
}
#define INLINE_COPY_FROM_USER
#define INLINE_COPY_TO_USER
#include <asm-generic/uaccess.h>
#endif
......@@ -9,6 +9,54 @@
*/
#include <linux/string.h>
#ifdef CONFIG_UACCESS_MEMCPY
static inline __must_check unsigned long
raw_copy_from_user(void *to, const void __user * from, unsigned long n)
{
if (__builtin_constant_p(n)) {
switch(n) {
case 1:
*(u8 *)to = *(u8 __force *)from;
return 0;
case 2:
*(u16 *)to = *(u16 __force *)from;
return 0;
case 4:
*(u32 *)to = *(u32 __force *)from;
return 0;
}
}
memcpy(to, (const void __force *)from, n);
return 0;
}
static inline __must_check unsigned long
raw_copy_to_user(void __user *to, const void *from, unsigned long n)
{
if (__builtin_constant_p(n)) {
switch(n) {
case 1:
*(u8 __force *)to = *(u8 *)from;
return 0;
case 2:
*(u16 __force *)to = *(u16 *)from;
return 0;
case 4:
*(u32 __force *)to = *(u32 *)from;
return 0;
default:
break;
}
}
memcpy((void __force *)to, from, n);
return 0;
}
#define INLINE_COPY_FROM_USER
#define INLINE_COPY_TO_USER
#endif /* CONFIG_UACCESS_MEMCPY */
#define MAKE_MM_SEG(s) ((mm_segment_t) { (s) })
#ifndef KERNEL_DS
......
......@@ -591,6 +591,10 @@ config ARCH_NO_SG_CHAIN
config ARCH_HAS_PMEM_API
bool
# use memcpy to implement user copies for nommu architectures
config UACCESS_MEMCPY
bool
config ARCH_HAS_UACCESS_FLUSHCACHE
bool
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment