Commit 1fc1ee17 authored by Matthew Wilcox's avatar Matthew Wilcox Committed by Linus Torvalds

[PATCH] parisc: use fixups for exception support

Revamp exceptions support to use fixup sections
Committed-by: default avatarRandolph Chung <tausq@parisc-linux.org>
parent 85b34e48
......@@ -38,6 +38,7 @@
#include <asm/ptrace.h>
#include <asm/processor.h>
#include <asm/pdc.h>
#include <asm/uaccess.h>
#define DEFINE(sym, val) \
asm volatile("\n->" #sym " %0 " #val : : "i" (val))
......@@ -291,5 +292,8 @@ int main(void)
DEFINE(ASM_PT_INITIAL, PT_INITIAL);
DEFINE(ASM_PAGE_SIZE, PAGE_SIZE);
BLANK();
DEFINE(EXCDATA_IP, offsetof(struct exception_data, fault_ip));
DEFINE(EXCDATA_SPACE, offsetof(struct exception_data, fault_space));
DEFINE(EXCDATA_ADDR, offsetof(struct exception_data, fault_addr));
return 0;
}
......@@ -29,6 +29,19 @@
#else
.level 1.1
#endif
#ifndef __LP64__
.macro fixup_branch,lbl
b \lbl
.endm
#else
.macro fixup_branch,lbl
ldil L%\lbl, %r1
ldo R%\lbl(%r1), %r1
bv,n %r0(%r1)
.endm
#endif
.text
.import syscall_exit,code
......
......@@ -23,6 +23,7 @@
#include <linux/config.h>
#include <linux/kernel.h>
#include <linux/module.h>
#include <asm/uaccess.h>
/* #define DEBUG_UNALIGNED 1 */
......@@ -38,6 +39,11 @@
#define RFMT "%08lx"
#endif
#define FIXUP_BRANCH(lbl) \
"\tldil L%%" #lbl ", %%r1\n" \
"\tldo R%%" #lbl "(%%r1), %%r1\n" \
"\tbv,n %%r0(%%r1)\n"
/* 1111 1100 0000 0000 0001 0011 1100 0000 */
#define OPCODE1(a,b,c) ((a)<<26|(b)<<12|(c)<<6)
#define OPCODE2(a,b) ((a)<<26|(b)<<1)
......@@ -134,15 +140,19 @@ static int emulate_ldh(struct pt_regs *regs, int toreg)
"1: ldbs 0(%%sr1,%3), %%r20\n"
"2: ldbs 1(%%sr1,%3), %0\n"
" depw %%r20, 23, 24, %0\n"
" cmpclr,= %%r0, %%r0, %1\n"
"3: ldo -2(%%r0), %1\n"
" .section __ex_table,\"a\"\n"
" copy %%r0, %1\n"
"3: \n"
" .section .fixup,\"ax\"\n"
"4: ldi -2, %1\n"
FIXUP_BRANCH(3b)
" .previous\n"
" .section __ex_table,\"aw\"\n"
#ifdef __LP64__
" .dword 1b,(3b-1b)\n"
" .dword 2b,(3b-2b)\n"
" .dword 1b,4b\n"
" .dword 2b,4b\n"
#else
" .word 1b,(3b-1b)\n"
" .word 2b,(3b-2b)\n"
" .word 1b,4b\n"
" .word 2b,4b\n"
#endif
" .previous\n"
: "=r" (val), "=r" (ret)
......@@ -175,15 +185,19 @@ static int emulate_ldw(struct pt_regs *regs, int toreg, int flop)
" subi 32,%%r19,%%r19\n"
" mtctl %%r19,11\n"
" vshd %0,%%r20,%0\n"
" cmpclr,= %%r0, %%r0, %1\n"
"3: ldo -2(%%r0), %1\n"
" .section __ex_table,\"a\"\n"
" copy %%r0, %1\n"
"3: \n"
" .section .fixup,\"ax\"\n"
"4: ldi -2, %1\n"
FIXUP_BRANCH(3b)
" .previous\n"
" .section __ex_table,\"aw\"\n"
#ifdef __LP64__
" .dword 1b,(3b-1b)\n"
" .dword 2b,(3b-2b)\n"
" .dword 1b,4b\n"
" .dword 2b,4b\n"
#else
" .word 1b,(3b-1b)\n"
" .word 2b,(3b-2b)\n"
" .word 1b,4b\n"
" .word 2b,4b\n"
#endif
" .previous\n"
: "=r" (val), "=r" (ret)
......@@ -222,15 +236,19 @@ static int emulate_ldd(struct pt_regs *regs, int toreg, int flop)
" subi 64,%%r19,%%r19\n"
" mtsar %%r19\n"
" shrpd %0,%%r20,%%sar,%0\n"
" cmpclr,= %%r0, %%r0, %1\n"
"3: ldo -2(%%r0), %1\n"
" .section __ex_table,\"a\"\n"
" copy %%r0, %1\n"
"3: \n"
" .section .fixup,\"ax\"\n"
"4: ldi -2, %1\n"
FIXUP_BRANCH(3b)
" .previous\n"
" .section __ex_table,\"aw\"\n"
#ifdef __LP64__
" .dword 1b,(3b-1b)\n"
" .dword 2b,(3b-2b)\n"
" .dword 1b,4b\n"
" .dword 2b,4b\n"
#else
" .word 1b,(3b-1b)\n"
" .word 2b,(3b-2b)\n"
" .word 1b,4b\n"
" .word 2b,4b\n"
#endif
" .previous\n"
: "=r" (val), "=r" (ret)
......@@ -250,17 +268,21 @@ static int emulate_ldd(struct pt_regs *regs, int toreg, int flop)
" mtsar %%r19\n"
" vshd %0,%1,%0\n"
" vshd %1,%%r20,%1\n"
" cmpclr,= %%r0, %%r0, %2\n"
"4: ldo -2(%%r0), %2\n"
" .section __ex_table,\"a\"\n"
" copy %%r0, %2\n"
"4: \n"
" .section .fixup,\"ax\"\n"
"5: ldi -2, %2\n"
FIXUP_BRANCH(4b)
" .previous\n"
" .section __ex_table,\"aw\"\n"
#ifdef __LP64__
" .dword 1b,(4b-1b)\n"
" .dword 2b,(4b-2b)\n"
" .dword 3b,(4b-3b)\n"
" .dword 1b,5b\n"
" .dword 2b,5b\n"
" .dword 3b,5b\n"
#else
" .word 1b,(4b-1b)\n"
" .word 2b,(4b-2b)\n"
" .word 3b,(4b-3b)\n"
" .word 1b,5b\n"
" .word 2b,5b\n"
" .word 3b,5b\n"
#endif
" .previous\n"
: "=r" (valh), "=r" (vall), "=r" (ret)
......@@ -296,15 +318,19 @@ static int emulate_sth(struct pt_regs *regs, int frreg)
" extrw,u %1, 23, 8, %%r19\n"
"1: stb %1, 1(%%sr1, %2)\n"
"2: stb %%r19, 0(%%sr1, %2)\n"
" cmpclr,= %%r0, %%r0, %0\n"
"3: ldo -2(%%r0), %0\n"
" .section __ex_table,\"a\"\n"
" copy %%r0, %0\n"
"3: \n"
" .section .fixup,\"ax\"\n"
"4: ldi -2, %0\n"
FIXUP_BRANCH(3b)
" .previous\n"
" .section __ex_table,\"aw\"\n"
#ifdef __LP64__
" .dword 1b,(3b-1b)\n"
" .dword 2b,(3b-2b)\n"
" .dword 1b,4b\n"
" .dword 2b,4b\n"
#else
" .word 1b,(3b-1b)\n"
" .word 2b,(3b-2b)\n"
" .word 1b,4b\n"
" .word 2b,4b\n"
#endif
" .previous\n"
: "=r" (ret)
......@@ -346,15 +372,19 @@ static int emulate_stw(struct pt_regs *regs, int frreg, int flop)
" or %%r1, %%r21, %%r21\n"
" stw %%r20,0(%%sr1,%2)\n"
" stw %%r21,4(%%sr1,%2)\n"
" cmpclr,= %%r0, %%r0, %0\n"
"3: ldo -2(%%r0), %0\n"
" .section __ex_table,\"a\"\n"
" copy %%r0, %0\n"
"3: \n"
" .section .fixup,\"ax\"\n"
"4: ldi -2, %0\n"
FIXUP_BRANCH(3b)
" .previous\n"
" .section __ex_table,\"aw\"\n"
#ifdef __LP64__
" .dword 1b,(3b-1b)\n"
" .dword 2b,(3b-2b)\n"
" .dword 1b,4b\n"
" .dword 2b,4b\n"
#else
" .word 1b,(3b-1b)\n"
" .word 2b,(3b-2b)\n"
" .word 1b,4b\n"
" .word 2b,4b\n"
#endif
" .previous\n"
: "=r" (ret)
......@@ -399,19 +429,23 @@ static int emulate_std(struct pt_regs *regs, int frreg, int flop)
" or %%r1, %%r21, %%r21\n"
"3: std %%r20,0(%%sr1,%2)\n"
"4: std %%r21,8(%%sr1,%2)\n"
" cmpclr,= %%r0, %%r0, %0\n"
"5: ldo -2(%%r0), %0\n"
" .section __ex_table,\"a\"\n"
" copy %%r0, %0\n"
"5: \n"
" .section .fixup,\"ax\"\n"
"6: ldi -2, %0\n"
FIXUP_BRANCH(5b)
" .previous\n"
" .section __ex_table,\"aw\"\n"
#ifdef __LP64__
" .dword 1b,(5b-1b)\n"
" .dword 2b,(5b-2b)\n"
" .dword 3b,(5b-3b)\n"
" .dword 4b,(5b-4b)\n"
" .dword 1b,6b\n"
" .dword 2b,6b\n"
" .dword 3b,6b\n"
" .dword 4b,6b\n"
#else
" .word 1b,(5b-1b)\n"
" .word 2b,(5b-2b)\n"
" .word 3b,(5b-3b)\n"
" .word 4b,(5b-4b)\n"
" .word 1b,6b\n"
" .word 2b,6b\n"
" .word 3b,6b\n"
" .word 4b,6b\n"
#endif
" .previous\n"
: "=r" (ret)
......@@ -438,21 +472,25 @@ static int emulate_std(struct pt_regs *regs, int frreg, int flop)
"3: stw %1,0(%%sr1,%1)\n"
"4: stw %%r1,4(%%sr1,%3)\n"
"5: stw %2,8(%%sr1,%3)\n"
" cmpclr,= %%r0, %%r0, %0\n"
"6: ldo -2(%%r0), %0\n"
" .section __ex_table,\"a\"\n"
" copy %%r0, %0\n"
"6: \n"
" .section .fixup,\"ax\"\n"
"7: ldi -2, %0\n"
FIXUP_BRANCH(6b)
" .previous\n"
" .section __ex_table,\"aw\"\n"
#ifdef __LP64__
" .dword 1b,(6b-1b)\n"
" .dword 2b,(6b-2b)\n"
" .dword 3b,(6b-3b)\n"
" .dword 4b,(6b-4b)\n"
" .dword 5b,(6b-5b)\n"
" .dword 1b,7b\n"
" .dword 2b,7b\n"
" .dword 3b,7b\n"
" .dword 4b,7b\n"
" .dword 5b,7b\n"
#else
" .word 1b,(6b-1b)\n"
" .word 2b,(6b-2b)\n"
" .word 3b,(6b-3b)\n"
" .word 4b,(6b-4b)\n"
" .word 5b,(6b-5b)\n"
" .word 1b,7b\n"
" .word 2b,7b\n"
" .word 3b,7b\n"
" .word 4b,7b\n"
" .word 5b,7b\n"
#endif
" .previous\n"
: "=r" (ret)
......
/*
* Linux/PA-RISC Project (http://www.parisc-linux.org/)
*
* Copyright (C) 2004 Randolph Chung <tausq@debian.org>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* Fixup routines for kernel exception handling.
*/
#include <linux/config.h>
#include <asm/offsets.h>
#include <asm/assembly.h>
#include <asm/errno.h>
#ifdef CONFIG_SMP
.macro get_fault_ip t1 t2
addil LT%__per_cpu_offset,%r27
LDREG RT%__per_cpu_offset(%r1),\t1
/* t2 = smp_processor_id() */
mfctl 30,\t2
ldw TI_CPU(\t2),\t2
#ifdef __LP64__
extrd,u \t2,63,32,\t2
#endif
/* t2 = &__per_cpu_offset[smp_processor_id()]; */
LDREG,s \t2(\t1),\t2
addil LT%per_cpu__exception_data,%r27
LDREG RT%per_cpu__exception_data(%r1),\t1
/* t1 = &__get_cpu_var(exception_data) */
add,l \t1,\t2,\t1
/* t1 = t1->fault_ip */
LDREG EXCDATA_IP(\t1), \t1
.endm
#else
.macro get_fault_ip t1 t2
/* t1 = &__get_cpu_var(exception_data) */
addil LT%per_cpu__exception_data,%r27
LDREG RT%per_cpu__exception_data(%r1),\t2
/* t1 = t2->fault_ip */
LDREG EXCDATA_IP(\t2), \t1
.endm
#endif
.text
.section .fixup, "ax"
/* get_user() fixups, store -EFAULT in r8, and 0 in r9 */
.export fixup_get_user_skip_1
fixup_get_user_skip_1:
get_fault_ip %r1,%r8
ldo 4(%r1), %r1
ldi -EFAULT, %r8
bv %r0(%r1)
copy %r0, %r9
.export fixup_get_user_skip_2
fixup_get_user_skip_2:
get_fault_ip %r1,%r8
ldo 8(%r1), %r1
ldi -EFAULT, %r8
bv %r0(%r1)
copy %r0, %r9
/* put_user() fixups, store -EFAULT in r8 */
.export fixup_put_user_skip_1
fixup_put_user_skip_1:
get_fault_ip %r1,%r8
ldo 4(%r1), %r1
bv %r0(%r1)
ldi -EFAULT, %r8
.export fixup_put_user_skip_2
fixup_put_user_skip_2:
get_fault_ip %r1,%r8
ldo 8(%r1), %r1
bv %r0(%r1)
ldi -EFAULT, %r8
......@@ -201,16 +201,18 @@ $lsfu_exit:
nop
.exit
3: b $lsfu_exit
.section .fixup,"ax"
3: fixup_branch $lsfu_exit
ldi -EFAULT,%r28
.previous
.section __ex_table,"a"
.section __ex_table,"aw"
#ifdef __LP64__
.dword 1b,(3b-1b)
.dword 2b,(3b-2b)
.dword 1b,3b
.dword 2b,3b
#else
.word 1b,(3b-1b)
.word 2b,(3b-2b)
.word 1b,3b
.word 2b,3b
#endif
.previous
......@@ -239,14 +241,16 @@ $lclu_done:
copy %r25,%r28
.exit
2: b $lclu_done
.section .fixup,"ax"
2: fixup_branch $lclu_done
ldo 1(%r25),%r25
.previous
.section __ex_table,"a"
.section __ex_table,"aw"
#ifdef __LP64__
.dword 1b,(2b-1b)
.dword 1b,2b
#else
.word 1b,(2b-1b)
.word 1b,2b
#endif
.previous
......@@ -282,16 +286,18 @@ $lslen_nzero:
b $lslen_done
ldo 1(%r26),%r26 /* special case for N == 0 */
3: b $lslen_done
.section .fixup,"ax"
3: fixup_branch $lslen_done
copy %r24,%r26 /* reset r26 so 0 is returned on fault */
.previous
.section __ex_table,"a"
.section __ex_table,"aw"
#ifdef __LP64__
.dword 1b,(3b-1b)
.dword 2b,(3b-2b)
.dword 1b,3b
.dword 2b,3b
#else
.word 1b,(3b-1b)
.word 2b,(3b-2b)
.word 1b,3b
.word 2b,3b
#endif
.previous
......
......@@ -36,6 +36,9 @@
#define BITSSET 0x1c0 /* for identifying LDCW */
DEFINE_PER_CPU(struct exception_data, exception_data);
/*
* parisc_acctyp(unsigned int inst) --
* Given a PA-RISC memory access instruction, determine if the
......@@ -230,17 +233,17 @@ void do_page_fault(struct pt_regs *regs, unsigned long code,
no_context:
if (!user_mode(regs)) {
fix = search_exception_tables(regs->iaoq[0]);
if (fix) {
struct exception_data *d;
if (fix->skip & 1)
regs->gr[8] = -EFAULT;
if (fix->skip & 2)
regs->gr[9] = 0;
d = &__get_cpu_var(exception_data);
d->fault_ip = regs->iaoq[0];
d->fault_space = regs->isr;
d->fault_addr = regs->ior;
regs->iaoq[0] += ((fix->skip) & ~3);
regs->iaoq[0] = ((fix->fixup) & ~3);
/*
* NOTE: In some cases the faulting instruction
......
......@@ -55,16 +55,22 @@ extern int __put_user_bad(void);
/*
* The exception table contains two values: the first is an address
* for an instruction that is allowed to fault, and the second is
* the number of bytes to skip if a fault occurs. We also support in
* two bit flags: 0x2 tells the exception handler to clear register
* r9 and 0x1 tells the exception handler to put -EFAULT in r8.
* This allows us to handle the simple cases for put_user and
* get_user without having to have .fixup sections.
* the address to the fixup routine.
*/
struct exception_table_entry {
unsigned long insn; /* address of insn that is allowed to fault. */
long skip; /* pcoq skip | r9 clear flag | r8 -EFAULT flag */
long fixup; /* fixup routine */
};
/*
* The page fault handler stores, in a per-cpu area, the following information
* if a fixup routine is available.
*/
struct exception_data {
unsigned long fault_ip;
unsigned long fault_space;
unsigned long fault_addr;
};
#define __get_user(x,ptr) \
......@@ -98,48 +104,44 @@ struct exception_table_entry {
#ifdef __LP64__
#define __get_kernel_asm(ldx,ptr) \
__asm__("\n1:\t" ldx "\t0(%2),%0\n" \
"2:\n" \
"\t.section __ex_table,\"aw\"\n" \
"\t.dword\t1b\n" \
"\t.dword\t(2b-1b)+3\n" \
"\t.previous" \
"\t.section __ex_table,\"aw\"\n" \
"\t.dword\t1b,fixup_get_user_skip_1\n" \
"\t.previous" \
: "=r"(__gu_val), "=r"(__gu_err) \
: "r"(ptr), "1"(__gu_err));
: "r"(ptr), "1"(__gu_err) \
: "r1");
#define __get_user_asm(ldx,ptr) \
__asm__("\n1:\t" ldx "\t0(%%sr3,%2),%0\n" \
"2:\n" \
"\t.section __ex_table,\"aw\"\n" \
"\t.dword\t1b\n" \
"\t.dword\t(2b-1b)+3\n" \
"\t.previous" \
"\t.section __ex_table,\"aw\"\n" \
"\t.dword\t1b,fixup_get_user_skip_1\n" \
"\t.previous" \
: "=r"(__gu_val), "=r"(__gu_err) \
: "r"(ptr), "1"(__gu_err));
: "r"(ptr), "1"(__gu_err) \
: "r1");
#else
#define __get_kernel_asm(ldx,ptr) \
__asm__("\n1:\t" ldx "\t0(%2),%0\n" \
"2:\n" \
"\t.section __ex_table,\"aw\"\n" \
"\t.word\t1b\n" \
"\t.word\t(2b-1b)+3\n" \
"\t.previous" \
"\t.section __ex_table,\"aw\"\n" \
"\t.word\t1b,fixup_get_user_skip_1\n" \
"\t.previous" \
: "=r"(__gu_val), "=r"(__gu_err) \
: "r"(ptr), "1"(__gu_err));
: "r"(ptr), "1"(__gu_err) \
: "r1");
#define __get_user_asm(ldx,ptr) \
__asm__("\n1:\t" ldx "\t0(%%sr3,%2),%0\n" \
"2:\n" \
"\t.section __ex_table,\"aw\"\n" \
"\t.word\t1b\n" \
"\t.word\t(2b-1b)+3\n" \
"\t.section __ex_table,\"aw\"\n" \
"\t.word\t1b,fixup_get_user_skip_1\n" \
"\t.previous" \
: "=r"(__gu_val), "=r"(__gu_err) \
: "r"(ptr), "1"(__gu_err));
: "r"(ptr), "1"(__gu_err) \
: "r1");
#endif /* !__LP64__ */
#define __put_user(x,ptr) \
({ \
register long __pu_err __asm__ ("r8") = 0; \
register long __pu_err __asm__ ("r8") = 0; \
\
if (segment_eq(get_fs(),KERNEL_DS)) { \
switch (sizeof(*(ptr))) { \
......@@ -173,82 +175,73 @@ struct exception_table_entry {
#define __put_kernel_asm(stx,x,ptr) \
__asm__ __volatile__ ( \
"\n1:\t" stx "\t%2,0(%1)\n" \
"2:\n" \
"\t.section __ex_table,\"aw\"\n" \
"\t.dword\t1b\n" \
"\t.dword\t(2b-1b)+1\n" \
"\t.previous" \
"\t.section __ex_table,\"aw\"\n" \
"\t.dword\t1b,fixup_put_user_skip_1\n" \
"\t.previous" \
: "=r"(__pu_err) \
: "r"(ptr), "r"(x), "0"(__pu_err))
#define __put_user_asm(stx,x,ptr) \
__asm__ __volatile__ ( \
"\n1:\t" stx "\t%2,0(%%sr3,%1)\n" \
"2:\n" \
"\t.section __ex_table,\"aw\"\n" \
"\t.dword\t1b\n" \
"\t.dword\t(2b-1b)+1\n" \
"\t.section __ex_table,\"aw\"\n" \
"\t.dword\t1b,fixup_put_user_skip_1\n" \
"\t.previous" \
: "=r"(__pu_err) \
: "r"(ptr), "r"(x), "0"(__pu_err))
: "r"(ptr), "r"(x), "0"(__pu_err) \
: "r1")
#else
#define __put_kernel_asm(stx,x,ptr) \
__asm__ __volatile__ ( \
"\n1:\t" stx "\t%2,0(%1)\n" \
"2:\n" \
"\t.section __ex_table,\"aw\"\n" \
"\t.word\t1b\n" \
"\t.word\t(2b-1b)+1\n" \
"\t.section __ex_table,\"aw\"\n" \
"\t.word\t1b,fixup_put_user_skip_1\n" \
"\t.previous" \
: "=r"(__pu_err) \
: "r"(ptr), "r"(x), "0"(__pu_err))
: "r"(ptr), "r"(x), "0"(__pu_err) \
: "r1")
#define __put_user_asm(stx,x,ptr) \
__asm__ __volatile__ ( \
"\n1:\t" stx "\t%2,0(%%sr3,%1)\n" \
"2:\n" \
"\t.section __ex_table,\"aw\"\n" \
"\t.word\t1b\n" \
"\t.word\t(2b-1b)+1\n" \
"\t.section __ex_table,\"aw\"\n" \
"\t.word\t1b,fixup_put_user_skip_1\n" \
"\t.previous" \
: "=r"(__pu_err) \
: "r"(ptr), "r"(x), "0"(__pu_err))
static inline void __put_kernel_asm64(u64 x, void *ptr)
{
u32 hi = x>>32;
u32 lo = x&0xffffffff;
__asm__ __volatile__ (
"\n1:\tstw %1,0(%0)\n"
"\n2:\tstw %2,4(%0)\n"
"3:\n"
"\t.section __ex_table,\"aw\"\n"
"\t.word\t1b\n"
"\t.word\t(3b-1b)+1\n"
"\t.word\t2b\n"
"\t.word\t(3b-2b)+1\n"
"\t.previous"
: : "r"(ptr), "r"(hi), "r"(lo));
}
static inline void __put_user_asm64(u64 x, void *ptr)
{
u32 hi = x>>32;
u32 lo = x&0xffffffff;
__asm__ __volatile__ (
"\n1:\tstw %1,0(%%sr3,%0)\n"
"\n2:\tstw %2,4(%%sr3,%0)\n"
"3:\n"
"\t.section __ex_table,\"aw\"\n"
"\t.word\t1b\n"
"\t.word\t(3b-1b)+1\n"
"\t.word\t2b\n"
"\t.word\t(3b-2b)+1\n"
"\t.previous"
: : "r"(ptr), "r"(hi), "r"(lo));
}
: "r"(ptr), "r"(x), "0"(__pu_err) \
: "r1")
#define __put_kernel_asm64(__val,ptr) do { \
u64 __val64 = (u64)(__val); \
u32 hi = (__val64) >> 32; \
u32 lo = (__val64) & 0xffffffff; \
__asm__ __volatile__ ( \
"\n1:\tstw %2,0(%1)\n" \
"\n2:\tstw %3,4(%1)\n" \
"\t.section __ex_table,\"aw\"\n" \
"\t.word\t1b,fixup_put_user_skip_2\n" \
"\t.word\t2b,fixup_put_user_skip_1\n" \
"\t.previous" \
: "=r"(__pu_err) \
: "r"(ptr), "r"(hi), "r"(lo), "0"(__pu_err) \
: "r1"); \
} while (0)
#define __put_user_asm64(__val,ptr) do { \
u64 __val64 = (u64)__val; \
u32 hi = (__val64) >> 32; \
u32 lo = (__val64) & 0xffffffff; \
__asm__ __volatile__ ( \
"\n1:\tstw %2,0(%%sr3,%1)\n" \
"\n2:\tstw %3,4(%%sr3,%1)\n" \
"\t.section __ex_table,\"aw\"\n" \
"\t.word\t1b,fixup_get_user_skip_2\n" \
"\t.word\t2b,fixup_get_user_skip_1\n" \
"\t.previous" \
: "=r"(__pu_err) \
: "r"(ptr), "r"(hi), "r"(lo), "0"(__pu_err) \
: "r1"); \
} while (0)
#endif /* !__LP64__ */
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment