Commit 0b1a47c2 authored by Martin Schwidefsky's avatar Martin Schwidefsky Committed by Linus Torvalds

[PATCH] s390: inline assemblies.

Optimize s390 inline assemblies.
parent d5cb012f
......@@ -513,10 +513,9 @@ int __cpu_up(unsigned int cpu)
cpu_lowcore->kernel_stack = (unsigned long)
idle->thread_info + (THREAD_SIZE);
__ctl_store(cpu_lowcore->cregs_save_area[0], 0, 15);
__asm__ __volatile__("la 1,%0\n\t"
"stam 0,15,0(1)"
: "=m" (cpu_lowcore->access_regs_save_area[0])
: : "1", "memory");
__asm__ __volatile__("stam 0,15,0(%0)"
: : "a" (&cpu_lowcore->access_regs_save_area)
: "memory");
eieio();
signal_processor(cpu,sigp_restart);
......
......@@ -25,12 +25,9 @@ extern struct task_struct *resume(void *, void *);
#ifdef __s390x__
#define __FLAG_SHIFT 56
extern void __misaligned_u16(void);
extern void __misaligned_u32(void);
extern void __misaligned_u64(void);
#else /* __s390x__ */
#else /* ! __s390x__ */
#define __FLAG_SHIFT 24
#endif /* __s390x__ */
#endif /* ! __s390x__ */
static inline void save_fp_regs(s390_fp_regs *fpregs)
{
......@@ -301,56 +298,52 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
#define __ctl_load(array, low, high) ({ \
__asm__ __volatile__ ( \
" la 1,%0\n" \
" bras 2,0f\n" \
" lctlg 0,0,0(1)\n" \
"0: ex %1,0(2)" \
: : "m" (array), "a" (((low)<<4)+(high)) : "1", "2" ); \
" bras 1,0f\n" \
" lctlg 0,0,0(%0)\n" \
"0: ex %1,0(1)" \
: : "a" (&array), "a" (((low)<<4)+(high)) : "1" ); \
})
#define __ctl_store(array, low, high) ({ \
__asm__ __volatile__ ( \
" la 1,%0\n" \
" bras 2,0f\n" \
" stctg 0,0,0(1)\n" \
"0: ex %1,0(2)" \
: "=m" (array) : "a" (((low)<<4)+(high)): "1", "2" ); \
" bras 1,0f\n" \
" stctg 0,0,0(%1)\n" \
"0: ex %2,0(1)" \
: "=m" (array) : "a" (&array), "a" (((low)<<4)+(high)) : "1" ); \
})
#define __ctl_set_bit(cr, bit) ({ \
__u8 __dummy[24]; \
__asm__ __volatile__ ( \
" la 1,%0\n" /* align to 8 byte */ \
" aghi 1,7\n" \
" nill 1,0xfff8\n" \
" bras 2,0f\n" /* skip indirect insns */ \
" stctg 0,0,0(1)\n" \
" lctlg 0,0,0(1)\n" \
"0: ex %1,0(2)\n" /* execute stctl */ \
" lg 0,0(1)\n" \
" ogr 0,%2\n" /* set the bit */ \
" stg 0,0(1)\n" \
"1: ex %1,6(2)" /* execute lctl */ \
: "=m" (__dummy) : "a" (cr*17), "a" (1L<<(bit)) \
: "cc", "0", "1", "2"); \
" bras 1,0f\n" /* skip indirect insns */ \
" stctg 0,0,0(%1)\n" \
" lctlg 0,0,0(%1)\n" \
"0: ex %2,0(1)\n" /* execute stctl */ \
" lg 0,0(%1)\n" \
" ogr 0,%3\n" /* set the bit */ \
" stg 0,0(%1)\n" \
"1: ex %2,6(1)" /* execute lctl */ \
: "=m" (__dummy) \
: "a" ((((unsigned long) &__dummy) + 7) & ~7UL), \
"a" (cr*17), "a" (1L<<(bit)) \
: "cc", "0", "1" ); \
})
#define __ctl_clear_bit(cr, bit) ({ \
__u8 __dummy[24]; \
__u8 __dummy[16]; \
__asm__ __volatile__ ( \
" la 1,%0\n" /* align to 8 byte */ \
" aghi 1,7\n" \
" nill 1,0xfff8\n" \
" bras 2,0f\n" /* skip indirect insns */ \
" stctg 0,0,0(1)\n" \
" lctlg 0,0,0(1)\n" \
"0: ex %1,0(2)\n" /* execute stctl */ \
" lg 0,0(1)\n" \
" ngr 0,%2\n" /* set the bit */ \
" stg 0,0(1)\n" \
"1: ex %1,6(2)" /* execute lctl */ \
: "=m" (__dummy) : "a" (cr*17), "a" (~(1L<<(bit))) \
: "cc", "0", "1", "2"); \
" bras 1,0f\n" /* skip indirect insns */ \
" stctg 0,0,0(%1)\n" \
" lctlg 0,0,0(%1)\n" \
"0: ex %2,0(1)\n" /* execute stctl */ \
" lg 0,0(%1)\n" \
" ngr 0,%3\n" /* set the bit */ \
" stg 0,0(%1)\n" \
"1: ex %2,6(1)" /* execute lctl */ \
: "=m" (__dummy) \
: "a" ((((unsigned long) &__dummy) + 7) & ~7UL), \
"a" (cr*17), "a" (~(1L<<(bit))) \
: "cc", "0", "1" ); \
})
#else /* __s390x__ */
......@@ -360,58 +353,52 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
#define __ctl_load(array, low, high) ({ \
__asm__ __volatile__ ( \
" la 1,%0\n" \
" bras 2,0f\n" \
" lctl 0,0,0(1)\n" \
"0: ex %1,0(2)" \
: : "m" (array), "a" (((low)<<4)+(high)) : "1", "2" ); \
" bras 1,0f\n" \
" lctl 0,0,0(%0)\n" \
"0: ex %1,0(1)" \
: : "a" (&array), "a" (((low)<<4)+(high)) : "1" ); \
})
#define __ctl_store(array, low, high) ({ \
__asm__ __volatile__ ( \
" la 1,%0\n" \
" bras 2,0f\n" \
" stctl 0,0,0(1)\n" \
"0: ex %1,0(2)" \
: "=m" (array) : "a" (((low)<<4)+(high)): "1", "2" ); \
" bras 1,0f\n" \
" stctl 0,0,0(%1)\n" \
"0: ex %2,0(1)" \
: "=m" (array) : "a" (&array), "a" (((low)<<4)+(high)): "1" ); \
})
#define __ctl_set_bit(cr, bit) ({ \
__u8 __dummy[16]; \
__asm__ __volatile__ ( \
" la 1,%0\n" /* align to 8 byte */ \
" ahi 1,7\n" \
" srl 1,3\n" \
" sll 1,3\n" \
" bras 2,0f\n" /* skip indirect insns */ \
" stctl 0,0,0(1)\n" \
" lctl 0,0,0(1)\n" \
"0: ex %1,0(2)\n" /* execute stctl */ \
" l 0,0(1)\n" \
" or 0,%2\n" /* set the bit */ \
" st 0,0(1)\n" \
"1: ex %1,4(2)" /* execute lctl */ \
: "=m" (__dummy) : "a" (cr*17), "a" (1<<(bit)) \
: "cc", "0", "1", "2"); \
" bras 1,0f\n" /* skip indirect insns */ \
" stctl 0,0,0(%1)\n" \
" lctl 0,0,0(%1)\n" \
"0: ex %2,0(1)\n" /* execute stctl */ \
" l 0,0(%1)\n" \
" or 0,%3\n" /* set the bit */ \
" st 0,0(%1)\n" \
"1: ex %2,4(1)" /* execute lctl */ \
: "=m" (__dummy) \
: "a" ((((unsigned long) &__dummy) + 7) & ~7UL), \
"a" (cr*17), "a" (1<<(bit)) \
: "cc", "0", "1" ); \
})
#define __ctl_clear_bit(cr, bit) ({ \
__u8 __dummy[16]; \
__asm__ __volatile__ ( \
" la 1,%0\n" /* align to 8 byte */ \
" ahi 1,7\n" \
" srl 1,3\n" \
" sll 1,3\n" \
" bras 2,0f\n" /* skip indirect insns */ \
" stctl 0,0,0(1)\n" \
" lctl 0,0,0(1)\n" \
"0: ex %1,0(2)\n" /* execute stctl */ \
" l 0,0(1)\n" \
" nr 0,%2\n" /* set the bit */ \
" st 0,0(1)\n" \
"1: ex %1,4(2)" /* execute lctl */ \
: "=m" (__dummy) : "a" (cr*17), "a" (~(1<<(bit))) \
: "cc", "0", "1", "2"); \
" bras 1,0f\n" /* skip indirect insns */ \
" stctl 0,0,0(%1)\n" \
" lctl 0,0,0(%1)\n" \
"0: ex %2,0(1)\n" /* execute stctl */ \
" l 0,0(%1)\n" \
" nr 0,%3\n" /* set the bit */ \
" st 0,0(%1)\n" \
"1: ex %2,4(1)" /* execute lctl */ \
: "=m" (__dummy) \
: "a" ((((unsigned long) &__dummy) + 7) & ~7UL), \
"a" (cr*17), "a" (~(1<<(bit))) \
: "cc", "0", "1" ); \
})
#endif /* __s390x__ */
......
......@@ -76,13 +76,16 @@ static inline void global_flush_tlb(void)
}
#endif /* __s390x__ */
{
long dummy = 0;
register unsigned long addr asm("4");
long dummy;
dummy = 0;
addr = ((unsigned long) &dummy) + 1;
__asm__ __volatile__ (
" la 4,1(%0)\n"
" slr 2,2\n"
" slr 3,3\n"
" csp 2,4"
: : "a" (&dummy) : "cc", "2", "3", "4" );
" csp 2,%0"
: : "a" (addr) : "cc", "2", "3" );
}
}
......
......@@ -113,82 +113,83 @@ struct exception_table_entry
#define __put_user_asm_8(x, ptr, err) \
({ \
register __typeof__(x) const * __from asm("2"); \
register __typeof__(*(ptr)) * __to asm("4"); \
__from = &(x); \
__to = (ptr); \
__asm__ __volatile__ ( \
" sr %0,%0\n" \
" la 2,%2\n" \
" la 4,%1\n" \
" sacf 512\n" \
"0: mvc 0(8,4),0(2)\n" \
"0: mvc 0(8,%1),0(%2)\n" \
" sacf 0\n" \
"1:\n" \
__uaccess_fixup \
: "=&d" (err) \
: "m" (*(__u64*)(ptr)), "m" (x), "K" (-EFAULT) \
: "cc", "2", "4" ); \
: "a" (__to),"a" (__from),"K" (-EFAULT),"0" (0) \
: "cc" ); \
})
#else /* __s390x__ */
#define __put_user_asm_8(x, ptr, err) \
({ \
register __typeof__(*(ptr)) * __ptr asm("4"); \
__ptr = (ptr); \
__asm__ __volatile__ ( \
" sr %0,%0\n" \
" la 4,%1\n" \
" sacf 512\n" \
"0: stg %2,0(4)\n" \
"0: stg %2,0(%1)\n" \
" sacf 0\n" \
"1:\n" \
__uaccess_fixup \
: "=&d" (err) \
: "m" (*(__u64*)(ptr)), "d" (x), "K" (-EFAULT) \
: "cc", "4" ); \
: "a" (__ptr), "d" (x), "K" (-EFAULT), "0" (0) \
: "cc" ); \
})
#endif /* __s390x__ */
#define __put_user_asm_4(x, ptr, err) \
({ \
register __typeof__(*(ptr)) * __ptr asm("4"); \
__ptr = (ptr); \
__asm__ __volatile__ ( \
" sr %0,%0\n" \
" la 4,%1\n" \
" sacf 512\n" \
"0: st %2,0(4)\n" \
"0: st %2,0(%1)\n" \
" sacf 0\n" \
"1:\n" \
__uaccess_fixup \
: "=&d" (err) \
: "m" (*(__u32*)(ptr)), "d" (x), "K" (-EFAULT) \
: "cc", "4" ); \
: "a" (__ptr), "d" (x), "K" (-EFAULT), "0" (0) \
: "cc" ); \
})
#define __put_user_asm_2(x, ptr, err) \
({ \
register __typeof__(*(ptr)) * __ptr asm("4"); \
__ptr = (ptr); \
__asm__ __volatile__ ( \
" sr %0,%0\n" \
" la 4,%1\n" \
" sacf 512\n" \
"0: sth %2,0(4)\n" \
"0: sth %2,0(%1)\n" \
" sacf 0\n" \
"1:\n" \
__uaccess_fixup \
: "=&d" (err) \
: "m" (*(__u16*)(ptr)), "d" (x), "K" (-EFAULT) \
: "cc", "4" ); \
: "a" (__ptr), "d" (x), "K" (-EFAULT), "0" (0) \
: "cc" ); \
})
#define __put_user_asm_1(x, ptr, err) \
({ \
register __typeof__(*(ptr)) * __ptr asm("4"); \
__ptr = (ptr); \
__asm__ __volatile__ ( \
" sr %0,%0\n" \
" la 4,%1\n" \
" sacf 512\n" \
"0: stc %2,0(4)\n" \
"0: stc %2,0(%1)\n" \
" sacf 0\n" \
"1:\n" \
__uaccess_fixup \
: "=&d" (err) \
: "m" (*(__u8*)(ptr)), "d" (x), "K" (-EFAULT) \
: "cc", "4" ); \
: "a" (__ptr), "d" (x), "K" (-EFAULT), "0" (0) \
: "cc" ); \
})
#define __put_user(x, ptr) \
......@@ -223,35 +224,36 @@ extern int __put_user_bad(void);
#define __get_user_asm_8(x, ptr, err) \
({ \
register __typeof__(*(ptr)) const * __from asm("2"); \
register __typeof__(x) * __to asm("4"); \
__from = (ptr); \
__to = &(x); \
__asm__ __volatile__ ( \
" sr %0,%0\n" \
" la 2,%1\n" \
" la 4,%2\n" \
" sacf 512\n" \
"0: mvc 0(8,2),0(4)\n" \
"0: mvc 0(8,%1),0(%2)\n" \
" sacf 0\n" \
"1:\n" \
__uaccess_fixup \
: "=&d" (err), "=m" (x) \
: "m" (*(const __u64*)(ptr)),"K" (-EFAULT) \
: "cc", "2", "4" ); \
: "a" (__to),"a" (__from),"K" (-EFAULT),"0" (0) \
: "cc" ); \
})
#else /* __s390x__ */
#define __get_user_asm_8(x, ptr, err) \
({ \
register __typeof__(*(ptr)) const * __ptr asm("4"); \
__ptr = (ptr); \
__asm__ __volatile__ ( \
" sr %0,%0\n" \
" la 4,%2\n" \
" sacf 512\n" \
"0: lg %1,0(4)\n" \
"0: lg %1,0(%2)\n" \
" sacf 0\n" \
"1:\n" \
__uaccess_fixup \
: "=&d" (err), "=d" (x) \
: "m" (*(const __u64*)(ptr)),"K" (-EFAULT) \
: "cc", "4" ); \
: "a" (__ptr), "K" (-EFAULT), "0" (0) \
: "cc" ); \
})
#endif /* __s390x__ */
......@@ -259,48 +261,48 @@ extern int __put_user_bad(void);
#define __get_user_asm_4(x, ptr, err) \
({ \
register __typeof__(*(ptr)) const * __ptr asm("4"); \
__ptr = (ptr); \
__asm__ __volatile__ ( \
" sr %0,%0\n" \
" la 4,%2\n" \
" sacf 512\n" \
"0: l %1,0(4)\n" \
"0: l %1,0(%2)\n" \
" sacf 0\n" \
"1:\n" \
__uaccess_fixup \
: "=&d" (err), "=d" (x) \
: "m" (*(const __u32*)(ptr)),"K" (-EFAULT) \
: "cc", "4" ); \
: "a" (__ptr), "K" (-EFAULT), "0" (0) \
: "cc" ); \
})
#define __get_user_asm_2(x, ptr, err) \
({ \
register __typeof__(*(ptr)) const * __ptr asm("4"); \
__ptr = (ptr); \
__asm__ __volatile__ ( \
" sr %0,%0\n" \
" la 4,%2\n" \
" sacf 512\n" \
"0: lh %1,0(4)\n" \
"0: lh %1,0(%2)\n" \
" sacf 0\n" \
"1:\n" \
__uaccess_fixup \
: "=&d" (err), "=d" (x) \
: "m" (*(const __u16*)(ptr)),"K" (-EFAULT) \
: "cc", "4" ); \
: "a" (__ptr), "K" (-EFAULT), "0" (0) \
: "cc" ); \
})
#define __get_user_asm_1(x, ptr, err) \
({ \
register __typeof__(*(ptr)) const * __ptr asm("4"); \
__ptr = (ptr); \
__asm__ __volatile__ ( \
" sr %0,%0\n" \
" la 4,%2\n" \
" sr %1,%1\n" \
" sacf 512\n" \
"0: ic %1,0(4)\n" \
"0: ic %1,0(%2)\n" \
" sacf 0\n" \
"1:\n" \
__uaccess_fixup \
: "=&d" (err), "=d" (x) \
: "m" (*(const __u8*)(ptr)),"K" (-EFAULT) \
: "cc", "4" ); \
: "a" (__ptr), "K" (-EFAULT), "0" (0) \
: "cc" ); \
})
#define __get_user(x, ptr) \
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment