Commit 86e08d64 authored by Sven Schnelle's avatar Sven Schnelle Committed by Vasily Gorbik

s390/entry: Add base register to CHECK_VMAP_STACK/CHECK_STACK macro

In preparation of having lowcore at different address than zero,
add the base register to CHECK_VMAP_STACK and CHECK_STACK. No
functional change, because %r0 is passed to the macro.
Signed-off-by: default avatarSven Schnelle <svens@linux.ibm.com>
Signed-off-by: default avatarVasily Gorbik <gor@linux.ibm.com>
parent 6908f8f9
...@@ -49,30 +49,30 @@ _LPP_OFFSET = __LC_LPP ...@@ -49,30 +49,30 @@ _LPP_OFFSET = __LC_LPP
ALT_FACILITY(193) ALT_FACILITY(193)
.endm .endm
.macro CHECK_STACK savearea .macro CHECK_STACK savearea, lowcore
#ifdef CONFIG_CHECK_STACK #ifdef CONFIG_CHECK_STACK
tml %r15,THREAD_SIZE - CONFIG_STACK_GUARD tml %r15,THREAD_SIZE - CONFIG_STACK_GUARD
lghi %r14,\savearea la %r14,\savearea(\lowcore)
jz stack_overflow jz stack_overflow
#endif #endif
.endm .endm
.macro CHECK_VMAP_STACK savearea,oklabel .macro CHECK_VMAP_STACK savearea, lowcore, oklabel
#ifdef CONFIG_VMAP_STACK #ifdef CONFIG_VMAP_STACK
lgr %r14,%r15 lgr %r14,%r15
nill %r14,0x10000 - THREAD_SIZE nill %r14,0x10000 - THREAD_SIZE
oill %r14,STACK_INIT_OFFSET oill %r14,STACK_INIT_OFFSET
clg %r14,__LC_KERNEL_STACK clg %r14,__LC_KERNEL_STACK(\lowcore)
je \oklabel je \oklabel
clg %r14,__LC_ASYNC_STACK clg %r14,__LC_ASYNC_STACK(\lowcore)
je \oklabel je \oklabel
clg %r14,__LC_MCCK_STACK clg %r14,__LC_MCCK_STACK(\lowcore)
je \oklabel je \oklabel
clg %r14,__LC_NODAT_STACK clg %r14,__LC_NODAT_STACK(\lowcore)
je \oklabel je \oklabel
clg %r14,__LC_RESTART_STACK clg %r14,__LC_RESTART_STACK(\lowcore)
je \oklabel je \oklabel
lghi %r14,\savearea la %r14,\savearea(\lowcore)
j stack_overflow j stack_overflow
#else #else
j \oklabel j \oklabel
...@@ -331,10 +331,10 @@ SYM_CODE_START(pgm_check_handler) ...@@ -331,10 +331,10 @@ SYM_CODE_START(pgm_check_handler)
jnz 2f # -> enabled, can't be a double fault jnz 2f # -> enabled, can't be a double fault
tm __LC_PGM_ILC+3,0x80 # check for per exception tm __LC_PGM_ILC+3,0x80 # check for per exception
jnz .Lpgm_svcper # -> single stepped svc jnz .Lpgm_svcper # -> single stepped svc
2: CHECK_STACK __LC_SAVE_AREA_SYNC 2: CHECK_STACK __LC_SAVE_AREA_SYNC,%r0
aghi %r15,-(STACK_FRAME_OVERHEAD + __PT_SIZE) aghi %r15,-(STACK_FRAME_OVERHEAD + __PT_SIZE)
# CHECK_VMAP_STACK branches to stack_overflow or 4f # CHECK_VMAP_STACK branches to stack_overflow or 4f
CHECK_VMAP_STACK __LC_SAVE_AREA_SYNC,4f CHECK_VMAP_STACK __LC_SAVE_AREA_SYNC,%r0,4f
3: lg %r15,__LC_KERNEL_STACK 3: lg %r15,__LC_KERNEL_STACK
4: la %r11,STACK_FRAME_OVERHEAD(%r15) 4: la %r11,STACK_FRAME_OVERHEAD(%r15)
xc __PT_FLAGS(8,%r11),__PT_FLAGS(%r11) xc __PT_FLAGS(8,%r11),__PT_FLAGS(%r11)
...@@ -406,7 +406,7 @@ SYM_CODE_START(\name) ...@@ -406,7 +406,7 @@ SYM_CODE_START(\name)
BPENTER __SF_SIE_FLAGS(%r15),_TIF_ISOLATE_BP_GUEST BPENTER __SF_SIE_FLAGS(%r15),_TIF_ISOLATE_BP_GUEST
SIEEXIT __SF_SIE_CONTROL(%r15),%r0 SIEEXIT __SF_SIE_CONTROL(%r15),%r0
#endif #endif
0: CHECK_STACK __LC_SAVE_AREA_ASYNC 0: CHECK_STACK __LC_SAVE_AREA_ASYNC,%r0
aghi %r15,-(STACK_FRAME_OVERHEAD + __PT_SIZE) aghi %r15,-(STACK_FRAME_OVERHEAD + __PT_SIZE)
j 2f j 2f
1: lctlg %c1,%c1,__LC_KERNEL_ASCE 1: lctlg %c1,%c1,__LC_KERNEL_ASCE
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment