Commit 6ba3b554 authored by Mark Rutland's avatar Mark Rutland Committed by Will Deacon

arm64: use alternative auto-nop

Make use of the new alternative_if and alternative_else_nop_endif and
get rid of our homebew NOP sleds, making the code simpler to read.

Note that for cpu_do_switch_mm the ret has been moved out of the
alternative sequence, and in the default case there will be three
additional NOPs executed.
Signed-off-by: default avatarMark Rutland <mark.rutland@arm.com>
Cc: Catalin Marinas <catalin.marinas@arm.com>
Cc: James Morse <james.morse@arm.com>
Cc: Will Deacon <will.deacon@arm.com>
Signed-off-by: default avatarWill Deacon <will.deacon@arm.com>
parent 792d4737
...@@ -150,13 +150,7 @@ ...@@ -150,13 +150,7 @@
ldr x23, [sp, #S_SP] // load return stack pointer ldr x23, [sp, #S_SP] // load return stack pointer
msr sp_el0, x23 msr sp_el0, x23
#ifdef CONFIG_ARM64_ERRATUM_845719 #ifdef CONFIG_ARM64_ERRATUM_845719
alternative_if_not ARM64_WORKAROUND_845719 alternative_if ARM64_WORKAROUND_845719
nop
nop
#ifdef CONFIG_PID_IN_CONTEXTIDR
nop
#endif
alternative_else
tbz x22, #4, 1f tbz x22, #4, 1f
#ifdef CONFIG_PID_IN_CONTEXTIDR #ifdef CONFIG_PID_IN_CONTEXTIDR
mrs x29, contextidr_el1 mrs x29, contextidr_el1
...@@ -165,7 +159,7 @@ alternative_else ...@@ -165,7 +159,7 @@ alternative_else
msr contextidr_el1, xzr msr contextidr_el1, xzr
#endif #endif
1: 1:
alternative_endif alternative_else_nop_endif
#endif #endif
.endif .endif
msr elr_el1, x21 // set up the return data msr elr_el1, x21 // set up the return data
......
...@@ -29,14 +29,11 @@ ...@@ -29,14 +29,11 @@
* x1 - src * x1 - src
*/ */
ENTRY(copy_page) ENTRY(copy_page)
alternative_if_not ARM64_HAS_NO_HW_PREFETCH alternative_if ARM64_HAS_NO_HW_PREFETCH
nop
nop
alternative_else
# Prefetch two cache lines ahead. # Prefetch two cache lines ahead.
prfm pldl1strm, [x1, #128] prfm pldl1strm, [x1, #128]
prfm pldl1strm, [x1, #256] prfm pldl1strm, [x1, #256]
alternative_endif alternative_else_nop_endif
ldp x2, x3, [x1] ldp x2, x3, [x1]
ldp x4, x5, [x1, #16] ldp x4, x5, [x1, #16]
...@@ -52,11 +49,9 @@ alternative_endif ...@@ -52,11 +49,9 @@ alternative_endif
1: 1:
subs x18, x18, #128 subs x18, x18, #128
alternative_if_not ARM64_HAS_NO_HW_PREFETCH alternative_if ARM64_HAS_NO_HW_PREFETCH
nop
alternative_else
prfm pldl1strm, [x1, #384] prfm pldl1strm, [x1, #384]
alternative_endif alternative_else_nop_endif
stnp x2, x3, [x0] stnp x2, x3, [x0]
ldp x2, x3, [x1] ldp x2, x3, [x1]
......
...@@ -127,17 +127,12 @@ ENTRY(cpu_do_switch_mm) ...@@ -127,17 +127,12 @@ ENTRY(cpu_do_switch_mm)
bfi x0, x1, #48, #16 // set the ASID bfi x0, x1, #48, #16 // set the ASID
msr ttbr0_el1, x0 // set TTBR0 msr ttbr0_el1, x0 // set TTBR0
isb isb
alternative_if_not ARM64_WORKAROUND_CAVIUM_27456 alternative_if ARM64_WORKAROUND_CAVIUM_27456
ret
nop
nop
nop
alternative_else
ic iallu ic iallu
dsb nsh dsb nsh
isb isb
alternative_else_nop_endif
ret ret
alternative_endif
ENDPROC(cpu_do_switch_mm) ENDPROC(cpu_do_switch_mm)
.pushsection ".idmap.text", "ax" .pushsection ".idmap.text", "ax"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment