Commit e176e267 authored by Mark Rutland's avatar Mark Rutland Committed by Will Deacon

arm64: assembler: add set_this_cpu_offset

There should be no functional change as a result of this patch.
Signed-off-by: default avatarMark Rutland <mark.rutland@arm.com>
Cc: Ard Biesheuvel <ardb@kernel.org>
Cc: Catalin Marinas <catalin.marinas@arm.com>
Cc: James Morse <james.morse@arm.com>
Cc: Marc Zyngier <maz@kernel.org>
Cc: Suzuki Poulose <suzuki.poulose@arm.com>
Cc: Will Deacon <will@kernel.org>
Reviewed-by: default avatarArd Biesheuvel <ardb@kernel.org>
Link: https://lore.kernel.org/r/20210520115031.18509-3-mark.rutland@arm.comSigned-off-by: default avatarWill Deacon <will@kernel.org>
parent 50355532
...@@ -232,15 +232,23 @@ lr .req x30 // link register ...@@ -232,15 +232,23 @@ lr .req x30 // link register
* @dst: destination register * @dst: destination register
*/ */
#if defined(__KVM_NVHE_HYPERVISOR__) || defined(__KVM_VHE_HYPERVISOR__) #if defined(__KVM_NVHE_HYPERVISOR__) || defined(__KVM_VHE_HYPERVISOR__)
.macro this_cpu_offset, dst .macro get_this_cpu_offset, dst
mrs \dst, tpidr_el2 mrs \dst, tpidr_el2
.endm .endm
#else #else
.macro this_cpu_offset, dst .macro get_this_cpu_offset, dst
alternative_if_not ARM64_HAS_VIRT_HOST_EXTN alternative_if_not ARM64_HAS_VIRT_HOST_EXTN
mrs \dst, tpidr_el1 mrs \dst, tpidr_el1
alternative_else alternative_else
mrs \dst, tpidr_el2 mrs \dst, tpidr_el2
alternative_endif
.endm
.macro set_this_cpu_offset, src
alternative_if_not ARM64_HAS_VIRT_HOST_EXTN
msr tpidr_el1, \src
alternative_else
msr tpidr_el2, \src
alternative_endif alternative_endif
.endm .endm
#endif #endif
...@@ -253,7 +261,7 @@ alternative_endif ...@@ -253,7 +261,7 @@ alternative_endif
.macro adr_this_cpu, dst, sym, tmp .macro adr_this_cpu, dst, sym, tmp
adrp \tmp, \sym adrp \tmp, \sym
add \dst, \tmp, #:lo12:\sym add \dst, \tmp, #:lo12:\sym
this_cpu_offset \tmp get_this_cpu_offset \tmp
add \dst, \dst, \tmp add \dst, \dst, \tmp
.endm .endm
...@@ -264,7 +272,7 @@ alternative_endif ...@@ -264,7 +272,7 @@ alternative_endif
*/ */
.macro ldr_this_cpu dst, sym, tmp .macro ldr_this_cpu dst, sym, tmp
adr_l \dst, \sym adr_l \dst, \sym
this_cpu_offset \tmp get_this_cpu_offset \tmp
ldr \dst, [\dst, \tmp] ldr \dst, [\dst, \tmp]
.endm .endm
...@@ -745,7 +753,7 @@ USER(\label, ic ivau, \tmp2) // invalidate I line PoU ...@@ -745,7 +753,7 @@ USER(\label, ic ivau, \tmp2) // invalidate I line PoU
cbz \tmp, \lbl cbz \tmp, \lbl
#endif #endif
adr_l \tmp, irq_stat + IRQ_CPUSTAT_SOFTIRQ_PENDING adr_l \tmp, irq_stat + IRQ_CPUSTAT_SOFTIRQ_PENDING
this_cpu_offset \tmp2 get_this_cpu_offset \tmp2
ldr w\tmp, [\tmp, \tmp2] ldr w\tmp, [\tmp, \tmp2]
cbnz w\tmp, \lbl // yield on pending softirq in task context cbnz w\tmp, \lbl // yield on pending softirq in task context
.Lnoyield_\@: .Lnoyield_\@:
......
...@@ -83,11 +83,7 @@ SYM_FUNC_START(cpu_do_suspend) ...@@ -83,11 +83,7 @@ SYM_FUNC_START(cpu_do_suspend)
mrs x9, mdscr_el1 mrs x9, mdscr_el1
mrs x10, oslsr_el1 mrs x10, oslsr_el1
mrs x11, sctlr_el1 mrs x11, sctlr_el1
alternative_if_not ARM64_HAS_VIRT_HOST_EXTN get_this_cpu_offset x12
mrs x12, tpidr_el1
alternative_else
mrs x12, tpidr_el2
alternative_endif
mrs x13, sp_el0 mrs x13, sp_el0
stp x2, x3, [x0] stp x2, x3, [x0]
stp x4, x5, [x0, #16] stp x4, x5, [x0, #16]
...@@ -145,11 +141,7 @@ SYM_FUNC_START(cpu_do_resume) ...@@ -145,11 +141,7 @@ SYM_FUNC_START(cpu_do_resume)
msr mdscr_el1, x10 msr mdscr_el1, x10
msr sctlr_el1, x12 msr sctlr_el1, x12
alternative_if_not ARM64_HAS_VIRT_HOST_EXTN set_this_cpu_offset x13
msr tpidr_el1, x13
alternative_else
msr tpidr_el2, x13
alternative_endif
msr sp_el0, x14 msr sp_el0, x14
/* /*
* Restore oslsr_el1 by writing oslar_el1 * Restore oslsr_el1 by writing oslar_el1
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment