Commit 359c2ca7 authored by Christophe Leroy's avatar Christophe Leroy Committed by Michael Ellerman

powerpc: Don't handle ALTIVEC/SPE in ASM in _switch(). Do it in C.

_switch() saves and restores ALTIVEC and SPE status.
For altivec this is redundant with what __switch_to() does with
save_sprs() and restore_sprs() and giveup_all() before
calling _switch().

Add support for SPI in save_sprs() and restore_sprs() and
remove things from _switch().
Signed-off-by: default avatarChristophe Leroy <christophe.leroy@csgroup.eu>
Signed-off-by: default avatarMichael Ellerman <mpe@ellerman.id.au>
Link: https://lore.kernel.org/r/8ab21fd93d6e0047aa71e6509e5e312f14b2991b.1620998075.git.christophe.leroy@csgroup.eu
parent 4423eff7
......@@ -116,7 +116,6 @@ int main(void)
#ifdef CONFIG_ALTIVEC
OFFSET(THREAD_VRSTATE, thread_struct, vr_state.vr);
OFFSET(THREAD_VRSAVEAREA, thread_struct, vr_save_area);
OFFSET(THREAD_VRSAVE, thread_struct, vrsave);
OFFSET(THREAD_USED_VR, thread_struct, used_vr);
OFFSET(VRSTATE_VSCR, thread_vr_state, vscr);
OFFSET(THREAD_LOAD_VEC, thread_struct, load_vec);
......@@ -147,7 +146,6 @@ int main(void)
#ifdef CONFIG_SPE
OFFSET(THREAD_EVR0, thread_struct, evr[0]);
OFFSET(THREAD_ACC, thread_struct, acc);
OFFSET(THREAD_SPEFSCR, thread_struct, spefscr);
OFFSET(THREAD_USED_SPE, thread_struct, used_spe);
#endif /* CONFIG_SPE */
#endif /* CONFIG_PPC64 */
......
......@@ -176,28 +176,6 @@ _GLOBAL(_switch)
/* r3-r12 are caller saved -- Cort */
SAVE_NVGPRS(r1)
stw r0,_NIP(r1) /* Return to switch caller */
mfmsr r11
li r0,MSR_FP /* Disable floating-point */
#ifdef CONFIG_ALTIVEC
BEGIN_FTR_SECTION
oris r0,r0,MSR_VEC@h /* Disable altivec */
mfspr r12,SPRN_VRSAVE /* save vrsave register value */
stw r12,THREAD+THREAD_VRSAVE(r2)
END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC)
#endif /* CONFIG_ALTIVEC */
#ifdef CONFIG_SPE
BEGIN_FTR_SECTION
oris r0,r0,MSR_SPE@h /* Disable SPE */
mfspr r12,SPRN_SPEFSCR /* save spefscr register value */
stw r12,THREAD+THREAD_SPEFSCR(r2)
END_FTR_SECTION_IFSET(CPU_FTR_SPE)
#endif /* CONFIG_SPE */
and. r0,r0,r11 /* FP or altivec or SPE enabled? */
beq+ 1f
andc r11,r11,r0
mtmsr r11
isync
1: stw r11,_MSR(r1)
mfcr r10
stw r10,_CCR(r1)
stw r1,KSP(r3) /* Set old stack pointer */
......@@ -218,19 +196,6 @@ END_FTR_SECTION_IFSET(CPU_FTR_SPE)
mr r3,r2
addi r2,r4,-THREAD /* Update current */
#ifdef CONFIG_ALTIVEC
BEGIN_FTR_SECTION
lwz r0,THREAD+THREAD_VRSAVE(r2)
mtspr SPRN_VRSAVE,r0 /* if G4, restore VRSAVE reg */
END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC)
#endif /* CONFIG_ALTIVEC */
#ifdef CONFIG_SPE
BEGIN_FTR_SECTION
lwz r0,THREAD+THREAD_SPEFSCR(r2)
mtspr SPRN_SPEFSCR,r0 /* restore SPEFSCR reg */
END_FTR_SECTION_IFSET(CPU_FTR_SPE)
#endif /* CONFIG_SPE */
lwz r0,_CCR(r1)
mtcrf 0xFF,r0
/* r3-r12 are destroyed -- Cort */
......
......@@ -1129,6 +1129,10 @@ static inline void save_sprs(struct thread_struct *t)
if (cpu_has_feature(CPU_FTR_ALTIVEC))
t->vrsave = mfspr(SPRN_VRSAVE);
#endif
#ifdef CONFIG_SPE
if (cpu_has_feature(CPU_FTR_SPE))
t->spefscr = mfspr(SPRN_SPEFSCR);
#endif
#ifdef CONFIG_PPC_BOOK3S_64
if (cpu_has_feature(CPU_FTR_DSCR))
t->dscr = mfspr(SPRN_DSCR);
......@@ -1159,6 +1163,11 @@ static inline void restore_sprs(struct thread_struct *old_thread,
old_thread->vrsave != new_thread->vrsave)
mtspr(SPRN_VRSAVE, new_thread->vrsave);
#endif
#ifdef CONFIG_SPE
if (cpu_has_feature(CPU_FTR_SPE) &&
old_thread->spefscr != new_thread->spefscr)
mtspr(SPRN_SPEFSCR, new_thread->spefscr);
#endif
#ifdef CONFIG_PPC_BOOK3S_64
if (cpu_has_feature(CPU_FTR_DSCR)) {
u64 dscr = get_paca()->dscr_default;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment