Commit 6f700d38 authored by Cyril Bur's avatar Cyril Bur Committed by Michael Ellerman

powerpc: Remove facility loadups on transactional {fp, vec, vsx} unavailable

After handling a transactional FP, Altivec or VSX unavailable exception.
The return to userspace code will detect that the TIF_RESTORE_TM bit is
set and call restore_tm_state(). restore_tm_state() will call
restore_math() to ensure that the correct facilities are loaded.

This means that all the loadup code in {fp,altivec,vsx}_unavailable_tm()
is doing pointless work and can simply be removed.
Signed-off-by: default avatarCyril Bur <cyrilbur@gmail.com>
Signed-off-by: default avatarMichael Ellerman <mpe@ellerman.id.au>
parent eb5c3f1c
...@@ -1663,12 +1663,6 @@ void facility_unavailable_exception(struct pt_regs *regs) ...@@ -1663,12 +1663,6 @@ void facility_unavailable_exception(struct pt_regs *regs)
void fp_unavailable_tm(struct pt_regs *regs) void fp_unavailable_tm(struct pt_regs *regs)
{ {
/*
* Save the MSR now because tm_reclaim_current() is likely to
* change it
*/
unsigned long orig_msr = regs->msr;
/* Note: This does not handle any kind of FP laziness. */ /* Note: This does not handle any kind of FP laziness. */
TM_DEBUG("FP Unavailable trap whilst transactional at 0x%lx, MSR=%lx\n", TM_DEBUG("FP Unavailable trap whilst transactional at 0x%lx, MSR=%lx\n",
...@@ -1694,24 +1688,10 @@ void fp_unavailable_tm(struct pt_regs *regs) ...@@ -1694,24 +1688,10 @@ void fp_unavailable_tm(struct pt_regs *regs)
* so we don't want to load the VRs from the thread_struct. * so we don't want to load the VRs from the thread_struct.
*/ */
tm_recheckpoint(&current->thread); tm_recheckpoint(&current->thread);
/* If VMX is in use, get the transactional values back */
if (orig_msr & MSR_VEC) {
msr_check_and_set(MSR_VEC);
load_vr_state(&current->thread.vr_state);
/* At this point all the VSX state is loaded, so enable it */
regs->msr |= MSR_VSX;
}
} }
void altivec_unavailable_tm(struct pt_regs *regs) void altivec_unavailable_tm(struct pt_regs *regs)
{ {
/*
* Save the MSR now because tm_reclaim_current() is likely to
* change it
*/
unsigned long orig_msr = regs->msr;
/* See the comments in fp_unavailable_tm(). This function operates /* See the comments in fp_unavailable_tm(). This function operates
* the same way. * the same way.
*/ */
...@@ -1723,12 +1703,6 @@ void altivec_unavailable_tm(struct pt_regs *regs) ...@@ -1723,12 +1703,6 @@ void altivec_unavailable_tm(struct pt_regs *regs)
current->thread.load_vec = 1; current->thread.load_vec = 1;
tm_recheckpoint(&current->thread); tm_recheckpoint(&current->thread);
current->thread.used_vr = 1; current->thread.used_vr = 1;
if (orig_msr & MSR_FP) {
msr_check_and_set(MSR_FP);
load_fp_state(&current->thread.fp_state);
regs->msr |= MSR_VSX;
}
} }
void vsx_unavailable_tm(struct pt_regs *regs) void vsx_unavailable_tm(struct pt_regs *regs)
...@@ -1753,10 +1727,6 @@ void vsx_unavailable_tm(struct pt_regs *regs) ...@@ -1753,10 +1727,6 @@ void vsx_unavailable_tm(struct pt_regs *regs)
current->thread.load_fp = 1; current->thread.load_fp = 1;
tm_recheckpoint(&current->thread); tm_recheckpoint(&current->thread);
msr_check_and_set(MSR_FP | MSR_VEC);
load_fp_state(&current->thread.fp_state);
load_vr_state(&current->thread.vr_state);
} }
#endif /* CONFIG_PPC_TRANSACTIONAL_MEM */ #endif /* CONFIG_PPC_TRANSACTIONAL_MEM */
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment