Commit 34b0900d authored by Thomas Gleixner's avatar Thomas Gleixner

x86: warn if arch_flush_lazy_mmu_cpu is called in preemptible context

Impact: Catch cases where lazy MMU state is active in a preemtible context

arch_flush_lazy_mmu_cpu() has been changed to disable preemption so
the checks in enter/leave will never trigger. Put the preemtible()
check into arch_flush_lazy_mmu_cpu() to catch such cases.
Signed-off-by: default avatarThomas Gleixner <tglx@linutronix.de>
parent d85cf93d
...@@ -273,6 +273,7 @@ void arch_flush_lazy_mmu_mode(void) ...@@ -273,6 +273,7 @@ void arch_flush_lazy_mmu_mode(void)
preempt_disable(); preempt_disable();
if (paravirt_get_lazy_mode() == PARAVIRT_LAZY_MMU) { if (paravirt_get_lazy_mode() == PARAVIRT_LAZY_MMU) {
WARN_ON(preempt_count() == 1);
arch_leave_lazy_mmu_mode(); arch_leave_lazy_mmu_mode();
arch_enter_lazy_mmu_mode(); arch_enter_lazy_mmu_mode();
} }
...@@ -285,6 +286,7 @@ void arch_flush_lazy_cpu_mode(void) ...@@ -285,6 +286,7 @@ void arch_flush_lazy_cpu_mode(void)
preempt_disable(); preempt_disable();
if (paravirt_get_lazy_mode() == PARAVIRT_LAZY_CPU) { if (paravirt_get_lazy_mode() == PARAVIRT_LAZY_CPU) {
WARN_ON(preempt_count() == 1);
arch_leave_lazy_cpu_mode(); arch_leave_lazy_cpu_mode();
arch_enter_lazy_cpu_mode(); arch_enter_lazy_cpu_mode();
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment