Commit a4da3d86 authored by Ingo Molnar's avatar Ingo Molnar

Revert "x86/paravirt: Work around GCC inlining bugs when compiling paravirt ops"

This reverts commit 494b5168.

See this commit for details about the revert:

  e769742d ("Revert "x86/jump-labels: Macrofy inline assembly code to work around GCC inlining bugs"")
Reported-by: default avatarMasahiro Yamada <yamada.masahiro@socionext.com>
Reviewed-by: default avatarBorislav Petkov <bp@alien8.de>
Reviewed-by: default avatarThomas Gleixner <tglx@linutronix.de>
Cc: Juergen Gross <jgross@suse.com>
Cc: Richard Biener <rguenther@suse.de>
Cc: Kees Cook <keescook@chromium.org>
Cc: Segher Boessenkool <segher@kernel.crashing.org>
Cc: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Josh Poimboeuf <jpoimboe@redhat.com>
Cc: Nadav Amit <namit@vmware.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: linux-kernel@vger.kernel.org
Signed-off-by: default avatarIngo Molnar <mingo@kernel.org>
parent 81a68455
...@@ -348,11 +348,23 @@ extern struct paravirt_patch_template pv_ops; ...@@ -348,11 +348,23 @@ extern struct paravirt_patch_template pv_ops;
#define paravirt_clobber(clobber) \ #define paravirt_clobber(clobber) \
[paravirt_clobber] "i" (clobber) [paravirt_clobber] "i" (clobber)
/*
* Generate some code, and mark it as patchable by the
* apply_paravirt() alternate instruction patcher.
*/
#define _paravirt_alt(insn_string, type, clobber) \
"771:\n\t" insn_string "\n" "772:\n" \
".pushsection .parainstructions,\"a\"\n" \
_ASM_ALIGN "\n" \
_ASM_PTR " 771b\n" \
" .byte " type "\n" \
" .byte 772b-771b\n" \
" .short " clobber "\n" \
".popsection\n"
/* Generate patchable code, with the default asm parameters. */ /* Generate patchable code, with the default asm parameters. */
#define paravirt_call \ #define paravirt_alt(insn_string) \
"PARAVIRT_CALL type=\"%c[paravirt_typenum]\"" \ _paravirt_alt(insn_string, "%c[paravirt_typenum]", "%c[paravirt_clobber]")
" clobber=\"%c[paravirt_clobber]\"" \
" pv_opptr=\"%c[paravirt_opptr]\";"
/* Simple instruction patching code. */ /* Simple instruction patching code. */
#define NATIVE_LABEL(a,x,b) "\n\t.globl " a #x "_" #b "\n" a #x "_" #b ":\n\t" #define NATIVE_LABEL(a,x,b) "\n\t.globl " a #x "_" #b "\n" a #x "_" #b ":\n\t"
...@@ -372,6 +384,16 @@ unsigned native_patch(u8 type, void *ibuf, unsigned long addr, unsigned len); ...@@ -372,6 +384,16 @@ unsigned native_patch(u8 type, void *ibuf, unsigned long addr, unsigned len);
int paravirt_disable_iospace(void); int paravirt_disable_iospace(void);
/*
* This generates an indirect call based on the operation type number.
* The type number, computed in PARAVIRT_PATCH, is derived from the
* offset into the paravirt_patch_template structure, and can therefore be
* freely converted back into a structure offset.
*/
#define PARAVIRT_CALL \
ANNOTATE_RETPOLINE_SAFE \
"call *%c[paravirt_opptr];"
/* /*
* These macros are intended to wrap calls through one of the paravirt * These macros are intended to wrap calls through one of the paravirt
* ops structs, so that they can be later identified and patched at * ops structs, so that they can be later identified and patched at
...@@ -509,7 +531,7 @@ int paravirt_disable_iospace(void); ...@@ -509,7 +531,7 @@ int paravirt_disable_iospace(void);
/* since this condition will never hold */ \ /* since this condition will never hold */ \
if (sizeof(rettype) > sizeof(unsigned long)) { \ if (sizeof(rettype) > sizeof(unsigned long)) { \
asm volatile(pre \ asm volatile(pre \
paravirt_call \ paravirt_alt(PARAVIRT_CALL) \
post \ post \
: call_clbr, ASM_CALL_CONSTRAINT \ : call_clbr, ASM_CALL_CONSTRAINT \
: paravirt_type(op), \ : paravirt_type(op), \
...@@ -519,7 +541,7 @@ int paravirt_disable_iospace(void); ...@@ -519,7 +541,7 @@ int paravirt_disable_iospace(void);
__ret = (rettype)((((u64)__edx) << 32) | __eax); \ __ret = (rettype)((((u64)__edx) << 32) | __eax); \
} else { \ } else { \
asm volatile(pre \ asm volatile(pre \
paravirt_call \ paravirt_alt(PARAVIRT_CALL) \
post \ post \
: call_clbr, ASM_CALL_CONSTRAINT \ : call_clbr, ASM_CALL_CONSTRAINT \
: paravirt_type(op), \ : paravirt_type(op), \
...@@ -546,7 +568,7 @@ int paravirt_disable_iospace(void); ...@@ -546,7 +568,7 @@ int paravirt_disable_iospace(void);
PVOP_VCALL_ARGS; \ PVOP_VCALL_ARGS; \
PVOP_TEST_NULL(op); \ PVOP_TEST_NULL(op); \
asm volatile(pre \ asm volatile(pre \
paravirt_call \ paravirt_alt(PARAVIRT_CALL) \
post \ post \
: call_clbr, ASM_CALL_CONSTRAINT \ : call_clbr, ASM_CALL_CONSTRAINT \
: paravirt_type(op), \ : paravirt_type(op), \
...@@ -664,26 +686,6 @@ struct paravirt_patch_site { ...@@ -664,26 +686,6 @@ struct paravirt_patch_site {
extern struct paravirt_patch_site __parainstructions[], extern struct paravirt_patch_site __parainstructions[],
__parainstructions_end[]; __parainstructions_end[];
#else /* __ASSEMBLY__ */
/*
* This generates an indirect call based on the operation type number.
* The type number, computed in PARAVIRT_PATCH, is derived from the
* offset into the paravirt_patch_template structure, and can therefore be
* freely converted back into a structure offset.
*/
.macro PARAVIRT_CALL type:req clobber:req pv_opptr:req
771: ANNOTATE_RETPOLINE_SAFE
call *\pv_opptr
772: .pushsection .parainstructions,"a"
_ASM_ALIGN
_ASM_PTR 771b
.byte \type
.byte 772b-771b
.short \clobber
.popsection
.endm
#endif /* __ASSEMBLY__ */ #endif /* __ASSEMBLY__ */
#endif /* _ASM_X86_PARAVIRT_TYPES_H */ #endif /* _ASM_X86_PARAVIRT_TYPES_H */
...@@ -10,4 +10,3 @@ ...@@ -10,4 +10,3 @@
#include <asm/refcount.h> #include <asm/refcount.h>
#include <asm/alternative-asm.h> #include <asm/alternative-asm.h>
#include <asm/bug.h> #include <asm/bug.h>
#include <asm/paravirt.h>
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment