Commit 96af6cd0 authored by Ingo Molnar's avatar Ingo Molnar

Revert "x86/objtool: Use asm macros to work around GCC inlining bugs"

This reverts commit c06c4d80.

See this commit for details about the revert:

  e769742d ("Revert "x86/jump-labels: Macrofy inline assembly code to work around GCC inlining bugs"")
Reported-by: default avatarMasahiro Yamada <yamada.masahiro@socionext.com>
Reviewed-by: default avatarBorislav Petkov <bp@alien8.de>
Reviewed-by: default avatarThomas Gleixner <tglx@linutronix.de>
Cc: Juergen Gross <jgross@suse.com>
Cc: Richard Biener <rguenther@suse.de>
Cc: Kees Cook <keescook@chromium.org>
Cc: Segher Boessenkool <segher@kernel.crashing.org>
Cc: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Josh Poimboeuf <jpoimboe@redhat.com>
Cc: Nadav Amit <namit@vmware.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: linux-kernel@vger.kernel.org
Signed-off-by: default avatarIngo Molnar <mingo@kernel.org>
parent ac180540
...@@ -5,5 +5,3 @@ ...@@ -5,5 +5,3 @@
* commonly used. The macros are precompiled into assmebly file which is later * commonly used. The macros are precompiled into assmebly file which is later
* assembled together with each compiled file. * assembled together with each compiled file.
*/ */
#include <linux/compiler.h>
...@@ -99,13 +99,22 @@ void ftrace_likely_update(struct ftrace_likely_data *f, int val, ...@@ -99,13 +99,22 @@ void ftrace_likely_update(struct ftrace_likely_data *f, int val,
* unique, to convince GCC not to merge duplicate inline asm statements. * unique, to convince GCC not to merge duplicate inline asm statements.
*/ */
#define annotate_reachable() ({ \ #define annotate_reachable() ({ \
asm volatile("ANNOTATE_REACHABLE counter=%c0" \ asm volatile("%c0:\n\t" \
: : "i" (__COUNTER__)); \ ".pushsection .discard.reachable\n\t" \
".long %c0b - .\n\t" \
".popsection\n\t" : : "i" (__COUNTER__)); \
}) })
#define annotate_unreachable() ({ \ #define annotate_unreachable() ({ \
asm volatile("ANNOTATE_UNREACHABLE counter=%c0" \ asm volatile("%c0:\n\t" \
: : "i" (__COUNTER__)); \ ".pushsection .discard.unreachable\n\t" \
".long %c0b - .\n\t" \
".popsection\n\t" : : "i" (__COUNTER__)); \
}) })
#define ASM_UNREACHABLE \
"999:\n\t" \
".pushsection .discard.unreachable\n\t" \
".long 999b - .\n\t" \
".popsection\n\t"
#else #else
#define annotate_reachable() #define annotate_reachable()
#define annotate_unreachable() #define annotate_unreachable()
...@@ -293,45 +302,6 @@ static inline void *offset_to_ptr(const int *off) ...@@ -293,45 +302,6 @@ static inline void *offset_to_ptr(const int *off)
return (void *)((unsigned long)off + *off); return (void *)((unsigned long)off + *off);
} }
#else /* __ASSEMBLY__ */
#ifdef __KERNEL__
#ifndef LINKER_SCRIPT
#ifdef CONFIG_STACK_VALIDATION
.macro ANNOTATE_UNREACHABLE counter:req
\counter:
.pushsection .discard.unreachable
.long \counter\()b -.
.popsection
.endm
.macro ANNOTATE_REACHABLE counter:req
\counter:
.pushsection .discard.reachable
.long \counter\()b -.
.popsection
.endm
.macro ASM_UNREACHABLE
999:
.pushsection .discard.unreachable
.long 999b - .
.popsection
.endm
#else /* CONFIG_STACK_VALIDATION */
.macro ANNOTATE_UNREACHABLE counter:req
.endm
.macro ANNOTATE_REACHABLE counter:req
.endm
.macro ASM_UNREACHABLE
.endm
#endif /* CONFIG_STACK_VALIDATION */
#endif /* LINKER_SCRIPT */
#endif /* __KERNEL__ */
#endif /* __ASSEMBLY__ */ #endif /* __ASSEMBLY__ */
/* Compile time object size, -1 for unknown */ /* Compile time object size, -1 for unknown */
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment