Commit 4e3aa923 authored by Peter Zijlstra's avatar Peter Zijlstra

x86/nospec: Unwreck the RSB stuffing

Commit 2b129932 ("x86/speculation: Add RSB VM Exit protections")
made a right mess of the RSB stuffing, rewrite the whole thing to not
suck.

Thanks to Andrew for the enlightening comment about Post-Barrier RSB
things so we can make this code less magical.

Cc: stable@vger.kernel.org
Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Link: https://lkml.kernel.org/r/YvuNdDWoUZSBjYcm@worktop.programming.kicks-ass.net
parent 7df54884
...@@ -35,33 +35,44 @@ ...@@ -35,33 +35,44 @@
#define RSB_CLEAR_LOOPS 32 /* To forcibly overwrite all entries */ #define RSB_CLEAR_LOOPS 32 /* To forcibly overwrite all entries */
/* /*
* Common helper for __FILL_RETURN_BUFFER and __FILL_ONE_RETURN.
*/
#define __FILL_RETURN_SLOT \
ANNOTATE_INTRA_FUNCTION_CALL; \
call 772f; \
int3; \
772:
/*
* Stuff the entire RSB.
*
* Google experimented with loop-unrolling and this turned out to be * Google experimented with loop-unrolling and this turned out to be
* the optimal version - two calls, each with their own speculation * the optimal version - two calls, each with their own speculation
* trap should their return address end up getting used, in a loop. * trap should their return address end up getting used, in a loop.
*/ */
#define __FILL_RETURN_BUFFER(reg, nr, sp) \ #define __FILL_RETURN_BUFFER(reg, nr) \
mov $(nr/2), reg; \ mov $(nr/2), reg; \
771: \ 771: \
ANNOTATE_INTRA_FUNCTION_CALL; \ __FILL_RETURN_SLOT \
call 772f; \ __FILL_RETURN_SLOT \
773: /* speculation trap */ \ add $(BITS_PER_LONG/8) * 2, %_ASM_SP; \
UNWIND_HINT_EMPTY; \ dec reg; \
pause; \ jnz 771b; \
lfence; \ /* barrier for jnz misprediction */ \
jmp 773b; \ lfence;
772: \
ANNOTATE_INTRA_FUNCTION_CALL; \ /*
call 774f; \ * Stuff a single RSB slot.
775: /* speculation trap */ \ *
UNWIND_HINT_EMPTY; \ * To mitigate Post-Barrier RSB speculation, one CALL instruction must be
pause; \ * forced to retire before letting a RET instruction execute.
lfence; \ *
jmp 775b; \ * On PBRSB-vulnerable CPUs, it is not safe for a RET to be executed
774: \ * before this point.
add $(BITS_PER_LONG/8) * 2, sp; \ */
dec reg; \ #define __FILL_ONE_RETURN \
jnz 771b; \ __FILL_RETURN_SLOT \
/* barrier for jnz misprediction */ \ add $(BITS_PER_LONG/8), %_ASM_SP; \
lfence; lfence;
#ifdef __ASSEMBLY__ #ifdef __ASSEMBLY__
...@@ -132,28 +143,15 @@ ...@@ -132,28 +143,15 @@
#endif #endif
.endm .endm
.macro ISSUE_UNBALANCED_RET_GUARD
ANNOTATE_INTRA_FUNCTION_CALL
call .Lunbalanced_ret_guard_\@
int3
.Lunbalanced_ret_guard_\@:
add $(BITS_PER_LONG/8), %_ASM_SP
lfence
.endm
/* /*
* A simpler FILL_RETURN_BUFFER macro. Don't make people use the CPP * A simpler FILL_RETURN_BUFFER macro. Don't make people use the CPP
* monstrosity above, manually. * monstrosity above, manually.
*/ */
.macro FILL_RETURN_BUFFER reg:req nr:req ftr:req ftr2 .macro FILL_RETURN_BUFFER reg:req nr:req ftr:req ftr2=ALT_NOT(X86_FEATURE_ALWAYS)
.ifb \ftr2 ALTERNATIVE_2 "jmp .Lskip_rsb_\@", \
ALTERNATIVE "jmp .Lskip_rsb_\@", "", \ftr __stringify(__FILL_RETURN_BUFFER(\reg,\nr)), \ftr, \
.else __stringify(__FILL_ONE_RETURN), \ftr2
ALTERNATIVE_2 "jmp .Lskip_rsb_\@", "", \ftr, "jmp .Lunbalanced_\@", \ftr2
.endif
__FILL_RETURN_BUFFER(\reg,\nr,%_ASM_SP)
.Lunbalanced_\@:
ISSUE_UNBALANCED_RET_GUARD
.Lskip_rsb_\@: .Lskip_rsb_\@:
.endm .endm
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment