Commit a09a6e23 authored by Peter Zijlstra's avatar Peter Zijlstra Committed by Borislav Petkov

objtool: Add entry UNRET validation

Since entry asm is tricky, add a validation pass that ensures the
retbleed mitigation has been done before the first actual RET
instruction.

Entry points are those that either have UNWIND_HINT_ENTRY, which acts
as UNWIND_HINT_EMPTY but marks the instruction as an entry point, or
those that have UWIND_HINT_IRET_REGS at +0.

This is basically a variant of validate_branch() that is
intra-function and it will simply follow all branches from marked
entry points and ensures that all paths lead to ANNOTATE_UNRET_END.

If a path hits RET or an indirection the path is a fail and will be
reported.

There are 3 ANNOTATE_UNRET_END instances:

 - UNTRAIN_RET itself
 - exception from-kernel; this path doesn't need UNTRAIN_RET
 - all early exceptions; these also don't need UNTRAIN_RET
Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: default avatarBorislav Petkov <bp@suse.de>
Reviewed-by: default avatarJosh Poimboeuf <jpoimboe@kernel.org>
Signed-off-by: default avatarBorislav Petkov <bp@suse.de>
parent 0fe4aeea
...@@ -85,7 +85,7 @@ ...@@ -85,7 +85,7 @@
*/ */
SYM_CODE_START(entry_SYSCALL_64) SYM_CODE_START(entry_SYSCALL_64)
UNWIND_HINT_EMPTY UNWIND_HINT_ENTRY
ENDBR ENDBR
swapgs swapgs
...@@ -1095,6 +1095,7 @@ SYM_CODE_START_LOCAL(error_entry) ...@@ -1095,6 +1095,7 @@ SYM_CODE_START_LOCAL(error_entry)
.Lerror_entry_done_lfence: .Lerror_entry_done_lfence:
FENCE_SWAPGS_KERNEL_ENTRY FENCE_SWAPGS_KERNEL_ENTRY
leaq 8(%rsp), %rax /* return pt_regs pointer */ leaq 8(%rsp), %rax /* return pt_regs pointer */
ANNOTATE_UNRET_END
RET RET
.Lbstep_iret: .Lbstep_iret:
......
...@@ -49,7 +49,7 @@ ...@@ -49,7 +49,7 @@
* 0(%ebp) arg6 * 0(%ebp) arg6
*/ */
SYM_CODE_START(entry_SYSENTER_compat) SYM_CODE_START(entry_SYSENTER_compat)
UNWIND_HINT_EMPTY UNWIND_HINT_ENTRY
ENDBR ENDBR
/* Interrupts are off on entry. */ /* Interrupts are off on entry. */
swapgs swapgs
...@@ -179,7 +179,7 @@ SYM_CODE_END(entry_SYSENTER_compat) ...@@ -179,7 +179,7 @@ SYM_CODE_END(entry_SYSENTER_compat)
* 0(%esp) arg6 * 0(%esp) arg6
*/ */
SYM_CODE_START(entry_SYSCALL_compat) SYM_CODE_START(entry_SYSCALL_compat)
UNWIND_HINT_EMPTY UNWIND_HINT_ENTRY
ENDBR ENDBR
/* Interrupts are off on entry. */ /* Interrupts are off on entry. */
swapgs swapgs
...@@ -305,7 +305,7 @@ SYM_CODE_END(entry_SYSCALL_compat) ...@@ -305,7 +305,7 @@ SYM_CODE_END(entry_SYSCALL_compat)
* ebp arg6 * ebp arg6
*/ */
SYM_CODE_START(entry_INT80_compat) SYM_CODE_START(entry_INT80_compat)
UNWIND_HINT_EMPTY UNWIND_HINT_ENTRY
ENDBR ENDBR
/* /*
* Interrupts are off on entry. * Interrupts are off on entry.
......
...@@ -81,6 +81,17 @@ ...@@ -81,6 +81,17 @@
*/ */
#define ANNOTATE_UNRET_SAFE ANNOTATE_RETPOLINE_SAFE #define ANNOTATE_UNRET_SAFE ANNOTATE_RETPOLINE_SAFE
/*
* Abuse ANNOTATE_RETPOLINE_SAFE on a NOP to indicate UNRET_END, should
* eventually turn into it's own annotation.
*/
.macro ANNOTATE_UNRET_END
#ifdef CONFIG_DEBUG_ENTRY
ANNOTATE_RETPOLINE_SAFE
nop
#endif
.endm
/* /*
* JMP_NOSPEC and CALL_NOSPEC macros can be used instead of a simple * JMP_NOSPEC and CALL_NOSPEC macros can be used instead of a simple
* indirect jmp/call which may be susceptible to the Spectre variant 2 * indirect jmp/call which may be susceptible to the Spectre variant 2
...@@ -131,6 +142,7 @@ ...@@ -131,6 +142,7 @@
*/ */
.macro UNTRAIN_RET .macro UNTRAIN_RET
#ifdef CONFIG_RETPOLINE #ifdef CONFIG_RETPOLINE
ANNOTATE_UNRET_END
ALTERNATIVE_2 "", \ ALTERNATIVE_2 "", \
"call zen_untrain_ret", X86_FEATURE_UNRET, \ "call zen_untrain_ret", X86_FEATURE_UNRET, \
"call entry_ibpb", X86_FEATURE_ENTRY_IBPB "call entry_ibpb", X86_FEATURE_ENTRY_IBPB
......
...@@ -11,6 +11,10 @@ ...@@ -11,6 +11,10 @@
UNWIND_HINT sp_reg=ORC_REG_UNDEFINED type=UNWIND_HINT_TYPE_CALL end=1 UNWIND_HINT sp_reg=ORC_REG_UNDEFINED type=UNWIND_HINT_TYPE_CALL end=1
.endm .endm
.macro UNWIND_HINT_ENTRY
UNWIND_HINT sp_reg=ORC_REG_UNDEFINED type=UNWIND_HINT_TYPE_ENTRY end=1
.endm
.macro UNWIND_HINT_REGS base=%rsp offset=0 indirect=0 extra=1 partial=0 .macro UNWIND_HINT_REGS base=%rsp offset=0 indirect=0 extra=1 partial=0
.if \base == %rsp .if \base == %rsp
.if \indirect .if \indirect
......
...@@ -389,6 +389,8 @@ SYM_CODE_START_NOALIGN(vc_boot_ghcb) ...@@ -389,6 +389,8 @@ SYM_CODE_START_NOALIGN(vc_boot_ghcb)
UNWIND_HINT_IRET_REGS offset=8 UNWIND_HINT_IRET_REGS offset=8
ENDBR ENDBR
ANNOTATE_UNRET_END
/* Build pt_regs */ /* Build pt_regs */
PUSH_AND_CLEAR_REGS PUSH_AND_CLEAR_REGS
...@@ -448,6 +450,7 @@ SYM_CODE_END(early_idt_handler_array) ...@@ -448,6 +450,7 @@ SYM_CODE_END(early_idt_handler_array)
SYM_CODE_START_LOCAL(early_idt_handler_common) SYM_CODE_START_LOCAL(early_idt_handler_common)
UNWIND_HINT_IRET_REGS offset=16 UNWIND_HINT_IRET_REGS offset=16
ANNOTATE_UNRET_END
/* /*
* The stack is the hardware frame, an error code or zero, and the * The stack is the hardware frame, an error code or zero, and the
* vector number. * vector number.
...@@ -497,6 +500,8 @@ SYM_CODE_START_NOALIGN(vc_no_ghcb) ...@@ -497,6 +500,8 @@ SYM_CODE_START_NOALIGN(vc_no_ghcb)
UNWIND_HINT_IRET_REGS offset=8 UNWIND_HINT_IRET_REGS offset=8
ENDBR ENDBR
ANNOTATE_UNRET_END
/* Build pt_regs */ /* Build pt_regs */
PUSH_AND_CLEAR_REGS PUSH_AND_CLEAR_REGS
......
...@@ -121,7 +121,7 @@ SYM_FUNC_END(xen_read_cr2_direct); ...@@ -121,7 +121,7 @@ SYM_FUNC_END(xen_read_cr2_direct);
.macro xen_pv_trap name .macro xen_pv_trap name
SYM_CODE_START(xen_\name) SYM_CODE_START(xen_\name)
UNWIND_HINT_EMPTY UNWIND_HINT_ENTRY
ENDBR ENDBR
pop %rcx pop %rcx
pop %r11 pop %r11
...@@ -235,7 +235,7 @@ SYM_CODE_END(xenpv_restore_regs_and_return_to_usermode) ...@@ -235,7 +235,7 @@ SYM_CODE_END(xenpv_restore_regs_and_return_to_usermode)
/* Normal 64-bit system call target */ /* Normal 64-bit system call target */
SYM_CODE_START(xen_entry_SYSCALL_64) SYM_CODE_START(xen_entry_SYSCALL_64)
UNWIND_HINT_EMPTY UNWIND_HINT_ENTRY
ENDBR ENDBR
popq %rcx popq %rcx
popq %r11 popq %r11
...@@ -255,7 +255,7 @@ SYM_CODE_END(xen_entry_SYSCALL_64) ...@@ -255,7 +255,7 @@ SYM_CODE_END(xen_entry_SYSCALL_64)
/* 32-bit compat syscall target */ /* 32-bit compat syscall target */
SYM_CODE_START(xen_entry_SYSCALL_compat) SYM_CODE_START(xen_entry_SYSCALL_compat)
UNWIND_HINT_EMPTY UNWIND_HINT_ENTRY
ENDBR ENDBR
popq %rcx popq %rcx
popq %r11 popq %r11
...@@ -273,7 +273,7 @@ SYM_CODE_END(xen_entry_SYSCALL_compat) ...@@ -273,7 +273,7 @@ SYM_CODE_END(xen_entry_SYSCALL_compat)
/* 32-bit compat sysenter target */ /* 32-bit compat sysenter target */
SYM_CODE_START(xen_entry_SYSENTER_compat) SYM_CODE_START(xen_entry_SYSENTER_compat)
UNWIND_HINT_EMPTY UNWIND_HINT_ENTRY
ENDBR ENDBR
/* /*
* NB: Xen is polite and clears TF from EFLAGS for us. This means * NB: Xen is polite and clears TF from EFLAGS for us. This means
...@@ -297,7 +297,7 @@ SYM_CODE_END(xen_entry_SYSENTER_compat) ...@@ -297,7 +297,7 @@ SYM_CODE_END(xen_entry_SYSENTER_compat)
SYM_CODE_START(xen_entry_SYSCALL_compat) SYM_CODE_START(xen_entry_SYSCALL_compat)
SYM_CODE_START(xen_entry_SYSENTER_compat) SYM_CODE_START(xen_entry_SYSENTER_compat)
UNWIND_HINT_EMPTY UNWIND_HINT_ENTRY
ENDBR ENDBR
lea 16(%rsp), %rsp /* strip %rcx, %r11 */ lea 16(%rsp), %rsp /* strip %rcx, %r11 */
mov $-ENOSYS, %rax mov $-ENOSYS, %rax
......
...@@ -32,11 +32,14 @@ struct unwind_hint { ...@@ -32,11 +32,14 @@ struct unwind_hint {
* *
* UNWIND_HINT_FUNC: Generate the unwind metadata of a callable function. * UNWIND_HINT_FUNC: Generate the unwind metadata of a callable function.
* Useful for code which doesn't have an ELF function annotation. * Useful for code which doesn't have an ELF function annotation.
*
* UNWIND_HINT_ENTRY: machine entry without stack, SYSCALL/SYSENTER etc.
*/ */
#define UNWIND_HINT_TYPE_CALL 0 #define UNWIND_HINT_TYPE_CALL 0
#define UNWIND_HINT_TYPE_REGS 1 #define UNWIND_HINT_TYPE_REGS 1
#define UNWIND_HINT_TYPE_REGS_PARTIAL 2 #define UNWIND_HINT_TYPE_REGS_PARTIAL 2
#define UNWIND_HINT_TYPE_FUNC 3 #define UNWIND_HINT_TYPE_FUNC 3
#define UNWIND_HINT_TYPE_ENTRY 4
#ifdef CONFIG_OBJTOOL #ifdef CONFIG_OBJTOOL
......
...@@ -44,7 +44,7 @@ objtool-enabled := $(or $(delay-objtool),$(CONFIG_NOINSTR_VALIDATION)) ...@@ -44,7 +44,7 @@ objtool-enabled := $(or $(delay-objtool),$(CONFIG_NOINSTR_VALIDATION))
objtool_args := \ objtool_args := \
$(if $(delay-objtool),$(objtool_args)) \ $(if $(delay-objtool),$(objtool_args)) \
$(if $(CONFIG_NOINSTR_VALIDATION), --noinstr) \ $(if $(CONFIG_NOINSTR_VALIDATION), --noinstr $(if $(CONFIG_RETPOLINE), --unret)) \
$(if $(CONFIG_GCOV_KERNEL), --no-unreachable) \ $(if $(CONFIG_GCOV_KERNEL), --no-unreachable) \
--link --link
......
...@@ -32,11 +32,14 @@ struct unwind_hint { ...@@ -32,11 +32,14 @@ struct unwind_hint {
* *
* UNWIND_HINT_FUNC: Generate the unwind metadata of a callable function. * UNWIND_HINT_FUNC: Generate the unwind metadata of a callable function.
* Useful for code which doesn't have an ELF function annotation. * Useful for code which doesn't have an ELF function annotation.
*
* UNWIND_HINT_ENTRY: machine entry without stack, SYSCALL/SYSENTER etc.
*/ */
#define UNWIND_HINT_TYPE_CALL 0 #define UNWIND_HINT_TYPE_CALL 0
#define UNWIND_HINT_TYPE_REGS 1 #define UNWIND_HINT_TYPE_REGS 1
#define UNWIND_HINT_TYPE_REGS_PARTIAL 2 #define UNWIND_HINT_TYPE_REGS_PARTIAL 2
#define UNWIND_HINT_TYPE_FUNC 3 #define UNWIND_HINT_TYPE_FUNC 3
#define UNWIND_HINT_TYPE_ENTRY 4
#ifdef CONFIG_OBJTOOL #ifdef CONFIG_OBJTOOL
......
...@@ -68,6 +68,7 @@ const struct option check_options[] = { ...@@ -68,6 +68,7 @@ const struct option check_options[] = {
OPT_BOOLEAN('n', "noinstr", &opts.noinstr, "validate noinstr rules"), OPT_BOOLEAN('n', "noinstr", &opts.noinstr, "validate noinstr rules"),
OPT_BOOLEAN('o', "orc", &opts.orc, "generate ORC metadata"), OPT_BOOLEAN('o', "orc", &opts.orc, "generate ORC metadata"),
OPT_BOOLEAN('r', "retpoline", &opts.retpoline, "validate and annotate retpoline usage"), OPT_BOOLEAN('r', "retpoline", &opts.retpoline, "validate and annotate retpoline usage"),
OPT_BOOLEAN(0, "unret", &opts.unret, "validate entry unret placement"),
OPT_BOOLEAN('l', "sls", &opts.sls, "validate straight-line-speculation mitigations"), OPT_BOOLEAN('l', "sls", &opts.sls, "validate straight-line-speculation mitigations"),
OPT_BOOLEAN('s', "stackval", &opts.stackval, "validate frame pointer rules"), OPT_BOOLEAN('s', "stackval", &opts.stackval, "validate frame pointer rules"),
OPT_BOOLEAN('t', "static-call", &opts.static_call, "annotate static calls"), OPT_BOOLEAN('t', "static-call", &opts.static_call, "annotate static calls"),
...@@ -163,6 +164,11 @@ static bool link_opts_valid(struct objtool_file *file) ...@@ -163,6 +164,11 @@ static bool link_opts_valid(struct objtool_file *file)
return false; return false;
} }
if (opts.unret) {
ERROR("--unret requires --link");
return false;
}
return true; return true;
} }
......
...@@ -2032,16 +2032,24 @@ static int read_unwind_hints(struct objtool_file *file) ...@@ -2032,16 +2032,24 @@ static int read_unwind_hints(struct objtool_file *file)
insn->hint = true; insn->hint = true;
if (opts.ibt && hint->type == UNWIND_HINT_TYPE_REGS_PARTIAL) { if (hint->type == UNWIND_HINT_TYPE_REGS_PARTIAL) {
struct symbol *sym = find_symbol_by_offset(insn->sec, insn->offset); struct symbol *sym = find_symbol_by_offset(insn->sec, insn->offset);
if (sym && sym->bind == STB_GLOBAL && if (sym && sym->bind == STB_GLOBAL) {
insn->type != INSN_ENDBR && !insn->noendbr) { if (opts.ibt && insn->type != INSN_ENDBR && !insn->noendbr) {
WARN_FUNC("UNWIND_HINT_IRET_REGS without ENDBR", WARN_FUNC("UNWIND_HINT_IRET_REGS without ENDBR",
insn->sec, insn->offset); insn->sec, insn->offset);
}
insn->entry = 1;
} }
} }
if (hint->type == UNWIND_HINT_TYPE_ENTRY) {
hint->type = UNWIND_HINT_TYPE_CALL;
insn->entry = 1;
}
if (hint->type == UNWIND_HINT_TYPE_FUNC) { if (hint->type == UNWIND_HINT_TYPE_FUNC) {
insn->cfi = &func_cfi; insn->cfi = &func_cfi;
continue; continue;
...@@ -2116,8 +2124,9 @@ static int read_retpoline_hints(struct objtool_file *file) ...@@ -2116,8 +2124,9 @@ static int read_retpoline_hints(struct objtool_file *file)
if (insn->type != INSN_JUMP_DYNAMIC && if (insn->type != INSN_JUMP_DYNAMIC &&
insn->type != INSN_CALL_DYNAMIC && insn->type != INSN_CALL_DYNAMIC &&
insn->type != INSN_RETURN) { insn->type != INSN_RETURN &&
WARN_FUNC("retpoline_safe hint not an indirect jump/call/ret", insn->type != INSN_NOP) {
WARN_FUNC("retpoline_safe hint not an indirect jump/call/ret/nop",
insn->sec, insn->offset); insn->sec, insn->offset);
return -1; return -1;
} }
...@@ -3305,8 +3314,8 @@ static int validate_branch(struct objtool_file *file, struct symbol *func, ...@@ -3305,8 +3314,8 @@ static int validate_branch(struct objtool_file *file, struct symbol *func,
return 1; return 1;
} }
visited = 1 << state.uaccess; visited = VISITED_BRANCH << state.uaccess;
if (insn->visited) { if (insn->visited & VISITED_BRANCH_MASK) {
if (!insn->hint && !insn_cfi_match(insn, &state.cfi)) if (!insn->hint && !insn_cfi_match(insn, &state.cfi))
return 1; return 1;
...@@ -3520,6 +3529,145 @@ static int validate_unwind_hints(struct objtool_file *file, struct section *sec) ...@@ -3520,6 +3529,145 @@ static int validate_unwind_hints(struct objtool_file *file, struct section *sec)
return warnings; return warnings;
} }
/*
* Validate rethunk entry constraint: must untrain RET before the first RET.
*
* Follow every branch (intra-function) and ensure ANNOTATE_UNRET_END comes
* before an actual RET instruction.
*/
static int validate_entry(struct objtool_file *file, struct instruction *insn)
{
struct instruction *next, *dest;
int ret, warnings = 0;
for (;;) {
next = next_insn_to_validate(file, insn);
if (insn->visited & VISITED_ENTRY)
return 0;
insn->visited |= VISITED_ENTRY;
if (!insn->ignore_alts && !list_empty(&insn->alts)) {
struct alternative *alt;
bool skip_orig = false;
list_for_each_entry(alt, &insn->alts, list) {
if (alt->skip_orig)
skip_orig = true;
ret = validate_entry(file, alt->insn);
if (ret) {
if (opts.backtrace)
BT_FUNC("(alt)", insn);
return ret;
}
}
if (skip_orig)
return 0;
}
switch (insn->type) {
case INSN_CALL_DYNAMIC:
case INSN_JUMP_DYNAMIC:
case INSN_JUMP_DYNAMIC_CONDITIONAL:
WARN_FUNC("early indirect call", insn->sec, insn->offset);
return 1;
case INSN_JUMP_UNCONDITIONAL:
case INSN_JUMP_CONDITIONAL:
if (!is_sibling_call(insn)) {
if (!insn->jump_dest) {
WARN_FUNC("unresolved jump target after linking?!?",
insn->sec, insn->offset);
return -1;
}
ret = validate_entry(file, insn->jump_dest);
if (ret) {
if (opts.backtrace) {
BT_FUNC("(branch%s)", insn,
insn->type == INSN_JUMP_CONDITIONAL ? "-cond" : "");
}
return ret;
}
if (insn->type == INSN_JUMP_UNCONDITIONAL)
return 0;
break;
}
/* fallthrough */
case INSN_CALL:
dest = find_insn(file, insn->call_dest->sec,
insn->call_dest->offset);
if (!dest) {
WARN("Unresolved function after linking!?: %s",
insn->call_dest->name);
return -1;
}
ret = validate_entry(file, dest);
if (ret) {
if (opts.backtrace)
BT_FUNC("(call)", insn);
return ret;
}
/*
* If a call returns without error, it must have seen UNTRAIN_RET.
* Therefore any non-error return is a success.
*/
return 0;
case INSN_RETURN:
WARN_FUNC("RET before UNTRAIN", insn->sec, insn->offset);
return 1;
case INSN_NOP:
if (insn->retpoline_safe)
return 0;
break;
default:
break;
}
if (!next) {
WARN_FUNC("teh end!", insn->sec, insn->offset);
return -1;
}
insn = next;
}
return warnings;
}
/*
* Validate that all branches starting at 'insn->entry' encounter UNRET_END
* before RET.
*/
static int validate_unret(struct objtool_file *file)
{
struct instruction *insn;
int ret, warnings = 0;
for_each_insn(file, insn) {
if (!insn->entry)
continue;
ret = validate_entry(file, insn);
if (ret < 0) {
WARN_FUNC("Failed UNRET validation", insn->sec, insn->offset);
return ret;
}
warnings += ret;
}
return warnings;
}
static int validate_retpoline(struct objtool_file *file) static int validate_retpoline(struct objtool_file *file)
{ {
struct instruction *insn; struct instruction *insn;
...@@ -4039,6 +4187,17 @@ int check(struct objtool_file *file) ...@@ -4039,6 +4187,17 @@ int check(struct objtool_file *file)
warnings += ret; warnings += ret;
} }
if (opts.unret) {
/*
* Must be after validate_branch() and friends, it plays
* further games with insn->visited.
*/
ret = validate_unret(file);
if (ret < 0)
return ret;
warnings += ret;
}
if (opts.ibt) { if (opts.ibt) {
ret = validate_ibt(file); ret = validate_ibt(file);
if (ret < 0) if (ret < 0)
......
...@@ -19,6 +19,7 @@ struct opts { ...@@ -19,6 +19,7 @@ struct opts {
bool noinstr; bool noinstr;
bool orc; bool orc;
bool retpoline; bool retpoline;
bool unret;
bool sls; bool sls;
bool stackval; bool stackval;
bool static_call; bool static_call;
......
...@@ -51,8 +51,10 @@ struct instruction { ...@@ -51,8 +51,10 @@ struct instruction {
ignore_alts : 1, ignore_alts : 1,
hint : 1, hint : 1,
retpoline_safe : 1, retpoline_safe : 1,
noendbr : 1; noendbr : 1,
/* 2 bit hole */ entry : 1;
/* 1 bit hole */
s8 instr; s8 instr;
u8 visited; u8 visited;
/* u8 hole */ /* u8 hole */
...@@ -69,6 +71,11 @@ struct instruction { ...@@ -69,6 +71,11 @@ struct instruction {
struct cfi_state *cfi; struct cfi_state *cfi;
}; };
#define VISITED_BRANCH 0x01
#define VISITED_BRANCH_UACCESS 0x02
#define VISITED_BRANCH_MASK 0x03
#define VISITED_ENTRY 0x04
static inline bool is_static_jump(struct instruction *insn) static inline bool is_static_jump(struct instruction *insn)
{ {
return insn->type == INSN_JUMP_CONDITIONAL || return insn->type == INSN_JUMP_CONDITIONAL ||
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment