Commit 69fc06f7 authored by Linus Torvalds's avatar Linus Torvalds

Merge tag 'objtool-core-2020-06-01' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip

Pull objtool updates from Ingo Molnar:
 "There are a lot of objtool changes in this cycle, all across the map:

   - Speed up objtool significantly, especially when there are large
     number of sections

   - Improve objtool's understanding of special instructions such as
     IRET, to reduce the number of annotations required

   - Implement 'noinstr' validation

   - Do baby steps for non-x86 objtool use

   - Simplify/fix retpoline decoding

   - Add vmlinux validation

   - Improve documentation

   - Fix various bugs and apply smaller cleanups"

* tag 'objtool-core-2020-06-01' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: (54 commits)
  objtool: Enable compilation of objtool for all architectures
  objtool: Move struct objtool_file into arch-independent header
  objtool: Exit successfully when requesting help
  objtool: Add check_kcov_mode() to the uaccess safelist
  samples/ftrace: Fix asm function ELF annotations
  objtool: optimize add_dead_ends for split sections
  objtool: use gelf_getsymshndx to handle >64k sections
  objtool: Allow no-op CFI ops in alternatives
  x86/retpoline: Fix retpoline unwind
  x86: Change {JMP,CALL}_NOSPEC argument
  x86: Simplify retpoline declaration
  x86/speculation: Change FILL_RETURN_BUFFER to work with objtool
  objtool: Add support for intra-function calls
  objtool: Move the IRET hack into the arch decoder
  objtool: Remove INSN_STACK
  objtool: Make handle_insn_ops() unconditional
  objtool: Rework allocating stack_ops on decode
  objtool: UNWIND_HINT_RET_OFFSET should not check registers
  objtool: is_fentry_call() crashes if call has no destination
  x86,smap: Fix smap_{save,restore}() alternatives
  ...
parents 60056060 0decf1f8
......@@ -2758,7 +2758,7 @@ SYM_FUNC_START(aesni_xts_crypt8)
pxor INC, STATE4
movdqu IV, 0x30(OUTP)
CALL_NOSPEC %r11
CALL_NOSPEC r11
movdqu 0x00(OUTP), INC
pxor INC, STATE1
......@@ -2803,7 +2803,7 @@ SYM_FUNC_START(aesni_xts_crypt8)
_aesni_gf128mul_x_ble()
movups IV, (IVP)
CALL_NOSPEC %r11
CALL_NOSPEC r11
movdqu 0x40(OUTP), INC
pxor INC, STATE1
......
......@@ -1228,7 +1228,7 @@ SYM_FUNC_START_LOCAL(camellia_xts_crypt_16way)
vpxor 14 * 16(%rax), %xmm15, %xmm14;
vpxor 15 * 16(%rax), %xmm15, %xmm15;
CALL_NOSPEC %r9;
CALL_NOSPEC r9;
addq $(16 * 16), %rsp;
......
......@@ -1339,7 +1339,7 @@ SYM_FUNC_START_LOCAL(camellia_xts_crypt_32way)
vpxor 14 * 32(%rax), %ymm15, %ymm14;
vpxor 15 * 32(%rax), %ymm15, %ymm15;
CALL_NOSPEC %r9;
CALL_NOSPEC r9;
addq $(16 * 32), %rsp;
......
......@@ -75,7 +75,7 @@
.text
SYM_FUNC_START(crc_pcl)
#define bufp %rdi
#define bufp rdi
#define bufp_dw %edi
#define bufp_w %di
#define bufp_b %dil
......@@ -105,9 +105,9 @@ SYM_FUNC_START(crc_pcl)
## 1) ALIGN:
################################################################
mov bufp, bufptmp # rdi = *buf
neg bufp
and $7, bufp # calculate the unalignment amount of
mov %bufp, bufptmp # rdi = *buf
neg %bufp
and $7, %bufp # calculate the unalignment amount of
# the address
je proc_block # Skip if aligned
......@@ -123,13 +123,13 @@ SYM_FUNC_START(crc_pcl)
do_align:
#### Calculate CRC of unaligned bytes of the buffer (if any)
movq (bufptmp), tmp # load a quadward from the buffer
add bufp, bufptmp # align buffer pointer for quadword
add %bufp, bufptmp # align buffer pointer for quadword
# processing
sub bufp, len # update buffer length
sub %bufp, len # update buffer length
align_loop:
crc32b %bl, crc_init_dw # compute crc32 of 1-byte
shr $8, tmp # get next byte
dec bufp
dec %bufp
jne align_loop
proc_block:
......@@ -169,10 +169,10 @@ continue_block:
xor crc2, crc2
## branch into array
lea jump_table(%rip), bufp
movzxw (bufp, %rax, 2), len
lea crc_array(%rip), bufp
lea (bufp, len, 1), bufp
lea jump_table(%rip), %bufp
movzxw (%bufp, %rax, 2), len
lea crc_array(%rip), %bufp
lea (%bufp, len, 1), %bufp
JMP_NOSPEC bufp
################################################################
......@@ -218,9 +218,9 @@ LABEL crc_ %i
## 4) Combine three results:
################################################################
lea (K_table-8)(%rip), bufp # first entry is for idx 1
lea (K_table-8)(%rip), %bufp # first entry is for idx 1
shlq $3, %rax # rax *= 8
pmovzxdq (bufp,%rax), %xmm0 # 2 consts: K1:K2
pmovzxdq (%bufp,%rax), %xmm0 # 2 consts: K1:K2
leal (%eax,%eax,2), %eax # rax *= 3 (total *24)
subq %rax, tmp # tmp -= rax*24
......
......@@ -816,7 +816,7 @@ SYM_CODE_START(ret_from_fork)
/* kernel thread */
1: movl %edi, %eax
CALL_NOSPEC %ebx
CALL_NOSPEC ebx
/*
* A kernel thread is allowed to return here after successfully
* calling do_execve(). Exit to userspace to complete the execve()
......@@ -1501,7 +1501,7 @@ SYM_CODE_START_LOCAL_NOALIGN(common_exception_read_cr2)
TRACE_IRQS_OFF
movl %esp, %eax # pt_regs pointer
CALL_NOSPEC %edi
CALL_NOSPEC edi
jmp ret_from_exception
SYM_CODE_END(common_exception_read_cr2)
......@@ -1522,7 +1522,7 @@ SYM_CODE_START_LOCAL_NOALIGN(common_exception)
TRACE_IRQS_OFF
movl %esp, %eax # pt_regs pointer
CALL_NOSPEC %edi
CALL_NOSPEC edi
jmp ret_from_exception
SYM_CODE_END(common_exception)
......
......@@ -348,7 +348,7 @@ SYM_CODE_START(ret_from_fork)
/* kernel thread */
UNWIND_HINT_EMPTY
movq %r12, %rdi
CALL_NOSPEC %rbx
CALL_NOSPEC rbx
/*
* A kernel thread is allowed to return here after successfully
* calling do_execve(). Exit to userspace to complete the execve()
......
#ifdef CONFIG_64BIT
GEN(rax)
GEN(rbx)
GEN(rcx)
GEN(rdx)
GEN(rsi)
GEN(rdi)
GEN(rbp)
GEN(r8)
GEN(r9)
GEN(r10)
GEN(r11)
GEN(r12)
GEN(r13)
GEN(r14)
GEN(r15)
#else
GEN(eax)
GEN(ebx)
GEN(ecx)
GEN(edx)
GEN(esi)
GEN(edi)
GEN(ebp)
#endif
......@@ -17,24 +17,19 @@ extern void cmpxchg8b_emu(void);
#endif
#ifdef CONFIG_RETPOLINE
#ifdef CONFIG_X86_32
#define INDIRECT_THUNK(reg) extern asmlinkage void __x86_indirect_thunk_e ## reg(void);
#else
#define INDIRECT_THUNK(reg) extern asmlinkage void __x86_indirect_thunk_r ## reg(void);
INDIRECT_THUNK(8)
INDIRECT_THUNK(9)
INDIRECT_THUNK(10)
INDIRECT_THUNK(11)
INDIRECT_THUNK(12)
INDIRECT_THUNK(13)
INDIRECT_THUNK(14)
INDIRECT_THUNK(15)
#endif
INDIRECT_THUNK(ax)
INDIRECT_THUNK(bx)
INDIRECT_THUNK(cx)
INDIRECT_THUNK(dx)
INDIRECT_THUNK(si)
INDIRECT_THUNK(di)
INDIRECT_THUNK(bp)
#define DECL_INDIRECT_THUNK(reg) \
extern asmlinkage void __x86_indirect_thunk_ ## reg (void);
#define DECL_RETPOLINE(reg) \
extern asmlinkage void __x86_retpoline_ ## reg (void);
#undef GEN
#define GEN(reg) DECL_INDIRECT_THUNK(reg)
#include <asm/GEN-for-each-reg.h>
#undef GEN
#define GEN(reg) DECL_RETPOLINE(reg)
#include <asm/GEN-for-each-reg.h>
#endif /* CONFIG_RETPOLINE */
......@@ -4,20 +4,13 @@
#define _ASM_X86_NOSPEC_BRANCH_H_
#include <linux/static_key.h>
#include <linux/frame.h>
#include <asm/alternative.h>
#include <asm/alternative-asm.h>
#include <asm/cpufeatures.h>
#include <asm/msr-index.h>
/*
* This should be used immediately before a retpoline alternative. It tells
* objtool where the retpolines are so that it can make sense of the control
* flow by just reading the original instruction(s) and ignoring the
* alternatives.
*/
#define ANNOTATE_NOSPEC_ALTERNATIVE \
ANNOTATE_IGNORE_ALTERNATIVE
#include <asm/unwind_hints.h>
/*
* Fill the CPU return stack buffer.
......@@ -46,21 +39,25 @@
#define __FILL_RETURN_BUFFER(reg, nr, sp) \
mov $(nr/2), reg; \
771: \
ANNOTATE_INTRA_FUNCTION_CALL; \
call 772f; \
773: /* speculation trap */ \
UNWIND_HINT_EMPTY; \
pause; \
lfence; \
jmp 773b; \
772: \
ANNOTATE_INTRA_FUNCTION_CALL; \
call 774f; \
775: /* speculation trap */ \
UNWIND_HINT_EMPTY; \
pause; \
lfence; \
jmp 775b; \
774: \
add $(BITS_PER_LONG/8) * 2, sp; \
dec reg; \
jnz 771b; \
add $(BITS_PER_LONG/8) * nr, sp;
jnz 771b;
#ifdef __ASSEMBLY__
......@@ -76,34 +73,6 @@
.popsection
.endm
/*
* These are the bare retpoline primitives for indirect jmp and call.
* Do not use these directly; they only exist to make the ALTERNATIVE
* invocation below less ugly.
*/
.macro RETPOLINE_JMP reg:req
call .Ldo_rop_\@
.Lspec_trap_\@:
pause
lfence
jmp .Lspec_trap_\@
.Ldo_rop_\@:
mov \reg, (%_ASM_SP)
ret
.endm
/*
* This is a wrapper around RETPOLINE_JMP so the called function in reg
* returns to the instruction after the macro.
*/
.macro RETPOLINE_CALL reg:req
jmp .Ldo_call_\@
.Ldo_retpoline_jmp_\@:
RETPOLINE_JMP \reg
.Ldo_call_\@:
call .Ldo_retpoline_jmp_\@
.endm
/*
* JMP_NOSPEC and CALL_NOSPEC macros can be used instead of a simple
* indirect jmp/call which may be susceptible to the Spectre variant 2
......@@ -111,23 +80,21 @@
*/
.macro JMP_NOSPEC reg:req
#ifdef CONFIG_RETPOLINE
ANNOTATE_NOSPEC_ALTERNATIVE
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; jmp *\reg), \
__stringify(RETPOLINE_JMP \reg), X86_FEATURE_RETPOLINE, \
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; jmp *\reg), X86_FEATURE_RETPOLINE_AMD
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; jmp *%\reg), \
__stringify(jmp __x86_retpoline_\reg), X86_FEATURE_RETPOLINE, \
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; jmp *%\reg), X86_FEATURE_RETPOLINE_AMD
#else
jmp *\reg
jmp *%\reg
#endif
.endm
.macro CALL_NOSPEC reg:req
#ifdef CONFIG_RETPOLINE
ANNOTATE_NOSPEC_ALTERNATIVE
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; call *\reg), \
__stringify(RETPOLINE_CALL \reg), X86_FEATURE_RETPOLINE,\
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; call *\reg), X86_FEATURE_RETPOLINE_AMD
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; call *%\reg), \
__stringify(call __x86_retpoline_\reg), X86_FEATURE_RETPOLINE, \
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; call *%\reg), X86_FEATURE_RETPOLINE_AMD
#else
call *\reg
call *%\reg
#endif
.endm
......@@ -137,10 +104,8 @@
*/
.macro FILL_RETURN_BUFFER reg:req nr:req ftr:req
#ifdef CONFIG_RETPOLINE
ANNOTATE_NOSPEC_ALTERNATIVE
ALTERNATIVE "jmp .Lskip_rsb_\@", \
__stringify(__FILL_RETURN_BUFFER(\reg,\nr,%_ASM_SP)) \
\ftr
ALTERNATIVE "jmp .Lskip_rsb_\@", "", \ftr
__FILL_RETURN_BUFFER(\reg,\nr,%_ASM_SP)
.Lskip_rsb_\@:
#endif
.endm
......@@ -161,16 +126,16 @@
* which is ensured when CONFIG_RETPOLINE is defined.
*/
# define CALL_NOSPEC \
ANNOTATE_NOSPEC_ALTERNATIVE \
ALTERNATIVE_2( \
ANNOTATE_RETPOLINE_SAFE \
"call *%[thunk_target]\n", \
"call __x86_indirect_thunk_%V[thunk_target]\n", \
"call __x86_retpoline_%V[thunk_target]\n", \
X86_FEATURE_RETPOLINE, \
"lfence;\n" \
ANNOTATE_RETPOLINE_SAFE \
"call *%[thunk_target]\n", \
X86_FEATURE_RETPOLINE_AMD)
# define THUNK_TARGET(addr) [thunk_target] "r" (addr)
#else /* CONFIG_X86_32 */
......@@ -180,7 +145,6 @@
* here, anyway.
*/
# define CALL_NOSPEC \
ANNOTATE_NOSPEC_ALTERNATIVE \
ALTERNATIVE_2( \
ANNOTATE_RETPOLINE_SAFE \
"call *%[thunk_target]\n", \
......
......@@ -58,8 +58,7 @@
#define ORC_TYPE_CALL 0
#define ORC_TYPE_REGS 1
#define ORC_TYPE_REGS_IRET 2
#define UNWIND_HINT_TYPE_SAVE 3
#define UNWIND_HINT_TYPE_RESTORE 4
#define UNWIND_HINT_TYPE_RET_OFFSET 3
#ifndef __ASSEMBLY__
/*
......
......@@ -728,7 +728,6 @@ static inline void sync_core(void)
unsigned int tmp;
asm volatile (
UNWIND_HINT_SAVE
"mov %%ss, %0\n\t"
"pushq %q0\n\t"
"pushq %%rsp\n\t"
......@@ -738,7 +737,6 @@ static inline void sync_core(void)
"pushq %q0\n\t"
"pushq $1f\n\t"
"iretq\n\t"
UNWIND_HINT_RESTORE
"1:"
: "=&r" (tmp), ASM_CALL_CONSTRAINT : : "cc", "memory");
#endif
......
......@@ -57,8 +57,10 @@ static __always_inline unsigned long smap_save(void)
{
unsigned long flags;
asm volatile (ALTERNATIVE("", "pushf; pop %0; " __ASM_CLAC,
X86_FEATURE_SMAP)
asm volatile ("# smap_save\n\t"
ALTERNATIVE("jmp 1f", "", X86_FEATURE_SMAP)
"pushf; pop %0; " __ASM_CLAC "\n\t"
"1:"
: "=rm" (flags) : : "memory", "cc");
return flags;
......@@ -66,7 +68,10 @@ static __always_inline unsigned long smap_save(void)
static __always_inline void smap_restore(unsigned long flags)
{
asm volatile (ALTERNATIVE("", "push %0; popf", X86_FEATURE_SMAP)
asm volatile ("# smap_restore\n\t"
ALTERNATIVE("jmp 1f", "", X86_FEATURE_SMAP)
"push %0; popf\n\t"
"1:"
: : "g" (flags) : "memory", "cc");
}
......
......@@ -86,32 +86,15 @@
UNWIND_HINT sp_offset=\sp_offset
.endm
.macro UNWIND_HINT_SAVE
UNWIND_HINT type=UNWIND_HINT_TYPE_SAVE
.endm
.macro UNWIND_HINT_RESTORE
UNWIND_HINT type=UNWIND_HINT_TYPE_RESTORE
/*
* RET_OFFSET: Used on instructions that terminate a function; mostly RETURN
* and sibling calls. On these, sp_offset denotes the expected offset from
* initial_func_cfi.
*/
.macro UNWIND_HINT_RET_OFFSET sp_offset=8
UNWIND_HINT type=UNWIND_HINT_TYPE_RET_OFFSET sp_offset=\sp_offset
.endm
#else /* !__ASSEMBLY__ */
#define UNWIND_HINT(sp_reg, sp_offset, type, end) \
"987: \n\t" \
".pushsection .discard.unwind_hints\n\t" \
/* struct unwind_hint */ \
".long 987b - .\n\t" \
".short " __stringify(sp_offset) "\n\t" \
".byte " __stringify(sp_reg) "\n\t" \
".byte " __stringify(type) "\n\t" \
".byte " __stringify(end) "\n\t" \
".balign 4 \n\t" \
".popsection\n\t"
#define UNWIND_HINT_SAVE UNWIND_HINT(0, 0, UNWIND_HINT_TYPE_SAVE, 0)
#define UNWIND_HINT_RESTORE UNWIND_HINT(0, 0, UNWIND_HINT_TYPE_RESTORE, 0)
#endif /* __ASSEMBLY__ */
#endif /* _ASM_X86_UNWIND_HINTS_H */
......@@ -282,7 +282,8 @@ static inline void tramp_free(void *tramp) { }
/* Defined as markers to the end of the ftrace default trampolines */
extern void ftrace_regs_caller_end(void);
extern void ftrace_epilogue(void);
extern void ftrace_regs_caller_ret(void);
extern void ftrace_caller_end(void);
extern void ftrace_caller_op_ptr(void);
extern void ftrace_regs_caller_op_ptr(void);
......@@ -334,7 +335,7 @@ create_trampoline(struct ftrace_ops *ops, unsigned int *tramp_size)
call_offset = (unsigned long)ftrace_regs_call;
} else {
start_offset = (unsigned long)ftrace_caller;
end_offset = (unsigned long)ftrace_epilogue;
end_offset = (unsigned long)ftrace_caller_end;
op_offset = (unsigned long)ftrace_caller_op_ptr;
call_offset = (unsigned long)ftrace_call;
}
......@@ -366,6 +367,13 @@ create_trampoline(struct ftrace_ops *ops, unsigned int *tramp_size)
if (WARN_ON(ret < 0))
goto fail;
if (ops->flags & FTRACE_OPS_FL_SAVE_REGS) {
ip = trampoline + (ftrace_regs_caller_ret - ftrace_regs_caller);
ret = probe_kernel_read(ip, (void *)retq, RET_SIZE);
if (WARN_ON(ret < 0))
goto fail;
}
/*
* The address of the ftrace_ops that is used for this trampoline
* is stored at the end of the trampoline. This will be used to
......@@ -433,7 +441,7 @@ void set_ftrace_ops_ro(void)
end_offset = (unsigned long)ftrace_regs_caller_end;
} else {
start_offset = (unsigned long)ftrace_caller;
end_offset = (unsigned long)ftrace_epilogue;
end_offset = (unsigned long)ftrace_caller_end;
}
size = end_offset - start_offset;
size = size + RET_SIZE + sizeof(void *);
......
......@@ -189,5 +189,5 @@ return_to_handler:
movl %eax, %ecx
popl %edx
popl %eax
JMP_NOSPEC %ecx
JMP_NOSPEC ecx
#endif
......@@ -23,7 +23,7 @@
#endif /* CONFIG_FRAME_POINTER */
/* Size of stack used to save mcount regs in save_mcount_regs */
#define MCOUNT_REG_SIZE (SS+8 + MCOUNT_FRAME_SIZE)
#define MCOUNT_REG_SIZE (FRAME_SIZE + MCOUNT_FRAME_SIZE)
/*
* gcc -pg option adds a call to 'mcount' in most functions.
......@@ -77,7 +77,7 @@
/*
* We add enough stack to save all regs.
*/
subq $(MCOUNT_REG_SIZE - MCOUNT_FRAME_SIZE), %rsp
subq $(FRAME_SIZE), %rsp
movq %rax, RAX(%rsp)
movq %rcx, RCX(%rsp)
movq %rdx, RDX(%rsp)
......@@ -157,8 +157,12 @@ SYM_INNER_LABEL(ftrace_call, SYM_L_GLOBAL)
* think twice before adding any new code or changing the
* layout here.
*/
SYM_INNER_LABEL(ftrace_epilogue, SYM_L_GLOBAL)
SYM_INNER_LABEL(ftrace_caller_end, SYM_L_GLOBAL)
jmp ftrace_epilogue
SYM_FUNC_END(ftrace_caller);
SYM_FUNC_START(ftrace_epilogue)
#ifdef CONFIG_FUNCTION_GRAPH_TRACER
SYM_INNER_LABEL(ftrace_graph_call, SYM_L_GLOBAL)
jmp ftrace_stub
......@@ -170,14 +174,12 @@ SYM_INNER_LABEL(ftrace_graph_call, SYM_L_GLOBAL)
*/
SYM_INNER_LABEL_ALIGN(ftrace_stub, SYM_L_WEAK)
retq
SYM_FUNC_END(ftrace_caller)
SYM_FUNC_END(ftrace_epilogue)
SYM_FUNC_START(ftrace_regs_caller)
/* Save the current flags before any operations that can change them */
pushfq
UNWIND_HINT_SAVE
/* added 8 bytes to save flags */
save_mcount_regs 8
/* save_mcount_regs fills in first two parameters */
......@@ -233,10 +235,13 @@ SYM_INNER_LABEL(ftrace_regs_call, SYM_L_GLOBAL)
movq ORIG_RAX(%rsp), %rax
movq %rax, MCOUNT_REG_SIZE-8(%rsp)
/* If ORIG_RAX is anything but zero, make this a call to that */
/*
* If ORIG_RAX is anything but zero, make this a call to that.
* See arch_ftrace_set_direct_caller().
*/
movq ORIG_RAX(%rsp), %rax
cmpq $0, %rax
je 1f
testq %rax, %rax
jz 1f
/* Swap the flags with orig_rax */
movq MCOUNT_REG_SIZE(%rsp), %rdi
......@@ -244,20 +249,14 @@ SYM_INNER_LABEL(ftrace_regs_call, SYM_L_GLOBAL)
movq %rax, MCOUNT_REG_SIZE(%rsp)
restore_mcount_regs 8
/* Restore flags */
popfq
jmp 2f
SYM_INNER_LABEL(ftrace_regs_caller_ret, SYM_L_GLOBAL);
UNWIND_HINT_RET_OFFSET
jmp ftrace_epilogue
1: restore_mcount_regs
2:
/*
* The stack layout is nondetermistic here, depending on which path was
* taken. This confuses objtool and ORC, rightfully so. For now,
* pretend the stack always looks like the non-direct case.
*/
UNWIND_HINT_RESTORE
/* Restore flags */
popfq
......@@ -268,7 +267,6 @@ SYM_INNER_LABEL(ftrace_regs_call, SYM_L_GLOBAL)
* to the return.
*/
SYM_INNER_LABEL(ftrace_regs_caller_end, SYM_L_GLOBAL)
jmp ftrace_epilogue
SYM_FUNC_END(ftrace_regs_caller)
......@@ -303,7 +301,7 @@ trace:
* function tracing is enabled.
*/
movq ftrace_trace_function, %r8
CALL_NOSPEC %r8
CALL_NOSPEC r8
restore_mcount_regs
jmp fgraph_trace
......@@ -340,6 +338,6 @@ SYM_CODE_START(return_to_handler)
movq 8(%rsp), %rdx
movq (%rsp), %rax
addq $24, %rsp
JMP_NOSPEC %rdi
JMP_NOSPEC rdi
SYM_CODE_END(return_to_handler)
#endif
......@@ -153,7 +153,7 @@ SYM_FUNC_START(csum_partial)
negl %ebx
lea 45f(%ebx,%ebx,2), %ebx
testl %esi, %esi
JMP_NOSPEC %ebx
JMP_NOSPEC ebx
# Handle 2-byte-aligned regions
20: addw (%esi), %ax
......@@ -436,7 +436,7 @@ SYM_FUNC_START(csum_partial_copy_generic)
andl $-32,%edx
lea 3f(%ebx,%ebx), %ebx
testl %esi, %esi
JMP_NOSPEC %ebx
JMP_NOSPEC ebx
1: addl $64,%esi
addl $64,%edi
SRC(movb -32(%edx),%bl) ; SRC(movb (%edx),%bl)
......
......@@ -7,15 +7,31 @@
#include <asm/alternative-asm.h>
#include <asm/export.h>
#include <asm/nospec-branch.h>
#include <asm/unwind_hints.h>
#include <asm/frame.h>
.macro THUNK reg
.section .text.__x86.indirect_thunk
.align 32
SYM_FUNC_START(__x86_indirect_thunk_\reg)
CFI_STARTPROC
JMP_NOSPEC %\reg
CFI_ENDPROC
JMP_NOSPEC \reg
SYM_FUNC_END(__x86_indirect_thunk_\reg)
SYM_FUNC_START_NOALIGN(__x86_retpoline_\reg)
ANNOTATE_INTRA_FUNCTION_CALL
call .Ldo_rop_\@
.Lspec_trap_\@:
UNWIND_HINT_EMPTY
pause
lfence
jmp .Lspec_trap_\@
.Ldo_rop_\@:
mov %\reg, (%_ASM_SP)
UNWIND_HINT_RET_OFFSET
ret
SYM_FUNC_END(__x86_retpoline_\reg)
.endm
/*
......@@ -24,25 +40,24 @@ SYM_FUNC_END(__x86_indirect_thunk_\reg)
* only see one instance of "__x86_indirect_thunk_\reg" rather
* than one per register with the correct names. So we do it
* the simple and nasty way...
*
* Worse, you can only have a single EXPORT_SYMBOL per line,
* and CPP can't insert newlines, so we have to repeat everything
* at least twice.
*/
#define __EXPORT_THUNK(sym) _ASM_NOKPROBE(sym); EXPORT_SYMBOL(sym)
#define EXPORT_THUNK(reg) __EXPORT_THUNK(__x86_indirect_thunk_ ## reg)
#define GENERATE_THUNK(reg) THUNK reg ; EXPORT_THUNK(reg)
GENERATE_THUNK(_ASM_AX)
GENERATE_THUNK(_ASM_BX)
GENERATE_THUNK(_ASM_CX)
GENERATE_THUNK(_ASM_DX)
GENERATE_THUNK(_ASM_SI)
GENERATE_THUNK(_ASM_DI)
GENERATE_THUNK(_ASM_BP)
#ifdef CONFIG_64BIT
GENERATE_THUNK(r8)
GENERATE_THUNK(r9)
GENERATE_THUNK(r10)
GENERATE_THUNK(r11)
GENERATE_THUNK(r12)
GENERATE_THUNK(r13)
GENERATE_THUNK(r14)
GENERATE_THUNK(r15)
#endif
#define __EXPORT_THUNK(sym) _ASM_NOKPROBE(sym); EXPORT_SYMBOL(sym)
#define EXPORT_THUNK(reg) __EXPORT_THUNK(__x86_indirect_thunk_ ## reg)
#define EXPORT_RETPOLINE(reg) __EXPORT_THUNK(__x86_retpoline_ ## reg)
#undef GEN
#define GEN(reg) THUNK reg
#include <asm/GEN-for-each-reg.h>
#undef GEN
#define GEN(reg) EXPORT_THUNK(reg)
#include <asm/GEN-for-each-reg.h>
#undef GEN
#define GEN(reg) EXPORT_RETPOLINE(reg)
#include <asm/GEN-for-each-reg.h>
......@@ -21,7 +21,7 @@ SYM_FUNC_START(__efi_call)
mov %r8, %r9
mov %rcx, %r8
mov %rsi, %rcx
CALL_NOSPEC %rdi
CALL_NOSPEC rdi
leave
ret
SYM_FUNC_END(__efi_call)
......@@ -15,9 +15,20 @@
static void __used __section(.discard.func_stack_frame_non_standard) \
*__func_stack_frame_non_standard_##func = func
/*
* This macro indicates that the following intra-function call is valid.
* Any non-annotated intra-function call will cause objtool to issue a warning.
*/
#define ANNOTATE_INTRA_FUNCTION_CALL \
999: \
.pushsection .discard.intra_function_calls; \
.long 999b; \
.popsection;
#else /* !CONFIG_STACK_VALIDATION */
#define STACK_FRAME_NON_STANDARD(func)
#define ANNOTATE_INTRA_FUNCTION_CALL
#endif /* CONFIG_STACK_VALIDATION */
......
......@@ -369,6 +369,11 @@ config STACK_VALIDATION
For more information, see
tools/objtool/Documentation/stack-validation.txt.
config VMLINUX_VALIDATION
bool
depends on STACK_VALIDATION && DEBUG_ENTRY && !PARAVIRT
default y
config DEBUG_FORCE_WEAK_PER_CPU
bool "Force weak per-cpu definitions"
depends on DEBUG_KERNEL
......
......@@ -20,18 +20,22 @@ static unsigned long my_ip = (unsigned long)schedule;
asm (
" .pushsection .text, \"ax\", @progbits\n"
" .type my_tramp1, @function\n"
" my_tramp1:"
" pushq %rbp\n"
" movq %rsp, %rbp\n"
" call my_direct_func1\n"
" leave\n"
" .size my_tramp1, .-my_tramp1\n"
" ret\n"
" .type my_tramp2, @function\n"
" my_tramp2:"
" pushq %rbp\n"
" movq %rsp, %rbp\n"
" call my_direct_func2\n"
" leave\n"
" ret\n"
" .size my_tramp2, .-my_tramp2\n"
" .popsection\n"
);
......
......@@ -15,6 +15,7 @@ extern void my_tramp(void *);
asm (
" .pushsection .text, \"ax\", @progbits\n"
" .type my_tramp, @function\n"
" my_tramp:"
" pushq %rbp\n"
" movq %rsp, %rbp\n"
......@@ -27,6 +28,7 @@ asm (
" popq %rdi\n"
" leave\n"
" ret\n"
" .size my_tramp, .-my_tramp\n"
" .popsection\n"
);
......
......@@ -13,6 +13,7 @@ extern void my_tramp(void *);
asm (
" .pushsection .text, \"ax\", @progbits\n"
" .type my_tramp, @function\n"
" my_tramp:"
" pushq %rbp\n"
" movq %rsp, %rbp\n"
......@@ -21,6 +22,7 @@ asm (
" popq %rdi\n"
" leave\n"
" ret\n"
" .size my_tramp, .-my_tramp\n"
" .popsection\n"
);
......
......@@ -55,6 +55,29 @@ modpost_link()
${LD} ${KBUILD_LDFLAGS} -r -o ${1} ${objects}
}
objtool_link()
{
local objtoolopt;
if [ -n "${CONFIG_VMLINUX_VALIDATION}" ]; then
objtoolopt="check"
if [ -z "${CONFIG_FRAME_POINTER}" ]; then
objtoolopt="${objtoolopt} --no-fp"
fi
if [ -n "${CONFIG_GCOV_KERNEL}" ]; then
objtoolopt="${objtoolopt} --no-unreachable"
fi
if [ -n "${CONFIG_RETPOLINE}" ]; then
objtoolopt="${objtoolopt} --retpoline"
fi
if [ -n "${CONFIG_X86_SMAP}" ]; then
objtoolopt="${objtoolopt} --uaccess"
fi
info OBJTOOL ${1}
tools/objtool/objtool ${objtoolopt} ${1}
fi
}
# Link of vmlinux
# ${1} - output file
# ${2}, ${3}, ... - optional extra .o files
......@@ -251,6 +274,7 @@ ${MAKE} -f "${srctree}/scripts/Makefile.build" obj=init need-builtin=1
#link vmlinux.o
info LD vmlinux.o
modpost_link vmlinux.o
objtool_link vmlinux.o
# modpost vmlinux.o to check for section mismatches
${MAKE} -f "${srctree}/scripts/Makefile.modpost" MODPOST_VMLINUX=1
......
......@@ -58,8 +58,7 @@
#define ORC_TYPE_CALL 0
#define ORC_TYPE_REGS 1
#define ORC_TYPE_REGS_IRET 2
#define UNWIND_HINT_TYPE_SAVE 3
#define UNWIND_HINT_TYPE_RESTORE 4
#define UNWIND_HINT_TYPE_RET_OFFSET 3
#ifndef __ASSEMBLY__
/*
......
objtool-y += arch/$(SRCARCH)/
objtool-y += weak.o
objtool-$(SUBCMD_CHECK) += check.o
objtool-$(SUBCMD_CHECK) += special.o
objtool-$(SUBCMD_ORC) += check.o
objtool-$(SUBCMD_ORC) += orc_gen.o
objtool-$(SUBCMD_ORC) += orc_dump.o
objtool-y += builtin-check.o
objtool-y += builtin-orc.o
objtool-y += check.o
objtool-y += orc_gen.o
objtool-y += orc_dump.o
objtool-y += elf.o
objtool-y += special.o
objtool-y += objtool.o
objtool-y += libstring.o
......
......@@ -289,6 +289,47 @@ they mean, and suggestions for how to fix them.
might be corrupt due to a gcc bug. For more details, see:
https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70646
9. file.o: warning: objtool: funcA() call to funcB() with UACCESS enabled
This means that an unexpected call to a non-whitelisted function exists
outside of arch-specific guards.
X86: SMAP (stac/clac): __uaccess_begin()/__uaccess_end()
ARM: PAN: uaccess_enable()/uaccess_disable()
These functions should be called to denote a minimal critical section around
access to __user variables. See also: https://lwn.net/Articles/517475/
The intention of the warning is to prevent calls to funcB() from eventually
calling schedule(), potentially leaking the AC flags state, and not
restoring them correctly.
It also helps verify that there are no unexpected calls to funcB() which may
access user space pages with protections against doing so disabled.
To fix, either:
1) remove explicit calls to funcB() from funcA().
2) add the correct guards before and after calls to low level functions like
__get_user_size()/__put_user_size().
3) add funcB to uaccess_safe_builtin whitelist in tools/objtool/check.c, if
funcB obviously does not call schedule(), and is marked notrace (since
function tracing inserts additional calls, which is not obvious from the
sources).
10. file.o: warning: func()+0x5c: alternative modifies stack
This means that an alternative includes instructions that modify the
stack. The problem is that there is only one ORC unwind table, this means
that the ORC unwind entries must be valid for each of the alternatives.
The easiest way to enforce this is to ensure alternatives do not contain
any ORC entries, which in turn implies the above constraint.
11. file.o: warning: unannotated intra-function call
This warning means that a direct call is done to a destination which
is not at the beginning of a function. If this is a legit call, you
can remove this warning by putting the ANNOTATE_INTRA_FUNCTION_CALL
directive right before the call.
If the error doesn't seem to make sense, it could be a bug in objtool.
Feel free to ask the objtool maintainer for help.
......
......@@ -35,7 +35,8 @@ all: $(OBJTOOL)
INCLUDES := -I$(srctree)/tools/include \
-I$(srctree)/tools/arch/$(HOSTARCH)/include/uapi \
-I$(srctree)/tools/arch/$(SRCARCH)/include
-I$(srctree)/tools/arch/$(SRCARCH)/include \
-I$(srctree)/tools/objtool/arch/$(SRCARCH)/include
WARNINGS := $(EXTRA_WARNINGS) -Wno-switch-default -Wno-switch-enum -Wno-packed
CFLAGS := -Werror $(WARNINGS) $(KBUILD_HOSTCFLAGS) -g $(INCLUDES) $(LIBELF_FLAGS)
LDFLAGS += $(LIBELF_LIBS) $(LIBSUBCMD) $(KBUILD_HOSTLDFLAGS)
......@@ -45,14 +46,24 @@ elfshdr := $(shell echo '$(pound)include <libelf.h>' | $(CC) $(CFLAGS) -x c -E -
CFLAGS += $(if $(elfshdr),,-DLIBELF_USE_DEPRECATED)
AWK = awk
SUBCMD_CHECK := n
SUBCMD_ORC := n
ifeq ($(SRCARCH),x86)
SUBCMD_CHECK := y
SUBCMD_ORC := y
endif
export SUBCMD_CHECK SUBCMD_ORC
export srctree OUTPUT CFLAGS SRCARCH AWK
include $(srctree)/tools/build/Makefile.include
$(OBJTOOL_IN): fixdep FORCE
@$(CONFIG_SHELL) ./sync-check.sh
@$(MAKE) $(build)=objtool
$(OBJTOOL): $(LIBSUBCMD) $(OBJTOOL_IN)
@$(CONFIG_SHELL) ./sync-check.sh
$(QUIET_LINK)$(CC) $(OBJTOOL_IN) $(LDFLAGS) -o $@
......
......@@ -8,9 +8,11 @@
#include <stdbool.h>
#include <linux/list.h>
#include "elf.h"
#include "objtool.h"
#include "cfi.h"
#include <asm/orc_types.h>
enum insn_type {
INSN_JUMP_CONDITIONAL,
INSN_JUMP_UNCONDITIONAL,
......@@ -20,7 +22,6 @@ enum insn_type {
INSN_CALL_DYNAMIC,
INSN_RETURN,
INSN_CONTEXT_SWITCH,
INSN_STACK,
INSN_BUG,
INSN_NOP,
INSN_STAC,
......@@ -64,15 +65,23 @@ struct op_src {
struct stack_op {
struct op_dest dest;
struct op_src src;
struct list_head list;
};
void arch_initial_func_cfi_state(struct cfi_state *state);
struct instruction;
void arch_initial_func_cfi_state(struct cfi_init_state *state);
int arch_decode_instruction(struct elf *elf, struct section *sec,
int arch_decode_instruction(const struct elf *elf, const struct section *sec,
unsigned long offset, unsigned int maxlen,
unsigned int *len, enum insn_type *type,
unsigned long *immediate, struct stack_op *op);
unsigned long *immediate,
struct list_head *ops_list);
bool arch_callee_saved_reg(unsigned char reg);
unsigned long arch_jump_destination(struct instruction *insn);
unsigned long arch_dest_rela_offset(int addend);
#endif /* _ARCH_H */
......@@ -11,6 +11,7 @@
#include "../../../arch/x86/lib/inat.c"
#include "../../../arch/x86/lib/insn.c"
#include "../../check.h"
#include "../../elf.h"
#include "../../arch.h"
#include "../../warn.h"
......@@ -26,7 +27,7 @@ static unsigned char op_to_cfi_reg[][2] = {
{CFI_DI, CFI_R15},
};
static int is_x86_64(struct elf *elf)
static int is_x86_64(const struct elf *elf)
{
switch (elf->ehdr.e_machine) {
case EM_X86_64:
......@@ -66,16 +67,34 @@ bool arch_callee_saved_reg(unsigned char reg)
}
}
int arch_decode_instruction(struct elf *elf, struct section *sec,
unsigned long arch_dest_rela_offset(int addend)
{
return addend + 4;
}
unsigned long arch_jump_destination(struct instruction *insn)
{
return insn->offset + insn->len + insn->immediate;
}
#define ADD_OP(op) \
if (!(op = calloc(1, sizeof(*op)))) \
return -1; \
else for (list_add_tail(&op->list, ops_list); op; op = NULL)
int arch_decode_instruction(const struct elf *elf, const struct section *sec,
unsigned long offset, unsigned int maxlen,
unsigned int *len, enum insn_type *type,
unsigned long *immediate, struct stack_op *op)
unsigned long *immediate,
struct list_head *ops_list)
{
struct insn insn;
int x86_64, sign;
unsigned char op1, op2, rex = 0, rex_b = 0, rex_r = 0, rex_w = 0,
rex_x = 0, modrm = 0, modrm_mod = 0, modrm_rm = 0,
modrm_reg = 0, sib = 0;
struct stack_op *op = NULL;
struct symbol *sym;
x86_64 = is_x86_64(elf);
if (x86_64 == -1)
......@@ -85,7 +104,7 @@ int arch_decode_instruction(struct elf *elf, struct section *sec,
insn_get_length(&insn);
if (!insn_complete(&insn)) {
WARN_FUNC("can't decode instruction", sec, offset);
WARN("can't decode instruction at %s:0x%lx", sec->name, offset);
return -1;
}
......@@ -123,40 +142,44 @@ int arch_decode_instruction(struct elf *elf, struct section *sec,
if (rex_w && !rex_b && modrm_mod == 3 && modrm_rm == 4) {
/* add/sub reg, %rsp */
*type = INSN_STACK;
op->src.type = OP_SRC_ADD;
op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
op->dest.type = OP_DEST_REG;
op->dest.reg = CFI_SP;
ADD_OP(op) {
op->src.type = OP_SRC_ADD;
op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
op->dest.type = OP_DEST_REG;
op->dest.reg = CFI_SP;
}
}
break;
case 0x50 ... 0x57:
/* push reg */
*type = INSN_STACK;
op->src.type = OP_SRC_REG;
op->src.reg = op_to_cfi_reg[op1 & 0x7][rex_b];
op->dest.type = OP_DEST_PUSH;
ADD_OP(op) {
op->src.type = OP_SRC_REG;
op->src.reg = op_to_cfi_reg[op1 & 0x7][rex_b];
op->dest.type = OP_DEST_PUSH;
}
break;
case 0x58 ... 0x5f:
/* pop reg */
*type = INSN_STACK;
op->src.type = OP_SRC_POP;
op->dest.type = OP_DEST_REG;
op->dest.reg = op_to_cfi_reg[op1 & 0x7][rex_b];
ADD_OP(op) {
op->src.type = OP_SRC_POP;
op->dest.type = OP_DEST_REG;
op->dest.reg = op_to_cfi_reg[op1 & 0x7][rex_b];
}
break;
case 0x68:
case 0x6a:
/* push immediate */
*type = INSN_STACK;
op->src.type = OP_SRC_CONST;
op->dest.type = OP_DEST_PUSH;
ADD_OP(op) {
op->src.type = OP_SRC_CONST;
op->dest.type = OP_DEST_PUSH;
}
break;
case 0x70 ... 0x7f:
......@@ -170,12 +193,13 @@ int arch_decode_instruction(struct elf *elf, struct section *sec,
if (modrm == 0xe4) {
/* and imm, %rsp */
*type = INSN_STACK;
op->src.type = OP_SRC_AND;
op->src.reg = CFI_SP;
op->src.offset = insn.immediate.value;
op->dest.type = OP_DEST_REG;
op->dest.reg = CFI_SP;
ADD_OP(op) {
op->src.type = OP_SRC_AND;
op->src.reg = CFI_SP;
op->src.offset = insn.immediate.value;
op->dest.type = OP_DEST_REG;
op->dest.reg = CFI_SP;
}
break;
}
......@@ -187,34 +211,37 @@ int arch_decode_instruction(struct elf *elf, struct section *sec,
break;
/* add/sub imm, %rsp */
*type = INSN_STACK;
op->src.type = OP_SRC_ADD;
op->src.reg = CFI_SP;
op->src.offset = insn.immediate.value * sign;
op->dest.type = OP_DEST_REG;
op->dest.reg = CFI_SP;
ADD_OP(op) {
op->src.type = OP_SRC_ADD;
op->src.reg = CFI_SP;
op->src.offset = insn.immediate.value * sign;
op->dest.type = OP_DEST_REG;
op->dest.reg = CFI_SP;
}
break;
case 0x89:
if (rex_w && !rex_r && modrm_mod == 3 && modrm_reg == 4) {
/* mov %rsp, reg */
*type = INSN_STACK;
op->src.type = OP_SRC_REG;
op->src.reg = CFI_SP;
op->dest.type = OP_DEST_REG;
op->dest.reg = op_to_cfi_reg[modrm_rm][rex_b];
ADD_OP(op) {
op->src.type = OP_SRC_REG;
op->src.reg = CFI_SP;
op->dest.type = OP_DEST_REG;
op->dest.reg = op_to_cfi_reg[modrm_rm][rex_b];
}
break;
}
if (rex_w && !rex_b && modrm_mod == 3 && modrm_rm == 4) {
/* mov reg, %rsp */
*type = INSN_STACK;
op->src.type = OP_SRC_REG;
op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
op->dest.type = OP_DEST_REG;
op->dest.reg = CFI_SP;
ADD_OP(op) {
op->src.type = OP_SRC_REG;
op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
op->dest.type = OP_DEST_REG;
op->dest.reg = CFI_SP;
}
break;
}
......@@ -224,22 +251,24 @@ int arch_decode_instruction(struct elf *elf, struct section *sec,
(modrm_mod == 1 || modrm_mod == 2) && modrm_rm == 5) {
/* mov reg, disp(%rbp) */
*type = INSN_STACK;
op->src.type = OP_SRC_REG;
op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
op->dest.type = OP_DEST_REG_INDIRECT;
op->dest.reg = CFI_BP;
op->dest.offset = insn.displacement.value;
ADD_OP(op) {
op->src.type = OP_SRC_REG;
op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
op->dest.type = OP_DEST_REG_INDIRECT;
op->dest.reg = CFI_BP;
op->dest.offset = insn.displacement.value;
}
} else if (rex_w && !rex_b && modrm_rm == 4 && sib == 0x24) {
/* mov reg, disp(%rsp) */
*type = INSN_STACK;
op->src.type = OP_SRC_REG;
op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
op->dest.type = OP_DEST_REG_INDIRECT;
op->dest.reg = CFI_SP;
op->dest.offset = insn.displacement.value;
ADD_OP(op) {
op->src.type = OP_SRC_REG;
op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
op->dest.type = OP_DEST_REG_INDIRECT;
op->dest.reg = CFI_SP;
op->dest.offset = insn.displacement.value;
}
}
break;
......@@ -248,23 +277,25 @@ int arch_decode_instruction(struct elf *elf, struct section *sec,
if (rex_w && !rex_b && modrm_mod == 1 && modrm_rm == 5) {
/* mov disp(%rbp), reg */
*type = INSN_STACK;
op->src.type = OP_SRC_REG_INDIRECT;
op->src.reg = CFI_BP;
op->src.offset = insn.displacement.value;
op->dest.type = OP_DEST_REG;
op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r];
ADD_OP(op) {
op->src.type = OP_SRC_REG_INDIRECT;
op->src.reg = CFI_BP;
op->src.offset = insn.displacement.value;
op->dest.type = OP_DEST_REG;
op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r];
}
} else if (rex_w && !rex_b && sib == 0x24 &&
modrm_mod != 3 && modrm_rm == 4) {
/* mov disp(%rsp), reg */
*type = INSN_STACK;
op->src.type = OP_SRC_REG_INDIRECT;
op->src.reg = CFI_SP;
op->src.offset = insn.displacement.value;
op->dest.type = OP_DEST_REG;
op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r];
ADD_OP(op) {
op->src.type = OP_SRC_REG_INDIRECT;
op->src.reg = CFI_SP;
op->src.offset = insn.displacement.value;
op->dest.type = OP_DEST_REG;
op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r];
}
}
break;
......@@ -272,28 +303,30 @@ int arch_decode_instruction(struct elf *elf, struct section *sec,
case 0x8d:
if (sib == 0x24 && rex_w && !rex_b && !rex_x) {
*type = INSN_STACK;
if (!insn.displacement.value) {
/* lea (%rsp), reg */
op->src.type = OP_SRC_REG;
} else {
/* lea disp(%rsp), reg */
op->src.type = OP_SRC_ADD;
op->src.offset = insn.displacement.value;
ADD_OP(op) {
if (!insn.displacement.value) {
/* lea (%rsp), reg */
op->src.type = OP_SRC_REG;
} else {
/* lea disp(%rsp), reg */
op->src.type = OP_SRC_ADD;
op->src.offset = insn.displacement.value;
}
op->src.reg = CFI_SP;
op->dest.type = OP_DEST_REG;
op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r];
}
op->src.reg = CFI_SP;
op->dest.type = OP_DEST_REG;
op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r];
} else if (rex == 0x48 && modrm == 0x65) {
/* lea disp(%rbp), %rsp */
*type = INSN_STACK;
op->src.type = OP_SRC_ADD;
op->src.reg = CFI_BP;
op->src.offset = insn.displacement.value;
op->dest.type = OP_DEST_REG;
op->dest.reg = CFI_SP;
ADD_OP(op) {
op->src.type = OP_SRC_ADD;
op->src.reg = CFI_BP;
op->src.offset = insn.displacement.value;
op->dest.type = OP_DEST_REG;
op->dest.reg = CFI_SP;
}
} else if (rex == 0x49 && modrm == 0x62 &&
insn.displacement.value == -8) {
......@@ -304,12 +337,13 @@ int arch_decode_instruction(struct elf *elf, struct section *sec,
* Restoring rsp back to its original value after a
* stack realignment.
*/
*type = INSN_STACK;
op->src.type = OP_SRC_ADD;
op->src.reg = CFI_R10;
op->src.offset = -8;
op->dest.type = OP_DEST_REG;
op->dest.reg = CFI_SP;
ADD_OP(op) {
op->src.type = OP_SRC_ADD;
op->src.reg = CFI_R10;
op->src.offset = -8;
op->dest.type = OP_DEST_REG;
op->dest.reg = CFI_SP;
}
} else if (rex == 0x49 && modrm == 0x65 &&
insn.displacement.value == -16) {
......@@ -320,21 +354,23 @@ int arch_decode_instruction(struct elf *elf, struct section *sec,
* Restoring rsp back to its original value after a
* stack realignment.
*/
*type = INSN_STACK;
op->src.type = OP_SRC_ADD;
op->src.reg = CFI_R13;
op->src.offset = -16;
op->dest.type = OP_DEST_REG;
op->dest.reg = CFI_SP;
ADD_OP(op) {
op->src.type = OP_SRC_ADD;
op->src.reg = CFI_R13;
op->src.offset = -16;
op->dest.type = OP_DEST_REG;
op->dest.reg = CFI_SP;
}
}
break;
case 0x8f:
/* pop to mem */
*type = INSN_STACK;
op->src.type = OP_SRC_POP;
op->dest.type = OP_DEST_MEM;
ADD_OP(op) {
op->src.type = OP_SRC_POP;
op->dest.type = OP_DEST_MEM;
}
break;
case 0x90:
......@@ -343,16 +379,18 @@ int arch_decode_instruction(struct elf *elf, struct section *sec,
case 0x9c:
/* pushf */
*type = INSN_STACK;
op->src.type = OP_SRC_CONST;
op->dest.type = OP_DEST_PUSHF;
ADD_OP(op) {
op->src.type = OP_SRC_CONST;
op->dest.type = OP_DEST_PUSHF;
}
break;
case 0x9d:
/* popf */
*type = INSN_STACK;
op->src.type = OP_SRC_POPF;
op->dest.type = OP_DEST_MEM;
ADD_OP(op) {
op->src.type = OP_SRC_POPF;
op->dest.type = OP_DEST_MEM;
}
break;
case 0x0f:
......@@ -387,16 +425,18 @@ int arch_decode_instruction(struct elf *elf, struct section *sec,
} else if (op2 == 0xa0 || op2 == 0xa8) {
/* push fs/gs */
*type = INSN_STACK;
op->src.type = OP_SRC_CONST;
op->dest.type = OP_DEST_PUSH;
ADD_OP(op) {
op->src.type = OP_SRC_CONST;
op->dest.type = OP_DEST_PUSH;
}
} else if (op2 == 0xa1 || op2 == 0xa9) {
/* pop fs/gs */
*type = INSN_STACK;
op->src.type = OP_SRC_POP;
op->dest.type = OP_DEST_MEM;
ADD_OP(op) {
op->src.type = OP_SRC_POP;
op->dest.type = OP_DEST_MEM;
}
}
break;
......@@ -409,8 +449,8 @@ int arch_decode_instruction(struct elf *elf, struct section *sec,
* mov bp, sp
* pop bp
*/
*type = INSN_STACK;
op->dest.type = OP_DEST_LEAVE;
ADD_OP(op)
op->dest.type = OP_DEST_LEAVE;
break;
......@@ -429,14 +469,41 @@ int arch_decode_instruction(struct elf *elf, struct section *sec,
*type = INSN_RETURN;
break;
case 0xcf: /* iret */
/*
* Handle sync_core(), which has an IRET to self.
* All other IRET are in STT_NONE entry code.
*/
sym = find_symbol_containing(sec, offset);
if (sym && sym->type == STT_FUNC) {
ADD_OP(op) {
/* add $40, %rsp */
op->src.type = OP_SRC_ADD;
op->src.reg = CFI_SP;
op->src.offset = 5*8;
op->dest.type = OP_DEST_REG;
op->dest.reg = CFI_SP;
}
break;
}
/* fallthrough */
case 0xca: /* retf */
case 0xcb: /* retf */
case 0xcf: /* iret */
*type = INSN_CONTEXT_SWITCH;
break;
case 0xe8:
*type = INSN_CALL;
/*
* For the impact on the stack, a CALL behaves like
* a PUSH of an immediate value (the return address).
*/
ADD_OP(op) {
op->src.type = OP_SRC_CONST;
op->dest.type = OP_DEST_PUSH;
}
break;
case 0xfc:
......@@ -464,9 +531,10 @@ int arch_decode_instruction(struct elf *elf, struct section *sec,
else if (modrm_reg == 6) {
/* push from mem */
*type = INSN_STACK;
op->src.type = OP_SRC_CONST;
op->dest.type = OP_DEST_PUSH;
ADD_OP(op) {
op->src.type = OP_SRC_CONST;
op->dest.type = OP_DEST_PUSH;
}
}
break;
......@@ -480,7 +548,7 @@ int arch_decode_instruction(struct elf *elf, struct section *sec,
return 0;
}
void arch_initial_func_cfi_state(struct cfi_state *state)
void arch_initial_func_cfi_state(struct cfi_init_state *state)
{
int i;
......
/* SPDX-License-Identifier: GPL-2.0-or-later */
#ifndef _OBJTOOL_CFI_REGS_H
#define _OBJTOOL_CFI_REGS_H
#define CFI_AX 0
#define CFI_DX 1
#define CFI_CX 2
#define CFI_BX 3
#define CFI_SI 4
#define CFI_DI 5
#define CFI_BP 6
#define CFI_SP 7
#define CFI_R8 8
#define CFI_R9 9
#define CFI_R10 10
#define CFI_R11 11
#define CFI_R12 12
#define CFI_R13 13
#define CFI_R14 14
#define CFI_R15 15
#define CFI_RA 16
#define CFI_NUM_REGS 17
#endif /* _OBJTOOL_CFI_REGS_H */
......@@ -14,10 +14,11 @@
*/
#include <subcmd/parse-options.h>
#include <string.h>
#include "builtin.h"
#include "check.h"
#include "objtool.h"
bool no_fp, no_unreachable, retpoline, module, backtrace, uaccess, stats;
bool no_fp, no_unreachable, retpoline, module, backtrace, uaccess, stats, validate_dup, vmlinux;
static const char * const check_usage[] = {
"objtool check [<options>] file.o",
......@@ -32,12 +33,14 @@ const struct option check_options[] = {
OPT_BOOLEAN('b', "backtrace", &backtrace, "unwind on error"),
OPT_BOOLEAN('a', "uaccess", &uaccess, "enable uaccess checking"),
OPT_BOOLEAN('s', "stats", &stats, "print statistics"),
OPT_BOOLEAN('d', "duplicate", &validate_dup, "duplicate validation for vmlinux.o"),
OPT_BOOLEAN('l', "vmlinux", &vmlinux, "vmlinux.o validation"),
OPT_END(),
};
int cmd_check(int argc, const char **argv)
{
const char *objname;
const char *objname, *s;
argc = parse_options(argc, argv, check_options, check_usage, 0);
......@@ -46,5 +49,9 @@ int cmd_check(int argc, const char **argv)
objname = argv[0];
s = strstr(objname, "vmlinux.o");
if (s && !s[9])
vmlinux = true;
return check(objname, false);
}
......@@ -14,8 +14,7 @@
#include <string.h>
#include "builtin.h"
#include "check.h"
#include "objtool.h"
static const char *orc_usage[] = {
"objtool orc generate [<options>] file.o",
......
......@@ -8,7 +8,7 @@
#include <subcmd/parse-options.h>
extern const struct option check_options[];
extern bool no_fp, no_unreachable, retpoline, module, backtrace, uaccess, stats;
extern bool no_fp, no_unreachable, retpoline, module, backtrace, uaccess, stats, validate_dup, vmlinux;
extern int cmd_check(int argc, const char **argv);
extern int cmd_orc(int argc, const char **argv);
......
......@@ -6,38 +6,33 @@
#ifndef _OBJTOOL_CFI_H
#define _OBJTOOL_CFI_H
#include "cfi_regs.h"
#define CFI_UNDEFINED -1
#define CFI_CFA -2
#define CFI_SP_INDIRECT -3
#define CFI_BP_INDIRECT -4
#define CFI_AX 0
#define CFI_DX 1
#define CFI_CX 2
#define CFI_BX 3
#define CFI_SI 4
#define CFI_DI 5
#define CFI_BP 6
#define CFI_SP 7
#define CFI_R8 8
#define CFI_R9 9
#define CFI_R10 10
#define CFI_R11 11
#define CFI_R12 12
#define CFI_R13 13
#define CFI_R14 14
#define CFI_R15 15
#define CFI_RA 16
#define CFI_NUM_REGS 17
struct cfi_reg {
int base;
int offset;
};
struct cfi_state {
struct cfi_init_state {
struct cfi_reg regs[CFI_NUM_REGS];
struct cfi_reg cfa;
};
struct cfi_state {
struct cfi_reg regs[CFI_NUM_REGS];
struct cfi_reg vals[CFI_NUM_REGS];
struct cfi_reg cfa;
int stack_size;
int drap_reg, drap_offset;
unsigned char type;
bool bp_scratch;
bool drap;
bool end;
};
#endif /* _OBJTOOL_CFI_H */
......@@ -7,10 +7,10 @@
#include <stdlib.h>
#include "builtin.h"
#include "cfi.h"
#include "arch.h"
#include "check.h"
#include "elf.h"
#include "special.h"
#include "arch.h"
#include "warn.h"
#include <linux/hashtable.h>
......@@ -27,16 +27,17 @@ struct alternative {
};
const char *objname;
struct cfi_state initial_func_cfi;
struct cfi_init_state initial_func_cfi;
struct instruction *find_insn(struct objtool_file *file,
struct section *sec, unsigned long offset)
{
struct instruction *insn;
hash_for_each_possible(file->insn_hash, insn, hash, offset)
hash_for_each_possible(file->insn_hash, insn, hash, sec_offset_hash(sec, offset)) {
if (insn->sec == sec && insn->offset == offset)
return insn;
}
return NULL;
}
......@@ -226,18 +227,31 @@ static bool dead_end_function(struct objtool_file *file, struct symbol *func)
return __dead_end_function(file, func, 0);
}
static void clear_insn_state(struct insn_state *state)
static void init_cfi_state(struct cfi_state *cfi)
{
int i;
memset(state, 0, sizeof(*state));
state->cfa.base = CFI_UNDEFINED;
for (i = 0; i < CFI_NUM_REGS; i++) {
state->regs[i].base = CFI_UNDEFINED;
state->vals[i].base = CFI_UNDEFINED;
cfi->regs[i].base = CFI_UNDEFINED;
cfi->vals[i].base = CFI_UNDEFINED;
}
state->drap_reg = CFI_UNDEFINED;
state->drap_offset = -1;
cfi->cfa.base = CFI_UNDEFINED;
cfi->drap_reg = CFI_UNDEFINED;
cfi->drap_offset = -1;
}
static void init_insn_state(struct insn_state *state, struct section *sec)
{
memset(state, 0, sizeof(*state));
init_cfi_state(&state->cfi);
/*
* We need the full vmlinux for noinstr validation, otherwise we can
* not correctly determine insn->call_dest->sec (external symbols do
* not have a section).
*/
if (vmlinux && sec)
state->noinstr = sec->noinstr;
}
/*
......@@ -263,6 +277,10 @@ static int decode_instructions(struct objtool_file *file)
strncmp(sec->name, ".discard.", 9))
sec->text = true;
if (!strcmp(sec->name, ".noinstr.text") ||
!strcmp(sec->name, ".entry.text"))
sec->noinstr = true;
for (offset = 0; offset < sec->len; offset += insn->len) {
insn = malloc(sizeof(*insn));
if (!insn) {
......@@ -271,7 +289,8 @@ static int decode_instructions(struct objtool_file *file)
}
memset(insn, 0, sizeof(*insn));
INIT_LIST_HEAD(&insn->alts);
clear_insn_state(&insn->state);
INIT_LIST_HEAD(&insn->stack_ops);
init_cfi_state(&insn->cfi);
insn->sec = sec;
insn->offset = offset;
......@@ -280,11 +299,11 @@ static int decode_instructions(struct objtool_file *file)
sec->len - offset,
&insn->len, &insn->type,
&insn->immediate,
&insn->stack_op);
&insn->stack_ops);
if (ret)
goto err;
hash_add(file->insn_hash, &insn->hash, insn->offset);
hash_add(file->insn_hash, &insn->hash, sec_offset_hash(sec, insn->offset));
list_add_tail(&insn->list, &file->insn_list);
nr_insns++;
}
......@@ -314,6 +333,19 @@ static int decode_instructions(struct objtool_file *file)
return ret;
}
static struct instruction *find_last_insn(struct objtool_file *file,
struct section *sec)
{
struct instruction *insn = NULL;
unsigned int offset;
unsigned int end = (sec->len > 10) ? sec->len - 10 : 0;
for (offset = sec->len - 1; offset >= end && !insn; offset--)
insn = find_insn(file, sec, offset);
return insn;
}
/*
* Mark "ud2" instructions and manually annotated dead ends.
*/
......@@ -322,7 +354,6 @@ static int add_dead_ends(struct objtool_file *file)
struct section *sec;
struct rela *rela;
struct instruction *insn;
bool found;
/*
* By default, "ud2" is a dead end unless otherwise annotated, because
......@@ -348,15 +379,8 @@ static int add_dead_ends(struct objtool_file *file)
if (insn)
insn = list_prev_entry(insn, list);
else if (rela->addend == rela->sym->sec->len) {
found = false;
list_for_each_entry_reverse(insn, &file->insn_list, list) {
if (insn->sec == rela->sym->sec) {
found = true;
break;
}
}
if (!found) {
insn = find_last_insn(file, rela->sym->sec);
if (!insn) {
WARN("can't find unreachable insn at %s+0x%x",
rela->sym->sec->name, rela->addend);
return -1;
......@@ -390,15 +414,8 @@ static int add_dead_ends(struct objtool_file *file)
if (insn)
insn = list_prev_entry(insn, list);
else if (rela->addend == rela->sym->sec->len) {
found = false;
list_for_each_entry_reverse(insn, &file->insn_list, list) {
if (insn->sec == rela->sym->sec) {
found = true;
break;
}
}
if (!found) {
insn = find_last_insn(file, rela->sym->sec);
if (!insn) {
WARN("can't find reachable insn at %s+0x%x",
rela->sym->sec->name, rela->addend);
return -1;
......@@ -490,6 +507,7 @@ static const char *uaccess_safe_builtin[] = {
"__asan_report_store16_noabort",
/* KCOV */
"write_comp_data",
"check_kcov_mode",
"__sanitizer_cov_trace_pc",
"__sanitizer_cov_trace_const_cmp1",
"__sanitizer_cov_trace_const_cmp2",
......@@ -585,13 +603,14 @@ static int add_jump_destinations(struct objtool_file *file)
insn->offset, insn->len);
if (!rela) {
dest_sec = insn->sec;
dest_off = insn->offset + insn->len + insn->immediate;
dest_off = arch_jump_destination(insn);
} else if (rela->sym->type == STT_SECTION) {
dest_sec = rela->sym->sec;
dest_off = rela->addend + 4;
dest_off = arch_dest_rela_offset(rela->addend);
} else if (rela->sym->sec->idx) {
dest_sec = rela->sym->sec;
dest_off = rela->sym->sym.st_value + rela->addend + 4;
dest_off = rela->sym->sym.st_value +
arch_dest_rela_offset(rela->addend);
} else if (strstr(rela->sym->name, "_indirect_thunk_")) {
/*
* Retpoline jumps are really dynamic jumps in
......@@ -665,6 +684,16 @@ static int add_jump_destinations(struct objtool_file *file)
return 0;
}
static void remove_insn_ops(struct instruction *insn)
{
struct stack_op *op, *tmp;
list_for_each_entry_safe(op, tmp, &insn->stack_ops, list) {
list_del(&op->list);
free(op);
}
}
/*
* Find the destination instructions for all calls.
*/
......@@ -681,7 +710,7 @@ static int add_call_destinations(struct objtool_file *file)
rela = find_rela_by_dest_range(file->elf, insn->sec,
insn->offset, insn->len);
if (!rela) {
dest_off = insn->offset + insn->len + insn->immediate;
dest_off = arch_jump_destination(insn);
insn->call_dest = find_func_by_offset(insn->sec, dest_off);
if (!insn->call_dest)
insn->call_dest = find_symbol_by_offset(insn->sec, dest_off);
......@@ -690,10 +719,7 @@ static int add_call_destinations(struct objtool_file *file)
continue;
if (!insn->call_dest) {
WARN_FUNC("unsupported intra-function call",
insn->sec, insn->offset);
if (retpoline)
WARN("If this is a retpoline, please patch it in with alternatives and annotate it with ANNOTATE_NOSPEC_ALTERNATIVE.");
WARN_FUNC("unannotated intra-function call", insn->sec, insn->offset);
return -1;
}
......@@ -704,17 +730,27 @@ static int add_call_destinations(struct objtool_file *file)
}
} else if (rela->sym->type == STT_SECTION) {
dest_off = arch_dest_rela_offset(rela->addend);
insn->call_dest = find_func_by_offset(rela->sym->sec,
rela->addend+4);
dest_off);
if (!insn->call_dest) {
WARN_FUNC("can't find call dest symbol at %s+0x%x",
WARN_FUNC("can't find call dest symbol at %s+0x%lx",
insn->sec, insn->offset,
rela->sym->sec->name,
rela->addend + 4);
dest_off);
return -1;
}
} else
insn->call_dest = rela->sym;
/*
* Whatever stack impact regular CALLs have, should be undone
* by the RETURN of the called function.
*
* Annotated intra-function calls retain the stack_ops but
* are converted to JUMP, see read_intra_function_calls().
*/
remove_insn_ops(insn);
}
return 0;
......@@ -742,7 +778,9 @@ static int handle_group_alt(struct objtool_file *file,
struct instruction *orig_insn,
struct instruction **new_insn)
{
static unsigned int alt_group_next_index = 1;
struct instruction *last_orig_insn, *last_new_insn, *insn, *fake_jump = NULL;
unsigned int alt_group = alt_group_next_index++;
unsigned long dest_off;
last_orig_insn = NULL;
......@@ -751,7 +789,7 @@ static int handle_group_alt(struct objtool_file *file,
if (insn->offset >= special_alt->orig_off + special_alt->orig_len)
break;
insn->alt_group = true;
insn->alt_group = alt_group;
last_orig_insn = insn;
}
......@@ -763,7 +801,8 @@ static int handle_group_alt(struct objtool_file *file,
}
memset(fake_jump, 0, sizeof(*fake_jump));
INIT_LIST_HEAD(&fake_jump->alts);
clear_insn_state(&fake_jump->state);
INIT_LIST_HEAD(&fake_jump->stack_ops);
init_cfi_state(&fake_jump->cfi);
fake_jump->sec = special_alt->new_sec;
fake_jump->offset = FAKE_JUMP_OFFSET;
......@@ -784,6 +823,7 @@ static int handle_group_alt(struct objtool_file *file,
}
last_new_insn = NULL;
alt_group = alt_group_next_index++;
insn = *new_insn;
sec_for_each_insn_from(file, insn) {
if (insn->offset >= special_alt->new_off + special_alt->new_len)
......@@ -793,6 +833,7 @@ static int handle_group_alt(struct objtool_file *file,
insn->ignore = orig_insn->ignore_alts;
insn->func = orig_insn->func;
insn->alt_group = alt_group;
/*
* Since alternative replacement code is copy/pasted by the
......@@ -821,7 +862,7 @@ static int handle_group_alt(struct objtool_file *file,
if (!insn->immediate)
continue;
dest_off = insn->offset + insn->len + insn->immediate;
dest_off = arch_jump_destination(insn);
if (dest_off == special_alt->new_off + special_alt->new_len) {
if (!fake_jump) {
WARN("%s: alternative jump to end of section",
......@@ -916,6 +957,12 @@ static int add_special_section_alts(struct objtool_file *file)
}
if (special_alt->group) {
if (!special_alt->orig_len) {
WARN_FUNC("empty alternative entry",
orig_insn->sec, orig_insn->offset);
continue;
}
ret = handle_group_alt(file, special_alt, orig_insn,
&new_insn);
if (ret)
......@@ -1253,15 +1300,10 @@ static int read_unwind_hints(struct objtool_file *file)
return -1;
}
cfa = &insn->state.cfa;
if (hint->type == UNWIND_HINT_TYPE_SAVE) {
insn->save = true;
continue;
cfa = &insn->cfi.cfa;
} else if (hint->type == UNWIND_HINT_TYPE_RESTORE) {
insn->restore = true;
insn->hint = true;
if (hint->type == UNWIND_HINT_TYPE_RET_OFFSET) {
insn->ret_offset = hint->sp_offset;
continue;
}
......@@ -1299,8 +1341,8 @@ static int read_unwind_hints(struct objtool_file *file)
}
cfa->offset = hint->sp_offset;
insn->state.type = hint->type;
insn->state.end = hint->end;
insn->cfi.type = hint->type;
insn->cfi.end = hint->end;
}
return 0;
......@@ -1341,6 +1383,104 @@ static int read_retpoline_hints(struct objtool_file *file)
return 0;
}
static int read_instr_hints(struct objtool_file *file)
{
struct section *sec;
struct instruction *insn;
struct rela *rela;
sec = find_section_by_name(file->elf, ".rela.discard.instr_end");
if (!sec)
return 0;
list_for_each_entry(rela, &sec->rela_list, list) {
if (rela->sym->type != STT_SECTION) {
WARN("unexpected relocation symbol type in %s", sec->name);
return -1;
}
insn = find_insn(file, rela->sym->sec, rela->addend);
if (!insn) {
WARN("bad .discard.instr_end entry");
return -1;
}
insn->instr--;
}
sec = find_section_by_name(file->elf, ".rela.discard.instr_begin");
if (!sec)
return 0;
list_for_each_entry(rela, &sec->rela_list, list) {
if (rela->sym->type != STT_SECTION) {
WARN("unexpected relocation symbol type in %s", sec->name);
return -1;
}
insn = find_insn(file, rela->sym->sec, rela->addend);
if (!insn) {
WARN("bad .discard.instr_begin entry");
return -1;
}
insn->instr++;
}
return 0;
}
static int read_intra_function_calls(struct objtool_file *file)
{
struct instruction *insn;
struct section *sec;
struct rela *rela;
sec = find_section_by_name(file->elf, ".rela.discard.intra_function_calls");
if (!sec)
return 0;
list_for_each_entry(rela, &sec->rela_list, list) {
unsigned long dest_off;
if (rela->sym->type != STT_SECTION) {
WARN("unexpected relocation symbol type in %s",
sec->name);
return -1;
}
insn = find_insn(file, rela->sym->sec, rela->addend);
if (!insn) {
WARN("bad .discard.intra_function_call entry");
return -1;
}
if (insn->type != INSN_CALL) {
WARN_FUNC("intra_function_call not a direct call",
insn->sec, insn->offset);
return -1;
}
/*
* Treat intra-function CALLs as JMPs, but with a stack_op.
* See add_call_destinations(), which strips stack_ops from
* normal CALLs.
*/
insn->type = INSN_JUMP_UNCONDITIONAL;
dest_off = insn->offset + insn->len + insn->immediate;
insn->jump_dest = find_insn(file, insn->sec, dest_off);
if (!insn->jump_dest) {
WARN_FUNC("can't find call dest at %s+0x%lx",
insn->sec, insn->offset,
insn->sec->name, dest_off);
return -1;
}
}
return 0;
}
static void mark_rodata(struct objtool_file *file)
{
struct section *sec;
......@@ -1357,8 +1497,8 @@ static void mark_rodata(struct objtool_file *file)
* .rodata.str1.* sections are ignored; they don't contain jump tables.
*/
for_each_sec(file, sec) {
if ((!strncmp(sec->name, ".rodata", 7) && !strstr(sec->name, ".str1.")) ||
!strcmp(sec->name, C_JUMP_TABLE_SECTION)) {
if (!strncmp(sec->name, ".rodata", 7) &&
!strstr(sec->name, ".str1.")) {
sec->rodata = true;
found = true;
}
......@@ -1396,6 +1536,10 @@ static int decode_sections(struct objtool_file *file)
if (ret)
return ret;
ret = read_intra_function_calls(file);
if (ret)
return ret;
ret = add_call_destinations(file);
if (ret)
return ret;
......@@ -1412,12 +1556,16 @@ static int decode_sections(struct objtool_file *file)
if (ret)
return ret;
ret = read_instr_hints(file);
if (ret)
return ret;
return 0;
}
static bool is_fentry_call(struct instruction *insn)
{
if (insn->type == INSN_CALL &&
if (insn->type == INSN_CALL && insn->call_dest &&
insn->call_dest->type == STT_NOTYPE &&
!strcmp(insn->call_dest->name, "__fentry__"))
return true;
......@@ -1425,40 +1573,57 @@ static bool is_fentry_call(struct instruction *insn)
return false;
}
static bool has_modified_stack_frame(struct insn_state *state)
static bool has_modified_stack_frame(struct instruction *insn, struct insn_state *state)
{
u8 ret_offset = insn->ret_offset;
struct cfi_state *cfi = &state->cfi;
int i;
if (state->cfa.base != initial_func_cfi.cfa.base ||
state->cfa.offset != initial_func_cfi.cfa.offset ||
state->stack_size != initial_func_cfi.cfa.offset ||
state->drap)
if (cfi->cfa.base != initial_func_cfi.cfa.base || cfi->drap)
return true;
if (cfi->cfa.offset != initial_func_cfi.cfa.offset + ret_offset)
return true;
if (cfi->stack_size != initial_func_cfi.cfa.offset + ret_offset)
return true;
for (i = 0; i < CFI_NUM_REGS; i++)
if (state->regs[i].base != initial_func_cfi.regs[i].base ||
state->regs[i].offset != initial_func_cfi.regs[i].offset)
/*
* If there is a ret offset hint then don't check registers
* because a callee-saved register might have been pushed on
* the stack.
*/
if (ret_offset)
return false;
for (i = 0; i < CFI_NUM_REGS; i++) {
if (cfi->regs[i].base != initial_func_cfi.regs[i].base ||
cfi->regs[i].offset != initial_func_cfi.regs[i].offset)
return true;
}
return false;
}
static bool has_valid_stack_frame(struct insn_state *state)
{
if (state->cfa.base == CFI_BP && state->regs[CFI_BP].base == CFI_CFA &&
state->regs[CFI_BP].offset == -16)
struct cfi_state *cfi = &state->cfi;
if (cfi->cfa.base == CFI_BP && cfi->regs[CFI_BP].base == CFI_CFA &&
cfi->regs[CFI_BP].offset == -16)
return true;
if (state->drap && state->regs[CFI_BP].base == CFI_BP)
if (cfi->drap && cfi->regs[CFI_BP].base == CFI_BP)
return true;
return false;
}
static int update_insn_state_regs(struct instruction *insn, struct insn_state *state)
static int update_cfi_state_regs(struct instruction *insn,
struct cfi_state *cfi,
struct stack_op *op)
{
struct cfi_reg *cfa = &state->cfa;
struct stack_op *op = &insn->stack_op;
struct cfi_reg *cfa = &cfi->cfa;
if (cfa->base != CFI_SP && cfa->base != CFI_SP_INDIRECT)
return 0;
......@@ -1479,20 +1644,19 @@ static int update_insn_state_regs(struct instruction *insn, struct insn_state *s
return 0;
}
static void save_reg(struct insn_state *state, unsigned char reg, int base,
int offset)
static void save_reg(struct cfi_state *cfi, unsigned char reg, int base, int offset)
{
if (arch_callee_saved_reg(reg) &&
state->regs[reg].base == CFI_UNDEFINED) {
state->regs[reg].base = base;
state->regs[reg].offset = offset;
cfi->regs[reg].base == CFI_UNDEFINED) {
cfi->regs[reg].base = base;
cfi->regs[reg].offset = offset;
}
}
static void restore_reg(struct insn_state *state, unsigned char reg)
static void restore_reg(struct cfi_state *cfi, unsigned char reg)
{
state->regs[reg].base = CFI_UNDEFINED;
state->regs[reg].offset = 0;
cfi->regs[reg].base = initial_func_cfi.regs[reg].base;
cfi->regs[reg].offset = initial_func_cfi.regs[reg].offset;
}
/*
......@@ -1548,11 +1712,11 @@ static void restore_reg(struct insn_state *state, unsigned char reg)
* 41 5d pop %r13
* c3 retq
*/
static int update_insn_state(struct instruction *insn, struct insn_state *state)
static int update_cfi_state(struct instruction *insn, struct cfi_state *cfi,
struct stack_op *op)
{
struct stack_op *op = &insn->stack_op;
struct cfi_reg *cfa = &state->cfa;
struct cfi_reg *regs = state->regs;
struct cfi_reg *cfa = &cfi->cfa;
struct cfi_reg *regs = cfi->regs;
/* stack operations don't make sense with an undefined CFA */
if (cfa->base == CFI_UNDEFINED) {
......@@ -1563,8 +1727,8 @@ static int update_insn_state(struct instruction *insn, struct insn_state *state)
return 0;
}
if (state->type == ORC_TYPE_REGS || state->type == ORC_TYPE_REGS_IRET)
return update_insn_state_regs(insn, state);
if (cfi->type == ORC_TYPE_REGS || cfi->type == ORC_TYPE_REGS_IRET)
return update_cfi_state_regs(insn, cfi, op);
switch (op->dest.type) {
......@@ -1579,16 +1743,16 @@ static int update_insn_state(struct instruction *insn, struct insn_state *state)
/* mov %rsp, %rbp */
cfa->base = op->dest.reg;
state->bp_scratch = false;
cfi->bp_scratch = false;
}
else if (op->src.reg == CFI_SP &&
op->dest.reg == CFI_BP && state->drap) {
op->dest.reg == CFI_BP && cfi->drap) {
/* drap: mov %rsp, %rbp */
regs[CFI_BP].base = CFI_BP;
regs[CFI_BP].offset = -state->stack_size;
state->bp_scratch = false;
regs[CFI_BP].offset = -cfi->stack_size;
cfi->bp_scratch = false;
}
else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
......@@ -1603,8 +1767,8 @@ static int update_insn_state(struct instruction *insn, struct insn_state *state)
* ...
* mov %rax, %rsp
*/
state->vals[op->dest.reg].base = CFI_CFA;
state->vals[op->dest.reg].offset = -state->stack_size;
cfi->vals[op->dest.reg].base = CFI_CFA;
cfi->vals[op->dest.reg].offset = -cfi->stack_size;
}
else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP &&
......@@ -1615,14 +1779,14 @@ static int update_insn_state(struct instruction *insn, struct insn_state *state)
*
* Restore the original stack pointer (Clang).
*/
state->stack_size = -state->regs[CFI_BP].offset;
cfi->stack_size = -cfi->regs[CFI_BP].offset;
}
else if (op->dest.reg == cfa->base) {
/* mov %reg, %rsp */
if (cfa->base == CFI_SP &&
state->vals[op->src.reg].base == CFI_CFA) {
cfi->vals[op->src.reg].base == CFI_CFA) {
/*
* This is needed for the rare case
......@@ -1632,8 +1796,8 @@ static int update_insn_state(struct instruction *insn, struct insn_state *state)
* ...
* mov %rcx, %rsp
*/
cfa->offset = -state->vals[op->src.reg].offset;
state->stack_size = cfa->offset;
cfa->offset = -cfi->vals[op->src.reg].offset;
cfi->stack_size = cfa->offset;
} else {
cfa->base = CFI_UNDEFINED;
......@@ -1647,7 +1811,7 @@ static int update_insn_state(struct instruction *insn, struct insn_state *state)
if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) {
/* add imm, %rsp */
state->stack_size -= op->src.offset;
cfi->stack_size -= op->src.offset;
if (cfa->base == CFI_SP)
cfa->offset -= op->src.offset;
break;
......@@ -1656,14 +1820,14 @@ static int update_insn_state(struct instruction *insn, struct insn_state *state)
if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) {
/* lea disp(%rbp), %rsp */
state->stack_size = -(op->src.offset + regs[CFI_BP].offset);
cfi->stack_size = -(op->src.offset + regs[CFI_BP].offset);
break;
}
if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
/* drap: lea disp(%rsp), %drap */
state->drap_reg = op->dest.reg;
cfi->drap_reg = op->dest.reg;
/*
* lea disp(%rsp), %reg
......@@ -1675,25 +1839,25 @@ static int update_insn_state(struct instruction *insn, struct insn_state *state)
* ...
* mov %rcx, %rsp
*/
state->vals[op->dest.reg].base = CFI_CFA;
state->vals[op->dest.reg].offset = \
-state->stack_size + op->src.offset;
cfi->vals[op->dest.reg].base = CFI_CFA;
cfi->vals[op->dest.reg].offset = \
-cfi->stack_size + op->src.offset;
break;
}
if (state->drap && op->dest.reg == CFI_SP &&
op->src.reg == state->drap_reg) {
if (cfi->drap && op->dest.reg == CFI_SP &&
op->src.reg == cfi->drap_reg) {
/* drap: lea disp(%drap), %rsp */
cfa->base = CFI_SP;
cfa->offset = state->stack_size = -op->src.offset;
state->drap_reg = CFI_UNDEFINED;
state->drap = false;
cfa->offset = cfi->stack_size = -op->src.offset;
cfi->drap_reg = CFI_UNDEFINED;
cfi->drap = false;
break;
}
if (op->dest.reg == state->cfa.base) {
if (op->dest.reg == cfi->cfa.base) {
WARN_FUNC("unsupported stack register modification",
insn->sec, insn->offset);
return -1;
......@@ -1703,18 +1867,18 @@ static int update_insn_state(struct instruction *insn, struct insn_state *state)
case OP_SRC_AND:
if (op->dest.reg != CFI_SP ||
(state->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) ||
(state->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) {
(cfi->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) ||
(cfi->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) {
WARN_FUNC("unsupported stack pointer realignment",
insn->sec, insn->offset);
return -1;
}
if (state->drap_reg != CFI_UNDEFINED) {
if (cfi->drap_reg != CFI_UNDEFINED) {
/* drap: and imm, %rsp */
cfa->base = state->drap_reg;
cfa->offset = state->stack_size = 0;
state->drap = true;
cfa->base = cfi->drap_reg;
cfa->offset = cfi->stack_size = 0;
cfi->drap = true;
}
/*
......@@ -1726,57 +1890,55 @@ static int update_insn_state(struct instruction *insn, struct insn_state *state)
case OP_SRC_POP:
case OP_SRC_POPF:
if (!state->drap && op->dest.type == OP_DEST_REG &&
op->dest.reg == cfa->base) {
if (!cfi->drap && op->dest.reg == cfa->base) {
/* pop %rbp */
cfa->base = CFI_SP;
}
if (state->drap && cfa->base == CFI_BP_INDIRECT &&
op->dest.type == OP_DEST_REG &&
op->dest.reg == state->drap_reg &&
state->drap_offset == -state->stack_size) {
if (cfi->drap && cfa->base == CFI_BP_INDIRECT &&
op->dest.reg == cfi->drap_reg &&
cfi->drap_offset == -cfi->stack_size) {
/* drap: pop %drap */
cfa->base = state->drap_reg;
cfa->base = cfi->drap_reg;
cfa->offset = 0;
state->drap_offset = -1;
cfi->drap_offset = -1;
} else if (regs[op->dest.reg].offset == -state->stack_size) {
} else if (regs[op->dest.reg].offset == -cfi->stack_size) {
/* pop %reg */
restore_reg(state, op->dest.reg);
restore_reg(cfi, op->dest.reg);
}
state->stack_size -= 8;
cfi->stack_size -= 8;
if (cfa->base == CFI_SP)
cfa->offset -= 8;
break;
case OP_SRC_REG_INDIRECT:
if (state->drap && op->src.reg == CFI_BP &&
op->src.offset == state->drap_offset) {
if (cfi->drap && op->src.reg == CFI_BP &&
op->src.offset == cfi->drap_offset) {
/* drap: mov disp(%rbp), %drap */
cfa->base = state->drap_reg;
cfa->base = cfi->drap_reg;
cfa->offset = 0;
state->drap_offset = -1;
cfi->drap_offset = -1;
}
if (state->drap && op->src.reg == CFI_BP &&
if (cfi->drap && op->src.reg == CFI_BP &&
op->src.offset == regs[op->dest.reg].offset) {
/* drap: mov disp(%rbp), %reg */
restore_reg(state, op->dest.reg);
restore_reg(cfi, op->dest.reg);
} else if (op->src.reg == cfa->base &&
op->src.offset == regs[op->dest.reg].offset + cfa->offset) {
/* mov disp(%rbp), %reg */
/* mov disp(%rsp), %reg */
restore_reg(state, op->dest.reg);
restore_reg(cfi, op->dest.reg);
}
break;
......@@ -1791,78 +1953,78 @@ static int update_insn_state(struct instruction *insn, struct insn_state *state)
case OP_DEST_PUSH:
case OP_DEST_PUSHF:
state->stack_size += 8;
cfi->stack_size += 8;
if (cfa->base == CFI_SP)
cfa->offset += 8;
if (op->src.type != OP_SRC_REG)
break;
if (state->drap) {
if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
if (cfi->drap) {
if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) {
/* drap: push %drap */
cfa->base = CFI_BP_INDIRECT;
cfa->offset = -state->stack_size;
cfa->offset = -cfi->stack_size;
/* save drap so we know when to restore it */
state->drap_offset = -state->stack_size;
cfi->drap_offset = -cfi->stack_size;
} else if (op->src.reg == CFI_BP && cfa->base == state->drap_reg) {
} else if (op->src.reg == CFI_BP && cfa->base == cfi->drap_reg) {
/* drap: push %rbp */
state->stack_size = 0;
cfi->stack_size = 0;
} else if (regs[op->src.reg].base == CFI_UNDEFINED) {
/* drap: push %reg */
save_reg(state, op->src.reg, CFI_BP, -state->stack_size);
save_reg(cfi, op->src.reg, CFI_BP, -cfi->stack_size);
}
} else {
/* push %reg */
save_reg(state, op->src.reg, CFI_CFA, -state->stack_size);
save_reg(cfi, op->src.reg, CFI_CFA, -cfi->stack_size);
}
/* detect when asm code uses rbp as a scratch register */
if (!no_fp && insn->func && op->src.reg == CFI_BP &&
cfa->base != CFI_BP)
state->bp_scratch = true;
cfi->bp_scratch = true;
break;
case OP_DEST_REG_INDIRECT:
if (state->drap) {
if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
if (cfi->drap) {
if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) {
/* drap: mov %drap, disp(%rbp) */
cfa->base = CFI_BP_INDIRECT;
cfa->offset = op->dest.offset;
/* save drap offset so we know when to restore it */
state->drap_offset = op->dest.offset;
cfi->drap_offset = op->dest.offset;
}
else if (regs[op->src.reg].base == CFI_UNDEFINED) {
/* drap: mov reg, disp(%rbp) */
save_reg(state, op->src.reg, CFI_BP, op->dest.offset);
save_reg(cfi, op->src.reg, CFI_BP, op->dest.offset);
}
} else if (op->dest.reg == cfa->base) {
/* mov reg, disp(%rbp) */
/* mov reg, disp(%rsp) */
save_reg(state, op->src.reg, CFI_CFA,
op->dest.offset - state->cfa.offset);
save_reg(cfi, op->src.reg, CFI_CFA,
op->dest.offset - cfi->cfa.offset);
}
break;
case OP_DEST_LEAVE:
if ((!state->drap && cfa->base != CFI_BP) ||
(state->drap && cfa->base != state->drap_reg)) {
if ((!cfi->drap && cfa->base != CFI_BP) ||
(cfi->drap && cfa->base != cfi->drap_reg)) {
WARN_FUNC("leave instruction with modified stack frame",
insn->sec, insn->offset);
return -1;
......@@ -1870,10 +2032,10 @@ static int update_insn_state(struct instruction *insn, struct insn_state *state)
/* leave (mov %rbp, %rsp; pop %rbp) */
state->stack_size = -state->regs[CFI_BP].offset - 8;
restore_reg(state, CFI_BP);
cfi->stack_size = -cfi->regs[CFI_BP].offset - 8;
restore_reg(cfi, CFI_BP);
if (!state->drap) {
if (!cfi->drap) {
cfa->base = CFI_SP;
cfa->offset -= 8;
}
......@@ -1888,7 +2050,7 @@ static int update_insn_state(struct instruction *insn, struct insn_state *state)
}
/* pop mem */
state->stack_size -= 8;
cfi->stack_size -= 8;
if (cfa->base == CFI_SP)
cfa->offset -= 8;
......@@ -1903,41 +2065,86 @@ static int update_insn_state(struct instruction *insn, struct insn_state *state)
return 0;
}
static bool insn_state_match(struct instruction *insn, struct insn_state *state)
static int handle_insn_ops(struct instruction *insn, struct insn_state *state)
{
struct stack_op *op;
list_for_each_entry(op, &insn->stack_ops, list) {
struct cfi_state old_cfi = state->cfi;
int res;
res = update_cfi_state(insn, &state->cfi, op);
if (res)
return res;
if (insn->alt_group && memcmp(&state->cfi, &old_cfi, sizeof(struct cfi_state))) {
WARN_FUNC("alternative modifies stack", insn->sec, insn->offset);
return -1;
}
if (op->dest.type == OP_DEST_PUSHF) {
if (!state->uaccess_stack) {
state->uaccess_stack = 1;
} else if (state->uaccess_stack >> 31) {
WARN_FUNC("PUSHF stack exhausted",
insn->sec, insn->offset);
return 1;
}
state->uaccess_stack <<= 1;
state->uaccess_stack |= state->uaccess;
}
if (op->src.type == OP_SRC_POPF) {
if (state->uaccess_stack) {
state->uaccess = state->uaccess_stack & 1;
state->uaccess_stack >>= 1;
if (state->uaccess_stack == 1)
state->uaccess_stack = 0;
}
}
}
return 0;
}
static bool insn_cfi_match(struct instruction *insn, struct cfi_state *cfi2)
{
struct insn_state *state1 = &insn->state, *state2 = state;
struct cfi_state *cfi1 = &insn->cfi;
int i;
if (memcmp(&state1->cfa, &state2->cfa, sizeof(state1->cfa))) {
if (memcmp(&cfi1->cfa, &cfi2->cfa, sizeof(cfi1->cfa))) {
WARN_FUNC("stack state mismatch: cfa1=%d%+d cfa2=%d%+d",
insn->sec, insn->offset,
state1->cfa.base, state1->cfa.offset,
state2->cfa.base, state2->cfa.offset);
cfi1->cfa.base, cfi1->cfa.offset,
cfi2->cfa.base, cfi2->cfa.offset);
} else if (memcmp(&state1->regs, &state2->regs, sizeof(state1->regs))) {
} else if (memcmp(&cfi1->regs, &cfi2->regs, sizeof(cfi1->regs))) {
for (i = 0; i < CFI_NUM_REGS; i++) {
if (!memcmp(&state1->regs[i], &state2->regs[i],
if (!memcmp(&cfi1->regs[i], &cfi2->regs[i],
sizeof(struct cfi_reg)))
continue;
WARN_FUNC("stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d",
insn->sec, insn->offset,
i, state1->regs[i].base, state1->regs[i].offset,
i, state2->regs[i].base, state2->regs[i].offset);
i, cfi1->regs[i].base, cfi1->regs[i].offset,
i, cfi2->regs[i].base, cfi2->regs[i].offset);
break;
}
} else if (state1->type != state2->type) {
} else if (cfi1->type != cfi2->type) {
WARN_FUNC("stack state mismatch: type1=%d type2=%d",
insn->sec, insn->offset, state1->type, state2->type);
insn->sec, insn->offset, cfi1->type, cfi2->type);
} else if (cfi1->drap != cfi2->drap ||
(cfi1->drap && cfi1->drap_reg != cfi2->drap_reg) ||
(cfi1->drap && cfi1->drap_offset != cfi2->drap_offset)) {
} else if (state1->drap != state2->drap ||
(state1->drap && state1->drap_reg != state2->drap_reg) ||
(state1->drap && state1->drap_offset != state2->drap_offset)) {
WARN_FUNC("stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)",
insn->sec, insn->offset,
state1->drap, state1->drap_reg, state1->drap_offset,
state2->drap, state2->drap_reg, state2->drap_offset);
cfi1->drap, cfi1->drap_reg, cfi1->drap_offset,
cfi2->drap, cfi2->drap_reg, cfi2->drap_offset);
} else
return true;
......@@ -1963,6 +2170,13 @@ static inline const char *call_dest_name(struct instruction *insn)
static int validate_call(struct instruction *insn, struct insn_state *state)
{
if (state->noinstr && state->instr <= 0 &&
(!insn->call_dest || !insn->call_dest->sec->noinstr)) {
WARN_FUNC("call to %s() leaves .noinstr.text section",
insn->sec, insn->offset, call_dest_name(insn));
return 1;
}
if (state->uaccess && !func_uaccess_safe(insn->call_dest)) {
WARN_FUNC("call to %s() with UACCESS enabled",
insn->sec, insn->offset, call_dest_name(insn));
......@@ -1980,7 +2194,7 @@ static int validate_call(struct instruction *insn, struct insn_state *state)
static int validate_sibling_call(struct instruction *insn, struct insn_state *state)
{
if (has_modified_stack_frame(state)) {
if (has_modified_stack_frame(insn, state)) {
WARN_FUNC("sibling call from callable instruction with modified stack frame",
insn->sec, insn->offset);
return 1;
......@@ -1991,6 +2205,12 @@ static int validate_sibling_call(struct instruction *insn, struct insn_state *st
static int validate_return(struct symbol *func, struct instruction *insn, struct insn_state *state)
{
if (state->noinstr && state->instr > 0) {
WARN_FUNC("return with instrumentation enabled",
insn->sec, insn->offset);
return 1;
}
if (state->uaccess && !func_uaccess_safe(func)) {
WARN_FUNC("return with UACCESS enabled",
insn->sec, insn->offset);
......@@ -2009,13 +2229,13 @@ static int validate_return(struct symbol *func, struct instruction *insn, struct
return 1;
}
if (func && has_modified_stack_frame(state)) {
if (func && has_modified_stack_frame(insn, state)) {
WARN_FUNC("return with modified stack frame",
insn->sec, insn->offset);
return 1;
}
if (state->bp_scratch) {
if (state->cfi.bp_scratch) {
WARN_FUNC("BP used as a scratch register",
insn->sec, insn->offset);
return 1;
......@@ -2024,6 +2244,30 @@ static int validate_return(struct symbol *func, struct instruction *insn, struct
return 0;
}
/*
* Alternatives should not contain any ORC entries, this in turn means they
* should not contain any CFI ops, which implies all instructions should have
* the same same CFI state.
*
* It is possible to constuct alternatives that have unreachable holes that go
* unreported (because they're NOPs), such holes would result in CFI_UNDEFINED
* states which then results in ORC entries, which we just said we didn't want.
*
* Avoid them by copying the CFI entry of the first instruction into the whole
* alternative.
*/
static void fill_alternative_cfi(struct objtool_file *file, struct instruction *insn)
{
struct instruction *first_insn = insn;
int alt_group = insn->alt_group;
sec_for_each_insn_continue(file, insn) {
if (insn->alt_group != alt_group)
break;
insn->cfi = first_insn->cfi;
}
}
/*
* Follow the branch starting at the given instruction, and recursively follow
* any other branches (jumps). Meanwhile, track the frame pointer state at
......@@ -2031,23 +2275,16 @@ static int validate_return(struct symbol *func, struct instruction *insn, struct
* tools/objtool/Documentation/stack-validation.txt.
*/
static int validate_branch(struct objtool_file *file, struct symbol *func,
struct instruction *first, struct insn_state state)
struct instruction *insn, struct insn_state state)
{
struct alternative *alt;
struct instruction *insn, *next_insn;
struct instruction *next_insn;
struct section *sec;
u8 visited;
int ret;
insn = first;
sec = insn->sec;
if (insn->alt_group && list_empty(&insn->alts)) {
WARN_FUNC("don't know how to handle branch to middle of alternative instruction group",
sec, insn->offset);
return 1;
}
while (1) {
next_insn = next_insn_same_sec(file, insn);
......@@ -2065,59 +2302,24 @@ static int validate_branch(struct objtool_file *file, struct symbol *func,
visited = 1 << state.uaccess;
if (insn->visited) {
if (!insn->hint && !insn_state_match(insn, &state))
if (!insn->hint && !insn_cfi_match(insn, &state.cfi))
return 1;
if (insn->visited & visited)
return 0;
}
if (insn->hint) {
if (insn->restore) {
struct instruction *save_insn, *i;
i = insn;
save_insn = NULL;
sym_for_each_insn_continue_reverse(file, func, i) {
if (i->save) {
save_insn = i;
break;
}
}
if (!save_insn) {
WARN_FUNC("no corresponding CFI save for CFI restore",
sec, insn->offset);
return 1;
}
if (!save_insn->visited) {
/*
* Oops, no state to copy yet.
* Hopefully we can reach this
* instruction from another branch
* after the save insn has been
* visited.
*/
if (insn == first)
return 0;
WARN_FUNC("objtool isn't smart enough to handle this CFI save/restore combo",
sec, insn->offset);
return 1;
}
insn->state = save_insn->state;
}
state = insn->state;
if (state.noinstr)
state.instr += insn->instr;
} else
insn->state = state;
if (insn->hint)
state.cfi = insn->cfi;
else
insn->cfi = state.cfi;
insn->visited |= visited;
if (!insn->ignore_alts) {
if (!insn->ignore_alts && !list_empty(&insn->alts)) {
bool skip_orig = false;
list_for_each_entry(alt, &insn->alts, list) {
......@@ -2132,10 +2334,16 @@ static int validate_branch(struct objtool_file *file, struct symbol *func,
}
}
if (insn->alt_group)
fill_alternative_cfi(file, insn);
if (skip_orig)
return 0;
}
if (handle_insn_ops(insn, &state))
return 1;
switch (insn->type) {
case INSN_RETURN:
......@@ -2202,32 +2410,6 @@ static int validate_branch(struct objtool_file *file, struct symbol *func,
}
return 0;
case INSN_STACK:
if (update_insn_state(insn, &state))
return 1;
if (insn->stack_op.dest.type == OP_DEST_PUSHF) {
if (!state.uaccess_stack) {
state.uaccess_stack = 1;
} else if (state.uaccess_stack >> 31) {
WARN_FUNC("PUSHF stack exhausted", sec, insn->offset);
return 1;
}
state.uaccess_stack <<= 1;
state.uaccess_stack |= state.uaccess;
}
if (insn->stack_op.src.type == OP_SRC_POPF) {
if (state.uaccess_stack) {
state.uaccess = state.uaccess_stack & 1;
state.uaccess_stack >>= 1;
if (state.uaccess_stack == 1)
state.uaccess_stack = 0;
}
}
break;
case INSN_STAC:
if (state.uaccess) {
WARN_FUNC("recursive UACCESS enable", sec, insn->offset);
......@@ -2273,7 +2455,7 @@ static int validate_branch(struct objtool_file *file, struct symbol *func,
return 0;
if (!next_insn) {
if (state.cfa.base == CFI_UNDEFINED)
if (state.cfi.cfa.base == CFI_UNDEFINED)
return 0;
WARN("%s: unexpected end of section", sec->name);
return 1;
......@@ -2285,24 +2467,34 @@ static int validate_branch(struct objtool_file *file, struct symbol *func,
return 0;
}
static int validate_unwind_hints(struct objtool_file *file)
static int validate_unwind_hints(struct objtool_file *file, struct section *sec)
{
struct instruction *insn;
int ret, warnings = 0;
struct insn_state state;
int ret, warnings = 0;
if (!file->hints)
return 0;
clear_insn_state(&state);
init_insn_state(&state, sec);
for_each_insn(file, insn) {
if (sec) {
insn = find_insn(file, sec, 0);
if (!insn)
return 0;
} else {
insn = list_first_entry(&file->insn_list, typeof(*insn), list);
}
while (&insn->list != &file->insn_list && (!sec || insn->sec == sec)) {
if (insn->hint && !insn->visited) {
ret = validate_branch(file, insn->func, insn, state);
if (ret && backtrace)
BT_FUNC("<=== (hint)", insn);
warnings += ret;
}
insn = list_next_entry(insn, list);
}
return warnings;
......@@ -2417,43 +2609,69 @@ static bool ignore_unreachable_insn(struct instruction *insn)
return false;
}
static int validate_section(struct objtool_file *file, struct section *sec)
static int validate_symbol(struct objtool_file *file, struct section *sec,
struct symbol *sym, struct insn_state *state)
{
struct symbol *func;
struct instruction *insn;
struct insn_state state;
int ret, warnings = 0;
int ret;
if (!sym->len) {
WARN("%s() is missing an ELF size annotation", sym->name);
return 1;
}
if (sym->pfunc != sym || sym->alias != sym)
return 0;
clear_insn_state(&state);
insn = find_insn(file, sec, sym->offset);
if (!insn || insn->ignore || insn->visited)
return 0;
state->uaccess = sym->uaccess_safe;
ret = validate_branch(file, insn->func, insn, *state);
if (ret && backtrace)
BT_FUNC("<=== (sym)", insn);
return ret;
}
state.cfa = initial_func_cfi.cfa;
memcpy(&state.regs, &initial_func_cfi.regs,
CFI_NUM_REGS * sizeof(struct cfi_reg));
state.stack_size = initial_func_cfi.cfa.offset;
static int validate_section(struct objtool_file *file, struct section *sec)
{
struct insn_state state;
struct symbol *func;
int warnings = 0;
list_for_each_entry(func, &sec->symbol_list, list) {
if (func->type != STT_FUNC)
continue;
if (!func->len) {
WARN("%s() is missing an ELF size annotation",
func->name);
warnings++;
}
init_insn_state(&state, sec);
state.cfi.cfa = initial_func_cfi.cfa;
memcpy(&state.cfi.regs, &initial_func_cfi.regs,
CFI_NUM_REGS * sizeof(struct cfi_reg));
state.cfi.stack_size = initial_func_cfi.cfa.offset;
if (func->pfunc != func || func->alias != func)
continue;
warnings += validate_symbol(file, sec, func, &state);
}
insn = find_insn(file, sec, func->offset);
if (!insn || insn->ignore || insn->visited)
continue;
return warnings;
}
state.uaccess = func->uaccess_safe;
static int validate_vmlinux_functions(struct objtool_file *file)
{
struct section *sec;
int warnings = 0;
ret = validate_branch(file, func, insn, state);
if (ret && backtrace)
BT_FUNC("<=== (func)", insn);
warnings += ret;
sec = find_section_by_name(file->elf, ".noinstr.text");
if (sec) {
warnings += validate_section(file, sec);
warnings += validate_unwind_hints(file, sec);
}
sec = find_section_by_name(file->elf, ".entry.text");
if (sec) {
warnings += validate_section(file, sec);
warnings += validate_unwind_hints(file, sec);
}
return warnings;
......@@ -2464,8 +2682,12 @@ static int validate_functions(struct objtool_file *file)
struct section *sec;
int warnings = 0;
for_each_sec(file, sec)
for_each_sec(file, sec) {
if (!(sec->sh.sh_flags & SHF_EXECINSTR))
continue;
warnings += validate_section(file, sec);
}
return warnings;
}
......@@ -2496,7 +2718,7 @@ int check(const char *_objname, bool orc)
objname = _objname;
file.elf = elf_read(objname, orc ? O_RDWR : O_RDONLY);
file.elf = elf_open_read(objname, orc ? O_RDWR : O_RDONLY);
if (!file.elf)
return 1;
......@@ -2516,6 +2738,15 @@ int check(const char *_objname, bool orc)
if (list_empty(&file.insn_list))
goto out;
if (vmlinux && !validate_dup) {
ret = validate_vmlinux_functions(&file);
if (ret < 0)
goto out;
warnings += ret;
goto out;
}
if (retpoline) {
ret = validate_retpoline(&file);
if (ret < 0)
......@@ -2528,7 +2759,7 @@ int check(const char *_objname, bool orc)
goto out;
warnings += ret;
ret = validate_unwind_hints(&file);
ret = validate_unwind_hints(&file, NULL);
if (ret < 0)
goto out;
warnings += ret;
......
......@@ -7,22 +7,16 @@
#define _CHECK_H
#include <stdbool.h>
#include "elf.h"
#include "cfi.h"
#include "arch.h"
#include "orc.h"
#include <linux/hashtable.h>
struct insn_state {
struct cfi_reg cfa;
struct cfi_reg regs[CFI_NUM_REGS];
int stack_size;
unsigned char type;
bool bp_scratch;
bool drap, end, uaccess, df;
struct cfi_state cfi;
unsigned int uaccess_stack;
int drap_reg, drap_offset;
struct cfi_reg vals[CFI_NUM_REGS];
bool uaccess;
bool df;
bool noinstr;
s8 instr;
};
struct instruction {
......@@ -33,29 +27,24 @@ struct instruction {
unsigned int len;
enum insn_type type;
unsigned long immediate;
bool alt_group, dead_end, ignore, hint, save, restore, ignore_alts;
bool dead_end, ignore, ignore_alts;
bool hint;
bool retpoline_safe;
s8 instr;
u8 visited;
u8 ret_offset;
int alt_group;
struct symbol *call_dest;
struct instruction *jump_dest;
struct instruction *first_jump_src;
struct rela *jump_table;
struct list_head alts;
struct symbol *func;
struct stack_op stack_op;
struct insn_state state;
struct list_head stack_ops;
struct cfi_state cfi;
struct orc_entry orc;
};
struct objtool_file {
struct elf *elf;
struct list_head insn_list;
DECLARE_HASHTABLE(insn_hash, 20);
bool ignore_unreachables, c_file, hints, rodata;
};
int check(const char *objname, bool orc);
struct instruction *find_insn(struct objtool_file *file,
struct section *sec, unsigned long offset);
......
......@@ -27,6 +27,22 @@ static inline u32 str_hash(const char *str)
return jhash(str, strlen(str), 0);
}
static inline int elf_hash_bits(void)
{
return vmlinux ? ELF_HASH_BITS : 16;
}
#define elf_hash_add(hashtable, node, key) \
hlist_add_head(node, &hashtable[hash_min(key, elf_hash_bits())])
static void elf_hash_init(struct hlist_head *table)
{
__hash_init(table, 1U << elf_hash_bits());
}
#define elf_hash_for_each_possible(name, obj, member, key) \
hlist_for_each_entry(obj, &name[hash_min(key, elf_hash_bits())], member)
static void rb_add(struct rb_root *tree, struct rb_node *node,
int (*cmp)(struct rb_node *, const struct rb_node *))
{
......@@ -45,7 +61,7 @@ static void rb_add(struct rb_root *tree, struct rb_node *node,
rb_insert_color(node, tree);
}
static struct rb_node *rb_find_first(struct rb_root *tree, const void *key,
static struct rb_node *rb_find_first(const struct rb_root *tree, const void *key,
int (*cmp)(const void *key, const struct rb_node *))
{
struct rb_node *node = tree->rb_node;
......@@ -111,11 +127,11 @@ static int symbol_by_offset(const void *key, const struct rb_node *node)
return 0;
}
struct section *find_section_by_name(struct elf *elf, const char *name)
struct section *find_section_by_name(const struct elf *elf, const char *name)
{
struct section *sec;
hash_for_each_possible(elf->section_name_hash, sec, name_hash, str_hash(name))
elf_hash_for_each_possible(elf->section_name_hash, sec, name_hash, str_hash(name))
if (!strcmp(sec->name, name))
return sec;
......@@ -127,7 +143,7 @@ static struct section *find_section_by_index(struct elf *elf,
{
struct section *sec;
hash_for_each_possible(elf->section_hash, sec, hash, idx)
elf_hash_for_each_possible(elf->section_hash, sec, hash, idx)
if (sec->idx == idx)
return sec;
......@@ -138,7 +154,7 @@ static struct symbol *find_symbol_by_index(struct elf *elf, unsigned int idx)
{
struct symbol *sym;
hash_for_each_possible(elf->symbol_hash, sym, hash, idx)
elf_hash_for_each_possible(elf->symbol_hash, sym, hash, idx)
if (sym->idx == idx)
return sym;
......@@ -173,7 +189,7 @@ struct symbol *find_func_by_offset(struct section *sec, unsigned long offset)
return NULL;
}
struct symbol *find_symbol_containing(struct section *sec, unsigned long offset)
struct symbol *find_symbol_containing(const struct section *sec, unsigned long offset)
{
struct rb_node *node;
......@@ -201,18 +217,18 @@ struct symbol *find_func_containing(struct section *sec, unsigned long offset)
return NULL;
}
struct symbol *find_symbol_by_name(struct elf *elf, const char *name)
struct symbol *find_symbol_by_name(const struct elf *elf, const char *name)
{
struct symbol *sym;
hash_for_each_possible(elf->symbol_name_hash, sym, name_hash, str_hash(name))
elf_hash_for_each_possible(elf->symbol_name_hash, sym, name_hash, str_hash(name))
if (!strcmp(sym->name, name))
return sym;
return NULL;
}
struct rela *find_rela_by_dest_range(struct elf *elf, struct section *sec,
struct rela *find_rela_by_dest_range(const struct elf *elf, struct section *sec,
unsigned long offset, unsigned int len)
{
struct rela *rela, *r = NULL;
......@@ -224,7 +240,7 @@ struct rela *find_rela_by_dest_range(struct elf *elf, struct section *sec,
sec = sec->rela;
for_offset_range(o, offset, offset + len) {
hash_for_each_possible(elf->rela_hash, rela, hash,
elf_hash_for_each_possible(elf->rela_hash, rela, hash,
sec_offset_hash(sec, o)) {
if (rela->sec != sec)
continue;
......@@ -241,7 +257,7 @@ struct rela *find_rela_by_dest_range(struct elf *elf, struct section *sec,
return NULL;
}
struct rela *find_rela_by_dest(struct elf *elf, struct section *sec, unsigned long offset)
struct rela *find_rela_by_dest(const struct elf *elf, struct section *sec, unsigned long offset)
{
return find_rela_by_dest_range(elf, sec, offset, 1);
}
......@@ -309,8 +325,8 @@ static int read_sections(struct elf *elf)
sec->len = sec->sh.sh_size;
list_add_tail(&sec->list, &elf->sections);
hash_add(elf->section_hash, &sec->hash, sec->idx);
hash_add(elf->section_name_hash, &sec->name_hash, str_hash(sec->name));
elf_hash_add(elf->section_hash, &sec->hash, sec->idx);
elf_hash_add(elf->section_name_hash, &sec->name_hash, str_hash(sec->name));
}
if (stats)
......@@ -327,12 +343,14 @@ static int read_sections(struct elf *elf)
static int read_symbols(struct elf *elf)
{
struct section *symtab, *sec;
struct section *symtab, *symtab_shndx, *sec;
struct symbol *sym, *pfunc;
struct list_head *entry;
struct rb_node *pnode;
int symbols_nr, i;
char *coldstr;
Elf_Data *shndx_data = NULL;
Elf32_Word shndx;
symtab = find_section_by_name(elf, ".symtab");
if (!symtab) {
......@@ -340,6 +358,10 @@ static int read_symbols(struct elf *elf)
return -1;
}
symtab_shndx = find_section_by_name(elf, ".symtab_shndx");
if (symtab_shndx)
shndx_data = symtab_shndx->data;
symbols_nr = symtab->sh.sh_size / symtab->sh.sh_entsize;
for (i = 0; i < symbols_nr; i++) {
......@@ -353,8 +375,9 @@ static int read_symbols(struct elf *elf)
sym->idx = i;
if (!gelf_getsym(symtab->data, i, &sym->sym)) {
WARN_ELF("gelf_getsym");
if (!gelf_getsymshndx(symtab->data, shndx_data, i, &sym->sym,
&shndx)) {
WARN_ELF("gelf_getsymshndx");
goto err;
}
......@@ -368,10 +391,13 @@ static int read_symbols(struct elf *elf)
sym->type = GELF_ST_TYPE(sym->sym.st_info);
sym->bind = GELF_ST_BIND(sym->sym.st_info);
if (sym->sym.st_shndx > SHN_UNDEF &&
sym->sym.st_shndx < SHN_LORESERVE) {
sym->sec = find_section_by_index(elf,
sym->sym.st_shndx);
if ((sym->sym.st_shndx > SHN_UNDEF &&
sym->sym.st_shndx < SHN_LORESERVE) ||
(shndx_data && sym->sym.st_shndx == SHN_XINDEX)) {
if (sym->sym.st_shndx != SHN_XINDEX)
shndx = sym->sym.st_shndx;
sym->sec = find_section_by_index(elf, shndx);
if (!sym->sec) {
WARN("couldn't find section for symbol %s",
sym->name);
......@@ -394,8 +420,8 @@ static int read_symbols(struct elf *elf)
else
entry = &sym->sec->symbol_list;
list_add(&sym->list, entry);
hash_add(elf->symbol_hash, &sym->hash, sym->idx);
hash_add(elf->symbol_name_hash, &sym->name_hash, str_hash(sym->name));
elf_hash_add(elf->symbol_hash, &sym->hash, sym->idx);
elf_hash_add(elf->symbol_name_hash, &sym->name_hash, str_hash(sym->name));
}
if (stats)
......@@ -456,6 +482,14 @@ static int read_symbols(struct elf *elf)
return -1;
}
void elf_add_rela(struct elf *elf, struct rela *rela)
{
struct section *sec = rela->sec;
list_add_tail(&rela->list, &sec->rela_list);
elf_hash_add(elf->rela_hash, &rela->hash, rela_hash(rela));
}
static int read_relas(struct elf *elf)
{
struct section *sec;
......@@ -503,8 +537,7 @@ static int read_relas(struct elf *elf)
return -1;
}
list_add_tail(&rela->list, &sec->rela_list);
hash_add(elf->rela_hash, &rela->hash, rela_hash(rela));
elf_add_rela(elf, rela);
nr_rela++;
}
max_rela = max(max_rela, nr_rela);
......@@ -519,7 +552,7 @@ static int read_relas(struct elf *elf)
return 0;
}
struct elf *elf_read(const char *name, int flags)
struct elf *elf_open_read(const char *name, int flags)
{
struct elf *elf;
Elf_Cmd cmd;
......@@ -531,15 +564,16 @@ struct elf *elf_read(const char *name, int flags)
perror("malloc");
return NULL;
}
memset(elf, 0, sizeof(*elf));
memset(elf, 0, offsetof(struct elf, sections));
hash_init(elf->symbol_hash);
hash_init(elf->symbol_name_hash);
hash_init(elf->section_hash);
hash_init(elf->section_name_hash);
hash_init(elf->rela_hash);
INIT_LIST_HEAD(&elf->sections);
elf_hash_init(elf->symbol_hash);
elf_hash_init(elf->symbol_name_hash);
elf_hash_init(elf->section_hash);
elf_hash_init(elf->section_name_hash);
elf_hash_init(elf->rela_hash);
elf->fd = open(name, flags);
if (elf->fd == -1) {
fprintf(stderr, "objtool: Can't open '%s': %s\n",
......@@ -676,8 +710,8 @@ struct section *elf_create_section(struct elf *elf, const char *name,
shstrtab->changed = true;
list_add_tail(&sec->list, &elf->sections);
hash_add(elf->section_hash, &sec->hash, sec->idx);
hash_add(elf->section_name_hash, &sec->name_hash, str_hash(sec->name));
elf_hash_add(elf->section_hash, &sec->hash, sec->idx);
elf_hash_add(elf->section_name_hash, &sec->name_hash, str_hash(sec->name));
return sec;
}
......@@ -745,7 +779,7 @@ int elf_rebuild_rela_section(struct section *sec)
return 0;
}
int elf_write(struct elf *elf)
int elf_write(const struct elf *elf)
{
struct section *sec;
Elf_Scn *s;
......
......@@ -39,7 +39,7 @@ struct section {
char *name;
int idx;
unsigned int len;
bool changed, text, rodata;
bool changed, text, rodata, noinstr;
};
struct symbol {
......@@ -70,17 +70,19 @@ struct rela {
bool jump_table_start;
};
#define ELF_HASH_BITS 20
struct elf {
Elf *elf;
GElf_Ehdr ehdr;
int fd;
char *name;
struct list_head sections;
DECLARE_HASHTABLE(symbol_hash, 20);
DECLARE_HASHTABLE(symbol_name_hash, 20);
DECLARE_HASHTABLE(section_hash, 16);
DECLARE_HASHTABLE(section_name_hash, 16);
DECLARE_HASHTABLE(rela_hash, 20);
DECLARE_HASHTABLE(symbol_hash, ELF_HASH_BITS);
DECLARE_HASHTABLE(symbol_name_hash, ELF_HASH_BITS);
DECLARE_HASHTABLE(section_hash, ELF_HASH_BITS);
DECLARE_HASHTABLE(section_name_hash, ELF_HASH_BITS);
DECLARE_HASHTABLE(rela_hash, ELF_HASH_BITS);
};
#define OFFSET_STRIDE_BITS 4
......@@ -112,22 +114,23 @@ static inline u32 rela_hash(struct rela *rela)
return sec_offset_hash(rela->sec, rela->offset);
}
struct elf *elf_read(const char *name, int flags);
struct section *find_section_by_name(struct elf *elf, const char *name);
struct elf *elf_open_read(const char *name, int flags);
struct section *elf_create_section(struct elf *elf, const char *name, size_t entsize, int nr);
struct section *elf_create_rela_section(struct elf *elf, struct section *base);
void elf_add_rela(struct elf *elf, struct rela *rela);
int elf_write(const struct elf *elf);
void elf_close(struct elf *elf);
struct section *find_section_by_name(const struct elf *elf, const char *name);
struct symbol *find_func_by_offset(struct section *sec, unsigned long offset);
struct symbol *find_symbol_by_offset(struct section *sec, unsigned long offset);
struct symbol *find_symbol_by_name(struct elf *elf, const char *name);
struct symbol *find_symbol_containing(struct section *sec, unsigned long offset);
struct rela *find_rela_by_dest(struct elf *elf, struct section *sec, unsigned long offset);
struct rela *find_rela_by_dest_range(struct elf *elf, struct section *sec,
struct symbol *find_symbol_by_name(const struct elf *elf, const char *name);
struct symbol *find_symbol_containing(const struct section *sec, unsigned long offset);
struct rela *find_rela_by_dest(const struct elf *elf, struct section *sec, unsigned long offset);
struct rela *find_rela_by_dest_range(const struct elf *elf, struct section *sec,
unsigned long offset, unsigned int len);
struct symbol *find_func_containing(struct section *sec, unsigned long offset);
struct section *elf_create_section(struct elf *elf, const char *name, size_t
entsize, int nr);
struct section *elf_create_rela_section(struct elf *elf, struct section *base);
int elf_rebuild_rela_section(struct section *sec);
int elf_write(struct elf *elf);
void elf_close(struct elf *elf);
#define for_each_sec(file, sec) \
list_for_each_entry(sec, &file->elf->sections, list)
......
......@@ -58,7 +58,9 @@ static void cmd_usage(void)
printf("\n");
exit(129);
if (!help)
exit(129);
exit(0);
}
static void handle_options(int *argc, const char ***argv)
......
/* SPDX-License-Identifier: GPL-2.0-or-later */
/*
* Copyright (C) 2017 Josh Poimboeuf <jpoimboe@redhat.com>
* Copyright (C) 2020 Matt Helsley <mhelsley@vmware.com>
*/
#ifndef _ORC_H
#define _ORC_H
#ifndef _OBJTOOL_H
#define _OBJTOOL_H
#include <asm/orc_types.h>
#include <stdbool.h>
#include <linux/list.h>
#include <linux/hashtable.h>
struct objtool_file;
#include "elf.h"
int create_orc(struct objtool_file *file);
int create_orc_sections(struct objtool_file *file);
struct objtool_file {
struct elf *elf;
struct list_head insn_list;
DECLARE_HASHTABLE(insn_hash, 20);
bool ignore_unreachables, c_file, hints, rodata;
};
int check(const char *objname, bool orc);
int orc_dump(const char *objname);
int create_orc(struct objtool_file *file);
int create_orc_sections(struct objtool_file *file);
#endif /* _ORC_H */
#endif /* _OBJTOOL_H */
......@@ -4,7 +4,8 @@
*/
#include <unistd.h>
#include "orc.h"
#include <asm/orc_types.h>
#include "objtool.h"
#include "warn.h"
static const char *reg_name(unsigned int reg)
......
......@@ -6,7 +6,6 @@
#include <stdlib.h>
#include <string.h>
#include "orc.h"
#include "check.h"
#include "warn.h"
......@@ -16,10 +15,10 @@ int create_orc(struct objtool_file *file)
for_each_insn(file, insn) {
struct orc_entry *orc = &insn->orc;
struct cfi_reg *cfa = &insn->state.cfa;
struct cfi_reg *bp = &insn->state.regs[CFI_BP];
struct cfi_reg *cfa = &insn->cfi.cfa;
struct cfi_reg *bp = &insn->cfi.regs[CFI_BP];
orc->end = insn->state.end;
orc->end = insn->cfi.end;
if (cfa->base == CFI_UNDEFINED) {
orc->sp_reg = ORC_REG_UNDEFINED;
......@@ -75,7 +74,7 @@ int create_orc(struct objtool_file *file)
orc->sp_offset = cfa->offset;
orc->bp_offset = bp->offset;
orc->type = insn->state.type;
orc->type = insn->cfi.type;
}
return 0;
......@@ -130,8 +129,7 @@ static int create_orc_entry(struct elf *elf, struct section *u_sec, struct secti
rela->offset = idx * sizeof(int);
rela->sec = ip_relasec;
list_add_tail(&rela->list, &ip_relasec->rela_list);
hash_add(elf->rela_hash, &rela->hash, rela_hash(rela));
elf_add_rela(elf, rela);
return 0;
}
......
// SPDX-License-Identifier: GPL-2.0-or-later
/*
* Copyright (C) 2020 Matt Helsley <mhelsley@vmware.com>
* Weak definitions necessary to compile objtool without
* some subcommands (e.g. check, orc).
*/
#include <stdbool.h>
#include <errno.h>
#include "objtool.h"
#define __weak __attribute__((weak))
#define UNSUPPORTED(name) \
({ \
fprintf(stderr, "error: objtool: " name " not implemented\n"); \
return ENOSYS; \
})
const char __weak *objname;
int __weak check(const char *_objname, bool orc)
{
UNSUPPORTED("check subcommand");
}
int __weak orc_dump(const char *_objname)
{
UNSUPPORTED("orc");
}
int __weak create_orc(struct objtool_file *file)
{
UNSUPPORTED("orc");
}
int __weak create_orc_sections(struct objtool_file *file)
{
UNSUPPORTED("orc");
}
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment