Commit 69fc06f7 authored by Linus Torvalds's avatar Linus Torvalds

Merge tag 'objtool-core-2020-06-01' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip

Pull objtool updates from Ingo Molnar:
 "There are a lot of objtool changes in this cycle, all across the map:

   - Speed up objtool significantly, especially when there are large
     number of sections

   - Improve objtool's understanding of special instructions such as
     IRET, to reduce the number of annotations required

   - Implement 'noinstr' validation

   - Do baby steps for non-x86 objtool use

   - Simplify/fix retpoline decoding

   - Add vmlinux validation

   - Improve documentation

   - Fix various bugs and apply smaller cleanups"

* tag 'objtool-core-2020-06-01' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: (54 commits)
  objtool: Enable compilation of objtool for all architectures
  objtool: Move struct objtool_file into arch-independent header
  objtool: Exit successfully when requesting help
  objtool: Add check_kcov_mode() to the uaccess safelist
  samples/ftrace: Fix asm function ELF annotations
  objtool: optimize add_dead_ends for split sections
  objtool: use gelf_getsymshndx to handle >64k sections
  objtool: Allow no-op CFI ops in alternatives
  x86/retpoline: Fix retpoline unwind
  x86: Change {JMP,CALL}_NOSPEC argument
  x86: Simplify retpoline declaration
  x86/speculation: Change FILL_RETURN_BUFFER to work with objtool
  objtool: Add support for intra-function calls
  objtool: Move the IRET hack into the arch decoder
  objtool: Remove INSN_STACK
  objtool: Make handle_insn_ops() unconditional
  objtool: Rework allocating stack_ops on decode
  objtool: UNWIND_HINT_RET_OFFSET should not check registers
  objtool: is_fentry_call() crashes if call has no destination
  x86,smap: Fix smap_{save,restore}() alternatives
  ...
parents 60056060 0decf1f8
......@@ -2758,7 +2758,7 @@ SYM_FUNC_START(aesni_xts_crypt8)
pxor INC, STATE4
movdqu IV, 0x30(OUTP)
CALL_NOSPEC %r11
CALL_NOSPEC r11
movdqu 0x00(OUTP), INC
pxor INC, STATE1
......@@ -2803,7 +2803,7 @@ SYM_FUNC_START(aesni_xts_crypt8)
_aesni_gf128mul_x_ble()
movups IV, (IVP)
CALL_NOSPEC %r11
CALL_NOSPEC r11
movdqu 0x40(OUTP), INC
pxor INC, STATE1
......
......@@ -1228,7 +1228,7 @@ SYM_FUNC_START_LOCAL(camellia_xts_crypt_16way)
vpxor 14 * 16(%rax), %xmm15, %xmm14;
vpxor 15 * 16(%rax), %xmm15, %xmm15;
CALL_NOSPEC %r9;
CALL_NOSPEC r9;
addq $(16 * 16), %rsp;
......
......@@ -1339,7 +1339,7 @@ SYM_FUNC_START_LOCAL(camellia_xts_crypt_32way)
vpxor 14 * 32(%rax), %ymm15, %ymm14;
vpxor 15 * 32(%rax), %ymm15, %ymm15;
CALL_NOSPEC %r9;
CALL_NOSPEC r9;
addq $(16 * 32), %rsp;
......
......@@ -75,7 +75,7 @@
.text
SYM_FUNC_START(crc_pcl)
#define bufp %rdi
#define bufp rdi
#define bufp_dw %edi
#define bufp_w %di
#define bufp_b %dil
......@@ -105,9 +105,9 @@ SYM_FUNC_START(crc_pcl)
## 1) ALIGN:
################################################################
mov bufp, bufptmp # rdi = *buf
neg bufp
and $7, bufp # calculate the unalignment amount of
mov %bufp, bufptmp # rdi = *buf
neg %bufp
and $7, %bufp # calculate the unalignment amount of
# the address
je proc_block # Skip if aligned
......@@ -123,13 +123,13 @@ SYM_FUNC_START(crc_pcl)
do_align:
#### Calculate CRC of unaligned bytes of the buffer (if any)
movq (bufptmp), tmp # load a quadward from the buffer
add bufp, bufptmp # align buffer pointer for quadword
add %bufp, bufptmp # align buffer pointer for quadword
# processing
sub bufp, len # update buffer length
sub %bufp, len # update buffer length
align_loop:
crc32b %bl, crc_init_dw # compute crc32 of 1-byte
shr $8, tmp # get next byte
dec bufp
dec %bufp
jne align_loop
proc_block:
......@@ -169,10 +169,10 @@ continue_block:
xor crc2, crc2
## branch into array
lea jump_table(%rip), bufp
movzxw (bufp, %rax, 2), len
lea crc_array(%rip), bufp
lea (bufp, len, 1), bufp
lea jump_table(%rip), %bufp
movzxw (%bufp, %rax, 2), len
lea crc_array(%rip), %bufp
lea (%bufp, len, 1), %bufp
JMP_NOSPEC bufp
################################################################
......@@ -218,9 +218,9 @@ LABEL crc_ %i
## 4) Combine three results:
################################################################
lea (K_table-8)(%rip), bufp # first entry is for idx 1
lea (K_table-8)(%rip), %bufp # first entry is for idx 1
shlq $3, %rax # rax *= 8
pmovzxdq (bufp,%rax), %xmm0 # 2 consts: K1:K2
pmovzxdq (%bufp,%rax), %xmm0 # 2 consts: K1:K2
leal (%eax,%eax,2), %eax # rax *= 3 (total *24)
subq %rax, tmp # tmp -= rax*24
......
......@@ -816,7 +816,7 @@ SYM_CODE_START(ret_from_fork)
/* kernel thread */
1: movl %edi, %eax
CALL_NOSPEC %ebx
CALL_NOSPEC ebx
/*
* A kernel thread is allowed to return here after successfully
* calling do_execve(). Exit to userspace to complete the execve()
......@@ -1501,7 +1501,7 @@ SYM_CODE_START_LOCAL_NOALIGN(common_exception_read_cr2)
TRACE_IRQS_OFF
movl %esp, %eax # pt_regs pointer
CALL_NOSPEC %edi
CALL_NOSPEC edi
jmp ret_from_exception
SYM_CODE_END(common_exception_read_cr2)
......@@ -1522,7 +1522,7 @@ SYM_CODE_START_LOCAL_NOALIGN(common_exception)
TRACE_IRQS_OFF
movl %esp, %eax # pt_regs pointer
CALL_NOSPEC %edi
CALL_NOSPEC edi
jmp ret_from_exception
SYM_CODE_END(common_exception)
......
......@@ -348,7 +348,7 @@ SYM_CODE_START(ret_from_fork)
/* kernel thread */
UNWIND_HINT_EMPTY
movq %r12, %rdi
CALL_NOSPEC %rbx
CALL_NOSPEC rbx
/*
* A kernel thread is allowed to return here after successfully
* calling do_execve(). Exit to userspace to complete the execve()
......
#ifdef CONFIG_64BIT
GEN(rax)
GEN(rbx)
GEN(rcx)
GEN(rdx)
GEN(rsi)
GEN(rdi)
GEN(rbp)
GEN(r8)
GEN(r9)
GEN(r10)
GEN(r11)
GEN(r12)
GEN(r13)
GEN(r14)
GEN(r15)
#else
GEN(eax)
GEN(ebx)
GEN(ecx)
GEN(edx)
GEN(esi)
GEN(edi)
GEN(ebp)
#endif
......@@ -17,24 +17,19 @@ extern void cmpxchg8b_emu(void);
#endif
#ifdef CONFIG_RETPOLINE
#ifdef CONFIG_X86_32
#define INDIRECT_THUNK(reg) extern asmlinkage void __x86_indirect_thunk_e ## reg(void);
#else
#define INDIRECT_THUNK(reg) extern asmlinkage void __x86_indirect_thunk_r ## reg(void);
INDIRECT_THUNK(8)
INDIRECT_THUNK(9)
INDIRECT_THUNK(10)
INDIRECT_THUNK(11)
INDIRECT_THUNK(12)
INDIRECT_THUNK(13)
INDIRECT_THUNK(14)
INDIRECT_THUNK(15)
#endif
INDIRECT_THUNK(ax)
INDIRECT_THUNK(bx)
INDIRECT_THUNK(cx)
INDIRECT_THUNK(dx)
INDIRECT_THUNK(si)
INDIRECT_THUNK(di)
INDIRECT_THUNK(bp)
#define DECL_INDIRECT_THUNK(reg) \
extern asmlinkage void __x86_indirect_thunk_ ## reg (void);
#define DECL_RETPOLINE(reg) \
extern asmlinkage void __x86_retpoline_ ## reg (void);
#undef GEN
#define GEN(reg) DECL_INDIRECT_THUNK(reg)
#include <asm/GEN-for-each-reg.h>
#undef GEN
#define GEN(reg) DECL_RETPOLINE(reg)
#include <asm/GEN-for-each-reg.h>
#endif /* CONFIG_RETPOLINE */
......@@ -4,20 +4,13 @@
#define _ASM_X86_NOSPEC_BRANCH_H_
#include <linux/static_key.h>
#include <linux/frame.h>
#include <asm/alternative.h>
#include <asm/alternative-asm.h>
#include <asm/cpufeatures.h>
#include <asm/msr-index.h>
/*
* This should be used immediately before a retpoline alternative. It tells
* objtool where the retpolines are so that it can make sense of the control
* flow by just reading the original instruction(s) and ignoring the
* alternatives.
*/
#define ANNOTATE_NOSPEC_ALTERNATIVE \
ANNOTATE_IGNORE_ALTERNATIVE
#include <asm/unwind_hints.h>
/*
* Fill the CPU return stack buffer.
......@@ -46,21 +39,25 @@
#define __FILL_RETURN_BUFFER(reg, nr, sp) \
mov $(nr/2), reg; \
771: \
ANNOTATE_INTRA_FUNCTION_CALL; \
call 772f; \
773: /* speculation trap */ \
UNWIND_HINT_EMPTY; \
pause; \
lfence; \
jmp 773b; \
772: \
ANNOTATE_INTRA_FUNCTION_CALL; \
call 774f; \
775: /* speculation trap */ \
UNWIND_HINT_EMPTY; \
pause; \
lfence; \
jmp 775b; \
774: \
add $(BITS_PER_LONG/8) * 2, sp; \
dec reg; \
jnz 771b; \
add $(BITS_PER_LONG/8) * nr, sp;
jnz 771b;
#ifdef __ASSEMBLY__
......@@ -76,34 +73,6 @@
.popsection
.endm
/*
* These are the bare retpoline primitives for indirect jmp and call.
* Do not use these directly; they only exist to make the ALTERNATIVE
* invocation below less ugly.
*/
.macro RETPOLINE_JMP reg:req
call .Ldo_rop_\@
.Lspec_trap_\@:
pause
lfence
jmp .Lspec_trap_\@
.Ldo_rop_\@:
mov \reg, (%_ASM_SP)
ret
.endm
/*
* This is a wrapper around RETPOLINE_JMP so the called function in reg
* returns to the instruction after the macro.
*/
.macro RETPOLINE_CALL reg:req
jmp .Ldo_call_\@
.Ldo_retpoline_jmp_\@:
RETPOLINE_JMP \reg
.Ldo_call_\@:
call .Ldo_retpoline_jmp_\@
.endm
/*
* JMP_NOSPEC and CALL_NOSPEC macros can be used instead of a simple
* indirect jmp/call which may be susceptible to the Spectre variant 2
......@@ -111,23 +80,21 @@
*/
.macro JMP_NOSPEC reg:req
#ifdef CONFIG_RETPOLINE
ANNOTATE_NOSPEC_ALTERNATIVE
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; jmp *\reg), \
__stringify(RETPOLINE_JMP \reg), X86_FEATURE_RETPOLINE, \
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; jmp *\reg), X86_FEATURE_RETPOLINE_AMD
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; jmp *%\reg), \
__stringify(jmp __x86_retpoline_\reg), X86_FEATURE_RETPOLINE, \
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; jmp *%\reg), X86_FEATURE_RETPOLINE_AMD
#else
jmp *\reg
jmp *%\reg
#endif
.endm
.macro CALL_NOSPEC reg:req
#ifdef CONFIG_RETPOLINE
ANNOTATE_NOSPEC_ALTERNATIVE
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; call *\reg), \
__stringify(RETPOLINE_CALL \reg), X86_FEATURE_RETPOLINE,\
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; call *\reg), X86_FEATURE_RETPOLINE_AMD
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; call *%\reg), \
__stringify(call __x86_retpoline_\reg), X86_FEATURE_RETPOLINE, \
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; call *%\reg), X86_FEATURE_RETPOLINE_AMD
#else
call *\reg
call *%\reg
#endif
.endm
......@@ -137,10 +104,8 @@
*/
.macro FILL_RETURN_BUFFER reg:req nr:req ftr:req
#ifdef CONFIG_RETPOLINE
ANNOTATE_NOSPEC_ALTERNATIVE
ALTERNATIVE "jmp .Lskip_rsb_\@", \
__stringify(__FILL_RETURN_BUFFER(\reg,\nr,%_ASM_SP)) \
\ftr
ALTERNATIVE "jmp .Lskip_rsb_\@", "", \ftr
__FILL_RETURN_BUFFER(\reg,\nr,%_ASM_SP)
.Lskip_rsb_\@:
#endif
.endm
......@@ -161,16 +126,16 @@
* which is ensured when CONFIG_RETPOLINE is defined.
*/
# define CALL_NOSPEC \
ANNOTATE_NOSPEC_ALTERNATIVE \
ALTERNATIVE_2( \
ANNOTATE_RETPOLINE_SAFE \
"call *%[thunk_target]\n", \
"call __x86_indirect_thunk_%V[thunk_target]\n", \
"call __x86_retpoline_%V[thunk_target]\n", \
X86_FEATURE_RETPOLINE, \
"lfence;\n" \
ANNOTATE_RETPOLINE_SAFE \
"call *%[thunk_target]\n", \
X86_FEATURE_RETPOLINE_AMD)
# define THUNK_TARGET(addr) [thunk_target] "r" (addr)
#else /* CONFIG_X86_32 */
......@@ -180,7 +145,6 @@
* here, anyway.
*/
# define CALL_NOSPEC \
ANNOTATE_NOSPEC_ALTERNATIVE \
ALTERNATIVE_2( \
ANNOTATE_RETPOLINE_SAFE \
"call *%[thunk_target]\n", \
......
......@@ -58,8 +58,7 @@
#define ORC_TYPE_CALL 0
#define ORC_TYPE_REGS 1
#define ORC_TYPE_REGS_IRET 2
#define UNWIND_HINT_TYPE_SAVE 3
#define UNWIND_HINT_TYPE_RESTORE 4
#define UNWIND_HINT_TYPE_RET_OFFSET 3
#ifndef __ASSEMBLY__
/*
......
......@@ -728,7 +728,6 @@ static inline void sync_core(void)
unsigned int tmp;
asm volatile (
UNWIND_HINT_SAVE
"mov %%ss, %0\n\t"
"pushq %q0\n\t"
"pushq %%rsp\n\t"
......@@ -738,7 +737,6 @@ static inline void sync_core(void)
"pushq %q0\n\t"
"pushq $1f\n\t"
"iretq\n\t"
UNWIND_HINT_RESTORE
"1:"
: "=&r" (tmp), ASM_CALL_CONSTRAINT : : "cc", "memory");
#endif
......
......@@ -57,8 +57,10 @@ static __always_inline unsigned long smap_save(void)
{
unsigned long flags;
asm volatile (ALTERNATIVE("", "pushf; pop %0; " __ASM_CLAC,
X86_FEATURE_SMAP)
asm volatile ("# smap_save\n\t"
ALTERNATIVE("jmp 1f", "", X86_FEATURE_SMAP)
"pushf; pop %0; " __ASM_CLAC "\n\t"
"1:"
: "=rm" (flags) : : "memory", "cc");
return flags;
......@@ -66,7 +68,10 @@ static __always_inline unsigned long smap_save(void)
static __always_inline void smap_restore(unsigned long flags)
{
asm volatile (ALTERNATIVE("", "push %0; popf", X86_FEATURE_SMAP)
asm volatile ("# smap_restore\n\t"
ALTERNATIVE("jmp 1f", "", X86_FEATURE_SMAP)
"push %0; popf\n\t"
"1:"
: : "g" (flags) : "memory", "cc");
}
......
......@@ -86,32 +86,15 @@
UNWIND_HINT sp_offset=\sp_offset
.endm
.macro UNWIND_HINT_SAVE
UNWIND_HINT type=UNWIND_HINT_TYPE_SAVE
.endm
.macro UNWIND_HINT_RESTORE
UNWIND_HINT type=UNWIND_HINT_TYPE_RESTORE
/*
* RET_OFFSET: Used on instructions that terminate a function; mostly RETURN
* and sibling calls. On these, sp_offset denotes the expected offset from
* initial_func_cfi.
*/
.macro UNWIND_HINT_RET_OFFSET sp_offset=8
UNWIND_HINT type=UNWIND_HINT_TYPE_RET_OFFSET sp_offset=\sp_offset
.endm
#else /* !__ASSEMBLY__ */
#define UNWIND_HINT(sp_reg, sp_offset, type, end) \
"987: \n\t" \
".pushsection .discard.unwind_hints\n\t" \
/* struct unwind_hint */ \
".long 987b - .\n\t" \
".short " __stringify(sp_offset) "\n\t" \
".byte " __stringify(sp_reg) "\n\t" \
".byte " __stringify(type) "\n\t" \
".byte " __stringify(end) "\n\t" \
".balign 4 \n\t" \
".popsection\n\t"
#define UNWIND_HINT_SAVE UNWIND_HINT(0, 0, UNWIND_HINT_TYPE_SAVE, 0)
#define UNWIND_HINT_RESTORE UNWIND_HINT(0, 0, UNWIND_HINT_TYPE_RESTORE, 0)
#endif /* __ASSEMBLY__ */
#endif /* _ASM_X86_UNWIND_HINTS_H */
......@@ -282,7 +282,8 @@ static inline void tramp_free(void *tramp) { }
/* Defined as markers to the end of the ftrace default trampolines */
extern void ftrace_regs_caller_end(void);
extern void ftrace_epilogue(void);
extern void ftrace_regs_caller_ret(void);
extern void ftrace_caller_end(void);
extern void ftrace_caller_op_ptr(void);
extern void ftrace_regs_caller_op_ptr(void);
......@@ -334,7 +335,7 @@ create_trampoline(struct ftrace_ops *ops, unsigned int *tramp_size)
call_offset = (unsigned long)ftrace_regs_call;
} else {
start_offset = (unsigned long)ftrace_caller;
end_offset = (unsigned long)ftrace_epilogue;
end_offset = (unsigned long)ftrace_caller_end;
op_offset = (unsigned long)ftrace_caller_op_ptr;
call_offset = (unsigned long)ftrace_call;
}
......@@ -366,6 +367,13 @@ create_trampoline(struct ftrace_ops *ops, unsigned int *tramp_size)
if (WARN_ON(ret < 0))
goto fail;
if (ops->flags & FTRACE_OPS_FL_SAVE_REGS) {
ip = trampoline + (ftrace_regs_caller_ret - ftrace_regs_caller);
ret = probe_kernel_read(ip, (void *)retq, RET_SIZE);
if (WARN_ON(ret < 0))
goto fail;
}
/*
* The address of the ftrace_ops that is used for this trampoline
* is stored at the end of the trampoline. This will be used to
......@@ -433,7 +441,7 @@ void set_ftrace_ops_ro(void)
end_offset = (unsigned long)ftrace_regs_caller_end;
} else {
start_offset = (unsigned long)ftrace_caller;
end_offset = (unsigned long)ftrace_epilogue;
end_offset = (unsigned long)ftrace_caller_end;
}
size = end_offset - start_offset;
size = size + RET_SIZE + sizeof(void *);
......
......@@ -189,5 +189,5 @@ return_to_handler:
movl %eax, %ecx
popl %edx
popl %eax
JMP_NOSPEC %ecx
JMP_NOSPEC ecx
#endif
......@@ -23,7 +23,7 @@
#endif /* CONFIG_FRAME_POINTER */
/* Size of stack used to save mcount regs in save_mcount_regs */
#define MCOUNT_REG_SIZE (SS+8 + MCOUNT_FRAME_SIZE)
#define MCOUNT_REG_SIZE (FRAME_SIZE + MCOUNT_FRAME_SIZE)
/*
* gcc -pg option adds a call to 'mcount' in most functions.
......@@ -77,7 +77,7 @@
/*
* We add enough stack to save all regs.
*/
subq $(MCOUNT_REG_SIZE - MCOUNT_FRAME_SIZE), %rsp
subq $(FRAME_SIZE), %rsp
movq %rax, RAX(%rsp)
movq %rcx, RCX(%rsp)
movq %rdx, RDX(%rsp)
......@@ -157,8 +157,12 @@ SYM_INNER_LABEL(ftrace_call, SYM_L_GLOBAL)
* think twice before adding any new code or changing the
* layout here.
*/
SYM_INNER_LABEL(ftrace_epilogue, SYM_L_GLOBAL)
SYM_INNER_LABEL(ftrace_caller_end, SYM_L_GLOBAL)
jmp ftrace_epilogue
SYM_FUNC_END(ftrace_caller);
SYM_FUNC_START(ftrace_epilogue)
#ifdef CONFIG_FUNCTION_GRAPH_TRACER
SYM_INNER_LABEL(ftrace_graph_call, SYM_L_GLOBAL)
jmp ftrace_stub
......@@ -170,14 +174,12 @@ SYM_INNER_LABEL(ftrace_graph_call, SYM_L_GLOBAL)
*/
SYM_INNER_LABEL_ALIGN(ftrace_stub, SYM_L_WEAK)
retq
SYM_FUNC_END(ftrace_caller)
SYM_FUNC_END(ftrace_epilogue)
SYM_FUNC_START(ftrace_regs_caller)
/* Save the current flags before any operations that can change them */
pushfq
UNWIND_HINT_SAVE
/* added 8 bytes to save flags */
save_mcount_regs 8
/* save_mcount_regs fills in first two parameters */
......@@ -233,10 +235,13 @@ SYM_INNER_LABEL(ftrace_regs_call, SYM_L_GLOBAL)
movq ORIG_RAX(%rsp), %rax
movq %rax, MCOUNT_REG_SIZE-8(%rsp)
/* If ORIG_RAX is anything but zero, make this a call to that */
/*
* If ORIG_RAX is anything but zero, make this a call to that.
* See arch_ftrace_set_direct_caller().
*/
movq ORIG_RAX(%rsp), %rax
cmpq $0, %rax
je 1f
testq %rax, %rax
jz 1f
/* Swap the flags with orig_rax */
movq MCOUNT_REG_SIZE(%rsp), %rdi
......@@ -244,20 +249,14 @@ SYM_INNER_LABEL(ftrace_regs_call, SYM_L_GLOBAL)
movq %rax, MCOUNT_REG_SIZE(%rsp)
restore_mcount_regs 8
/* Restore flags */
popfq
jmp 2f
SYM_INNER_LABEL(ftrace_regs_caller_ret, SYM_L_GLOBAL);
UNWIND_HINT_RET_OFFSET
jmp ftrace_epilogue
1: restore_mcount_regs
2:
/*
* The stack layout is nondetermistic here, depending on which path was
* taken. This confuses objtool and ORC, rightfully so. For now,
* pretend the stack always looks like the non-direct case.
*/
UNWIND_HINT_RESTORE
/* Restore flags */
popfq
......@@ -268,7 +267,6 @@ SYM_INNER_LABEL(ftrace_regs_call, SYM_L_GLOBAL)
* to the return.
*/
SYM_INNER_LABEL(ftrace_regs_caller_end, SYM_L_GLOBAL)
jmp ftrace_epilogue
SYM_FUNC_END(ftrace_regs_caller)
......@@ -303,7 +301,7 @@ trace:
* function tracing is enabled.
*/
movq ftrace_trace_function, %r8
CALL_NOSPEC %r8
CALL_NOSPEC r8
restore_mcount_regs
jmp fgraph_trace
......@@ -340,6 +338,6 @@ SYM_CODE_START(return_to_handler)
movq 8(%rsp), %rdx
movq (%rsp), %rax
addq $24, %rsp
JMP_NOSPEC %rdi
JMP_NOSPEC rdi
SYM_CODE_END(return_to_handler)
#endif
......@@ -153,7 +153,7 @@ SYM_FUNC_START(csum_partial)
negl %ebx
lea 45f(%ebx,%ebx,2), %ebx
testl %esi, %esi
JMP_NOSPEC %ebx
JMP_NOSPEC ebx
# Handle 2-byte-aligned regions
20: addw (%esi), %ax
......@@ -436,7 +436,7 @@ SYM_FUNC_START(csum_partial_copy_generic)
andl $-32,%edx
lea 3f(%ebx,%ebx), %ebx
testl %esi, %esi
JMP_NOSPEC %ebx
JMP_NOSPEC ebx
1: addl $64,%esi
addl $64,%edi
SRC(movb -32(%edx),%bl) ; SRC(movb (%edx),%bl)
......
......@@ -7,15 +7,31 @@
#include <asm/alternative-asm.h>
#include <asm/export.h>
#include <asm/nospec-branch.h>
#include <asm/unwind_hints.h>
#include <asm/frame.h>
.macro THUNK reg
.section .text.__x86.indirect_thunk
.align 32
SYM_FUNC_START(__x86_indirect_thunk_\reg)
CFI_STARTPROC
JMP_NOSPEC %\reg
CFI_ENDPROC
JMP_NOSPEC \reg
SYM_FUNC_END(__x86_indirect_thunk_\reg)
SYM_FUNC_START_NOALIGN(__x86_retpoline_\reg)
ANNOTATE_INTRA_FUNCTION_CALL
call .Ldo_rop_\@
.Lspec_trap_\@:
UNWIND_HINT_EMPTY
pause
lfence
jmp .Lspec_trap_\@
.Ldo_rop_\@:
mov %\reg, (%_ASM_SP)
UNWIND_HINT_RET_OFFSET
ret
SYM_FUNC_END(__x86_retpoline_\reg)
.endm
/*
......@@ -24,25 +40,24 @@ SYM_FUNC_END(__x86_indirect_thunk_\reg)
* only see one instance of "__x86_indirect_thunk_\reg" rather
* than one per register with the correct names. So we do it
* the simple and nasty way...
*
* Worse, you can only have a single EXPORT_SYMBOL per line,
* and CPP can't insert newlines, so we have to repeat everything
* at least twice.
*/
#define __EXPORT_THUNK(sym) _ASM_NOKPROBE(sym); EXPORT_SYMBOL(sym)
#define EXPORT_THUNK(reg) __EXPORT_THUNK(__x86_indirect_thunk_ ## reg)
#define GENERATE_THUNK(reg) THUNK reg ; EXPORT_THUNK(reg)
GENERATE_THUNK(_ASM_AX)
GENERATE_THUNK(_ASM_BX)
GENERATE_THUNK(_ASM_CX)
GENERATE_THUNK(_ASM_DX)
GENERATE_THUNK(_ASM_SI)
GENERATE_THUNK(_ASM_DI)
GENERATE_THUNK(_ASM_BP)
#ifdef CONFIG_64BIT
GENERATE_THUNK(r8)
GENERATE_THUNK(r9)
GENERATE_THUNK(r10)
GENERATE_THUNK(r11)
GENERATE_THUNK(r12)
GENERATE_THUNK(r13)
GENERATE_THUNK(r14)
GENERATE_THUNK(r15)
#endif
#define __EXPORT_THUNK(sym) _ASM_NOKPROBE(sym); EXPORT_SYMBOL(sym)
#define EXPORT_THUNK(reg) __EXPORT_THUNK(__x86_indirect_thunk_ ## reg)
#define EXPORT_RETPOLINE(reg) __EXPORT_THUNK(__x86_retpoline_ ## reg)
#undef GEN
#define GEN(reg) THUNK reg
#include <asm/GEN-for-each-reg.h>
#undef GEN
#define GEN(reg) EXPORT_THUNK(reg)
#include <asm/GEN-for-each-reg.h>
#undef GEN
#define GEN(reg) EXPORT_RETPOLINE(reg)
#include <asm/GEN-for-each-reg.h>
......@@ -21,7 +21,7 @@ SYM_FUNC_START(__efi_call)
mov %r8, %r9
mov %rcx, %r8
mov %rsi, %rcx
CALL_NOSPEC %rdi
CALL_NOSPEC rdi
leave
ret
SYM_FUNC_END(__efi_call)
......@@ -15,9 +15,20 @@
static void __used __section(.discard.func_stack_frame_non_standard) \
*__func_stack_frame_non_standard_##func = func
/*
* This macro indicates that the following intra-function call is valid.
* Any non-annotated intra-function call will cause objtool to issue a warning.
*/
#define ANNOTATE_INTRA_FUNCTION_CALL \
999: \
.pushsection .discard.intra_function_calls; \
.long 999b; \
.popsection;
#else /* !CONFIG_STACK_VALIDATION */
#define STACK_FRAME_NON_STANDARD(func)
#define ANNOTATE_INTRA_FUNCTION_CALL
#endif /* CONFIG_STACK_VALIDATION */
......
......@@ -369,6 +369,11 @@ config STACK_VALIDATION
For more information, see
tools/objtool/Documentation/stack-validation.txt.
config VMLINUX_VALIDATION
bool
depends on STACK_VALIDATION && DEBUG_ENTRY && !PARAVIRT
default y
config DEBUG_FORCE_WEAK_PER_CPU
bool "Force weak per-cpu definitions"
depends on DEBUG_KERNEL
......
......@@ -20,18 +20,22 @@ static unsigned long my_ip = (unsigned long)schedule;
asm (
" .pushsection .text, \"ax\", @progbits\n"
" .type my_tramp1, @function\n"
" my_tramp1:"
" pushq %rbp\n"
" movq %rsp, %rbp\n"
" call my_direct_func1\n"
" leave\n"
" .size my_tramp1, .-my_tramp1\n"
" ret\n"
" .type my_tramp2, @function\n"
" my_tramp2:"
" pushq %rbp\n"
" movq %rsp, %rbp\n"
" call my_direct_func2\n"
" leave\n"
" ret\n"
" .size my_tramp2, .-my_tramp2\n"
" .popsection\n"
);
......
......@@ -15,6 +15,7 @@ extern void my_tramp(void *);
asm (
" .pushsection .text, \"ax\", @progbits\n"
" .type my_tramp, @function\n"
" my_tramp:"
" pushq %rbp\n"
" movq %rsp, %rbp\n"
......@@ -27,6 +28,7 @@ asm (
" popq %rdi\n"
" leave\n"
" ret\n"
" .size my_tramp, .-my_tramp\n"
" .popsection\n"
);
......
......@@ -13,6 +13,7 @@ extern void my_tramp(void *);
asm (
" .pushsection .text, \"ax\", @progbits\n"
" .type my_tramp, @function\n"
" my_tramp:"
" pushq %rbp\n"
" movq %rsp, %rbp\n"
......@@ -21,6 +22,7 @@ asm (
" popq %rdi\n"
" leave\n"
" ret\n"
" .size my_tramp, .-my_tramp\n"
" .popsection\n"
);
......
......@@ -55,6 +55,29 @@ modpost_link()
${LD} ${KBUILD_LDFLAGS} -r -o ${1} ${objects}
}
objtool_link()
{
local objtoolopt;
if [ -n "${CONFIG_VMLINUX_VALIDATION}" ]; then
objtoolopt="check"
if [ -z "${CONFIG_FRAME_POINTER}" ]; then
objtoolopt="${objtoolopt} --no-fp"
fi
if [ -n "${CONFIG_GCOV_KERNEL}" ]; then
objtoolopt="${objtoolopt} --no-unreachable"
fi
if [ -n "${CONFIG_RETPOLINE}" ]; then
objtoolopt="${objtoolopt} --retpoline"
fi
if [ -n "${CONFIG_X86_SMAP}" ]; then
objtoolopt="${objtoolopt} --uaccess"
fi
info OBJTOOL ${1}
tools/objtool/objtool ${objtoolopt} ${1}
fi
}
# Link of vmlinux
# ${1} - output file
# ${2}, ${3}, ... - optional extra .o files
......@@ -251,6 +274,7 @@ ${MAKE} -f "${srctree}/scripts/Makefile.build" obj=init need-builtin=1
#link vmlinux.o
info LD vmlinux.o
modpost_link vmlinux.o
objtool_link vmlinux.o
# modpost vmlinux.o to check for section mismatches
${MAKE} -f "${srctree}/scripts/Makefile.modpost" MODPOST_VMLINUX=1
......
......@@ -58,8 +58,7 @@
#define ORC_TYPE_CALL 0
#define ORC_TYPE_REGS 1
#define ORC_TYPE_REGS_IRET 2
#define UNWIND_HINT_TYPE_SAVE 3
#define UNWIND_HINT_TYPE_RESTORE 4
#define UNWIND_HINT_TYPE_RET_OFFSET 3
#ifndef __ASSEMBLY__
/*
......
objtool-y += arch/$(SRCARCH)/
objtool-y += weak.o
objtool-$(SUBCMD_CHECK) += check.o
objtool-$(SUBCMD_CHECK) += special.o
objtool-$(SUBCMD_ORC) += check.o
objtool-$(SUBCMD_ORC) += orc_gen.o
objtool-$(SUBCMD_ORC) += orc_dump.o
objtool-y += builtin-check.o
objtool-y += builtin-orc.o
objtool-y += check.o
objtool-y += orc_gen.o
objtool-y += orc_dump.o
objtool-y += elf.o
objtool-y += special.o
objtool-y += objtool.o
objtool-y += libstring.o
......
......@@ -289,6 +289,47 @@ they mean, and suggestions for how to fix them.
might be corrupt due to a gcc bug. For more details, see:
https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70646
9. file.o: warning: objtool: funcA() call to funcB() with UACCESS enabled
This means that an unexpected call to a non-whitelisted function exists
outside of arch-specific guards.
X86: SMAP (stac/clac): __uaccess_begin()/__uaccess_end()
ARM: PAN: uaccess_enable()/uaccess_disable()
These functions should be called to denote a minimal critical section around
access to __user variables. See also: https://lwn.net/Articles/517475/
The intention of the warning is to prevent calls to funcB() from eventually
calling schedule(), potentially leaking the AC flags state, and not
restoring them correctly.
It also helps verify that there are no unexpected calls to funcB() which may
access user space pages with protections against doing so disabled.
To fix, either:
1) remove explicit calls to funcB() from funcA().
2) add the correct guards before and after calls to low level functions like
__get_user_size()/__put_user_size().
3) add funcB to uaccess_safe_builtin whitelist in tools/objtool/check.c, if
funcB obviously does not call schedule(), and is marked notrace (since
function tracing inserts additional calls, which is not obvious from the
sources).
10. file.o: warning: func()+0x5c: alternative modifies stack
This means that an alternative includes instructions that modify the
stack. The problem is that there is only one ORC unwind table, this means
that the ORC unwind entries must be valid for each of the alternatives.
The easiest way to enforce this is to ensure alternatives do not contain
any ORC entries, which in turn implies the above constraint.
11. file.o: warning: unannotated intra-function call
This warning means that a direct call is done to a destination which
is not at the beginning of a function. If this is a legit call, you
can remove this warning by putting the ANNOTATE_INTRA_FUNCTION_CALL
directive right before the call.
If the error doesn't seem to make sense, it could be a bug in objtool.
Feel free to ask the objtool maintainer for help.
......
......@@ -35,7 +35,8 @@ all: $(OBJTOOL)
INCLUDES := -I$(srctree)/tools/include \
-I$(srctree)/tools/arch/$(HOSTARCH)/include/uapi \
-I$(srctree)/tools/arch/$(SRCARCH)/include
-I$(srctree)/tools/arch/$(SRCARCH)/include \
-I$(srctree)/tools/objtool/arch/$(SRCARCH)/include
WARNINGS := $(EXTRA_WARNINGS) -Wno-switch-default -Wno-switch-enum -Wno-packed
CFLAGS := -Werror $(WARNINGS) $(KBUILD_HOSTCFLAGS) -g $(INCLUDES) $(LIBELF_FLAGS)
LDFLAGS += $(LIBELF_LIBS) $(LIBSUBCMD) $(KBUILD_HOSTLDFLAGS)
......@@ -45,14 +46,24 @@ elfshdr := $(shell echo '$(pound)include <libelf.h>' | $(CC) $(CFLAGS) -x c -E -
CFLAGS += $(if $(elfshdr),,-DLIBELF_USE_DEPRECATED)
AWK = awk
SUBCMD_CHECK := n
SUBCMD_ORC := n
ifeq ($(SRCARCH),x86)
SUBCMD_CHECK := y
SUBCMD_ORC := y
endif
export SUBCMD_CHECK SUBCMD_ORC
export srctree OUTPUT CFLAGS SRCARCH AWK
include $(srctree)/tools/build/Makefile.include
$(OBJTOOL_IN): fixdep FORCE
@$(CONFIG_SHELL) ./sync-check.sh
@$(MAKE) $(build)=objtool
$(OBJTOOL): $(LIBSUBCMD) $(OBJTOOL_IN)
@$(CONFIG_SHELL) ./sync-check.sh
$(QUIET_LINK)$(CC) $(OBJTOOL_IN) $(LDFLAGS) -o $@
......
......@@ -8,9 +8,11 @@
#include <stdbool.h>
#include <linux/list.h>
#include "elf.h"
#include "objtool.h"
#include "cfi.h"
#include <asm/orc_types.h>
enum insn_type {
INSN_JUMP_CONDITIONAL,
INSN_JUMP_UNCONDITIONAL,
......@@ -20,7 +22,6 @@ enum insn_type {
INSN_CALL_DYNAMIC,
INSN_RETURN,
INSN_CONTEXT_SWITCH,
INSN_STACK,
INSN_BUG,
INSN_NOP,
INSN_STAC,
......@@ -64,15 +65,23 @@ struct op_src {
struct stack_op {
struct op_dest dest;
struct op_src src;
struct list_head list;
};
void arch_initial_func_cfi_state(struct cfi_state *state);
struct instruction;
void arch_initial_func_cfi_state(struct cfi_init_state *state);
int arch_decode_instruction(struct elf *elf, struct section *sec,
int arch_decode_instruction(const struct elf *elf, const struct section *sec,
unsigned long offset, unsigned int maxlen,
unsigned int *len, enum insn_type *type,
unsigned long *immediate, struct stack_op *op);
unsigned long *immediate,
struct list_head *ops_list);
bool arch_callee_saved_reg(unsigned char reg);
unsigned long arch_jump_destination(struct instruction *insn);
unsigned long arch_dest_rela_offset(int addend);
#endif /* _ARCH_H */
This diff is collapsed.
/* SPDX-License-Identifier: GPL-2.0-or-later */
#ifndef _OBJTOOL_CFI_REGS_H
#define _OBJTOOL_CFI_REGS_H
#define CFI_AX 0
#define CFI_DX 1
#define CFI_CX 2
#define CFI_BX 3
#define CFI_SI 4
#define CFI_DI 5
#define CFI_BP 6
#define CFI_SP 7
#define CFI_R8 8
#define CFI_R9 9
#define CFI_R10 10
#define CFI_R11 11
#define CFI_R12 12
#define CFI_R13 13
#define CFI_R14 14
#define CFI_R15 15
#define CFI_RA 16
#define CFI_NUM_REGS 17
#endif /* _OBJTOOL_CFI_REGS_H */
......@@ -14,10 +14,11 @@
*/
#include <subcmd/parse-options.h>
#include <string.h>
#include "builtin.h"
#include "check.h"
#include "objtool.h"
bool no_fp, no_unreachable, retpoline, module, backtrace, uaccess, stats;
bool no_fp, no_unreachable, retpoline, module, backtrace, uaccess, stats, validate_dup, vmlinux;
static const char * const check_usage[] = {
"objtool check [<options>] file.o",
......@@ -32,12 +33,14 @@ const struct option check_options[] = {
OPT_BOOLEAN('b', "backtrace", &backtrace, "unwind on error"),
OPT_BOOLEAN('a', "uaccess", &uaccess, "enable uaccess checking"),
OPT_BOOLEAN('s', "stats", &stats, "print statistics"),
OPT_BOOLEAN('d', "duplicate", &validate_dup, "duplicate validation for vmlinux.o"),
OPT_BOOLEAN('l', "vmlinux", &vmlinux, "vmlinux.o validation"),
OPT_END(),
};
int cmd_check(int argc, const char **argv)
{
const char *objname;
const char *objname, *s;
argc = parse_options(argc, argv, check_options, check_usage, 0);
......@@ -46,5 +49,9 @@ int cmd_check(int argc, const char **argv)
objname = argv[0];
s = strstr(objname, "vmlinux.o");
if (s && !s[9])
vmlinux = true;
return check(objname, false);
}
......@@ -14,8 +14,7 @@
#include <string.h>
#include "builtin.h"
#include "check.h"
#include "objtool.h"
static const char *orc_usage[] = {
"objtool orc generate [<options>] file.o",
......
......@@ -8,7 +8,7 @@
#include <subcmd/parse-options.h>
extern const struct option check_options[];
extern bool no_fp, no_unreachable, retpoline, module, backtrace, uaccess, stats;
extern bool no_fp, no_unreachable, retpoline, module, backtrace, uaccess, stats, validate_dup, vmlinux;
extern int cmd_check(int argc, const char **argv);
extern int cmd_orc(int argc, const char **argv);
......
......@@ -6,38 +6,33 @@
#ifndef _OBJTOOL_CFI_H
#define _OBJTOOL_CFI_H
#include "cfi_regs.h"
#define CFI_UNDEFINED -1
#define CFI_CFA -2
#define CFI_SP_INDIRECT -3
#define CFI_BP_INDIRECT -4
#define CFI_AX 0
#define CFI_DX 1
#define CFI_CX 2
#define CFI_BX 3
#define CFI_SI 4
#define CFI_DI 5
#define CFI_BP 6
#define CFI_SP 7
#define CFI_R8 8
#define CFI_R9 9
#define CFI_R10 10
#define CFI_R11 11
#define CFI_R12 12
#define CFI_R13 13
#define CFI_R14 14
#define CFI_R15 15
#define CFI_RA 16
#define CFI_NUM_REGS 17
struct cfi_reg {
int base;
int offset;
};
struct cfi_state {
struct cfi_init_state {
struct cfi_reg regs[CFI_NUM_REGS];
struct cfi_reg cfa;
};
struct cfi_state {
struct cfi_reg regs[CFI_NUM_REGS];
struct cfi_reg vals[CFI_NUM_REGS];
struct cfi_reg cfa;
int stack_size;
int drap_reg, drap_offset;
unsigned char type;
bool bp_scratch;
bool drap;
bool end;
};
#endif /* _OBJTOOL_CFI_H */
This diff is collapsed.
......@@ -7,22 +7,16 @@
#define _CHECK_H
#include <stdbool.h>
#include "elf.h"
#include "cfi.h"
#include "arch.h"
#include "orc.h"
#include <linux/hashtable.h>
struct insn_state {
struct cfi_reg cfa;
struct cfi_reg regs[CFI_NUM_REGS];
int stack_size;
unsigned char type;
bool bp_scratch;
bool drap, end, uaccess, df;
struct cfi_state cfi;
unsigned int uaccess_stack;
int drap_reg, drap_offset;
struct cfi_reg vals[CFI_NUM_REGS];
bool uaccess;
bool df;
bool noinstr;
s8 instr;
};
struct instruction {
......@@ -33,29 +27,24 @@ struct instruction {
unsigned int len;
enum insn_type type;
unsigned long immediate;
bool alt_group, dead_end, ignore, hint, save, restore, ignore_alts;
bool dead_end, ignore, ignore_alts;
bool hint;
bool retpoline_safe;
s8 instr;
u8 visited;
u8 ret_offset;
int alt_group;
struct symbol *call_dest;
struct instruction *jump_dest;
struct instruction *first_jump_src;
struct rela *jump_table;
struct list_head alts;
struct symbol *func;
struct stack_op stack_op;
struct insn_state state;
struct list_head stack_ops;
struct cfi_state cfi;
struct orc_entry orc;
};
struct objtool_file {
struct elf *elf;
struct list_head insn_list;
DECLARE_HASHTABLE(insn_hash, 20);
bool ignore_unreachables, c_file, hints, rodata;
};
int check(const char *objname, bool orc);
struct instruction *find_insn(struct objtool_file *file,
struct section *sec, unsigned long offset);
......
......@@ -27,6 +27,22 @@ static inline u32 str_hash(const char *str)
return jhash(str, strlen(str), 0);
}
static inline int elf_hash_bits(void)
{
return vmlinux ? ELF_HASH_BITS : 16;
}
#define elf_hash_add(hashtable, node, key) \
hlist_add_head(node, &hashtable[hash_min(key, elf_hash_bits())])
static void elf_hash_init(struct hlist_head *table)
{
__hash_init(table, 1U << elf_hash_bits());
}
#define elf_hash_for_each_possible(name, obj, member, key) \
hlist_for_each_entry(obj, &name[hash_min(key, elf_hash_bits())], member)
static void rb_add(struct rb_root *tree, struct rb_node *node,
int (*cmp)(struct rb_node *, const struct rb_node *))
{
......@@ -45,7 +61,7 @@ static void rb_add(struct rb_root *tree, struct rb_node *node,
rb_insert_color(node, tree);
}
static struct rb_node *rb_find_first(struct rb_root *tree, const void *key,
static struct rb_node *rb_find_first(const struct rb_root *tree, const void *key,
int (*cmp)(const void *key, const struct rb_node *))
{
struct rb_node *node = tree->rb_node;
......@@ -111,11 +127,11 @@ static int symbol_by_offset(const void *key, const struct rb_node *node)
return 0;
}
struct section *find_section_by_name(struct elf *elf, const char *name)
struct section *find_section_by_name(const struct elf *elf, const char *name)
{
struct section *sec;
hash_for_each_possible(elf->section_name_hash, sec, name_hash, str_hash(name))
elf_hash_for_each_possible(elf->section_name_hash, sec, name_hash, str_hash(name))
if (!strcmp(sec->name, name))
return sec;
......@@ -127,7 +143,7 @@ static struct section *find_section_by_index(struct elf *elf,
{
struct section *sec;
hash_for_each_possible(elf->section_hash, sec, hash, idx)
elf_hash_for_each_possible(elf->section_hash, sec, hash, idx)
if (sec->idx == idx)
return sec;
......@@ -138,7 +154,7 @@ static struct symbol *find_symbol_by_index(struct elf *elf, unsigned int idx)
{
struct symbol *sym;
hash_for_each_possible(elf->symbol_hash, sym, hash, idx)
elf_hash_for_each_possible(elf->symbol_hash, sym, hash, idx)
if (sym->idx == idx)
return sym;
......@@ -173,7 +189,7 @@ struct symbol *find_func_by_offset(struct section *sec, unsigned long offset)
return NULL;
}
struct symbol *find_symbol_containing(struct section *sec, unsigned long offset)
struct symbol *find_symbol_containing(const struct section *sec, unsigned long offset)
{
struct rb_node *node;
......@@ -201,18 +217,18 @@ struct symbol *find_func_containing(struct section *sec, unsigned long offset)
return NULL;
}
struct symbol *find_symbol_by_name(struct elf *elf, const char *name)
struct symbol *find_symbol_by_name(const struct elf *elf, const char *name)
{
struct symbol *sym;
hash_for_each_possible(elf->symbol_name_hash, sym, name_hash, str_hash(name))
elf_hash_for_each_possible(elf->symbol_name_hash, sym, name_hash, str_hash(name))
if (!strcmp(sym->name, name))
return sym;
return NULL;
}
struct rela *find_rela_by_dest_range(struct elf *elf, struct section *sec,
struct rela *find_rela_by_dest_range(const struct elf *elf, struct section *sec,
unsigned long offset, unsigned int len)
{
struct rela *rela, *r = NULL;
......@@ -224,7 +240,7 @@ struct rela *find_rela_by_dest_range(struct elf *elf, struct section *sec,
sec = sec->rela;
for_offset_range(o, offset, offset + len) {
hash_for_each_possible(elf->rela_hash, rela, hash,
elf_hash_for_each_possible(elf->rela_hash, rela, hash,
sec_offset_hash(sec, o)) {
if (rela->sec != sec)
continue;
......@@ -241,7 +257,7 @@ struct rela *find_rela_by_dest_range(struct elf *elf, struct section *sec,
return NULL;
}
struct rela *find_rela_by_dest(struct elf *elf, struct section *sec, unsigned long offset)
struct rela *find_rela_by_dest(const struct elf *elf, struct section *sec, unsigned long offset)
{
return find_rela_by_dest_range(elf, sec, offset, 1);
}
......@@ -309,8 +325,8 @@ static int read_sections(struct elf *elf)
sec->len = sec->sh.sh_size;
list_add_tail(&sec->list, &elf->sections);
hash_add(elf->section_hash, &sec->hash, sec->idx);
hash_add(elf->section_name_hash, &sec->name_hash, str_hash(sec->name));
elf_hash_add(elf->section_hash, &sec->hash, sec->idx);
elf_hash_add(elf->section_name_hash, &sec->name_hash, str_hash(sec->name));
}
if (stats)
......@@ -327,12 +343,14 @@ static int read_sections(struct elf *elf)
static int read_symbols(struct elf *elf)
{
struct section *symtab, *sec;
struct section *symtab, *symtab_shndx, *sec;
struct symbol *sym, *pfunc;
struct list_head *entry;
struct rb_node *pnode;
int symbols_nr, i;
char *coldstr;
Elf_Data *shndx_data = NULL;
Elf32_Word shndx;
symtab = find_section_by_name(elf, ".symtab");
if (!symtab) {
......@@ -340,6 +358,10 @@ static int read_symbols(struct elf *elf)
return -1;
}
symtab_shndx = find_section_by_name(elf, ".symtab_shndx");
if (symtab_shndx)
shndx_data = symtab_shndx->data;
symbols_nr = symtab->sh.sh_size / symtab->sh.sh_entsize;
for (i = 0; i < symbols_nr; i++) {
......@@ -353,8 +375,9 @@ static int read_symbols(struct elf *elf)
sym->idx = i;
if (!gelf_getsym(symtab->data, i, &sym->sym)) {
WARN_ELF("gelf_getsym");
if (!gelf_getsymshndx(symtab->data, shndx_data, i, &sym->sym,
&shndx)) {
WARN_ELF("gelf_getsymshndx");
goto err;
}
......@@ -368,10 +391,13 @@ static int read_symbols(struct elf *elf)
sym->type = GELF_ST_TYPE(sym->sym.st_info);
sym->bind = GELF_ST_BIND(sym->sym.st_info);
if (sym->sym.st_shndx > SHN_UNDEF &&
sym->sym.st_shndx < SHN_LORESERVE) {
sym->sec = find_section_by_index(elf,
sym->sym.st_shndx);
if ((sym->sym.st_shndx > SHN_UNDEF &&
sym->sym.st_shndx < SHN_LORESERVE) ||
(shndx_data && sym->sym.st_shndx == SHN_XINDEX)) {
if (sym->sym.st_shndx != SHN_XINDEX)
shndx = sym->sym.st_shndx;
sym->sec = find_section_by_index(elf, shndx);
if (!sym->sec) {
WARN("couldn't find section for symbol %s",
sym->name);
......@@ -394,8 +420,8 @@ static int read_symbols(struct elf *elf)
else
entry = &sym->sec->symbol_list;
list_add(&sym->list, entry);
hash_add(elf->symbol_hash, &sym->hash, sym->idx);
hash_add(elf->symbol_name_hash, &sym->name_hash, str_hash(sym->name));
elf_hash_add(elf->symbol_hash, &sym->hash, sym->idx);
elf_hash_add(elf->symbol_name_hash, &sym->name_hash, str_hash(sym->name));
}
if (stats)
......@@ -456,6 +482,14 @@ static int read_symbols(struct elf *elf)
return -1;
}
void elf_add_rela(struct elf *elf, struct rela *rela)
{
struct section *sec = rela->sec;
list_add_tail(&rela->list, &sec->rela_list);
elf_hash_add(elf->rela_hash, &rela->hash, rela_hash(rela));
}
static int read_relas(struct elf *elf)
{
struct section *sec;
......@@ -503,8 +537,7 @@ static int read_relas(struct elf *elf)
return -1;
}
list_add_tail(&rela->list, &sec->rela_list);
hash_add(elf->rela_hash, &rela->hash, rela_hash(rela));
elf_add_rela(elf, rela);
nr_rela++;
}
max_rela = max(max_rela, nr_rela);
......@@ -519,7 +552,7 @@ static int read_relas(struct elf *elf)
return 0;
}
struct elf *elf_read(const char *name, int flags)
struct elf *elf_open_read(const char *name, int flags)
{
struct elf *elf;
Elf_Cmd cmd;
......@@ -531,15 +564,16 @@ struct elf *elf_read(const char *name, int flags)
perror("malloc");
return NULL;
}
memset(elf, 0, sizeof(*elf));
memset(elf, 0, offsetof(struct elf, sections));
hash_init(elf->symbol_hash);
hash_init(elf->symbol_name_hash);
hash_init(elf->section_hash);
hash_init(elf->section_name_hash);
hash_init(elf->rela_hash);
INIT_LIST_HEAD(&elf->sections);
elf_hash_init(elf->symbol_hash);
elf_hash_init(elf->symbol_name_hash);
elf_hash_init(elf->section_hash);
elf_hash_init(elf->section_name_hash);
elf_hash_init(elf->rela_hash);
elf->fd = open(name, flags);
if (elf->fd == -1) {
fprintf(stderr, "objtool: Can't open '%s': %s\n",
......@@ -676,8 +710,8 @@ struct section *elf_create_section(struct elf *elf, const char *name,
shstrtab->changed = true;
list_add_tail(&sec->list, &elf->sections);
hash_add(elf->section_hash, &sec->hash, sec->idx);
hash_add(elf->section_name_hash, &sec->name_hash, str_hash(sec->name));
elf_hash_add(elf->section_hash, &sec->hash, sec->idx);
elf_hash_add(elf->section_name_hash, &sec->name_hash, str_hash(sec->name));
return sec;
}
......@@ -745,7 +779,7 @@ int elf_rebuild_rela_section(struct section *sec)
return 0;
}
int elf_write(struct elf *elf)
int elf_write(const struct elf *elf)
{
struct section *sec;
Elf_Scn *s;
......
......@@ -39,7 +39,7 @@ struct section {
char *name;
int idx;
unsigned int len;
bool changed, text, rodata;
bool changed, text, rodata, noinstr;
};
struct symbol {
......@@ -70,17 +70,19 @@ struct rela {
bool jump_table_start;
};
#define ELF_HASH_BITS 20
struct elf {
Elf *elf;
GElf_Ehdr ehdr;
int fd;
char *name;
struct list_head sections;
DECLARE_HASHTABLE(symbol_hash, 20);
DECLARE_HASHTABLE(symbol_name_hash, 20);
DECLARE_HASHTABLE(section_hash, 16);
DECLARE_HASHTABLE(section_name_hash, 16);
DECLARE_HASHTABLE(rela_hash, 20);
DECLARE_HASHTABLE(symbol_hash, ELF_HASH_BITS);
DECLARE_HASHTABLE(symbol_name_hash, ELF_HASH_BITS);
DECLARE_HASHTABLE(section_hash, ELF_HASH_BITS);
DECLARE_HASHTABLE(section_name_hash, ELF_HASH_BITS);
DECLARE_HASHTABLE(rela_hash, ELF_HASH_BITS);
};
#define OFFSET_STRIDE_BITS 4
......@@ -112,22 +114,23 @@ static inline u32 rela_hash(struct rela *rela)
return sec_offset_hash(rela->sec, rela->offset);
}
struct elf *elf_read(const char *name, int flags);
struct section *find_section_by_name(struct elf *elf, const char *name);
struct elf *elf_open_read(const char *name, int flags);
struct section *elf_create_section(struct elf *elf, const char *name, size_t entsize, int nr);
struct section *elf_create_rela_section(struct elf *elf, struct section *base);
void elf_add_rela(struct elf *elf, struct rela *rela);
int elf_write(const struct elf *elf);
void elf_close(struct elf *elf);
struct section *find_section_by_name(const struct elf *elf, const char *name);
struct symbol *find_func_by_offset(struct section *sec, unsigned long offset);
struct symbol *find_symbol_by_offset(struct section *sec, unsigned long offset);
struct symbol *find_symbol_by_name(struct elf *elf, const char *name);
struct symbol *find_symbol_containing(struct section *sec, unsigned long offset);
struct rela *find_rela_by_dest(struct elf *elf, struct section *sec, unsigned long offset);
struct rela *find_rela_by_dest_range(struct elf *elf, struct section *sec,
struct symbol *find_symbol_by_name(const struct elf *elf, const char *name);
struct symbol *find_symbol_containing(const struct section *sec, unsigned long offset);
struct rela *find_rela_by_dest(const struct elf *elf, struct section *sec, unsigned long offset);
struct rela *find_rela_by_dest_range(const struct elf *elf, struct section *sec,
unsigned long offset, unsigned int len);
struct symbol *find_func_containing(struct section *sec, unsigned long offset);
struct section *elf_create_section(struct elf *elf, const char *name, size_t
entsize, int nr);
struct section *elf_create_rela_section(struct elf *elf, struct section *base);
int elf_rebuild_rela_section(struct section *sec);
int elf_write(struct elf *elf);
void elf_close(struct elf *elf);
#define for_each_sec(file, sec) \
list_for_each_entry(sec, &file->elf->sections, list)
......
......@@ -58,7 +58,9 @@ static void cmd_usage(void)
printf("\n");
exit(129);
if (!help)
exit(129);
exit(0);
}
static void handle_options(int *argc, const char ***argv)
......
/* SPDX-License-Identifier: GPL-2.0-or-later */
/*
* Copyright (C) 2017 Josh Poimboeuf <jpoimboe@redhat.com>
* Copyright (C) 2020 Matt Helsley <mhelsley@vmware.com>
*/
#ifndef _ORC_H
#define _ORC_H
#ifndef _OBJTOOL_H
#define _OBJTOOL_H
#include <asm/orc_types.h>
#include <stdbool.h>
#include <linux/list.h>
#include <linux/hashtable.h>
struct objtool_file;
#include "elf.h"
int create_orc(struct objtool_file *file);
int create_orc_sections(struct objtool_file *file);
struct objtool_file {
struct elf *elf;
struct list_head insn_list;
DECLARE_HASHTABLE(insn_hash, 20);
bool ignore_unreachables, c_file, hints, rodata;
};
int check(const char *objname, bool orc);
int orc_dump(const char *objname);
int create_orc(struct objtool_file *file);
int create_orc_sections(struct objtool_file *file);
#endif /* _ORC_H */
#endif /* _OBJTOOL_H */
......@@ -4,7 +4,8 @@
*/
#include <unistd.h>
#include "orc.h"
#include <asm/orc_types.h>
#include "objtool.h"
#include "warn.h"
static const char *reg_name(unsigned int reg)
......
......@@ -6,7 +6,6 @@
#include <stdlib.h>
#include <string.h>
#include "orc.h"
#include "check.h"
#include "warn.h"
......@@ -16,10 +15,10 @@ int create_orc(struct objtool_file *file)
for_each_insn(file, insn) {
struct orc_entry *orc = &insn->orc;
struct cfi_reg *cfa = &insn->state.cfa;
struct cfi_reg *bp = &insn->state.regs[CFI_BP];
struct cfi_reg *cfa = &insn->cfi.cfa;
struct cfi_reg *bp = &insn->cfi.regs[CFI_BP];
orc->end = insn->state.end;
orc->end = insn->cfi.end;
if (cfa->base == CFI_UNDEFINED) {
orc->sp_reg = ORC_REG_UNDEFINED;
......@@ -75,7 +74,7 @@ int create_orc(struct objtool_file *file)
orc->sp_offset = cfa->offset;
orc->bp_offset = bp->offset;
orc->type = insn->state.type;
orc->type = insn->cfi.type;
}
return 0;
......@@ -130,8 +129,7 @@ static int create_orc_entry(struct elf *elf, struct section *u_sec, struct secti
rela->offset = idx * sizeof(int);
rela->sec = ip_relasec;
list_add_tail(&rela->list, &ip_relasec->rela_list);
hash_add(elf->rela_hash, &rela->hash, rela_hash(rela));
elf_add_rela(elf, rela);
return 0;
}
......
// SPDX-License-Identifier: GPL-2.0-or-later
/*
* Copyright (C) 2020 Matt Helsley <mhelsley@vmware.com>
* Weak definitions necessary to compile objtool without
* some subcommands (e.g. check, orc).
*/
#include <stdbool.h>
#include <errno.h>
#include "objtool.h"
#define __weak __attribute__((weak))
#define UNSUPPORTED(name) \
({ \
fprintf(stderr, "error: objtool: " name " not implemented\n"); \
return ENOSYS; \
})
const char __weak *objname;
int __weak check(const char *_objname, bool orc)
{
UNSUPPORTED("check subcommand");
}
int __weak orc_dump(const char *_objname)
{
UNSUPPORTED("orc");
}
int __weak create_orc(struct objtool_file *file)
{
UNSUPPORTED("orc");
}
int __weak create_orc_sections(struct objtool_file *file)
{
UNSUPPORTED("orc");
}
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment