Commit 63fef14f authored by Masami Hiramatsu's avatar Masami Hiramatsu Committed by Ingo Molnar

kprobes/x86: Make insn buffer always ROX and use text_poke()

Make insn buffer always ROX and use text_poke() to write
the copied instructions instead of set_memory_*().
This makes instruction buffer stronger against other
kernel subsystems because there is no window time
to modify the buffer.
Suggested-by: default avatarIngo Molnar <mingo@kernel.org>
Signed-off-by: default avatarMasami Hiramatsu <mhiramat@kernel.org>
Cc: Ananth N Mavinakayanahalli <ananth@in.ibm.com>
Cc: Anil S Keshavamurthy <anil.s.keshavamurthy@intel.com>
Cc: David S . Miller <davem@davemloft.net>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Link: http://lkml.kernel.org/r/150304463032.17009.14195368040691676813.stgit@devboxSigned-off-by: default avatarIngo Molnar <mingo@kernel.org>
parent 4f56186e
...@@ -75,11 +75,11 @@ extern unsigned long recover_probed_instruction(kprobe_opcode_t *buf, ...@@ -75,11 +75,11 @@ extern unsigned long recover_probed_instruction(kprobe_opcode_t *buf,
* Copy an instruction and adjust the displacement if the instruction * Copy an instruction and adjust the displacement if the instruction
* uses the %rip-relative addressing mode. * uses the %rip-relative addressing mode.
*/ */
extern int __copy_instruction(u8 *dest, u8 *src, struct insn *insn); extern int __copy_instruction(u8 *dest, u8 *src, u8 *real, struct insn *insn);
/* Generate a relative-jump/call instruction */ /* Generate a relative-jump/call instruction */
extern void synthesize_reljump(void *from, void *to); extern void synthesize_reljump(void *dest, void *from, void *to);
extern void synthesize_relcall(void *from, void *to); extern void synthesize_relcall(void *dest, void *from, void *to);
#ifdef CONFIG_OPTPROBES #ifdef CONFIG_OPTPROBES
extern int setup_detour_execution(struct kprobe *p, struct pt_regs *regs, int reenter); extern int setup_detour_execution(struct kprobe *p, struct pt_regs *regs, int reenter);
......
...@@ -119,29 +119,29 @@ struct kretprobe_blackpoint kretprobe_blacklist[] = { ...@@ -119,29 +119,29 @@ struct kretprobe_blackpoint kretprobe_blacklist[] = {
const int kretprobe_blacklist_size = ARRAY_SIZE(kretprobe_blacklist); const int kretprobe_blacklist_size = ARRAY_SIZE(kretprobe_blacklist);
static nokprobe_inline void static nokprobe_inline void
__synthesize_relative_insn(void *from, void *to, u8 op) __synthesize_relative_insn(void *dest, void *from, void *to, u8 op)
{ {
struct __arch_relative_insn { struct __arch_relative_insn {
u8 op; u8 op;
s32 raddr; s32 raddr;
} __packed *insn; } __packed *insn;
insn = (struct __arch_relative_insn *)from; insn = (struct __arch_relative_insn *)dest;
insn->raddr = (s32)((long)(to) - ((long)(from) + 5)); insn->raddr = (s32)((long)(to) - ((long)(from) + 5));
insn->op = op; insn->op = op;
} }
/* Insert a jump instruction at address 'from', which jumps to address 'to'.*/ /* Insert a jump instruction at address 'from', which jumps to address 'to'.*/
void synthesize_reljump(void *from, void *to) void synthesize_reljump(void *dest, void *from, void *to)
{ {
__synthesize_relative_insn(from, to, RELATIVEJUMP_OPCODE); __synthesize_relative_insn(dest, from, to, RELATIVEJUMP_OPCODE);
} }
NOKPROBE_SYMBOL(synthesize_reljump); NOKPROBE_SYMBOL(synthesize_reljump);
/* Insert a call instruction at address 'from', which calls address 'to'.*/ /* Insert a call instruction at address 'from', which calls address 'to'.*/
void synthesize_relcall(void *from, void *to) void synthesize_relcall(void *dest, void *from, void *to)
{ {
__synthesize_relative_insn(from, to, RELATIVECALL_OPCODE); __synthesize_relative_insn(dest, from, to, RELATIVECALL_OPCODE);
} }
NOKPROBE_SYMBOL(synthesize_relcall); NOKPROBE_SYMBOL(synthesize_relcall);
...@@ -346,10 +346,11 @@ static int is_IF_modifier(kprobe_opcode_t *insn) ...@@ -346,10 +346,11 @@ static int is_IF_modifier(kprobe_opcode_t *insn)
/* /*
* Copy an instruction with recovering modified instruction by kprobes * Copy an instruction with recovering modified instruction by kprobes
* and adjust the displacement if the instruction uses the %rip-relative * and adjust the displacement if the instruction uses the %rip-relative
* addressing mode. * addressing mode. Note that since @real will be the final place of copied
* instruction, displacement must be adjust by @real, not @dest.
* This returns the length of copied instruction, or 0 if it has an error. * This returns the length of copied instruction, or 0 if it has an error.
*/ */
int __copy_instruction(u8 *dest, u8 *src, struct insn *insn) int __copy_instruction(u8 *dest, u8 *src, u8 *real, struct insn *insn)
{ {
kprobe_opcode_t buf[MAX_INSN_SIZE]; kprobe_opcode_t buf[MAX_INSN_SIZE];
unsigned long recovered_insn = unsigned long recovered_insn =
...@@ -387,11 +388,11 @@ int __copy_instruction(u8 *dest, u8 *src, struct insn *insn) ...@@ -387,11 +388,11 @@ int __copy_instruction(u8 *dest, u8 *src, struct insn *insn)
* have given. * have given.
*/ */
newdisp = (u8 *) src + (s64) insn->displacement.value newdisp = (u8 *) src + (s64) insn->displacement.value
- (u8 *) dest; - (u8 *) real;
if ((s64) (s32) newdisp != newdisp) { if ((s64) (s32) newdisp != newdisp) {
pr_err("Kprobes error: new displacement does not fit into s32 (%llx)\n", newdisp); pr_err("Kprobes error: new displacement does not fit into s32 (%llx)\n", newdisp);
pr_err("\tSrc: %p, Dest: %p, old disp: %x\n", pr_err("\tSrc: %p, Dest: %p, old disp: %x\n",
src, dest, insn->displacement.value); src, real, insn->displacement.value);
return 0; return 0;
} }
disp = (u8 *) dest + insn_offset_displacement(insn); disp = (u8 *) dest + insn_offset_displacement(insn);
...@@ -402,20 +403,38 @@ int __copy_instruction(u8 *dest, u8 *src, struct insn *insn) ...@@ -402,20 +403,38 @@ int __copy_instruction(u8 *dest, u8 *src, struct insn *insn)
} }
/* Prepare reljump right after instruction to boost */ /* Prepare reljump right after instruction to boost */
static void prepare_boost(struct kprobe *p, struct insn *insn) static int prepare_boost(kprobe_opcode_t *buf, struct kprobe *p,
struct insn *insn)
{ {
int len = insn->length;
if (can_boost(insn, p->addr) && if (can_boost(insn, p->addr) &&
MAX_INSN_SIZE - insn->length >= RELATIVEJUMP_SIZE) { MAX_INSN_SIZE - len >= RELATIVEJUMP_SIZE) {
/* /*
* These instructions can be executed directly if it * These instructions can be executed directly if it
* jumps back to correct address. * jumps back to correct address.
*/ */
synthesize_reljump(p->ainsn.insn + insn->length, synthesize_reljump(buf + len, p->ainsn.insn + len,
p->addr + insn->length); p->addr + insn->length);
len += RELATIVEJUMP_SIZE;
p->ainsn.boostable = true; p->ainsn.boostable = true;
} else { } else {
p->ainsn.boostable = false; p->ainsn.boostable = false;
} }
return len;
}
/* Make page to RO mode when allocate it */
void *alloc_insn_page(void)
{
void *page;
page = module_alloc(PAGE_SIZE);
if (page)
set_memory_ro((unsigned long)page & PAGE_MASK, 1);
return page;
} }
/* Recover page to RW mode before releasing it */ /* Recover page to RW mode before releasing it */
...@@ -429,12 +448,11 @@ void free_insn_page(void *page) ...@@ -429,12 +448,11 @@ void free_insn_page(void *page)
static int arch_copy_kprobe(struct kprobe *p) static int arch_copy_kprobe(struct kprobe *p)
{ {
struct insn insn; struct insn insn;
kprobe_opcode_t buf[MAX_INSN_SIZE];
int len; int len;
set_memory_rw((unsigned long)p->ainsn.insn & PAGE_MASK, 1);
/* Copy an instruction with recovering if other optprobe modifies it.*/ /* Copy an instruction with recovering if other optprobe modifies it.*/
len = __copy_instruction(p->ainsn.insn, p->addr, &insn); len = __copy_instruction(buf, p->addr, p->ainsn.insn, &insn);
if (!len) if (!len)
return -EINVAL; return -EINVAL;
...@@ -442,15 +460,16 @@ static int arch_copy_kprobe(struct kprobe *p) ...@@ -442,15 +460,16 @@ static int arch_copy_kprobe(struct kprobe *p)
* __copy_instruction can modify the displacement of the instruction, * __copy_instruction can modify the displacement of the instruction,
* but it doesn't affect boostable check. * but it doesn't affect boostable check.
*/ */
prepare_boost(p, &insn); len = prepare_boost(buf, p, &insn);
set_memory_ro((unsigned long)p->ainsn.insn & PAGE_MASK, 1);
/* Check whether the instruction modifies Interrupt Flag or not */ /* Check whether the instruction modifies Interrupt Flag or not */
p->ainsn.if_modifier = is_IF_modifier(p->ainsn.insn); p->ainsn.if_modifier = is_IF_modifier(buf);
/* Also, displacement change doesn't affect the first byte */ /* Also, displacement change doesn't affect the first byte */
p->opcode = p->ainsn.insn[0]; p->opcode = buf[0];
/* OK, write back the instruction(s) into ROX insn buffer */
text_poke(p->ainsn.insn, buf, len);
return 0; return 0;
} }
......
...@@ -184,13 +184,13 @@ optimized_callback(struct optimized_kprobe *op, struct pt_regs *regs) ...@@ -184,13 +184,13 @@ optimized_callback(struct optimized_kprobe *op, struct pt_regs *regs)
} }
NOKPROBE_SYMBOL(optimized_callback); NOKPROBE_SYMBOL(optimized_callback);
static int copy_optimized_instructions(u8 *dest, u8 *src) static int copy_optimized_instructions(u8 *dest, u8 *src, u8 *real)
{ {
struct insn insn; struct insn insn;
int len = 0, ret; int len = 0, ret;
while (len < RELATIVEJUMP_SIZE) { while (len < RELATIVEJUMP_SIZE) {
ret = __copy_instruction(dest + len, src + len, &insn); ret = __copy_instruction(dest + len, src + len, real, &insn);
if (!ret || !can_boost(&insn, src + len)) if (!ret || !can_boost(&insn, src + len))
return -EINVAL; return -EINVAL;
len += ret; len += ret;
...@@ -343,57 +343,66 @@ void arch_remove_optimized_kprobe(struct optimized_kprobe *op) ...@@ -343,57 +343,66 @@ void arch_remove_optimized_kprobe(struct optimized_kprobe *op)
int arch_prepare_optimized_kprobe(struct optimized_kprobe *op, int arch_prepare_optimized_kprobe(struct optimized_kprobe *op,
struct kprobe *__unused) struct kprobe *__unused)
{ {
u8 *buf; u8 *buf = NULL, *slot;
int ret; int ret, len;
long rel; long rel;
if (!can_optimize((unsigned long)op->kp.addr)) if (!can_optimize((unsigned long)op->kp.addr))
return -EILSEQ; return -EILSEQ;
op->optinsn.insn = get_optinsn_slot(); buf = kzalloc(MAX_OPTINSN_SIZE, GFP_KERNEL);
if (!op->optinsn.insn) if (!buf)
return -ENOMEM; return -ENOMEM;
op->optinsn.insn = slot = get_optinsn_slot();
if (!slot) {
ret = -ENOMEM;
goto out;
}
/* /*
* Verify if the address gap is in 2GB range, because this uses * Verify if the address gap is in 2GB range, because this uses
* a relative jump. * a relative jump.
*/ */
rel = (long)op->optinsn.insn - (long)op->kp.addr + RELATIVEJUMP_SIZE; rel = (long)slot - (long)op->kp.addr + RELATIVEJUMP_SIZE;
if (abs(rel) > 0x7fffffff) { if (abs(rel) > 0x7fffffff) {
__arch_remove_optimized_kprobe(op, 0); ret = -ERANGE;
return -ERANGE; goto err;
} }
buf = (u8 *)op->optinsn.insn; /* Copy arch-dep-instance from template */
set_memory_rw((unsigned long)buf & PAGE_MASK, 1); memcpy(buf, &optprobe_template_entry, TMPL_END_IDX);
/* Copy instructions into the out-of-line buffer */ /* Copy instructions into the out-of-line buffer */
ret = copy_optimized_instructions(buf + TMPL_END_IDX, op->kp.addr); ret = copy_optimized_instructions(buf + TMPL_END_IDX, op->kp.addr,
if (ret < 0) { slot + TMPL_END_IDX);
__arch_remove_optimized_kprobe(op, 0); if (ret < 0)
return ret; goto err;
}
op->optinsn.size = ret; op->optinsn.size = ret;
len = TMPL_END_IDX + op->optinsn.size;
/* Copy arch-dep-instance from template */
memcpy(buf, &optprobe_template_entry, TMPL_END_IDX);
/* Set probe information */ /* Set probe information */
synthesize_set_arg1(buf + TMPL_MOVE_IDX, (unsigned long)op); synthesize_set_arg1(buf + TMPL_MOVE_IDX, (unsigned long)op);
/* Set probe function call */ /* Set probe function call */
synthesize_relcall(buf + TMPL_CALL_IDX, optimized_callback); synthesize_relcall(buf + TMPL_CALL_IDX,
slot + TMPL_CALL_IDX, optimized_callback);
/* Set returning jmp instruction at the tail of out-of-line buffer */ /* Set returning jmp instruction at the tail of out-of-line buffer */
synthesize_reljump(buf + TMPL_END_IDX + op->optinsn.size, synthesize_reljump(buf + len, slot + len,
(u8 *)op->kp.addr + op->optinsn.size); (u8 *)op->kp.addr + op->optinsn.size);
len += RELATIVEJUMP_SIZE;
set_memory_ro((unsigned long)buf & PAGE_MASK, 1);
/* We have to use text_poke for instuction buffer because it is RO */
flush_icache_range((unsigned long) buf, text_poke(slot, buf, len);
(unsigned long) buf + TMPL_END_IDX + ret = 0;
op->optinsn.size + RELATIVEJUMP_SIZE); out:
return 0; kfree(buf);
return ret;
err:
__arch_remove_optimized_kprobe(op, 0);
goto out;
} }
/* /*
......
...@@ -117,7 +117,7 @@ enum kprobe_slot_state { ...@@ -117,7 +117,7 @@ enum kprobe_slot_state {
SLOT_USED = 2, SLOT_USED = 2,
}; };
static void *alloc_insn_page(void) void __weak *alloc_insn_page(void)
{ {
return module_alloc(PAGE_SIZE); return module_alloc(PAGE_SIZE);
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment