Commit 3d9b938e authored by Nelson Elhage's avatar Nelson Elhage Committed by Avi Kivity

KVM: emulator: Use linearize() when fetching instructions

Since segments need to be handled slightly differently when fetching
instructions, we add a __linearize helper that accepts a new 'fetch' boolean.

[avi: fix oops caused by wrong segmented_address initialization order]
Signed-off-by: default avatarNelson Elhage <nelhage@ksplice.com>
Signed-off-by: default avatarAvi Kivity <avi@redhat.com>
parent 7c4c0f4f
...@@ -265,7 +265,6 @@ struct x86_emulate_ctxt { ...@@ -265,7 +265,6 @@ struct x86_emulate_ctxt {
unsigned long eip; /* eip before instruction emulation */ unsigned long eip; /* eip before instruction emulation */
/* Emulated execution mode, represented by an X86EMUL_MODE value. */ /* Emulated execution mode, represented by an X86EMUL_MODE value. */
int mode; int mode;
u32 cs_base;
/* interruptibility state, as a result of execution of STI or MOV SS */ /* interruptibility state, as a result of execution of STI or MOV SS */
int interruptibility; int interruptibility;
......
...@@ -540,9 +540,9 @@ static int emulate_nm(struct x86_emulate_ctxt *ctxt) ...@@ -540,9 +540,9 @@ static int emulate_nm(struct x86_emulate_ctxt *ctxt)
return emulate_exception(ctxt, NM_VECTOR, 0, false); return emulate_exception(ctxt, NM_VECTOR, 0, false);
} }
static int linearize(struct x86_emulate_ctxt *ctxt, static int __linearize(struct x86_emulate_ctxt *ctxt,
struct segmented_address addr, struct segmented_address addr,
unsigned size, bool write, unsigned size, bool write, bool fetch,
ulong *linear) ulong *linear)
{ {
struct decode_cache *c = &ctxt->decode; struct decode_cache *c = &ctxt->decode;
...@@ -569,7 +569,7 @@ static int linearize(struct x86_emulate_ctxt *ctxt, ...@@ -569,7 +569,7 @@ static int linearize(struct x86_emulate_ctxt *ctxt,
if (((desc.type & 8) || !(desc.type & 2)) && write) if (((desc.type & 8) || !(desc.type & 2)) && write)
goto bad; goto bad;
/* unreadable code segment */ /* unreadable code segment */
if ((desc.type & 8) && !(desc.type & 2)) if (!fetch && (desc.type & 8) && !(desc.type & 2))
goto bad; goto bad;
lim = desc_limit_scaled(&desc); lim = desc_limit_scaled(&desc);
if ((desc.type & 8) || !(desc.type & 4)) { if ((desc.type & 8) || !(desc.type & 4)) {
...@@ -602,7 +602,7 @@ static int linearize(struct x86_emulate_ctxt *ctxt, ...@@ -602,7 +602,7 @@ static int linearize(struct x86_emulate_ctxt *ctxt,
} }
break; break;
} }
if (c->ad_bytes != 8) if (fetch ? ctxt->mode != X86EMUL_MODE_PROT64 : c->ad_bytes != 8)
la &= (u32)-1; la &= (u32)-1;
*linear = la; *linear = la;
return X86EMUL_CONTINUE; return X86EMUL_CONTINUE;
...@@ -613,6 +613,15 @@ static int linearize(struct x86_emulate_ctxt *ctxt, ...@@ -613,6 +613,15 @@ static int linearize(struct x86_emulate_ctxt *ctxt,
return emulate_gp(ctxt, addr.seg); return emulate_gp(ctxt, addr.seg);
} }
static int linearize(struct x86_emulate_ctxt *ctxt,
struct segmented_address addr,
unsigned size, bool write,
ulong *linear)
{
return __linearize(ctxt, addr, size, write, false, linear);
}
static int segmented_read_std(struct x86_emulate_ctxt *ctxt, static int segmented_read_std(struct x86_emulate_ctxt *ctxt,
struct segmented_address addr, struct segmented_address addr,
void *data, void *data,
...@@ -637,11 +646,13 @@ static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt, ...@@ -637,11 +646,13 @@ static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt,
int size, cur_size; int size, cur_size;
if (eip == fc->end) { if (eip == fc->end) {
unsigned long linear = eip + ctxt->cs_base; unsigned long linear;
if (ctxt->mode != X86EMUL_MODE_PROT64) struct segmented_address addr = { .seg=VCPU_SREG_CS, .ea=eip};
linear &= (u32)-1;
cur_size = fc->end - fc->start; cur_size = fc->end - fc->start;
size = min(15UL - cur_size, PAGE_SIZE - offset_in_page(eip)); size = min(15UL - cur_size, PAGE_SIZE - offset_in_page(eip));
rc = __linearize(ctxt, addr, size, false, true, &linear);
if (rc != X86EMUL_CONTINUE)
return rc;
rc = ops->fetch(linear, fc->data + cur_size, rc = ops->fetch(linear, fc->data + cur_size,
size, ctxt->vcpu, &ctxt->exception); size, ctxt->vcpu, &ctxt->exception);
if (rc != X86EMUL_CONTINUE) if (rc != X86EMUL_CONTINUE)
...@@ -3127,7 +3138,6 @@ x86_decode_insn(struct x86_emulate_ctxt *ctxt, void *insn, int insn_len) ...@@ -3127,7 +3138,6 @@ x86_decode_insn(struct x86_emulate_ctxt *ctxt, void *insn, int insn_len)
c->fetch.end = c->fetch.start + insn_len; c->fetch.end = c->fetch.start + insn_len;
if (insn_len > 0) if (insn_len > 0)
memcpy(c->fetch.data, insn, insn_len); memcpy(c->fetch.data, insn, insn_len);
ctxt->cs_base = seg_base(ctxt, ops, VCPU_SREG_CS);
switch (mode) { switch (mode) {
case X86EMUL_MODE_REAL: case X86EMUL_MODE_REAL:
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment