Commit 8764b46e authored by Alexander Graf's avatar Alexander Graf Committed by Avi Kivity

KVM: PPC: bookehv: remove negation for CONFIG_64BIT

Instead if doing

  #ifndef CONFIG_64BIT
  ...
  #else
  ...
  #endif

we should rather do

  #ifdef CONFIG_64BIT
  ...
  #else
  ...
  #endif

which is a lot easier to read. Change the bookehv implementation to
stick with this rule.
Signed-off-by: default avatarAlexander Graf <agraf@suse.de>
Signed-off-by: default avatarAvi Kivity <avi@redhat.com>
parent 73ede8d3
...@@ -99,10 +99,10 @@ ...@@ -99,10 +99,10 @@
.endif .endif
oris r8, r6, MSR_CE@h oris r8, r6, MSR_CE@h
#ifndef CONFIG_64BIT #ifdef CONFIG_64BIT
stw r6, (VCPU_SHARED_MSR + 4)(r11)
#else
std r6, (VCPU_SHARED_MSR)(r11) std r6, (VCPU_SHARED_MSR)(r11)
#else
stw r6, (VCPU_SHARED_MSR + 4)(r11)
#endif #endif
ori r8, r8, MSR_ME | MSR_RI ori r8, r8, MSR_ME | MSR_RI
PPC_STL r5, VCPU_PC(r4) PPC_STL r5, VCPU_PC(r4)
...@@ -344,10 +344,10 @@ _GLOBAL(kvmppc_resume_host) ...@@ -344,10 +344,10 @@ _GLOBAL(kvmppc_resume_host)
stw r5, VCPU_SHARED_MAS0(r11) stw r5, VCPU_SHARED_MAS0(r11)
mfspr r7, SPRN_MAS2 mfspr r7, SPRN_MAS2
stw r6, VCPU_SHARED_MAS1(r11) stw r6, VCPU_SHARED_MAS1(r11)
#ifndef CONFIG_64BIT #ifdef CONFIG_64BIT
stw r7, (VCPU_SHARED_MAS2 + 4)(r11)
#else
std r7, (VCPU_SHARED_MAS2)(r11) std r7, (VCPU_SHARED_MAS2)(r11)
#else
stw r7, (VCPU_SHARED_MAS2 + 4)(r11)
#endif #endif
mfspr r5, SPRN_MAS3 mfspr r5, SPRN_MAS3
mfspr r6, SPRN_MAS4 mfspr r6, SPRN_MAS4
...@@ -530,10 +530,10 @@ lightweight_exit: ...@@ -530,10 +530,10 @@ lightweight_exit:
stw r3, VCPU_HOST_MAS6(r4) stw r3, VCPU_HOST_MAS6(r4)
lwz r3, VCPU_SHARED_MAS0(r11) lwz r3, VCPU_SHARED_MAS0(r11)
lwz r5, VCPU_SHARED_MAS1(r11) lwz r5, VCPU_SHARED_MAS1(r11)
#ifndef CONFIG_64BIT #ifdef CONFIG_64BIT
lwz r6, (VCPU_SHARED_MAS2 + 4)(r11)
#else
ld r6, (VCPU_SHARED_MAS2)(r11) ld r6, (VCPU_SHARED_MAS2)(r11)
#else
lwz r6, (VCPU_SHARED_MAS2 + 4)(r11)
#endif #endif
lwz r7, VCPU_SHARED_MAS7_3+4(r11) lwz r7, VCPU_SHARED_MAS7_3+4(r11)
lwz r8, VCPU_SHARED_MAS4(r11) lwz r8, VCPU_SHARED_MAS4(r11)
...@@ -572,10 +572,10 @@ lightweight_exit: ...@@ -572,10 +572,10 @@ lightweight_exit:
PPC_LL r6, VCPU_CTR(r4) PPC_LL r6, VCPU_CTR(r4)
PPC_LL r7, VCPU_CR(r4) PPC_LL r7, VCPU_CR(r4)
PPC_LL r8, VCPU_PC(r4) PPC_LL r8, VCPU_PC(r4)
#ifndef CONFIG_64BIT #ifdef CONFIG_64BIT
lwz r9, (VCPU_SHARED_MSR + 4)(r11)
#else
ld r9, (VCPU_SHARED_MSR)(r11) ld r9, (VCPU_SHARED_MSR)(r11)
#else
lwz r9, (VCPU_SHARED_MSR + 4)(r11)
#endif #endif
PPC_LL r0, VCPU_GPR(r0)(r4) PPC_LL r0, VCPU_GPR(r0)(r4)
PPC_LL r1, VCPU_GPR(r1)(r4) PPC_LL r1, VCPU_GPR(r1)(r4)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment