Commit c9dcda5c authored by Glauber de Oliveira Costa's avatar Glauber de Oliveira Costa Committed by Ingo Molnar

x86: change write msr functions interface

This patche changes the native_write_msr() and friends interface
to explicitly take 2 32-bit registers instead of a 64-bit value.
The change will ease the merge with 64-bit code. As the 64-bit
value will be passed as two registers anyway in i386,
the PVOP_CALL interface has to account for that and use low/high parameters
It would force the x86_64 version to be different.

The change does not make i386 generated code less efficient. As said above,
it would get the values from two registers anyway.
Signed-off-by: default avatarGlauber de Oliveira Costa <gcosta@redhat.com>
Signed-off-by: default avatarIngo Molnar <mingo@elte.hu>
Signed-off-by: default avatarThomas Gleixner <tglx@linutronix.de>
parent b8d1fae7
...@@ -63,13 +63,14 @@ static inline unsigned long long native_read_msr_safe(unsigned int msr, ...@@ -63,13 +63,14 @@ static inline unsigned long long native_read_msr_safe(unsigned int msr,
return val; return val;
} }
static inline void native_write_msr(unsigned int msr, unsigned long long val) static inline void native_write_msr(unsigned int msr,
unsigned low, unsigned high)
{ {
asm volatile("wrmsr" : : "c" (msr), "A"(val)); asm volatile("wrmsr" : : "c" (msr), "a"(low), "d" (high));
} }
static inline int native_write_msr_safe(unsigned int msr, static inline int native_write_msr_safe(unsigned int msr,
unsigned long long val) unsigned low, unsigned high)
{ {
int err; int err;
asm volatile("2: wrmsr ; xorl %0,%0\n" asm volatile("2: wrmsr ; xorl %0,%0\n"
...@@ -82,7 +83,7 @@ static inline int native_write_msr_safe(unsigned int msr, ...@@ -82,7 +83,7 @@ static inline int native_write_msr_safe(unsigned int msr,
" .long 2b,3b\n\t" " .long 2b,3b\n\t"
".previous" ".previous"
: "=a" (err) : "=a" (err)
: "c" (msr), "0" ((u32)val), "d" ((u32)(val>>32)), : "c" (msr), "0" (low), "d" (high),
"i" (-EFAULT)); "i" (-EFAULT));
return err; return err;
} }
...@@ -118,20 +119,20 @@ static inline unsigned long long native_read_pmc(int counter) ...@@ -118,20 +119,20 @@ static inline unsigned long long native_read_pmc(int counter)
(val2) = (u32)(__val >> 32); \ (val2) = (u32)(__val >> 32); \
} while(0) } while(0)
static inline void wrmsr(u32 __msr, u32 __low, u32 __high) static inline void wrmsr(unsigned msr, unsigned low, unsigned high)
{ {
native_write_msr(__msr, ((u64)__high << 32) | __low); native_write_msr(msr, low, high);
} }
#define rdmsrl(msr,val) \ #define rdmsrl(msr,val) \
((val) = native_read_msr(msr)) ((val) = native_read_msr(msr))
#define wrmsrl(msr,val) native_write_msr(msr, val) #define wrmsrl(msr, val) native_write_msr(msr, (u32)val, (u32)(val >> 32))
/* wrmsr with exception handling */ /* wrmsr with exception handling */
static inline int wrmsr_safe(u32 __msr, u32 __low, u32 __high) static inline int wrmsr_safe(unsigned msr, unsigned low, unsigned high)
{ {
return native_write_msr_safe(__msr, ((u64)__high << 32) | __low); return native_write_msr_safe(msr, low, high);
} }
/* rdmsr with exception handling */ /* rdmsr with exception handling */
......
...@@ -115,7 +115,7 @@ struct pv_cpu_ops { ...@@ -115,7 +115,7 @@ struct pv_cpu_ops {
/* MSR, PMC and TSR operations. /* MSR, PMC and TSR operations.
err = 0/-EFAULT. wrmsr returns 0/-EFAULT. */ err = 0/-EFAULT. wrmsr returns 0/-EFAULT. */
u64 (*read_msr)(unsigned int msr, int *err); u64 (*read_msr)(unsigned int msr, int *err);
int (*write_msr)(unsigned int msr, u64 val); int (*write_msr)(unsigned int msr, unsigned low, unsigned high);
u64 (*read_tsc)(void); u64 (*read_tsc)(void);
u64 (*read_pmc)(int counter); u64 (*read_pmc)(int counter);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment