Commit f51c9452 authored by Glauber de Oliveira Costa's avatar Glauber de Oliveira Costa Committed by Linus Torvalds

x86_64: Use read and write crX in .c files

This patch uses the read and write functions provided at system.h
for control registers instead of writting raw assembly over and
over again in .c files. Functions to manipulate cr2 and cr8 were
provided, as they were lacking.

Also, removed some extra space after closing brackets
Signed-off-by: default avatarGlauber de Oliveira Costa <gcosta@redhat.com>
Signed-off-by: default avatarAndi Kleen <ak@suse.de>
Signed-off-by: default avatarLinus Torvalds <torvalds@linux-foundation.org>
parent abd4f750
......@@ -342,10 +342,10 @@ void __show_regs(struct pt_regs * regs)
rdmsrl(MSR_GS_BASE, gs);
rdmsrl(MSR_KERNEL_GS_BASE, shadowgs);
asm("movq %%cr0, %0": "=r" (cr0));
asm("movq %%cr2, %0": "=r" (cr2));
asm("movq %%cr3, %0": "=r" (cr3));
asm("movq %%cr4, %0": "=r" (cr4));
cr0 = read_cr0();
cr2 = read_cr2();
cr3 = read_cr3();
cr4 = read_cr4();
printk("FS: %016lx(%04x) GS:%016lx(%04x) knlGS:%016lx\n",
fs,fsindex,gs,gsindex,shadowgs);
......
......@@ -55,11 +55,11 @@ void __save_processor_state(struct saved_context *ctxt)
* control registers
*/
rdmsrl(MSR_EFER, ctxt->efer);
asm volatile ("movq %%cr0, %0" : "=r" (ctxt->cr0));
asm volatile ("movq %%cr2, %0" : "=r" (ctxt->cr2));
asm volatile ("movq %%cr3, %0" : "=r" (ctxt->cr3));
asm volatile ("movq %%cr4, %0" : "=r" (ctxt->cr4));
asm volatile ("movq %%cr8, %0" : "=r" (ctxt->cr8));
ctxt->cr0 = read_cr0();
ctxt->cr2 = read_cr2();
ctxt->cr3 = read_cr3();
ctxt->cr4 = read_cr4();
ctxt->cr8 = read_cr8();
}
void save_processor_state(void)
......@@ -81,11 +81,11 @@ void __restore_processor_state(struct saved_context *ctxt)
* control registers
*/
wrmsrl(MSR_EFER, ctxt->efer);
asm volatile ("movq %0, %%cr8" :: "r" (ctxt->cr8));
asm volatile ("movq %0, %%cr4" :: "r" (ctxt->cr4));
asm volatile ("movq %0, %%cr3" :: "r" (ctxt->cr3));
asm volatile ("movq %0, %%cr2" :: "r" (ctxt->cr2));
asm volatile ("movq %0, %%cr0" :: "r" (ctxt->cr0));
write_cr8(ctxt->cr8);
write_cr4(ctxt->cr4);
write_cr3(ctxt->cr3);
write_cr2(ctxt->cr2);
write_cr0(ctxt->cr0);
/*
* now restore the descriptor tables to their proper values
......
......@@ -159,7 +159,7 @@ void dump_pagetable(unsigned long address)
pmd_t *pmd;
pte_t *pte;
asm("movq %%cr3,%0" : "=r" (pgd));
pgd = (pgd_t *)read_cr3();
pgd = __va((unsigned long)pgd & PHYSICAL_PAGE_MASK);
pgd += pgd_index(address);
......@@ -316,7 +316,7 @@ asmlinkage void __kprobes do_page_fault(struct pt_regs *regs,
prefetchw(&mm->mmap_sem);
/* get the address */
__asm__("movq %%cr2,%0":"=r" (address));
address = read_cr2();
info.si_code = SEGV_MAPERR;
......
......@@ -383,7 +383,7 @@ void __meminit init_memory_mapping(unsigned long start, unsigned long end)
}
if (!after_bootmem)
asm volatile("movq %%cr4,%0" : "=r" (mmu_cr4_features));
mmu_cr4_features = read_cr4();
__flush_tlb_all();
}
......
......@@ -82,6 +82,18 @@ static inline void write_cr0(unsigned long val)
asm volatile("movq %0,%%cr0" :: "r" (val));
}
static inline unsigned long read_cr2(void)
{
unsigned long cr2;
asm("movq %%cr2,%0" : "=r" (cr2));
return cr2;
}
static inline void write_cr2(unsigned long val)
{
asm volatile("movq %0,%%cr2" :: "r" (val));
}
static inline unsigned long read_cr3(void)
{
unsigned long cr3;
......@@ -106,6 +118,18 @@ static inline void write_cr4(unsigned long val)
asm volatile("movq %0,%%cr4" :: "r" (val) : "memory");
}
static inline unsigned long read_cr8(void)
{
unsigned long cr8;
asm("movq %%cr8,%0" : "=r" (cr8));
return cr8;
}
static inline void write_cr8(unsigned long val)
{
asm volatile("movq %0,%%cr8" :: "r" (val) : "memory");
}
#define stts() write_cr0(8 | read_cr0())
#define wbinvd() \
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment