Commit 1f7afb08 authored by Thomas Gleixner's avatar Thomas Gleixner Committed by Thomas Gleixner

x86: unify include/asm/cache_32/64.h

Same file, except for whitespace, comment formatting and:

32-bit:	unsigned long *virt_addr = va;
64-bit: unsigned int *virt_addr = va;

Both can be safely replaced by:
	u32 i, *virt_addr = va;
Signed-off-by: default avatarThomas Gleixner <tglx@linutronix.de>
parent 327c21bc
#ifdef CONFIG_X86_32
# include "edac_32.h"
#else
# include "edac_64.h"
#ifndef _ASM_X86_EDAC_H
#define _ASM_X86_EDAC_H
/* ECC atomic, DMA, SMP and interrupt safe scrub function */
static __inline__ void atomic_scrub(void *va, u32 size)
{
u32 i, *virt_addr = va;
/*
* Very carefully read and write to memory atomically so we
* are interrupt, DMA and SMP safe.
*/
for (i = 0; i < size / 4; i++, virt_addr++)
__asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr));
}
#endif
#ifndef ASM_EDAC_H
#define ASM_EDAC_H
/* ECC atomic, DMA, SMP and interrupt safe scrub function */
static __inline__ void atomic_scrub(void *va, u32 size)
{
unsigned long *virt_addr = va;
u32 i;
for (i = 0; i < size / 4; i++, virt_addr++)
/* Very carefully read and write to memory atomically
* so we are interrupt, DMA and SMP safe.
*/
__asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr));
}
#endif
#ifndef ASM_EDAC_H
#define ASM_EDAC_H
/* ECC atomic, DMA, SMP and interrupt safe scrub function */
static __inline__ void atomic_scrub(void *va, u32 size)
{
unsigned int *virt_addr = va;
u32 i;
for (i = 0; i < size / 4; i++, virt_addr++)
/* Very carefully read and write to memory atomically
* so we are interrupt, DMA and SMP safe.
*/
__asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr));
}
#endif
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment