Commit 36d3295c authored by Paul Burton's avatar Paul Burton

MIPS: atomic: Fix whitespace in ATOMIC_OP macros

We define macros in asm/atomic.h which end each line with space
characters before a backslash to continue on the next line. Remove the
space characters leaving tabs as the whitespace used for conformity with
coding convention.
Signed-off-by: default avatarPaul Burton <paul.burton@mips.com>
Cc: linux-mips@vger.kernel.org
Cc: Huacai Chen <chenhc@lemote.com>
Cc: Jiaxun Yang <jiaxun.yang@flygoat.com>
Cc: linux-kernel@vger.kernel.org
parent 185d7d7a
...@@ -42,102 +42,102 @@ ...@@ -42,102 +42,102 @@
*/ */
#define atomic_set(v, i) WRITE_ONCE((v)->counter, (i)) #define atomic_set(v, i) WRITE_ONCE((v)->counter, (i))
#define ATOMIC_OP(op, c_op, asm_op) \ #define ATOMIC_OP(op, c_op, asm_op) \
static __inline__ void atomic_##op(int i, atomic_t * v) \ static __inline__ void atomic_##op(int i, atomic_t * v) \
{ \ { \
if (kernel_uses_llsc) { \ if (kernel_uses_llsc) { \
int temp; \ int temp; \
\ \
loongson_llsc_mb(); \ loongson_llsc_mb(); \
__asm__ __volatile__( \ __asm__ __volatile__( \
" .set push \n" \ " .set push \n" \
" .set "MIPS_ISA_LEVEL" \n" \ " .set "MIPS_ISA_LEVEL" \n" \
"1: ll %0, %1 # atomic_" #op " \n" \ "1: ll %0, %1 # atomic_" #op " \n" \
" " #asm_op " %0, %2 \n" \ " " #asm_op " %0, %2 \n" \
" sc %0, %1 \n" \ " sc %0, %1 \n" \
"\t" __SC_BEQZ "%0, 1b \n" \ "\t" __SC_BEQZ "%0, 1b \n" \
" .set pop \n" \ " .set pop \n" \
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
: "Ir" (i) : __LLSC_CLOBBER); \ : "Ir" (i) : __LLSC_CLOBBER); \
} else { \ } else { \
unsigned long flags; \ unsigned long flags; \
\ \
raw_local_irq_save(flags); \ raw_local_irq_save(flags); \
v->counter c_op i; \ v->counter c_op i; \
raw_local_irq_restore(flags); \ raw_local_irq_restore(flags); \
} \ } \
} }
#define ATOMIC_OP_RETURN(op, c_op, asm_op) \ #define ATOMIC_OP_RETURN(op, c_op, asm_op) \
static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v) \ static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v) \
{ \ { \
int result; \ int result; \
\ \
if (kernel_uses_llsc) { \ if (kernel_uses_llsc) { \
int temp; \ int temp; \
\ \
loongson_llsc_mb(); \ loongson_llsc_mb(); \
__asm__ __volatile__( \ __asm__ __volatile__( \
" .set push \n" \ " .set push \n" \
" .set "MIPS_ISA_LEVEL" \n" \ " .set "MIPS_ISA_LEVEL" \n" \
"1: ll %1, %2 # atomic_" #op "_return \n" \ "1: ll %1, %2 # atomic_" #op "_return \n" \
" " #asm_op " %0, %1, %3 \n" \ " " #asm_op " %0, %1, %3 \n" \
" sc %0, %2 \n" \ " sc %0, %2 \n" \
"\t" __SC_BEQZ "%0, 1b \n" \ "\t" __SC_BEQZ "%0, 1b \n" \
" " #asm_op " %0, %1, %3 \n" \ " " #asm_op " %0, %1, %3 \n" \
" .set pop \n" \ " .set pop \n" \
: "=&r" (result), "=&r" (temp), \ : "=&r" (result), "=&r" (temp), \
"+" GCC_OFF_SMALL_ASM() (v->counter) \ "+" GCC_OFF_SMALL_ASM() (v->counter) \
: "Ir" (i) : __LLSC_CLOBBER); \ : "Ir" (i) : __LLSC_CLOBBER); \
} else { \ } else { \
unsigned long flags; \ unsigned long flags; \
\ \
raw_local_irq_save(flags); \ raw_local_irq_save(flags); \
result = v->counter; \ result = v->counter; \
result c_op i; \ result c_op i; \
v->counter = result; \ v->counter = result; \
raw_local_irq_restore(flags); \ raw_local_irq_restore(flags); \
} \ } \
\ \
return result; \ return result; \
} }
#define ATOMIC_FETCH_OP(op, c_op, asm_op) \ #define ATOMIC_FETCH_OP(op, c_op, asm_op) \
static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v) \ static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v) \
{ \ { \
int result; \ int result; \
\ \
if (kernel_uses_llsc) { \ if (kernel_uses_llsc) { \
int temp; \ int temp; \
\ \
loongson_llsc_mb(); \ loongson_llsc_mb(); \
__asm__ __volatile__( \ __asm__ __volatile__( \
" .set push \n" \ " .set push \n" \
" .set "MIPS_ISA_LEVEL" \n" \ " .set "MIPS_ISA_LEVEL" \n" \
"1: ll %1, %2 # atomic_fetch_" #op " \n" \ "1: ll %1, %2 # atomic_fetch_" #op " \n" \
" " #asm_op " %0, %1, %3 \n" \ " " #asm_op " %0, %1, %3 \n" \
" sc %0, %2 \n" \ " sc %0, %2 \n" \
"\t" __SC_BEQZ "%0, 1b \n" \ "\t" __SC_BEQZ "%0, 1b \n" \
" .set pop \n" \ " .set pop \n" \
" move %0, %1 \n" \ " move %0, %1 \n" \
: "=&r" (result), "=&r" (temp), \ : "=&r" (result), "=&r" (temp), \
"+" GCC_OFF_SMALL_ASM() (v->counter) \ "+" GCC_OFF_SMALL_ASM() (v->counter) \
: "Ir" (i) : __LLSC_CLOBBER); \ : "Ir" (i) : __LLSC_CLOBBER); \
} else { \ } else { \
unsigned long flags; \ unsigned long flags; \
\ \
raw_local_irq_save(flags); \ raw_local_irq_save(flags); \
result = v->counter; \ result = v->counter; \
v->counter c_op i; \ v->counter c_op i; \
raw_local_irq_restore(flags); \ raw_local_irq_restore(flags); \
} \ } \
\ \
return result; \ return result; \
} }
#define ATOMIC_OPS(op, c_op, asm_op) \ #define ATOMIC_OPS(op, c_op, asm_op) \
ATOMIC_OP(op, c_op, asm_op) \ ATOMIC_OP(op, c_op, asm_op) \
ATOMIC_OP_RETURN(op, c_op, asm_op) \ ATOMIC_OP_RETURN(op, c_op, asm_op) \
ATOMIC_FETCH_OP(op, c_op, asm_op) ATOMIC_FETCH_OP(op, c_op, asm_op)
ATOMIC_OPS(add, +=, addu) ATOMIC_OPS(add, +=, addu)
...@@ -149,8 +149,8 @@ ATOMIC_OPS(sub, -=, subu) ...@@ -149,8 +149,8 @@ ATOMIC_OPS(sub, -=, subu)
#define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed #define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
#undef ATOMIC_OPS #undef ATOMIC_OPS
#define ATOMIC_OPS(op, c_op, asm_op) \ #define ATOMIC_OPS(op, c_op, asm_op) \
ATOMIC_OP(op, c_op, asm_op) \ ATOMIC_OP(op, c_op, asm_op) \
ATOMIC_FETCH_OP(op, c_op, asm_op) ATOMIC_FETCH_OP(op, c_op, asm_op)
ATOMIC_OPS(and, &=, and) ATOMIC_OPS(and, &=, and)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment