Commit ddb3108e authored by Maciej W. Rozycki's avatar Maciej W. Rozycki Committed by Ralf Baechle

MIPS: atomic.h: Reformat to fit in 79 columns

Signed-off-by: default avatarMaciej W. Rozycki <macro@codesourcery.com>
Cc: linux-mips@linux-mips.org
Patchwork: https://patchwork.linux-mips.org/patch/8484/Signed-off-by: default avatarRalf Baechle <ralf@linux-mips.org>
parent 0e525e48
...@@ -41,97 +41,97 @@ ...@@ -41,97 +41,97 @@
*/ */
#define atomic_set(v, i) ((v)->counter = (i)) #define atomic_set(v, i) ((v)->counter = (i))
#define ATOMIC_OP(op, c_op, asm_op) \ #define ATOMIC_OP(op, c_op, asm_op) \
static __inline__ void atomic_##op(int i, atomic_t * v) \ static __inline__ void atomic_##op(int i, atomic_t * v) \
{ \ { \
if (kernel_uses_llsc && R10000_LLSC_WAR) { \ if (kernel_uses_llsc && R10000_LLSC_WAR) { \
int temp; \ int temp; \
\ \
__asm__ __volatile__( \ __asm__ __volatile__( \
" .set arch=r4000 \n" \ " .set arch=r4000 \n" \
"1: ll %0, %1 # atomic_" #op " \n" \ "1: ll %0, %1 # atomic_" #op " \n" \
" " #asm_op " %0, %2 \n" \ " " #asm_op " %0, %2 \n" \
" sc %0, %1 \n" \ " sc %0, %1 \n" \
" beqzl %0, 1b \n" \ " beqzl %0, 1b \n" \
" .set mips0 \n" \ " .set mips0 \n" \
: "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \
: "Ir" (i)); \ : "Ir" (i)); \
} else if (kernel_uses_llsc) { \ } else if (kernel_uses_llsc) { \
int temp; \ int temp; \
\ \
do { \ do { \
__asm__ __volatile__( \ __asm__ __volatile__( \
" .set arch=r4000 \n" \ " .set arch=r4000 \n" \
" ll %0, %1 # atomic_" #op "\n" \ " ll %0, %1 # atomic_" #op "\n" \
" " #asm_op " %0, %2 \n" \ " " #asm_op " %0, %2 \n" \
" sc %0, %1 \n" \ " sc %0, %1 \n" \
" .set mips0 \n" \ " .set mips0 \n" \
: "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \
: "Ir" (i)); \ : "Ir" (i)); \
} while (unlikely(!temp)); \ } while (unlikely(!temp)); \
} else { \ } else { \
unsigned long flags; \ unsigned long flags; \
\ \
raw_local_irq_save(flags); \ raw_local_irq_save(flags); \
v->counter c_op i; \ v->counter c_op i; \
raw_local_irq_restore(flags); \ raw_local_irq_restore(flags); \
} \ } \
} \
#define ATOMIC_OP_RETURN(op, c_op, asm_op) \
static __inline__ int atomic_##op##_return(int i, atomic_t * v) \
{ \
int result; \
\
smp_mb__before_llsc(); \
\
if (kernel_uses_llsc && R10000_LLSC_WAR) { \
int temp; \
\
__asm__ __volatile__( \
" .set arch=r4000 \n" \
"1: ll %1, %2 # atomic_" #op "_return \n" \
" " #asm_op " %0, %1, %3 \n" \
" sc %0, %2 \n" \
" beqzl %0, 1b \n" \
" " #asm_op " %0, %1, %3 \n" \
" .set mips0 \n" \
: "=&r" (result), "=&r" (temp), \
"+" GCC_OFF12_ASM() (v->counter) \
: "Ir" (i)); \
} else if (kernel_uses_llsc) { \
int temp; \
\
do { \
__asm__ __volatile__( \
" .set arch=r4000 \n" \
" ll %1, %2 # atomic_" #op "_return \n" \
" " #asm_op " %0, %1, %3 \n" \
" sc %0, %2 \n" \
" .set mips0 \n" \
: "=&r" (result), "=&r" (temp), \
"+" GCC_OFF12_ASM() (v->counter) \
: "Ir" (i)); \
} while (unlikely(!result)); \
\
result = temp; result c_op i; \
} else { \
unsigned long flags; \
\
raw_local_irq_save(flags); \
result = v->counter; \
result c_op i; \
v->counter = result; \
raw_local_irq_restore(flags); \
} \
\
smp_llsc_mb(); \
\
return result; \
} }
#define ATOMIC_OPS(op, c_op, asm_op) \ #define ATOMIC_OP_RETURN(op, c_op, asm_op) \
ATOMIC_OP(op, c_op, asm_op) \ static __inline__ int atomic_##op##_return(int i, atomic_t * v) \
{ \
int result; \
\
smp_mb__before_llsc(); \
\
if (kernel_uses_llsc && R10000_LLSC_WAR) { \
int temp; \
\
__asm__ __volatile__( \
" .set arch=r4000 \n" \
"1: ll %1, %2 # atomic_" #op "_return \n" \
" " #asm_op " %0, %1, %3 \n" \
" sc %0, %2 \n" \
" beqzl %0, 1b \n" \
" " #asm_op " %0, %1, %3 \n" \
" .set mips0 \n" \
: "=&r" (result), "=&r" (temp), \
"+" GCC_OFF12_ASM() (v->counter) \
: "Ir" (i)); \
} else if (kernel_uses_llsc) { \
int temp; \
\
do { \
__asm__ __volatile__( \
" .set arch=r4000 \n" \
" ll %1, %2 # atomic_" #op "_return \n" \
" " #asm_op " %0, %1, %3 \n" \
" sc %0, %2 \n" \
" .set mips0 \n" \
: "=&r" (result), "=&r" (temp), \
"+" GCC_OFF12_ASM() (v->counter) \
: "Ir" (i)); \
} while (unlikely(!result)); \
\
result = temp; result c_op i; \
} else { \
unsigned long flags; \
\
raw_local_irq_save(flags); \
result = v->counter; \
result c_op i; \
v->counter = result; \
raw_local_irq_restore(flags); \
} \
\
smp_llsc_mb(); \
\
return result; \
}
#define ATOMIC_OPS(op, c_op, asm_op) \
ATOMIC_OP(op, c_op, asm_op) \
ATOMIC_OP_RETURN(op, c_op, asm_op) ATOMIC_OP_RETURN(op, c_op, asm_op)
ATOMIC_OPS(add, +=, addu) ATOMIC_OPS(add, +=, addu)
...@@ -320,98 +320,98 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u) ...@@ -320,98 +320,98 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
*/ */
#define atomic64_set(v, i) ((v)->counter = (i)) #define atomic64_set(v, i) ((v)->counter = (i))
#define ATOMIC64_OP(op, c_op, asm_op) \ #define ATOMIC64_OP(op, c_op, asm_op) \
static __inline__ void atomic64_##op(long i, atomic64_t * v) \ static __inline__ void atomic64_##op(long i, atomic64_t * v) \
{ \ { \
if (kernel_uses_llsc && R10000_LLSC_WAR) { \ if (kernel_uses_llsc && R10000_LLSC_WAR) { \
long temp; \ long temp; \
\ \
__asm__ __volatile__( \ __asm__ __volatile__( \
" .set arch=r4000 \n" \ " .set arch=r4000 \n" \
"1: lld %0, %1 # atomic64_" #op " \n" \ "1: lld %0, %1 # atomic64_" #op " \n" \
" " #asm_op " %0, %2 \n" \ " " #asm_op " %0, %2 \n" \
" scd %0, %1 \n" \ " scd %0, %1 \n" \
" beqzl %0, 1b \n" \ " beqzl %0, 1b \n" \
" .set mips0 \n" \ " .set mips0 \n" \
: "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \
: "Ir" (i)); \ : "Ir" (i)); \
} else if (kernel_uses_llsc) { \ } else if (kernel_uses_llsc) { \
long temp; \ long temp; \
\ \
do { \ do { \
__asm__ __volatile__( \ __asm__ __volatile__( \
" .set arch=r4000 \n" \ " .set arch=r4000 \n" \
" lld %0, %1 # atomic64_" #op "\n" \ " lld %0, %1 # atomic64_" #op "\n" \
" " #asm_op " %0, %2 \n" \ " " #asm_op " %0, %2 \n" \
" scd %0, %1 \n" \ " scd %0, %1 \n" \
" .set mips0 \n" \ " .set mips0 \n" \
: "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \
: "Ir" (i)); \ : "Ir" (i)); \
} while (unlikely(!temp)); \ } while (unlikely(!temp)); \
} else { \ } else { \
unsigned long flags; \ unsigned long flags; \
\ \
raw_local_irq_save(flags); \ raw_local_irq_save(flags); \
v->counter c_op i; \ v->counter c_op i; \
raw_local_irq_restore(flags); \ raw_local_irq_restore(flags); \
} \ } \
} \ }
#define ATOMIC64_OP_RETURN(op, c_op, asm_op) \ #define ATOMIC64_OP_RETURN(op, c_op, asm_op) \
static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \
{ \ { \
long result; \ long result; \
\ \
smp_mb__before_llsc(); \ smp_mb__before_llsc(); \
\ \
if (kernel_uses_llsc && R10000_LLSC_WAR) { \ if (kernel_uses_llsc && R10000_LLSC_WAR) { \
long temp; \ long temp; \
\ \
__asm__ __volatile__( \ __asm__ __volatile__( \
" .set arch=r4000 \n" \ " .set arch=r4000 \n" \
"1: lld %1, %2 # atomic64_" #op "_return\n" \ "1: lld %1, %2 # atomic64_" #op "_return\n" \
" " #asm_op " %0, %1, %3 \n" \ " " #asm_op " %0, %1, %3 \n" \
" scd %0, %2 \n" \ " scd %0, %2 \n" \
" beqzl %0, 1b \n" \ " beqzl %0, 1b \n" \
" " #asm_op " %0, %1, %3 \n" \ " " #asm_op " %0, %1, %3 \n" \
" .set mips0 \n" \ " .set mips0 \n" \
: "=&r" (result), "=&r" (temp), \ : "=&r" (result), "=&r" (temp), \
"+" GCC_OFF12_ASM() (v->counter) \ "+" GCC_OFF12_ASM() (v->counter) \
: "Ir" (i)); \ : "Ir" (i)); \
} else if (kernel_uses_llsc) { \ } else if (kernel_uses_llsc) { \
long temp; \ long temp; \
\ \
do { \ do { \
__asm__ __volatile__( \ __asm__ __volatile__( \
" .set arch=r4000 \n" \ " .set arch=r4000 \n" \
" lld %1, %2 # atomic64_" #op "_return\n" \ " lld %1, %2 # atomic64_" #op "_return\n" \
" " #asm_op " %0, %1, %3 \n" \ " " #asm_op " %0, %1, %3 \n" \
" scd %0, %2 \n" \ " scd %0, %2 \n" \
" .set mips0 \n" \ " .set mips0 \n" \
: "=&r" (result), "=&r" (temp), \ : "=&r" (result), "=&r" (temp), \
"=" GCC_OFF12_ASM() (v->counter) \ "=" GCC_OFF12_ASM() (v->counter) \
: "Ir" (i), GCC_OFF12_ASM() (v->counter) \ : "Ir" (i), GCC_OFF12_ASM() (v->counter) \
: "memory"); \ : "memory"); \
} while (unlikely(!result)); \ } while (unlikely(!result)); \
\ \
result = temp; result c_op i; \ result = temp; result c_op i; \
} else { \ } else { \
unsigned long flags; \ unsigned long flags; \
\ \
raw_local_irq_save(flags); \ raw_local_irq_save(flags); \
result = v->counter; \ result = v->counter; \
result c_op i; \ result c_op i; \
v->counter = result; \ v->counter = result; \
raw_local_irq_restore(flags); \ raw_local_irq_restore(flags); \
} \ } \
\ \
smp_llsc_mb(); \ smp_llsc_mb(); \
\ \
return result; \ return result; \
} }
#define ATOMIC64_OPS(op, c_op, asm_op) \ #define ATOMIC64_OPS(op, c_op, asm_op) \
ATOMIC64_OP(op, c_op, asm_op) \ ATOMIC64_OP(op, c_op, asm_op) \
ATOMIC64_OP_RETURN(op, c_op, asm_op) ATOMIC64_OP_RETURN(op, c_op, asm_op)
ATOMIC64_OPS(add, +=, daddu) ATOMIC64_OPS(add, +=, daddu)
...@@ -422,7 +422,8 @@ ATOMIC64_OPS(sub, -=, dsubu) ...@@ -422,7 +422,8 @@ ATOMIC64_OPS(sub, -=, dsubu)
#undef ATOMIC64_OP #undef ATOMIC64_OP
/* /*
* atomic64_sub_if_positive - conditionally subtract integer from atomic variable * atomic64_sub_if_positive - conditionally subtract integer from atomic
* variable
* @i: integer value to subtract * @i: integer value to subtract
* @v: pointer of type atomic64_t * @v: pointer of type atomic64_t
* *
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment