Commit 365bf8ac authored by Nicolas Pitre's avatar Nicolas Pitre Committed by Russell King

[ARM] 3311/1: clean up include/asm-arm/mutex.h

Patch from Nicolas Pitre

Since:

	if (unlikely(__res || __ex_flag))

produces worse code on ARM than:

	if (unlikely(__res | __ex_flag))

I therefore made it more explicit:

	__res |= __ex_flag;
	if (unlikely(__res != 0))

so it is not seen as a typo again.

Also made everything static inline rather than macros for better readability
(both produce the same code after all).

And finally added missing \t from multi-line assembly code.
Signed-off-by: default avatarNicolas Pitre <nico@cam.org>
Acked-by: default avatarIngo Molnar <mingo@elte.hu>
Signed-off-by: default avatarAndrew Morton <akpm@osdl.org>
Signed-off-by: default avatarAndrew Morton <akpm@osdl.org>
Signed-off-by: default avatarRussell King <rmk+kernel@arm.linux.org.uk>
parent 5964eae8
...@@ -23,72 +23,71 @@ ...@@ -23,72 +23,71 @@
* simply bail out immediately through the slow path where the lock will be * simply bail out immediately through the slow path where the lock will be
* reattempted until it succeeds. * reattempted until it succeeds.
*/ */
#define __mutex_fastpath_lock(count, fail_fn) \ static inline void
do { \ __mutex_fastpath_lock(atomic_t *count, fastcall void (*fail_fn)(atomic_t *))
int __ex_flag, __res; \ {
\ int __ex_flag, __res;
typecheck(atomic_t *, count); \
typecheck_fn(fastcall void (*)(atomic_t *), fail_fn); \ __asm__ (
\
__asm__ ( \ "ldrex %0, [%2] \n\t"
"ldrex %0, [%2] \n" \ "sub %0, %0, #1 \n\t"
"sub %0, %0, #1 \n" \ "strex %1, %0, [%2] "
"strex %1, %0, [%2] \n" \
\ : "=&r" (__res), "=&r" (__ex_flag)
: "=&r" (__res), "=&r" (__ex_flag) \ : "r" (&(count)->counter)
: "r" (&(count)->counter) \ : "cc","memory" );
: "cc","memory" ); \
\ __res |= __ex_flag;
if (unlikely(__res || __ex_flag)) \ if (unlikely(__res != 0))
fail_fn(count); \ fail_fn(count);
} while (0) }
#define __mutex_fastpath_lock_retval(count, fail_fn) \ static inline int
({ \ __mutex_fastpath_lock_retval(atomic_t *count, fastcall int (*fail_fn)(atomic_t *))
int __ex_flag, __res; \ {
\ int __ex_flag, __res;
typecheck(atomic_t *, count); \
typecheck_fn(fastcall int (*)(atomic_t *), fail_fn); \ __asm__ (
\
__asm__ ( \ "ldrex %0, [%2] \n\t"
"ldrex %0, [%2] \n" \ "sub %0, %0, #1 \n\t"
"sub %0, %0, #1 \n" \ "strex %1, %0, [%2] "
"strex %1, %0, [%2] \n" \
\ : "=&r" (__res), "=&r" (__ex_flag)
: "=&r" (__res), "=&r" (__ex_flag) \ : "r" (&(count)->counter)
: "r" (&(count)->counter) \ : "cc","memory" );
: "cc","memory" ); \
\ __res |= __ex_flag;
__res |= __ex_flag; \ if (unlikely(__res != 0))
if (unlikely(__res != 0)) \ __res = fail_fn(count);
__res = fail_fn(count); \ return __res;
__res; \ }
})
/* /*
* Same trick is used for the unlock fast path. However the original value, * Same trick is used for the unlock fast path. However the original value,
* rather than the result, is used to test for success in order to have * rather than the result, is used to test for success in order to have
* better generated assembly. * better generated assembly.
*/ */
#define __mutex_fastpath_unlock(count, fail_fn) \ static inline void
do { \ __mutex_fastpath_unlock(atomic_t *count, fastcall void (*fail_fn)(atomic_t *))
int __ex_flag, __res, __orig; \ {
\ int __ex_flag, __res, __orig;
typecheck(atomic_t *, count); \
typecheck_fn(fastcall void (*)(atomic_t *), fail_fn); \ __asm__ (
\
__asm__ ( \ "ldrex %0, [%3] \n\t"
"ldrex %0, [%3] \n" \ "add %1, %0, #1 \n\t"
"add %1, %0, #1 \n" \ "strex %2, %1, [%3] "
"strex %2, %1, [%3] \n" \
\ : "=&r" (__orig), "=&r" (__res), "=&r" (__ex_flag)
: "=&r" (__orig), "=&r" (__res), "=&r" (__ex_flag) \ : "r" (&(count)->counter)
: "r" (&(count)->counter) \ : "cc","memory" );
: "cc","memory" ); \
\ __orig |= __ex_flag;
if (unlikely(__orig || __ex_flag)) \ if (unlikely(__orig != 0))
fail_fn(count); \ fail_fn(count);
} while (0) }
/* /*
* If the unlock was done on a contended lock, or if the unlock simply fails * If the unlock was done on a contended lock, or if the unlock simply fails
...@@ -110,12 +109,12 @@ __mutex_fastpath_trylock(atomic_t *count, int (*fail_fn)(atomic_t *)) ...@@ -110,12 +109,12 @@ __mutex_fastpath_trylock(atomic_t *count, int (*fail_fn)(atomic_t *))
__asm__ ( __asm__ (
"1: ldrex %0, [%3] \n" "1: ldrex %0, [%3] \n\t"
"subs %1, %0, #1 \n" "subs %1, %0, #1 \n\t"
"strexeq %2, %1, [%3] \n" "strexeq %2, %1, [%3] \n\t"
"movlt %0, #0 \n" "movlt %0, #0 \n\t"
"cmpeq %2, #0 \n" "cmpeq %2, #0 \n\t"
"bgt 1b \n" "bgt 1b "
: "=&r" (__orig), "=&r" (__res), "=&r" (__ex_flag) : "=&r" (__orig), "=&r" (__res), "=&r" (__ex_flag)
: "r" (&count->counter) : "r" (&count->counter)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment