Commit b4bcb4c2 authored by Michel Lespinasse's avatar Michel Lespinasse Committed by H. Peter Anvin

x86, rwsem: Minor cleanups

Clarified few comments and made initialization of %edx/%rdx more uniform
accross __down_write_nested, __up_read and __up_write functions.
Signed-off-by: default avatarMichel Lespinasse <walken@google.com>
LKML-Reference: <201007202219.o6KMJkiA021048@imap1.linux-foundation.org>
Acked-by: default avatarDavid Howells <dhowells@redhat.com>
Cc: Mike Waychison <mikew@google.com>
Cc: Suleiman Souhlal <suleiman@google.com>
Cc: Ying Han <yinghan@google.com>
Signed-off-by: default avatarAndrew Morton <akpm@linux-foundation.org>
Signed-off-by: default avatarH. Peter Anvin <hpa@linux.intel.com>
parent a751bd85
...@@ -118,7 +118,7 @@ static inline void __down_read(struct rw_semaphore *sem) ...@@ -118,7 +118,7 @@ static inline void __down_read(struct rw_semaphore *sem)
{ {
asm volatile("# beginning down_read\n\t" asm volatile("# beginning down_read\n\t"
LOCK_PREFIX _ASM_INC "(%1)\n\t" LOCK_PREFIX _ASM_INC "(%1)\n\t"
/* adds 0x00000001, returns the old value */ /* adds 0x00000001 */
" jns 1f\n" " jns 1f\n"
" call call_rwsem_down_read_failed\n" " call call_rwsem_down_read_failed\n"
"1:\n\t" "1:\n\t"
...@@ -156,11 +156,9 @@ static inline int __down_read_trylock(struct rw_semaphore *sem) ...@@ -156,11 +156,9 @@ static inline int __down_read_trylock(struct rw_semaphore *sem)
static inline void __down_write_nested(struct rw_semaphore *sem, int subclass) static inline void __down_write_nested(struct rw_semaphore *sem, int subclass)
{ {
rwsem_count_t tmp; rwsem_count_t tmp;
tmp = RWSEM_ACTIVE_WRITE_BIAS;
asm volatile("# beginning down_write\n\t" asm volatile("# beginning down_write\n\t"
LOCK_PREFIX " xadd %1,(%2)\n\t" LOCK_PREFIX " xadd %1,(%2)\n\t"
/* subtract 0x0000ffff, returns the old value */ /* adds 0xffff0001, returns the old value */
" test %1,%1\n\t" " test %1,%1\n\t"
/* was the count 0 before? */ /* was the count 0 before? */
" jz 1f\n" " jz 1f\n"
...@@ -168,7 +166,7 @@ static inline void __down_write_nested(struct rw_semaphore *sem, int subclass) ...@@ -168,7 +166,7 @@ static inline void __down_write_nested(struct rw_semaphore *sem, int subclass)
"1:\n" "1:\n"
"# ending down_write" "# ending down_write"
: "+m" (sem->count), "=d" (tmp) : "+m" (sem->count), "=d" (tmp)
: "a" (sem), "1" (tmp) : "a" (sem), "1" (RWSEM_ACTIVE_WRITE_BIAS)
: "memory", "cc"); : "memory", "cc");
} }
...@@ -195,16 +193,16 @@ static inline int __down_write_trylock(struct rw_semaphore *sem) ...@@ -195,16 +193,16 @@ static inline int __down_write_trylock(struct rw_semaphore *sem)
*/ */
static inline void __up_read(struct rw_semaphore *sem) static inline void __up_read(struct rw_semaphore *sem)
{ {
rwsem_count_t tmp = -RWSEM_ACTIVE_READ_BIAS; rwsem_count_t tmp;
asm volatile("# beginning __up_read\n\t" asm volatile("# beginning __up_read\n\t"
LOCK_PREFIX " xadd %1,(%2)\n\t" LOCK_PREFIX " xadd %1,(%2)\n\t"
/* subtracts 1, returns the old value */ /* subtracts 1, returns the old value */
" jns 1f\n\t" " jns 1f\n\t"
" call call_rwsem_wake\n" " call call_rwsem_wake\n" /* expects old value in %edx */
"1:\n" "1:\n"
"# ending __up_read\n" "# ending __up_read\n"
: "+m" (sem->count), "=d" (tmp) : "+m" (sem->count), "=d" (tmp)
: "a" (sem), "1" (tmp) : "a" (sem), "1" (-RWSEM_ACTIVE_READ_BIAS)
: "memory", "cc"); : "memory", "cc");
} }
...@@ -218,7 +216,7 @@ static inline void __up_write(struct rw_semaphore *sem) ...@@ -218,7 +216,7 @@ static inline void __up_write(struct rw_semaphore *sem)
LOCK_PREFIX " xadd %1,(%2)\n\t" LOCK_PREFIX " xadd %1,(%2)\n\t"
/* subtracts 0xffff0001, returns the old value */ /* subtracts 0xffff0001, returns the old value */
" jns 1f\n\t" " jns 1f\n\t"
" call call_rwsem_wake\n" " call call_rwsem_wake\n" /* expects old value in %edx */
"1:\n\t" "1:\n\t"
"# ending __up_write\n" "# ending __up_write\n"
: "+m" (sem->count), "=d" (tmp) : "+m" (sem->count), "=d" (tmp)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment