Commit a33fff3a authored by Thomas Gleixner's avatar Thomas Gleixner Committed by Ingo Molnar

x86: fix asm constraints in spinlock_32/64.h

Use the correct constraints for the spinlock assembler functions.

read (modify) write functions need "+m" instead of "=m"
Signed-off-by: default avatarThomas Gleixner <tglx@linutronix.de>
Signed-off-by: default avatarIngo Molnar <mingo@elte.hu>
parent 2fed0c50
...@@ -99,7 +99,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock) ...@@ -99,7 +99,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock)
static inline void __raw_spin_unlock(raw_spinlock_t *lock) static inline void __raw_spin_unlock(raw_spinlock_t *lock)
{ {
asm volatile("movb $1,%0" : "+m" (lock->slock) :: "memory"); asm volatile("movb $1,%0" : "=m" (lock->slock) :: "memory");
} }
#else #else
......
...@@ -34,7 +34,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock) ...@@ -34,7 +34,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock)
"jle 3b\n\t" "jle 3b\n\t"
"jmp 1b\n" "jmp 1b\n"
"2:\t" "2:\t"
: "=m" (lock->slock) : : "memory"); : "+m" (lock->slock) : : "memory");
} }
/* /*
...@@ -80,7 +80,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock) ...@@ -80,7 +80,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock)
asm volatile( asm volatile(
"xchgl %0,%1" "xchgl %0,%1"
:"=q" (oldval), "=m" (lock->slock) :"=q" (oldval), "+m" (lock->slock)
:"0" (0) : "memory"); :"0" (0) : "memory");
return oldval > 0; return oldval > 0;
...@@ -162,13 +162,13 @@ static inline int __raw_write_trylock(raw_rwlock_t *lock) ...@@ -162,13 +162,13 @@ static inline int __raw_write_trylock(raw_rwlock_t *lock)
static inline void __raw_read_unlock(raw_rwlock_t *rw) static inline void __raw_read_unlock(raw_rwlock_t *rw)
{ {
asm volatile(LOCK_PREFIX "incl %0" :"=m" (rw->lock) : : "memory"); asm volatile(LOCK_PREFIX "incl %0" :"+m" (rw->lock) : : "memory");
} }
static inline void __raw_write_unlock(raw_rwlock_t *rw) static inline void __raw_write_unlock(raw_rwlock_t *rw)
{ {
asm volatile(LOCK_PREFIX "addl $" RW_LOCK_BIAS_STR ",%0" asm volatile(LOCK_PREFIX "addl $" RW_LOCK_BIAS_STR ", %0"
: "=m" (rw->lock) : : "memory"); : "+m" (rw->lock) : : "memory");
} }
#define _raw_spin_relax(lock) cpu_relax() #define _raw_spin_relax(lock) cpu_relax()
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment