From 6514f93a2ce643ef5914eae7ce49b978e1d356aa Mon Sep 17 00:00:00 2001 From: Thomas Gleixner Date: Wed, 30 Jan 2008 13:30:34 +0100 Subject: x86: use immediates instead of RW_LOCK_BIAS_STR Use immediate instead of the RW_LOCK_BIAS_STR. Makes the code more readable and gets rid of the string constant. Signed-off-by: Thomas Gleixner Signed-off-by: Ingo Molnar --- include/asm-x86/rwlock.h | 1 - include/asm-x86/spinlock_32.h | 8 ++++---- include/asm-x86/spinlock_64.h | 6 +++--- 3 files changed, 7 insertions(+), 8 deletions(-) (limited to 'include') diff --git a/include/asm-x86/rwlock.h b/include/asm-x86/rwlock.h index f2b64a429e6..6a8c0d64510 100644 --- a/include/asm-x86/rwlock.h +++ b/include/asm-x86/rwlock.h @@ -2,7 +2,6 @@ #define _ASM_X86_RWLOCK_H #define RW_LOCK_BIAS 0x01000000 -#define RW_LOCK_BIAS_STR "0x01000000" /* Actual code is in asm/spinlock.h or in arch/x86/lib/rwlock.S */ diff --git a/include/asm-x86/spinlock_32.h b/include/asm-x86/spinlock_32.h index fca124a1103..e7a14ab906e 100644 --- a/include/asm-x86/spinlock_32.h +++ b/include/asm-x86/spinlock_32.h @@ -156,11 +156,11 @@ static inline void __raw_read_lock(raw_rwlock_t *rw) static inline void __raw_write_lock(raw_rwlock_t *rw) { - asm volatile(LOCK_PREFIX " subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" + asm volatile(LOCK_PREFIX " subl %1,(%0)\n\t" "jz 1f\n" "call __write_lock_failed\n\t" "1:\n" - ::"a" (rw) : "memory"); + ::"a" (rw), "i" (RW_LOCK_BIAS) : "memory"); } static inline int __raw_read_trylock(raw_rwlock_t *lock) @@ -191,8 +191,8 @@ static inline void __raw_read_unlock(raw_rwlock_t *rw) static inline void __raw_write_unlock(raw_rwlock_t *rw) { - asm volatile(LOCK_PREFIX "addl $" RW_LOCK_BIAS_STR ", %0" - : "+m" (rw->lock) : : "memory"); + asm volatile(LOCK_PREFIX "addl %1, %0" + : "+m" (rw->lock) : "i" (RW_LOCK_BIAS) : "memory"); } #define _raw_spin_relax(lock) cpu_relax() diff --git a/include/asm-x86/spinlock_64.h b/include/asm-x86/spinlock_64.h index e81f6c18d87..ab50e7f5105 100644 --- a/include/asm-x86/spinlock_64.h +++ b/include/asm-x86/spinlock_64.h @@ -127,7 +127,7 @@ static inline void __raw_read_lock(raw_rwlock_t *rw) "jns 1f\n" "call __read_lock_failed\n\t" "1:\n" - ::"D" (rw), "i" (RW_LOCK_BIAS) : "memory"); + ::"D" (rw) : "memory"); } static inline void __raw_write_lock(raw_rwlock_t *rw) @@ -167,8 +167,8 @@ static inline void __raw_read_unlock(raw_rwlock_t *rw) static inline void __raw_write_unlock(raw_rwlock_t *rw) { - asm volatile(LOCK_PREFIX "addl $" RW_LOCK_BIAS_STR ", %0" - : "+m" (rw->lock) : : "memory"); + asm volatile(LOCK_PREFIX "addl %1, %0" + : "+m" (rw->lock) : "i" (RW_LOCK_BIAS) : "memory"); } #define _raw_spin_relax(lock) cpu_relax() -- cgit v1.2.3