aboutsummaryrefslogtreecommitdiff
path: root/include
diff options
context:
space:
mode:
authorIngo Molnar <mingo@elte.hu>2008-02-29 10:29:38 +0100
committerIngo Molnar <mingo@elte.hu>2008-04-17 17:41:29 +0200
commit7fda20f146d5d217684ffbc37c6b6c5f82c2dffd (patch)
treeec72cbdbdc05c95b7be5922694fa069af12ff22b /include
parentd93c870bad38e8daaaf9f7e900a13431f24becbb (diff)
x86: spinlock ops are always-inlined
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'include')
-rw-r--r--include/asm-x86/spinlock.h12
1 files changed, 6 insertions, 6 deletions
diff --git a/include/asm-x86/spinlock.h b/include/asm-x86/spinlock.h
index 47dfe2607bb..bc6376f1bc5 100644
--- a/include/asm-x86/spinlock.h
+++ b/include/asm-x86/spinlock.h
@@ -78,7 +78,7 @@ static inline int __raw_spin_is_contended(raw_spinlock_t *lock)
return (((tmp >> 8) & 0xff) - (tmp & 0xff)) > 1;
}
-static inline void __raw_spin_lock(raw_spinlock_t *lock)
+static __always_inline void __raw_spin_lock(raw_spinlock_t *lock)
{
short inc = 0x0100;
@@ -99,7 +99,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock)
#define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock)
-static inline int __raw_spin_trylock(raw_spinlock_t *lock)
+static __always_inline int __raw_spin_trylock(raw_spinlock_t *lock)
{
int tmp;
short new;
@@ -120,7 +120,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock)
return tmp;
}
-static inline void __raw_spin_unlock(raw_spinlock_t *lock)
+static __always_inline void __raw_spin_unlock(raw_spinlock_t *lock)
{
asm volatile(UNLOCK_LOCK_PREFIX "incb %0"
: "+m" (lock->slock)
@@ -142,7 +142,7 @@ static inline int __raw_spin_is_contended(raw_spinlock_t *lock)
return (((tmp >> 16) & 0xffff) - (tmp & 0xffff)) > 1;
}
-static inline void __raw_spin_lock(raw_spinlock_t *lock)
+static __always_inline void __raw_spin_lock(raw_spinlock_t *lock)
{
int inc = 0x00010000;
int tmp;
@@ -165,7 +165,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock)
#define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock)
-static inline int __raw_spin_trylock(raw_spinlock_t *lock)
+static __always_inline int __raw_spin_trylock(raw_spinlock_t *lock)
{
int tmp;
int new;
@@ -187,7 +187,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock)
return tmp;
}
-static inline void __raw_spin_unlock(raw_spinlock_t *lock)
+static __always_inline void __raw_spin_unlock(raw_spinlock_t *lock)
{
asm volatile(UNLOCK_LOCK_PREFIX "incw %0"
: "+m" (lock->slock)