aboutsummaryrefslogtreecommitdiff
path: root/include/asm-powerpc/bitops.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-powerpc/bitops.h')
-rw-r--r--include/asm-powerpc/bitops.h49
1 files changed, 49 insertions, 0 deletions
diff --git a/include/asm-powerpc/bitops.h b/include/asm-powerpc/bitops.h
index 8144a2788db..733b4af7f4f 100644
--- a/include/asm-powerpc/bitops.h
+++ b/include/asm-powerpc/bitops.h
@@ -38,6 +38,10 @@
#ifdef __KERNEL__
+#ifndef _LINUX_BITOPS_H
+#error only <linux/bitops.h> can be included directly
+#endif
+
#include <linux/compiler.h>
#include <asm/asm-compat.h>
#include <asm/synch.h>
@@ -86,6 +90,24 @@ static __inline__ void clear_bit(int nr, volatile unsigned long *addr)
: "cc" );
}
+static __inline__ void clear_bit_unlock(int nr, volatile unsigned long *addr)
+{
+ unsigned long old;
+ unsigned long mask = BITOP_MASK(nr);
+ unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
+
+ __asm__ __volatile__(
+ LWSYNC_ON_SMP
+"1:" PPC_LLARX "%0,0,%3 # clear_bit_unlock\n"
+ "andc %0,%0,%2\n"
+ PPC405_ERR77(0,%3)
+ PPC_STLCX "%0,0,%3\n"
+ "bne- 1b"
+ : "=&r" (old), "+m" (*p)
+ : "r" (mask), "r" (p)
+ : "cc", "memory");
+}
+
static __inline__ void change_bit(int nr, volatile unsigned long *addr)
{
unsigned long old;
@@ -125,6 +147,27 @@ static __inline__ int test_and_set_bit(unsigned long nr,
return (old & mask) != 0;
}
+static __inline__ int test_and_set_bit_lock(unsigned long nr,
+ volatile unsigned long *addr)
+{
+ unsigned long old, t;
+ unsigned long mask = BITOP_MASK(nr);
+ unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
+
+ __asm__ __volatile__(
+"1:" PPC_LLARX "%0,0,%3 # test_and_set_bit_lock\n"
+ "or %1,%0,%2 \n"
+ PPC405_ERR77(0,%3)
+ PPC_STLCX "%1,0,%3 \n"
+ "bne- 1b"
+ ISYNC_ON_SMP
+ : "=&r" (old), "=&r" (t)
+ : "r" (mask), "r" (p)
+ : "cc", "memory");
+
+ return (old & mask) != 0;
+}
+
static __inline__ int test_and_clear_bit(unsigned long nr,
volatile unsigned long *addr)
{
@@ -185,6 +228,12 @@ static __inline__ void set_bits(unsigned long mask, unsigned long *addr)
#include <asm-generic/bitops/non-atomic.h>
+static __inline__ void __clear_bit_unlock(int nr, volatile unsigned long *addr)
+{
+ __asm__ __volatile__(LWSYNC_ON_SMP "" ::: "memory");
+ __clear_bit(nr, addr);
+}
+
/*
* Return the zero-based bit position (LE, not IBM bit numbering) of
* the most significant 1-bit in a double word.