diff options
author | Dave Airlie <airlied@starflyer.(none)> | 2005-07-30 14:37:43 +1000 |
---|---|---|
committer | Dave Airlie <airlied@linux.ie> | 2005-07-30 14:37:43 +1000 |
commit | bdf242eeb0f69567fe43eba93889d80ecacbfe94 (patch) | |
tree | dc402eeeb75fd51e92b4f4a63712c6e64ac4c2fb /include/asm-s390 | |
parent | 836cf0465c422ee6d654060edd7c620d9cf0c09c (diff) | |
parent | b0825488a642cadcf39709961dde61440cb0731c (diff) |
Merge ../linux-2.6/
Diffstat (limited to 'include/asm-s390')
-rw-r--r-- | include/asm-s390/atomic.h | 8 | ||||
-rw-r--r-- | include/asm-s390/bitops.h | 440 | ||||
-rw-r--r-- | include/asm-s390/emergency-restart.h | 6 | ||||
-rw-r--r-- | include/asm-s390/lowcore.h | 4 | ||||
-rw-r--r-- | include/asm-s390/processor.h | 5 | ||||
-rw-r--r-- | include/asm-s390/spinlock.h | 252 |
6 files changed, 233 insertions, 482 deletions
diff --git a/include/asm-s390/atomic.h b/include/asm-s390/atomic.h index d5a05cf4716..9d86ba6f12d 100644 --- a/include/asm-s390/atomic.h +++ b/include/asm-s390/atomic.h @@ -123,19 +123,19 @@ typedef struct { #define atomic64_read(v) ((v)->counter) #define atomic64_set(v,i) (((v)->counter) = (i)) -static __inline__ void atomic64_add(int i, atomic64_t * v) +static __inline__ void atomic64_add(long long i, atomic64_t * v) { __CSG_LOOP(v, i, "agr"); } -static __inline__ long long atomic64_add_return(int i, atomic64_t * v) +static __inline__ long long atomic64_add_return(long long i, atomic64_t * v) { return __CSG_LOOP(v, i, "agr"); } -static __inline__ long long atomic64_add_negative(int i, atomic64_t * v) +static __inline__ long long atomic64_add_negative(long long i, atomic64_t * v) { return __CSG_LOOP(v, i, "agr") < 0; } -static __inline__ void atomic64_sub(int i, atomic64_t * v) +static __inline__ void atomic64_sub(long long i, atomic64_t * v) { __CSG_LOOP(v, i, "sgr"); } diff --git a/include/asm-s390/bitops.h b/include/asm-s390/bitops.h index 16bb08499c7..8651524217f 100644 --- a/include/asm-s390/bitops.h +++ b/include/asm-s390/bitops.h @@ -527,13 +527,64 @@ __constant_test_bit(unsigned long nr, const volatile unsigned long *addr) { __constant_test_bit((nr),(addr)) : \ __test_bit((nr),(addr)) ) -#ifndef __s390x__ +/* + * ffz = Find First Zero in word. Undefined if no zero exists, + * so code should check against ~0UL first.. + */ +static inline unsigned long ffz(unsigned long word) +{ + unsigned long bit = 0; + +#ifdef __s390x__ + if (likely((word & 0xffffffff) == 0xffffffff)) { + word >>= 32; + bit += 32; + } +#endif + if (likely((word & 0xffff) == 0xffff)) { + word >>= 16; + bit += 16; + } + if (likely((word & 0xff) == 0xff)) { + word >>= 8; + bit += 8; + } + return bit + _zb_findmap[word & 0xff]; +} + +/* + * __ffs = find first bit in word. Undefined if no bit exists, + * so code should check against 0UL first.. + */ +static inline unsigned long __ffs (unsigned long word) +{ + unsigned long bit = 0; + +#ifdef __s390x__ + if (likely((word & 0xffffffff) == 0)) { + word >>= 32; + bit += 32; + } +#endif + if (likely((word & 0xffff) == 0)) { + word >>= 16; + bit += 16; + } + if (likely((word & 0xff) == 0)) { + word >>= 8; + bit += 8; + } + return bit + _sb_findmap[word & 0xff]; +} /* * Find-bit routines.. */ + +#ifndef __s390x__ + static inline int -find_first_zero_bit(const unsigned long * addr, unsigned int size) +find_first_zero_bit(const unsigned long * addr, unsigned long size) { typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype; unsigned long cmp, count; @@ -548,7 +599,7 @@ find_first_zero_bit(const unsigned long * addr, unsigned int size) " srl %2,5\n" "0: c %1,0(%0,%4)\n" " jne 1f\n" - " ahi %0,4\n" + " la %0,4(%0)\n" " brct %2,0b\n" " lr %0,%3\n" " j 4f\n" @@ -574,7 +625,7 @@ find_first_zero_bit(const unsigned long * addr, unsigned int size) } static inline int -find_first_bit(const unsigned long * addr, unsigned int size) +find_first_bit(const unsigned long * addr, unsigned long size) { typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype; unsigned long cmp, count; @@ -589,7 +640,7 @@ find_first_bit(const unsigned long * addr, unsigned int size) " srl %2,5\n" "0: c %1,0(%0,%4)\n" " jne 1f\n" - " ahi %0,4\n" + " la %0,4(%0)\n" " brct %2,0b\n" " lr %0,%3\n" " j 4f\n" @@ -614,89 +665,8 @@ find_first_bit(const unsigned long * addr, unsigned int size) return (res < size) ? res : size; } -static inline int -find_next_zero_bit (const unsigned long * addr, int size, int offset) -{ - unsigned long * p = ((unsigned long *) addr) + (offset >> 5); - unsigned long bitvec, reg; - int set, bit = offset & 31, res; - - if (bit) { - /* - * Look for zero in first word - */ - bitvec = (*p) >> bit; - __asm__(" slr %0,%0\n" - " lhi %2,0xff\n" - " tml %1,0xffff\n" - " jno 0f\n" - " ahi %0,16\n" - " srl %1,16\n" - "0: tml %1,0x00ff\n" - " jno 1f\n" - " ahi %0,8\n" - " srl %1,8\n" - "1: nr %1,%2\n" - " ic %1,0(%1,%3)\n" - " alr %0,%1" - : "=&d" (set), "+a" (bitvec), "=&d" (reg) - : "a" (&_zb_findmap) : "cc" ); - if (set < (32 - bit)) - return set + offset; - offset += 32 - bit; - p++; - } - /* - * No zero yet, search remaining full words for a zero - */ - res = find_first_zero_bit (p, size - 32 * (p - (unsigned long *) addr)); - return (offset + res); -} - -static inline int -find_next_bit (const unsigned long * addr, int size, int offset) -{ - unsigned long * p = ((unsigned long *) addr) + (offset >> 5); - unsigned long bitvec, reg; - int set, bit = offset & 31, res; - - if (bit) { - /* - * Look for set bit in first word - */ - bitvec = (*p) >> bit; - __asm__(" slr %0,%0\n" - " lhi %2,0xff\n" - " tml %1,0xffff\n" - " jnz 0f\n" - " ahi %0,16\n" - " srl %1,16\n" - "0: tml %1,0x00ff\n" - " jnz 1f\n" - " ahi %0,8\n" - " srl %1,8\n" - "1: nr %1,%2\n" - " ic %1,0(%1,%3)\n" - " alr %0,%1" - : "=&d" (set), "+a" (bitvec), "=&d" (reg) - : "a" (&_sb_findmap) : "cc" ); - if (set < (32 - bit)) - return set + offset; - offset += 32 - bit; - p++; - } - /* - * No set bit yet, search remaining full words for a bit - */ - res = find_first_bit (p, size - 32 * (p - (unsigned long *) addr)); - return (offset + res); -} - #else /* __s390x__ */ -/* - * Find-bit routines.. - */ static inline unsigned long find_first_zero_bit(const unsigned long * addr, unsigned long size) { @@ -712,7 +682,7 @@ find_first_zero_bit(const unsigned long * addr, unsigned long size) " srlg %2,%2,6\n" "0: cg %1,0(%0,%4)\n" " jne 1f\n" - " aghi %0,8\n" + " la %0,8(%0)\n" " brct %2,0b\n" " lgr %0,%3\n" " j 5f\n" @@ -785,143 +755,66 @@ find_first_bit(const unsigned long * addr, unsigned long size) return (res < size) ? res : size; } -static inline unsigned long -find_next_zero_bit (const unsigned long * addr, unsigned long size, unsigned long offset) -{ - unsigned long * p = ((unsigned long *) addr) + (offset >> 6); - unsigned long bitvec, reg; - unsigned long set, bit = offset & 63, res; - - if (bit) { - /* - * Look for zero in first word - */ - bitvec = (*p) >> bit; - __asm__(" lhi %2,-1\n" - " slgr %0,%0\n" - " clr %1,%2\n" - " jne 0f\n" - " aghi %0,32\n" - " srlg %1,%1,32\n" - "0: lghi %2,0xff\n" - " tmll %1,0xffff\n" - " jno 1f\n" - " aghi %0,16\n" - " srlg %1,%1,16\n" - "1: tmll %1,0x00ff\n" - " jno 2f\n" - " aghi %0,8\n" - " srlg %1,%1,8\n" - "2: ngr %1,%2\n" - " ic %1,0(%1,%3)\n" - " algr %0,%1" - : "=&d" (set), "+a" (bitvec), "=&d" (reg) - : "a" (&_zb_findmap) : "cc" ); - if (set < (64 - bit)) - return set + offset; - offset += 64 - bit; - p++; - } - /* - * No zero yet, search remaining full words for a zero - */ - res = find_first_zero_bit (p, size - 64 * (p - (unsigned long *) addr)); - return (offset + res); -} - -static inline unsigned long -find_next_bit (const unsigned long * addr, unsigned long size, unsigned long offset) -{ - unsigned long * p = ((unsigned long *) addr) + (offset >> 6); - unsigned long bitvec, reg; - unsigned long set, bit = offset & 63, res; - - if (bit) { - /* - * Look for zero in first word - */ - bitvec = (*p) >> bit; - __asm__(" slgr %0,%0\n" - " ltr %1,%1\n" - " jnz 0f\n" - " aghi %0,32\n" - " srlg %1,%1,32\n" - "0: lghi %2,0xff\n" - " tmll %1,0xffff\n" - " jnz 1f\n" - " aghi %0,16\n" - " srlg %1,%1,16\n" - "1: tmll %1,0x00ff\n" - " jnz 2f\n" - " aghi %0,8\n" - " srlg %1,%1,8\n" - "2: ngr %1,%2\n" - " ic %1,0(%1,%3)\n" - " algr %0,%1" - : "=&d" (set), "+a" (bitvec), "=&d" (reg) - : "a" (&_sb_findmap) : "cc" ); - if (set < (64 - bit)) - return set + offset; - offset += 64 - bit; - p++; - } - /* - * No set bit yet, search remaining full words for a bit - */ - res = find_first_bit (p, size - 64 * (p - (unsigned long *) addr)); - return (offset + res); -} - #endif /* __s390x__ */ -/* - * ffz = Find First Zero in word. Undefined if no zero exists, - * so code should check against ~0UL first.. - */ -static inline unsigned long ffz(unsigned long word) +static inline int +find_next_zero_bit (const unsigned long * addr, unsigned long size, + unsigned long offset) { - unsigned long bit = 0; - -#ifdef __s390x__ - if (likely((word & 0xffffffff) == 0xffffffff)) { - word >>= 32; - bit += 32; - } -#endif - if (likely((word & 0xffff) == 0xffff)) { - word >>= 16; - bit += 16; + const unsigned long *p; + unsigned long bit, set; + + if (offset >= size) + return size; + bit = offset & (__BITOPS_WORDSIZE - 1); + offset -= bit; + size -= offset; + p = addr + offset / __BITOPS_WORDSIZE; + if (bit) { + /* + * s390 version of ffz returns __BITOPS_WORDSIZE + * if no zero bit is present in the word. + */ + set = ffz(*p >> bit) + bit; + if (set >= size) + return size + offset; + if (set < __BITOPS_WORDSIZE) + return set + offset; + offset += __BITOPS_WORDSIZE; + size -= __BITOPS_WORDSIZE; + p++; } - if (likely((word & 0xff) == 0xff)) { - word >>= 8; - bit += 8; - } - return bit + _zb_findmap[word & 0xff]; + return offset + find_first_zero_bit(p, size); } -/* - * __ffs = find first bit in word. Undefined if no bit exists, - * so code should check against 0UL first.. - */ -static inline unsigned long __ffs (unsigned long word) +static inline int +find_next_bit (const unsigned long * addr, unsigned long size, + unsigned long offset) { - unsigned long bit = 0; - -#ifdef __s390x__ - if (likely((word & 0xffffffff) == 0)) { - word >>= 32; - bit += 32; + const unsigned long *p; + unsigned long bit, set; + + if (offset >= size) + return size; + bit = offset & (__BITOPS_WORDSIZE - 1); + offset -= bit; + size -= offset; + p = addr + offset / __BITOPS_WORDSIZE; + if (bit) { + /* + * s390 version of __ffs returns __BITOPS_WORDSIZE + * if no one bit is present in the word. + */ + set = __ffs(*p & (~0UL << bit)); + if (set >= size) + return size + offset; + if (set < __BITOPS_WORDSIZE) + return set + offset; + offset += __BITOPS_WORDSIZE; + size -= __BITOPS_WORDSIZE; + p++; } -#endif - if (likely((word & 0xffff) == 0)) { - word >>= 16; - bit += 16; - } - if (likely((word & 0xff) == 0)) { - word >>= 8; - bit += 8; - } - return bit + _sb_findmap[word & 0xff]; + return offset + find_first_bit(p, size); } /* @@ -1031,49 +924,6 @@ ext2_find_first_zero_bit(void *vaddr, unsigned int size) return (res < size) ? res : size; } -static inline int -ext2_find_next_zero_bit(void *vaddr, unsigned int size, unsigned offset) -{ - unsigned long *addr = vaddr; - unsigned long *p = addr + (offset >> 5); - unsigned long word, reg; - unsigned int bit = offset & 31UL, res; - - if (offset >= size) - return size; - - if (bit) { - __asm__(" ic %0,0(%1)\n" - " icm %0,2,1(%1)\n" - " icm %0,4,2(%1)\n" - " icm %0,8,3(%1)" - : "=&a" (word) : "a" (p) : "cc" ); - word >>= bit; - res = bit; - /* Look for zero in first longword */ - __asm__(" lhi %2,0xff\n" - " tml %1,0xffff\n" - " jno 0f\n" - " ahi %0,16\n" - " srl %1,16\n" - "0: tml %1,0x00ff\n" - " jno 1f\n" - " ahi %0,8\n" - " srl %1,8\n" - "1: nr %1,%2\n" - " ic %1,0(%1,%3)\n" - " alr %0,%1" - : "+&d" (res), "+&a" (word), "=&d" (reg) - : "a" (&_zb_findmap) : "cc" ); - if (res < 32) - return (p - addr)*32 + res; - p++; - } - /* No zero yet, search remaining full bytes for a zero */ - res = ext2_find_first_zero_bit (p, size - 32 * (p - addr)); - return (p - addr) * 32 + res; -} - #else /* __s390x__ */ static inline unsigned long @@ -1120,56 +970,46 @@ ext2_find_first_zero_bit(void *vaddr, unsigned long size) return (res < size) ? res : size; } -static inline unsigned long +#endif /* __s390x__ */ + +static inline int ext2_find_next_zero_bit(void *vaddr, unsigned long size, unsigned long offset) { - unsigned long *addr = vaddr; - unsigned long *p = addr + (offset >> 6); - unsigned long word, reg; - unsigned long bit = offset & 63UL, res; + unsigned long *addr = vaddr, *p; + unsigned long word, bit, set; if (offset >= size) return size; - + bit = offset & (__BITOPS_WORDSIZE - 1); + offset -= bit; + size -= offset; + p = addr + offset / __BITOPS_WORDSIZE; if (bit) { - __asm__(" lrvg %0,%1" /* load reversed, neat instruction */ - : "=a" (word) : "m" (*p) ); - word >>= bit; - res = bit; - /* Look for zero in first 8 byte word */ - __asm__(" lghi %2,0xff\n" - " tmll %1,0xffff\n" - " jno 2f\n" - " ahi %0,16\n" - " srlg %1,%1,16\n" - "0: tmll %1,0xffff\n" - " jno 2f\n" - " ahi %0,16\n" - " srlg %1,%1,16\n" - "1: tmll %1,0xffff\n" - " jno 2f\n" - " ahi %0,16\n" - " srl %1,16\n" - "2: tmll %1,0x00ff\n" - " jno 3f\n" - " ahi %0,8\n" - " srl %1,8\n" - "3: ngr %1,%2\n" - " ic %1,0(%1,%3)\n" - " alr %0,%1" - : "+&d" (res), "+a" (word), "=&d" (reg) - : "a" (&_zb_findmap) : "cc" ); - if (res < 64) - return (p - addr)*64 + res; - p++; +#ifndef __s390x__ + asm(" ic %0,0(%1)\n" + " icm %0,2,1(%1)\n" + " icm %0,4,2(%1)\n" + " icm %0,8,3(%1)" + : "=&a" (word) : "a" (p), "m" (*p) : "cc" ); +#else + asm(" lrvg %0,%1" : "=a" (word) : "m" (*p) ); +#endif + /* + * s390 version of ffz returns __BITOPS_WORDSIZE + * if no zero bit is present in the word. + */ + set = ffz(word >> bit) + bit; + if (set >= size) + return size + offset; + if (set < __BITOPS_WORDSIZE) + return set + offset; + offset += __BITOPS_WORDSIZE; + size -= __BITOPS_WORDSIZE; + p++; } - /* No zero yet, search remaining full bytes for a zero */ - res = ext2_find_first_zero_bit (p, size - 64 * (p - addr)); - return (p - addr) * 64 + res; + return offset + ext2_find_first_zero_bit(p, size); } -#endif /* __s390x__ */ - /* Bitmap functions for the minix filesystem. */ /* FIXME !!! */ #define minix_test_and_set_bit(nr,addr) \ diff --git a/include/asm-s390/emergency-restart.h b/include/asm-s390/emergency-restart.h new file mode 100644 index 00000000000..108d8c48e42 --- /dev/null +++ b/include/asm-s390/emergency-restart.h @@ -0,0 +1,6 @@ +#ifndef _ASM_EMERGENCY_RESTART_H +#define _ASM_EMERGENCY_RESTART_H + +#include <asm-generic/emergency-restart.h> + +#endif /* _ASM_EMERGENCY_RESTART_H */ diff --git a/include/asm-s390/lowcore.h b/include/asm-s390/lowcore.h index 76b5b19c0ae..afe6a9f9b0a 100644 --- a/include/asm-s390/lowcore.h +++ b/include/asm-s390/lowcore.h @@ -90,7 +90,6 @@ #define __LC_SYSTEM_TIMER 0x278 #define __LC_LAST_UPDATE_CLOCK 0x280 #define __LC_STEAL_CLOCK 0x288 -#define __LC_DIAG44_OPCODE 0x290 #define __LC_KERNEL_STACK 0xD40 #define __LC_THREAD_INFO 0xD48 #define __LC_ASYNC_STACK 0xD50 @@ -286,8 +285,7 @@ struct _lowcore __u64 system_timer; /* 0x278 */ __u64 last_update_clock; /* 0x280 */ __u64 steal_clock; /* 0x288 */ - __u32 diag44_opcode; /* 0x290 */ - __u8 pad8[0xc00-0x294]; /* 0x294 */ + __u8 pad8[0xc00-0x290]; /* 0x290 */ /* System info area */ __u64 save_area[16]; /* 0xc00 */ __u8 pad9[0xd40-0xc80]; /* 0xc80 */ diff --git a/include/asm-s390/processor.h b/include/asm-s390/processor.h index 8bd14de69e3..4ec652ebb3b 100644 --- a/include/asm-s390/processor.h +++ b/include/asm-s390/processor.h @@ -203,7 +203,10 @@ unsigned long get_wchan(struct task_struct *p); # define cpu_relax() asm volatile ("diag 0,0,68" : : : "memory") #else /* __s390x__ */ # define cpu_relax() \ - asm volatile ("ex 0,%0" : : "i" (__LC_DIAG44_OPCODE) : "memory") + do { \ + if (MACHINE_HAS_DIAG44) \ + asm volatile ("diag 0,0,68" : : : "memory"); \ + } while (0) #endif /* __s390x__ */ /* diff --git a/include/asm-s390/spinlock.h b/include/asm-s390/spinlock.h index 53cc736b982..8ff10300f7e 100644 --- a/include/asm-s390/spinlock.h +++ b/include/asm-s390/spinlock.h @@ -11,21 +11,16 @@ #ifndef __ASM_SPINLOCK_H #define __ASM_SPINLOCK_H -#ifdef __s390x__ -/* - * Grmph, take care of %&#! user space programs that include - * asm/spinlock.h. The diagnose is only available in kernel - * context. - */ -#ifdef __KERNEL__ -#include <asm/lowcore.h> -#define __DIAG44_INSN "ex" -#define __DIAG44_OPERAND __LC_DIAG44_OPCODE -#else -#define __DIAG44_INSN "#" -#define __DIAG44_OPERAND 0 -#endif -#endif /* __s390x__ */ +static inline int +_raw_compare_and_swap(volatile unsigned int *lock, + unsigned int old, unsigned int new) +{ + asm volatile ("cs %0,%3,0(%4)" + : "=d" (old), "=m" (*lock) + : "0" (old), "d" (new), "a" (lock), "m" (*lock) + : "cc", "memory" ); + return old; +} /* * Simple spin lock operations. There are two variants, one clears IRQ's @@ -41,58 +36,35 @@ typedef struct { #endif } __attribute__ ((aligned (4))) spinlock_t; -#define SPIN_LOCK_UNLOCKED (spinlock_t) { 0 } -#define spin_lock_init(lp) do { (lp)->lock = 0; } while(0) +#define SPIN_LOCK_UNLOCKED (spinlock_t) { 0 } +#define spin_lock_init(lp) do { (lp)->lock = 0; } while(0) #define spin_unlock_wait(lp) do { barrier(); } while(((volatile spinlock_t *)(lp))->lock) -#define spin_is_locked(x) ((x)->lock != 0) +#define spin_is_locked(x) ((x)->lock != 0) #define _raw_spin_lock_flags(lock, flags) _raw_spin_lock(lock) -extern inline void _raw_spin_lock(spinlock_t *lp) +extern void _raw_spin_lock_wait(spinlock_t *lp, unsigned int pc); +extern int _raw_spin_trylock_retry(spinlock_t *lp, unsigned int pc); + +static inline void _raw_spin_lock(spinlock_t *lp) { -#ifndef __s390x__ - unsigned int reg1, reg2; - __asm__ __volatile__(" bras %0,1f\n" - "0: diag 0,0,68\n" - "1: slr %1,%1\n" - " cs %1,%0,0(%3)\n" - " jl 0b\n" - : "=&d" (reg1), "=&d" (reg2), "=m" (lp->lock) - : "a" (&lp->lock), "m" (lp->lock) - : "cc", "memory" ); -#else /* __s390x__ */ - unsigned long reg1, reg2; - __asm__ __volatile__(" bras %1,1f\n" - "0: " __DIAG44_INSN " 0,%4\n" - "1: slr %0,%0\n" - " cs %0,%1,0(%3)\n" - " jl 0b\n" - : "=&d" (reg1), "=&d" (reg2), "=m" (lp->lock) - : "a" (&lp->lock), "i" (__DIAG44_OPERAND), - "m" (lp->lock) : "cc", "memory" ); -#endif /* __s390x__ */ + unsigned long pc = (unsigned long) __builtin_return_address(0); + + if (unlikely(_raw_compare_and_swap(&lp->lock, 0, pc) != 0)) + _raw_spin_lock_wait(lp, pc); } -extern inline int _raw_spin_trylock(spinlock_t *lp) +static inline int _raw_spin_trylock(spinlock_t *lp) { - unsigned long reg; - unsigned int result; - - __asm__ __volatile__(" basr %1,0\n" - "0: cs %0,%1,0(%3)" - : "=d" (result), "=&d" (reg), "=m" (lp->lock) - : "a" (&lp->lock), "m" (lp->lock), "0" (0) - : "cc", "memory" ); - return !result; + unsigned long pc = (unsigned long) __builtin_return_address(0); + + if (likely(_raw_compare_and_swap(&lp->lock, 0, pc) == 0)) + return 1; + return _raw_spin_trylock_retry(lp, pc); } -extern inline void _raw_spin_unlock(spinlock_t *lp) +static inline void _raw_spin_unlock(spinlock_t *lp) { - unsigned int old; - - __asm__ __volatile__("cs %0,%3,0(%4)" - : "=d" (old), "=m" (lp->lock) - : "0" (lp->lock), "d" (0), "a" (lp) - : "cc", "memory" ); + _raw_compare_and_swap(&lp->lock, lp->lock, 0); } /* @@ -106,7 +78,7 @@ extern inline void _raw_spin_unlock(spinlock_t *lp) * read-locks. */ typedef struct { - volatile unsigned long lock; + volatile unsigned int lock; volatile unsigned long owner_pc; #ifdef CONFIG_PREEMPT unsigned int break_lock; @@ -129,123 +101,55 @@ typedef struct { */ #define write_can_lock(x) ((x)->lock == 0) -#ifndef __s390x__ -#define _raw_read_lock(rw) \ - asm volatile(" l 2,0(%1)\n" \ - " j 1f\n" \ - "0: diag 0,0,68\n" \ - "1: la 2,0(2)\n" /* clear high (=write) bit */ \ - " la 3,1(2)\n" /* one more reader */ \ - " cs 2,3,0(%1)\n" /* try to write new value */ \ - " jl 0b" \ - : "=m" ((rw)->lock) : "a" (&(rw)->lock), \ - "m" ((rw)->lock) : "2", "3", "cc", "memory" ) -#else /* __s390x__ */ -#define _raw_read_lock(rw) \ - asm volatile(" lg 2,0(%1)\n" \ - " j 1f\n" \ - "0: " __DIAG44_INSN " 0,%2\n" \ - "1: nihh 2,0x7fff\n" /* clear high (=write) bit */ \ - " la 3,1(2)\n" /* one more reader */ \ - " csg 2,3,0(%1)\n" /* try to write new value */ \ - " jl 0b" \ - : "=m" ((rw)->lock) \ - : "a" (&(rw)->lock), "i" (__DIAG44_OPERAND), \ - "m" ((rw)->lock) : "2", "3", "cc", "memory" ) -#endif /* __s390x__ */ - -#ifndef __s390x__ -#define _raw_read_unlock(rw) \ - asm volatile(" l 2,0(%1)\n" \ - " j 1f\n" \ - "0: diag 0,0,68\n" \ - "1: lr 3,2\n" \ - " ahi 3,-1\n" /* one less reader */ \ - " cs 2,3,0(%1)\n" \ - " jl 0b" \ - : "=m" ((rw)->lock) : "a" (&(rw)->lock), \ - "m" ((rw)->lock) : "2", "3", "cc", "memory" ) -#else /* __s390x__ */ -#define _raw_read_unlock(rw) \ - asm volatile(" lg 2,0(%1)\n" \ - " j 1f\n" \ - "0: " __DIAG44_INSN " 0,%2\n" \ - "1: lgr 3,2\n" \ - " bctgr 3,0\n" /* one less reader */ \ - " csg 2,3,0(%1)\n" \ - " jl 0b" \ - : "=m" ((rw)->lock) \ - : "a" (&(rw)->lock), "i" (__DIAG44_OPERAND), \ - "m" ((rw)->lock) : "2", "3", "cc", "memory" ) -#endif /* __s390x__ */ - -#ifndef __s390x__ -#define _raw_write_lock(rw) \ - asm volatile(" lhi 3,1\n" \ - " sll 3,31\n" /* new lock value = 0x80000000 */ \ - " j 1f\n" \ - "0: diag 0,0,68\n" \ - "1: slr 2,2\n" /* old lock value must be 0 */ \ - " cs 2,3,0(%1)\n" \ - " jl 0b" \ - : "=m" ((rw)->lock) : "a" (&(rw)->lock), \ - "m" ((rw)->lock) : "2", "3", "cc", "memory" ) -#else /* __s390x__ */ -#define _raw_write_lock(rw) \ - asm volatile(" llihh 3,0x8000\n" /* new lock value = 0x80...0 */ \ - " j 1f\n" \ - "0: " __DIAG44_INSN " 0,%2\n" \ - "1: slgr 2,2\n" /* old lock value must be 0 */ \ - " csg 2,3,0(%1)\n" \ - " jl 0b" \ - : "=m" ((rw)->lock) \ - : "a" (&(rw)->lock), "i" (__DIAG44_OPERAND), \ - "m" ((rw)->lock) : "2", "3", "cc", "memory" ) -#endif /* __s390x__ */ - -#ifndef __s390x__ -#define _raw_write_unlock(rw) \ - asm volatile(" slr 3,3\n" /* new lock value = 0 */ \ - " j 1f\n" \ - "0: diag 0,0,68\n" \ - "1: lhi 2,1\n" \ - " sll 2,31\n" /* old lock value must be 0x80000000 */ \ - " cs 2,3,0(%1)\n" \ - " jl 0b" \ - : "=m" ((rw)->lock) : "a" (&(rw)->lock), \ - "m" ((rw)->lock) : "2", "3", "cc", "memory" ) -#else /* __s390x__ */ -#define _raw_write_unlock(rw) \ - asm volatile(" slgr 3,3\n" /* new lock value = 0 */ \ - " j 1f\n" \ - "0: " __DIAG44_INSN " 0,%2\n" \ - "1: llihh 2,0x8000\n" /* old lock value must be 0x8..0 */\ - " csg 2,3,0(%1)\n" \ - " jl 0b" \ - : "=m" ((rw)->lock) \ - : "a" (&(rw)->lock), "i" (__DIAG44_OPERAND), \ - "m" ((rw)->lock) : "2", "3", "cc", "memory" ) -#endif /* __s390x__ */ - -#define _raw_read_trylock(lock) generic_raw_read_trylock(lock) - -extern inline int _raw_write_trylock(rwlock_t *rw) +extern void _raw_read_lock_wait(rwlock_t *lp); +extern int _raw_read_trylock_retry(rwlock_t *lp); +extern void _raw_write_lock_wait(rwlock_t *lp); +extern int _raw_write_trylock_retry(rwlock_t *lp); + +static inline void _raw_read_lock(rwlock_t *rw) +{ + unsigned int old; + old = rw->lock & 0x7fffffffU; + if (_raw_compare_and_swap(&rw->lock, old, old + 1) != old) + _raw_read_lock_wait(rw); +} + +static inline void _raw_read_unlock(rwlock_t *rw) +{ + unsigned int old, cmp; + + old = rw->lock; + do { + cmp = old; + old = _raw_compare_and_swap(&rw->lock, old, old - 1); + } while (cmp != old); +} + +static inline void _raw_write_lock(rwlock_t *rw) +{ + if (unlikely(_raw_compare_and_swap(&rw->lock, 0, 0x80000000) != 0)) + _raw_write_lock_wait(rw); +} + +static inline void _raw_write_unlock(rwlock_t *rw) +{ + _raw_compare_and_swap(&rw->lock, 0x80000000, 0); +} + +static inline int _raw_read_trylock(rwlock_t *rw) +{ + unsigned int old; + old = rw->lock & 0x7fffffffU; + if (likely(_raw_compare_and_swap(&rw->lock, old, old + 1) == old)) + return 1; + return _raw_read_trylock_retry(rw); +} + +static inline int _raw_write_trylock(rwlock_t *rw) { - unsigned long result, reg; - - __asm__ __volatile__( -#ifndef __s390x__ - " lhi %1,1\n" - " sll %1,31\n" - " cs %0,%1,0(%3)" -#else /* __s390x__ */ - " llihh %1,0x8000\n" - "0: csg %0,%1,0(%3)\n" -#endif /* __s390x__ */ - : "=d" (result), "=&d" (reg), "=m" (rw->lock) - : "a" (&rw->lock), "m" (rw->lock), "0" (0UL) - : "cc", "memory" ); - return result == 0; + if (likely(_raw_compare_and_swap(&rw->lock, 0, 0x80000000) == 0)) + return 1; + return _raw_write_trylock_retry(rw); } #endif /* __ASM_SPINLOCK_H */ |