diff options
Diffstat (limited to 'include/asm-mips/atomic.h')
-rw-r--r-- | include/asm-mips/atomic.h | 40 |
1 files changed, 40 insertions, 0 deletions
diff --git a/include/asm-mips/atomic.h b/include/asm-mips/atomic.h index c0bd8d014e1..6202eb8a14b 100644 --- a/include/asm-mips/atomic.h +++ b/include/asm-mips/atomic.h @@ -62,20 +62,24 @@ static __inline__ void atomic_add(int i, atomic_t * v) unsigned long temp; __asm__ __volatile__( + " .set mips3 \n" "1: ll %0, %1 # atomic_add \n" " addu %0, %2 \n" " sc %0, %1 \n" " beqzl %0, 1b \n" + " .set mips0 \n" : "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter)); } else if (cpu_has_llsc) { unsigned long temp; __asm__ __volatile__( + " .set mips3 \n" "1: ll %0, %1 # atomic_add \n" " addu %0, %2 \n" " sc %0, %1 \n" " beqz %0, 1b \n" + " .set mips0 \n" : "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter)); } else { @@ -100,20 +104,24 @@ static __inline__ void atomic_sub(int i, atomic_t * v) unsigned long temp; __asm__ __volatile__( + " .set mips3 \n" "1: ll %0, %1 # atomic_sub \n" " subu %0, %2 \n" " sc %0, %1 \n" " beqzl %0, 1b \n" + " .set mips0 \n" : "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter)); } else if (cpu_has_llsc) { unsigned long temp; __asm__ __volatile__( + " .set mips3 \n" "1: ll %0, %1 # atomic_sub \n" " subu %0, %2 \n" " sc %0, %1 \n" " beqz %0, 1b \n" + " .set mips0 \n" : "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter)); } else { @@ -136,12 +144,14 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) unsigned long temp; __asm__ __volatile__( + " .set mips3 \n" "1: ll %1, %2 # atomic_add_return \n" " addu %0, %1, %3 \n" " sc %0, %2 \n" " beqzl %0, 1b \n" " addu %0, %1, %3 \n" " sync \n" + " .set mips0 \n" : "=&r" (result), "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter) : "memory"); @@ -149,12 +159,14 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) unsigned long temp; __asm__ __volatile__( + " .set mips3 \n" "1: ll %1, %2 # atomic_add_return \n" " addu %0, %1, %3 \n" " sc %0, %2 \n" " beqz %0, 1b \n" " addu %0, %1, %3 \n" " sync \n" + " .set mips0 \n" : "=&r" (result), "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter) : "memory"); @@ -179,12 +191,14 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) unsigned long temp; __asm__ __volatile__( + " .set mips3 \n" "1: ll %1, %2 # atomic_sub_return \n" " subu %0, %1, %3 \n" " sc %0, %2 \n" " beqzl %0, 1b \n" " subu %0, %1, %3 \n" " sync \n" + " .set mips0 \n" : "=&r" (result), "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter) : "memory"); @@ -192,12 +206,14 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) unsigned long temp; __asm__ __volatile__( + " .set mips3 \n" "1: ll %1, %2 # atomic_sub_return \n" " subu %0, %1, %3 \n" " sc %0, %2 \n" " beqz %0, 1b \n" " subu %0, %1, %3 \n" " sync \n" + " .set mips0 \n" : "=&r" (result), "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter) : "memory"); @@ -229,6 +245,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) unsigned long temp; __asm__ __volatile__( + " .set mips3 \n" "1: ll %1, %2 # atomic_sub_if_positive\n" " subu %0, %1, %3 \n" " bltz %0, 1f \n" @@ -236,6 +253,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) " beqzl %0, 1b \n" " sync \n" "1: \n" + " .set mips0 \n" : "=&r" (result), "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter) : "memory"); @@ -243,6 +261,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) unsigned long temp; __asm__ __volatile__( + " .set mips3 \n" "1: ll %1, %2 # atomic_sub_if_positive\n" " subu %0, %1, %3 \n" " bltz %0, 1f \n" @@ -250,6 +269,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) " beqz %0, 1b \n" " sync \n" "1: \n" + " .set mips0 \n" : "=&r" (result), "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter) : "memory"); @@ -367,20 +387,24 @@ static __inline__ void atomic64_add(long i, atomic64_t * v) unsigned long temp; __asm__ __volatile__( + " .set mips3 \n" "1: lld %0, %1 # atomic64_add \n" " addu %0, %2 \n" " scd %0, %1 \n" " beqzl %0, 1b \n" + " .set mips0 \n" : "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter)); } else if (cpu_has_llsc) { unsigned long temp; __asm__ __volatile__( + " .set mips3 \n" "1: lld %0, %1 # atomic64_add \n" " addu %0, %2 \n" " scd %0, %1 \n" " beqz %0, 1b \n" + " .set mips0 \n" : "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter)); } else { @@ -405,20 +429,24 @@ static __inline__ void atomic64_sub(long i, atomic64_t * v) unsigned long temp; __asm__ __volatile__( + " .set mips3 \n" "1: lld %0, %1 # atomic64_sub \n" " subu %0, %2 \n" " scd %0, %1 \n" " beqzl %0, 1b \n" + " .set mips0 \n" : "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter)); } else if (cpu_has_llsc) { unsigned long temp; __asm__ __volatile__( + " .set mips3 \n" "1: lld %0, %1 # atomic64_sub \n" " subu %0, %2 \n" " scd %0, %1 \n" " beqz %0, 1b \n" + " .set mips0 \n" : "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter)); } else { @@ -441,12 +469,14 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) unsigned long temp; __asm__ __volatile__( + " .set mips3 \n" "1: lld %1, %2 # atomic64_add_return \n" " addu %0, %1, %3 \n" " scd %0, %2 \n" " beqzl %0, 1b \n" " addu %0, %1, %3 \n" " sync \n" + " .set mips0 \n" : "=&r" (result), "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter) : "memory"); @@ -454,12 +484,14 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) unsigned long temp; __asm__ __volatile__( + " .set mips3 \n" "1: lld %1, %2 # atomic64_add_return \n" " addu %0, %1, %3 \n" " scd %0, %2 \n" " beqz %0, 1b \n" " addu %0, %1, %3 \n" " sync \n" + " .set mips0 \n" : "=&r" (result), "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter) : "memory"); @@ -484,12 +516,14 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) unsigned long temp; __asm__ __volatile__( + " .set mips3 \n" "1: lld %1, %2 # atomic64_sub_return \n" " subu %0, %1, %3 \n" " scd %0, %2 \n" " beqzl %0, 1b \n" " subu %0, %1, %3 \n" " sync \n" + " .set mips0 \n" : "=&r" (result), "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter) : "memory"); @@ -497,12 +531,14 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) unsigned long temp; __asm__ __volatile__( + " .set mips3 \n" "1: lld %1, %2 # atomic64_sub_return \n" " subu %0, %1, %3 \n" " scd %0, %2 \n" " beqz %0, 1b \n" " subu %0, %1, %3 \n" " sync \n" + " .set mips0 \n" : "=&r" (result), "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter) : "memory"); @@ -534,6 +570,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) unsigned long temp; __asm__ __volatile__( + " .set mips3 \n" "1: lld %1, %2 # atomic64_sub_if_positive\n" " dsubu %0, %1, %3 \n" " bltz %0, 1f \n" @@ -541,6 +578,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) " beqzl %0, 1b \n" " sync \n" "1: \n" + " .set mips0 \n" : "=&r" (result), "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter) : "memory"); @@ -548,6 +586,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) unsigned long temp; __asm__ __volatile__( + " .set mips3 \n" "1: lld %1, %2 # atomic64_sub_if_positive\n" " dsubu %0, %1, %3 \n" " bltz %0, 1f \n" @@ -555,6 +594,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) " beqz %0, 1b \n" " sync \n" "1: \n" + " .set mips0 \n" : "=&r" (result), "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter) : "memory"); |