diff options
author | Harvey Harrison <harvey.harrison@gmail.com> | 2008-01-30 13:31:26 +0100 |
---|---|---|
committer | Ingo Molnar <mingo@elte.hu> | 2008-01-30 13:31:26 +0100 |
commit | 8ee5797a91bdb713b4031741b33bd035f9c43870 (patch) | |
tree | 1ce447909c7b2d43f88631180b72d67bd02d6f83 | |
parent | 992b95920a311db3267659ea17160e4812a05830 (diff) |
x86: introduce asm helpers in local_{32|64}.h
Handle the use of long on X86_32 and quad on X86_64
Signed-off-by: Harvey Harrison <harvey.harrison@gmail.com>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
-rw-r--r-- | include/asm-x86/asm.h | 12 | ||||
-rw-r--r-- | include/asm-x86/local_32.h | 18 | ||||
-rw-r--r-- | include/asm-x86/local_64.h | 18 |
3 files changed, 30 insertions, 18 deletions
diff --git a/include/asm-x86/asm.h b/include/asm-x86/asm.h index 8661ae75488..1a6980a60fc 100644 --- a/include/asm-x86/asm.h +++ b/include/asm-x86/asm.h @@ -8,6 +8,12 @@ # define _ASM_ALIGN " .balign 4 " # define _ASM_MOV_UL " movl " +# define _ASM_INC " incl " +# define _ASM_DEC " decl " +# define _ASM_ADD " addl " +# define _ASM_SUB " subl " +# define _ASM_XADD " xaddl " + #else /* 64 bits */ @@ -15,6 +21,12 @@ # define _ASM_ALIGN " .balign 8 " # define _ASM_MOV_UL " movq " +# define _ASM_INC " incq " +# define _ASM_DEC " decq " +# define _ASM_ADD " addq " +# define _ASM_SUB " subq " +# define _ASM_XADD " xaddq " + #endif /* CONFIG_X86_32 */ #endif /* _ASM_X86_ASM_H */ diff --git a/include/asm-x86/local_32.h b/include/asm-x86/local_32.h index 33d9c7bf463..c219fe56b3d 100644 --- a/include/asm-x86/local_32.h +++ b/include/asm-x86/local_32.h @@ -4,21 +4,21 @@ static inline void local_inc(local_t *l) { __asm__ __volatile__( - "incl %0" + _ASM_INC "%0" :"+m" (l->a.counter)); } static inline void local_dec(local_t *l) { __asm__ __volatile__( - "decl %0" + _ASM_DEC "%0" :"+m" (l->a.counter)); } static inline void local_add(long i, local_t *l) { __asm__ __volatile__( - "addl %1,%0" + _ASM_ADD "%1,%0" :"+m" (l->a.counter) :"ir" (i)); } @@ -26,7 +26,7 @@ static inline void local_add(long i, local_t *l) static inline void local_sub(long i, local_t *l) { __asm__ __volatile__( - "subl %1,%0" + _ASM_SUB "%1,%0" :"+m" (l->a.counter) :"ir" (i)); } @@ -45,7 +45,7 @@ static inline int local_sub_and_test(long i, local_t *l) unsigned char c; __asm__ __volatile__( - "subl %2,%0; sete %1" + _ASM_SUB "%2,%0; sete %1" :"+m" (l->a.counter), "=qm" (c) :"ir" (i) : "memory"); return c; @@ -64,7 +64,7 @@ static inline int local_dec_and_test(local_t *l) unsigned char c; __asm__ __volatile__( - "decl %0; sete %1" + _ASM_DEC "%0; sete %1" :"+m" (l->a.counter), "=qm" (c) : : "memory"); return c != 0; @@ -83,7 +83,7 @@ static inline int local_inc_and_test(local_t *l) unsigned char c; __asm__ __volatile__( - "incl %0; sete %1" + _ASM_INC "%0; sete %1" :"+m" (l->a.counter), "=qm" (c) : : "memory"); return c != 0; @@ -103,7 +103,7 @@ static inline int local_add_negative(long i, local_t *l) unsigned char c; __asm__ __volatile__( - "addl %2,%0; sets %1" + _ASM_ADD "%2,%0; sets %1" :"+m" (l->a.counter), "=qm" (c) :"ir" (i) : "memory"); return c; @@ -127,7 +127,7 @@ static inline long local_add_return(long i, local_t *l) /* Modern 486+ processor */ __i = i; __asm__ __volatile__( - "xaddl %0, %1;" + _ASM_XADD "%0, %1;" :"+r" (i), "+m" (l->a.counter) : : "memory"); return i + __i; diff --git a/include/asm-x86/local_64.h b/include/asm-x86/local_64.h index 50e99eddd62..d685cd7e014 100644 --- a/include/asm-x86/local_64.h +++ b/include/asm-x86/local_64.h @@ -4,21 +4,21 @@ static inline void local_inc(local_t *l) { __asm__ __volatile__( - "incq %0" + _ASM_INC "%0" :"+m" (l->a.counter)); } static inline void local_dec(local_t *l) { __asm__ __volatile__( - "decq %0" + _ASM_DEC "%0" :"+m" (l->a.counter)); } static inline void local_add(long i, local_t *l) { __asm__ __volatile__( - "addq %1,%0" + _ASM_ADD "%1,%0" :"+m" (l->a.counter) :"ir" (i)); } @@ -26,7 +26,7 @@ static inline void local_add(long i, local_t *l) static inline void local_sub(long i, local_t *l) { __asm__ __volatile__( - "subq %1,%0" + _ASM_SUB "%1,%0" :"+m" (l->a.counter) :"ir" (i)); } @@ -45,7 +45,7 @@ static inline int local_sub_and_test(long i, local_t *l) unsigned char c; __asm__ __volatile__( - "subq %2,%0; sete %1" + _ASM_SUB "%2,%0; sete %1" :"+m" (l->a.counter), "=qm" (c) :"ir" (i) : "memory"); return c; @@ -64,7 +64,7 @@ static inline int local_dec_and_test(local_t *l) unsigned char c; __asm__ __volatile__( - "decq %0; sete %1" + _ASM_DEC "%0; sete %1" :"+m" (l->a.counter), "=qm" (c) : : "memory"); return c != 0; @@ -83,7 +83,7 @@ static inline int local_inc_and_test(local_t *l) unsigned char c; __asm__ __volatile__( - "incq %0; sete %1" + _ASM_INC "%0; sete %1" :"+m" (l->a.counter), "=qm" (c) : : "memory"); return c != 0; @@ -103,7 +103,7 @@ static inline int local_add_negative(long i, local_t *l) unsigned char c; __asm__ __volatile__( - "addq %2,%0; sets %1" + _ASM_ADD "%2,%0; sets %1" :"+m" (l->a.counter), "=qm" (c) :"ir" (i) : "memory"); return c; @@ -120,7 +120,7 @@ static inline long local_add_return(long i, local_t *l) { long __i = i; __asm__ __volatile__( - "xaddq %0, %1;" + _ASM_XADD "%0, %1;" :"+r" (i), "+m" (l->a.counter) : : "memory"); return i + __i; |