From: Thomas Gleixner Date: Wed, 30 Jan 2008 12:30:34 +0000 (+0100) Subject: x86: spinlock_32/64 substitute types and instructions X-Git-Tag: v2.6.25-rc1~1143^2~772 X-Git-Url: http://pilppa.com/gitweb/?a=commitdiff_plain;h=cf244e30f5b50763cbe85f7de30923d12999e38d;p=linux-2.6-omap-h63xx.git x86: spinlock_32/64 substitute types and instructions Use _slock_t for the spinlock data types and replace the instructions by string defines, which makes the code of 32/64 bit versions more or less identical. Signed-off-by: Thomas Gleixner Signed-off-by: Ingo Molnar --- diff --git a/include/asm-x86/spinlock_32.h b/include/asm-x86/spinlock_32.h index 4ef626d5368..2de9b8b8990 100644 --- a/include/asm-x86/spinlock_32.h +++ b/include/asm-x86/spinlock_32.h @@ -17,20 +17,27 @@ * (the type definitions are in asm/spinlock_types.h) */ +typedef char _slock_t; +#define LOCK_INS_DEC "decb" +#define LOCK_INS_XCH "xchgb" +#define LOCK_INS_MOV "movb" +#define LOCK_INS_CMP "cmpb" +#define LOCK_PTR_REG "a" + static inline int __raw_spin_is_locked(raw_spinlock_t *lock) { - return *(volatile signed char *)(&(lock)->slock) <= 0; + return *(volatile _slock_t *)(&(lock)->slock) <= 0; } static inline void __raw_spin_lock(raw_spinlock_t *lock) { asm volatile( "\n1:\t" - LOCK_PREFIX " ; decb %0\n\t" + LOCK_PREFIX " ; " LOCK_INS_DEC " %0\n\t" "jns 3f\n" "2:\t" "rep;nop\n\t" - "cmpb $0,%0\n\t" + LOCK_INS_CMP " $0,%0\n\t" "jle 2b\n\t" "jmp 1b\n" "3:\n\t" @@ -51,25 +58,25 @@ static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, { asm volatile( "\n1:\t" - LOCK_PREFIX " ; decb %[slock]\n\t" + LOCK_PREFIX " ; " LOCK_INS_DEC " %[slock]\n\t" "jns 5f\n" "testl $0x200, %[flags]\n\t" "jz 4f\n\t" STI_STRING "\n" "3:\t" "rep;nop\n\t" - "cmpb $0, %[slock]\n\t" + LOCK_INS_CMP " $0, %[slock]\n\t" "jle 3b\n\t" CLI_STRING "\n\t" "jmp 1b\n" "4:\t" "rep;nop\n\t" - "cmpb $0, %[slock]\n\t" + LOCK_INS_CMP " $0, %[slock]\n\t" "jg 1b\n\t" "jmp 4b\n" "5:\n\t" : [slock] "+m" (lock->slock) - : [flags] "r" (flags) + : [flags] "r" ((u32)flags) CLI_STI_INPUT_ARGS : "memory" CLI_STI_CLOBBERS); } @@ -77,10 +84,10 @@ static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, static inline int __raw_spin_trylock(raw_spinlock_t *lock) { - signed char oldval; + _slock_t oldval; asm volatile( - "xchgb %b0,%1" + LOCK_INS_XCH " %0,%1" :"=q" (oldval), "+m" (lock->slock) :"0" (0) : "memory"); @@ -98,7 +105,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock) static inline void __raw_spin_unlock(raw_spinlock_t *lock) { - asm volatile("movb $1,%0" : "=m" (lock->slock) :: "memory"); + asm volatile(LOCK_INS_MOV " $1,%0" : "=m" (lock->slock) :: "memory"); } #else @@ -150,7 +157,7 @@ static inline void __raw_read_lock(raw_rwlock_t *rw) "jns 1f\n" "call __read_lock_failed\n\t" "1:\n" - ::"a" (rw) : "memory"); + ::LOCK_PTR_REG (rw) : "memory"); } static inline void __raw_write_lock(raw_rwlock_t *rw) @@ -159,7 +166,7 @@ static inline void __raw_write_lock(raw_rwlock_t *rw) "jz 1f\n" "call __write_lock_failed\n\t" "1:\n" - ::"a" (rw), "i" (RW_LOCK_BIAS) : "memory"); + ::LOCK_PTR_REG (rw), "i" (RW_LOCK_BIAS) : "memory"); } static inline int __raw_read_trylock(raw_rwlock_t *lock) diff --git a/include/asm-x86/spinlock_64.h b/include/asm-x86/spinlock_64.h index 19d483c8018..f5ba90b7335 100644 --- a/include/asm-x86/spinlock_64.h +++ b/include/asm-x86/spinlock_64.h @@ -17,20 +17,27 @@ * (the type definitions are in asm/spinlock_types.h) */ +typedef int _slock_t; +#define LOCK_INS_DEC "decl" +#define LOCK_INS_XCH "xchgl" +#define LOCK_INS_MOV "movl" +#define LOCK_INS_CMP "cmpl" +#define LOCK_PTR_REG "D" + static inline int __raw_spin_is_locked(raw_spinlock_t *lock) { - return *(volatile signed int *)(&(lock)->slock) <= 0; + return *(volatile _slock_t *)(&(lock)->slock) <= 0; } static inline void __raw_spin_lock(raw_spinlock_t *lock) { asm volatile( "\n1:\t" - LOCK_PREFIX " ; decl %0\n\t" + LOCK_PREFIX " ; " LOCK_INS_DEC " %0\n\t" "jns 3f\n" "2:\t" "rep;nop\n\t" - "cmpl $0,%0\n\t" + LOCK_INS_CMP " $0,%0\n\t" "jle 2b\n\t" "jmp 1b\n" "3:\n\t" @@ -51,25 +58,25 @@ static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, { asm volatile( "\n1:\t" - LOCK_PREFIX " ; decl %[slock]\n\t" + LOCK_PREFIX " ; " LOCK_INS_DEC " %[slock]\n\t" "jns 5f\n" "testl $0x200, %[flags]\n\t" "jz 4f\n\t" STI_STRING "\n" "3:\t" "rep;nop\n\t" - "cmpl $0, %[slock]\n\t" + LOCK_INS_CMP " $0, %[slock]\n\t" "jle 3b\n\t" CLI_STRING "\n\t" "jmp 1b\n" "4:\t" "rep;nop\n\t" - "cmpl $0, %[slock]\n\t" + LOCK_INS_CMP " $0, %[slock]\n\t" "jg 1b\n\t" "jmp 4b\n" "5:\n\t" : [slock] "+m" (lock->slock) - : [flags] "r" ((unsigned)flags) + : [flags] "r" ((u32)flags) CLI_STI_INPUT_ARGS : "memory" CLI_STI_CLOBBERS); } @@ -77,10 +84,10 @@ static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, static inline int __raw_spin_trylock(raw_spinlock_t *lock) { - int oldval; + _slock_t oldval; asm volatile( - "xchgl %0,%1" + LOCK_INS_XCH " %0,%1" :"=q" (oldval), "+m" (lock->slock) :"0" (0) : "memory"); @@ -89,7 +96,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock) static inline void __raw_spin_unlock(raw_spinlock_t *lock) { - asm volatile("movl $1,%0" : "=m" (lock->slock) :: "memory"); + asm volatile(LOCK_INS_MOV " $1,%0" : "=m" (lock->slock) :: "memory"); } static inline void __raw_spin_unlock_wait(raw_spinlock_t *lock) @@ -128,7 +135,7 @@ static inline void __raw_read_lock(raw_rwlock_t *rw) "jns 1f\n" "call __read_lock_failed\n\t" "1:\n" - ::"D" (rw) : "memory"); + ::LOCK_PTR_REG (rw) : "memory"); } static inline void __raw_write_lock(raw_rwlock_t *rw) @@ -137,7 +144,7 @@ static inline void __raw_write_lock(raw_rwlock_t *rw) "jz 1f\n" "call __write_lock_failed\n\t" "1:\n" - ::"D" (rw), "i" (RW_LOCK_BIAS) : "memory"); + ::LOCK_PTR_REG (rw), "i" (RW_LOCK_BIAS) : "memory"); } static inline int __raw_read_trylock(raw_rwlock_t *lock)