Commit 8c749327 authored by Andi Kleen's avatar Andi Kleen Committed by Linus Torvalds

[PATCH] i386: Remove alternative_smp

The .fill causes miscompilations with some binutils version.

Instead just patch the lock prefix in the lock constructs. That is the
majority of the cost and should be good enough.

Cc: Gerd Hoffmann <kraxel@suse.de>
Signed-off-by: default avatarAndi Kleen <ak@suse.de>
Signed-off-by: default avatarLinus Torvalds <torvalds@osdl.org>
parent 841be8dd
...@@ -88,9 +88,6 @@ static inline void alternatives_smp_switch(int smp) {} ...@@ -88,9 +88,6 @@ static inline void alternatives_smp_switch(int smp) {}
/* /*
* Alternative inline assembly for SMP. * Alternative inline assembly for SMP.
* *
* alternative_smp() takes two versions (SMP first, UP second) and is
* for more complex stuff such as spinlocks.
*
* The LOCK_PREFIX macro defined here replaces the LOCK and * The LOCK_PREFIX macro defined here replaces the LOCK and
* LOCK_PREFIX macros used everywhere in the source tree. * LOCK_PREFIX macros used everywhere in the source tree.
* *
...@@ -110,21 +107,6 @@ static inline void alternatives_smp_switch(int smp) {} ...@@ -110,21 +107,6 @@ static inline void alternatives_smp_switch(int smp) {}
*/ */
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
#define alternative_smp(smpinstr, upinstr, args...) \
asm volatile ("661:\n\t" smpinstr "\n662:\n" \
".section .smp_altinstructions,\"a\"\n" \
" .align 4\n" \
" .long 661b\n" /* label */ \
" .long 663f\n" /* new instruction */ \
" .byte " __stringify(X86_FEATURE_UP) "\n" \
" .byte 662b-661b\n" /* sourcelen */ \
" .byte 664f-663f\n" /* replacementlen */ \
".previous\n" \
".section .smp_altinstr_replacement,\"awx\"\n" \
"663:\n\t" upinstr "\n" /* replacement */ \
"664:\n\t.fill 662b-661b,1,0x42\n" /* space for original */ \
".previous" : args)
#define LOCK_PREFIX \ #define LOCK_PREFIX \
".section .smp_locks,\"a\"\n" \ ".section .smp_locks,\"a\"\n" \
" .align 4\n" \ " .align 4\n" \
...@@ -133,8 +115,6 @@ static inline void alternatives_smp_switch(int smp) {} ...@@ -133,8 +115,6 @@ static inline void alternatives_smp_switch(int smp) {}
"661:\n\tlock; " "661:\n\tlock; "
#else /* ! CONFIG_SMP */ #else /* ! CONFIG_SMP */
#define alternative_smp(smpinstr, upinstr, args...) \
asm volatile (upinstr : args)
#define LOCK_PREFIX "" #define LOCK_PREFIX ""
#endif #endif
......
...@@ -21,22 +21,20 @@ ...@@ -21,22 +21,20 @@
#define RW_LOCK_BIAS_STR "0x01000000" #define RW_LOCK_BIAS_STR "0x01000000"
#define __build_read_lock_ptr(rw, helper) \ #define __build_read_lock_ptr(rw, helper) \
alternative_smp("lock; subl $1,(%0)\n\t" \ asm volatile(LOCK_PREFIX " ; subl $1,(%0)\n\t" \
"jns 1f\n" \ "jns 1f\n" \
"call " helper "\n\t" \ "call " helper "\n\t" \
"1:\n", \ "1:\n" \
"subl $1,(%0)\n\t", \
:"a" (rw) : "memory") :"a" (rw) : "memory")
#define __build_read_lock_const(rw, helper) \ #define __build_read_lock_const(rw, helper) \
alternative_smp("lock; subl $1,%0\n\t" \ asm volatile(LOCK_PREFIX " ; subl $1,%0\n\t" \
"jns 1f\n" \ "jns 1f\n" \
"pushl %%eax\n\t" \ "pushl %%eax\n\t" \
"leal %0,%%eax\n\t" \ "leal %0,%%eax\n\t" \
"call " helper "\n\t" \ "call " helper "\n\t" \
"popl %%eax\n\t" \ "popl %%eax\n\t" \
"1:\n", \ "1:\n" : \
"subl $1,%0\n\t", \
"+m" (*(volatile int *)rw) : : "memory") "+m" (*(volatile int *)rw) : : "memory")
#define __build_read_lock(rw, helper) do { \ #define __build_read_lock(rw, helper) do { \
...@@ -47,7 +45,7 @@ ...@@ -47,7 +45,7 @@
} while (0) } while (0)
#define __build_write_lock_ptr(rw, helper) \ #define __build_write_lock_ptr(rw, helper) \
alternative_smp("lock; subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \ asm volatile(LOCK_PREFIX " ; subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \
"jz 1f\n" \ "jz 1f\n" \
"call " helper "\n\t" \ "call " helper "\n\t" \
"1:\n", \ "1:\n", \
...@@ -55,7 +53,7 @@ ...@@ -55,7 +53,7 @@
:"a" (rw) : "memory") :"a" (rw) : "memory")
#define __build_write_lock_const(rw, helper) \ #define __build_write_lock_const(rw, helper) \
alternative_smp("lock; subl $" RW_LOCK_BIAS_STR ",%0\n\t" \ asm volatile(LOCK_PREFIX " ; subl $" RW_LOCK_BIAS_STR ",%0\n\t" \
"jz 1f\n" \ "jz 1f\n" \
"pushl %%eax\n\t" \ "pushl %%eax\n\t" \
"leal %0,%%eax\n\t" \ "leal %0,%%eax\n\t" \
......
...@@ -22,7 +22,7 @@ ...@@ -22,7 +22,7 @@
#define __raw_spin_lock_string \ #define __raw_spin_lock_string \
"\n1:\t" \ "\n1:\t" \
"lock ; decb %0\n\t" \ LOCK_PREFIX " ; decb %0\n\t" \
"jns 3f\n" \ "jns 3f\n" \
"2:\t" \ "2:\t" \
"rep;nop\n\t" \ "rep;nop\n\t" \
...@@ -38,7 +38,7 @@ ...@@ -38,7 +38,7 @@
*/ */
#define __raw_spin_lock_string_flags \ #define __raw_spin_lock_string_flags \
"\n1:\t" \ "\n1:\t" \
"lock ; decb %0\n\t" \ LOCK_PREFIX " ; decb %0\n\t" \
"jns 5f\n" \ "jns 5f\n" \
"2:\t" \ "2:\t" \
"testl $0x200, %1\n\t" \ "testl $0x200, %1\n\t" \
...@@ -57,15 +57,9 @@ ...@@ -57,15 +57,9 @@
"jmp 4b\n" \ "jmp 4b\n" \
"5:\n\t" "5:\n\t"
#define __raw_spin_lock_string_up \
"\n\tdecb %0"
static inline void __raw_spin_lock(raw_spinlock_t *lock) static inline void __raw_spin_lock(raw_spinlock_t *lock)
{ {
alternative_smp( asm(__raw_spin_lock_string : "+m" (lock->slock) : : "memory");
__raw_spin_lock_string,
__raw_spin_lock_string_up,
"+m" (lock->slock) : : "memory");
} }
/* /*
...@@ -76,10 +70,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock) ...@@ -76,10 +70,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock)
#ifndef CONFIG_PROVE_LOCKING #ifndef CONFIG_PROVE_LOCKING
static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long flags) static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long flags)
{ {
alternative_smp( asm(__raw_spin_lock_string_flags : "+m" (lock->slock) : "r" (flags) : "memory");
__raw_spin_lock_string_flags,
__raw_spin_lock_string_up,
"+m" (lock->slock) : "r" (flags) : "memory");
} }
#endif #endif
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment