/*
  * Alternative inline assembly for SMP.
  *
- * alternative_smp() takes two versions (SMP first, UP second) and is
- * for more complex stuff such as spinlocks.
- *
  * The LOCK_PREFIX macro defined here replaces the LOCK and
  * LOCK_PREFIX macros used everywhere in the source tree.
  *
  */
 
 #ifdef CONFIG_SMP
-#define alternative_smp(smpinstr, upinstr, args...)                    \
-       asm volatile ("661:\n\t" smpinstr "\n662:\n"                    \
-                     ".section .smp_altinstructions,\"a\"\n"           \
-                     "  .align 8\n"                                    \
-                     "  .quad 661b\n"            /* label */           \
-                     "  .quad 663f\n"            /* new instruction */ \
-                     "  .byte " __stringify(X86_FEATURE_UP) "\n"       \
-                     "  .byte 662b-661b\n"       /* sourcelen */       \
-                     "  .byte 664f-663f\n"       /* replacementlen */  \
-                     ".previous\n"                                     \
-                     ".section .smp_altinstr_replacement,\"awx\"\n"    \
-                     "663:\n\t" upinstr "\n"     /* replacement */     \
-                     "664:\n\t.fill 662b-661b,1,0x42\n" /* space for original */ \
-                     ".previous" : args)
-
 #define LOCK_PREFIX \
                ".section .smp_locks,\"a\"\n"   \
                "  .align 8\n"                  \
                "661:\n\tlock; "
 
 #else /* ! CONFIG_SMP */
-#define alternative_smp(smpinstr, upinstr, args...) \
-       asm volatile (upinstr : args)
 #define LOCK_PREFIX ""
 #endif
 
 
 
 #define __raw_spin_lock_string \
        "\n1:\t" \
-       "lock ; decl %0\n\t" \
+       LOCK_PREFIX " ; decl %0\n\t" \
        "js 2f\n" \
        LOCK_SECTION_START("") \
        "2:\t" \
 
 static inline void __raw_spin_lock(raw_spinlock_t *lock)
 {
-       alternative_smp(
-               __raw_spin_lock_string,
-               __raw_spin_lock_string_up,
-               "=m" (lock->slock) : : "memory");
+       asm volatile(__raw_spin_lock_string : "=m" (lock->slock) : : "memory");
 }
 
 #define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock)
 
 static inline void __raw_read_unlock(raw_rwlock_t *rw)
 {
-       asm volatile("lock ; incl %0" :"=m" (rw->lock) : : "memory");
+       asm volatile(LOCK_PREFIX " ; incl %0" :"=m" (rw->lock) : : "memory");
 }
 
 static inline void __raw_write_unlock(raw_rwlock_t *rw)
 {
-       asm volatile("lock ; addl $" RW_LOCK_BIAS_STR ",%0"
+       asm volatile(LOCK_PREFIX " ; addl $" RW_LOCK_BIAS_STR ",%0"
                                : "=m" (rw->lock) : : "memory");
 }