.macro altinstruction_entry orig alt feature orig_len alt_len
        .align 8
-       .quad \orig
-       .quad \alt
+       .long \orig - .
+       .long \alt - .
        .word \feature
        .byte \orig_len
        .byte \alt_len
 
 #endif
 
 struct alt_instr {
-       u8 *instr;              /* original instruction */
-       u8 *replacement;
+       s32 instr_offset;       /* original instruction */
+       s32 repl_offset;        /* offset to replacement instruction */
        u16 cpuid;              /* cpuid bit set for replacement */
        u8  instrlen;           /* length of original instruction */
        u8  replacementlen;     /* length of new instruction, <= instrlen */
       "661:\n\t" oldinstr "\n662:\n"                                   \
       ".section .altinstructions,\"a\"\n"                              \
       _ASM_ALIGN "\n"                                                  \
-      _ASM_PTR "661b\n"                                /* label           */   \
-      _ASM_PTR "663f\n"                                /* new instruction */   \
+      "         .long 661b - .\n"                      /* label           */   \
+      "         .long 663f - .\n"                      /* new instruction */   \
       "         .word " __stringify(feature) "\n"      /* feature bit     */   \
       "         .byte 662b-661b\n"                     /* sourcelen       */   \
       "         .byte 664f-663f\n"                     /* replacementlen  */   \
 
                         "2:\n"
                         ".section .altinstructions,\"a\"\n"
                         _ASM_ALIGN "\n"
-                        _ASM_PTR "1b\n"
-                        _ASM_PTR "0\n"         /* no replacement */
+                        " .long 1b - .\n"
+                        " .long 0\n"           /* no replacement */
                         " .word %P0\n"         /* feature bit */
                         " .byte 2b - 1b\n"     /* source len */
                         " .byte 0\n"           /* replacement len */
                             "2:\n"
                             ".section .altinstructions,\"a\"\n"
                             _ASM_ALIGN "\n"
-                            _ASM_PTR "1b\n"
-                            _ASM_PTR "3f\n"
+                            " .long 1b - .\n"
+                            " .long 3f - .\n"
                             " .word %P1\n"             /* feature bit */
                             " .byte 2b - 1b\n"         /* source len */
                             " .byte 4f - 3f\n"         /* replacement len */
 
                                         struct alt_instr *end)
 {
        struct alt_instr *a;
+       u8 *instr, *replacement;
        u8 insnbuf[MAX_PATCH_LEN];
 
        DPRINTK("%s: alt table %p -> %p\n", __func__, start, end);
         * order.
         */
        for (a = start; a < end; a++) {
-               u8 *instr = a->instr;
+               instr = (u8 *)&a->instr_offset + a->instr_offset;
+               replacement = (u8 *)&a->repl_offset + a->repl_offset;
                BUG_ON(a->replacementlen > a->instrlen);
                BUG_ON(a->instrlen > sizeof(insnbuf));
                BUG_ON(a->cpuid >= NCAPINTS*32);
                if (!boot_cpu_has(a->cpuid))
                        continue;
+
+               memcpy(insnbuf, replacement, a->replacementlen);
+
+               /* 0xe8 is a relative jump; fix the offset. */
+               if (*insnbuf == 0xe8 && a->replacementlen == 5)
+                   *(s32 *)(insnbuf + 1) += replacement - instr;
+
+               add_nops(insnbuf + a->replacementlen,
+                        a->instrlen - a->replacementlen);
+
 #ifdef CONFIG_X86_64
                /* vsyscall code is not mapped yet. resolve it manually. */
                if (instr >= (u8 *)VSYSCALL_START && instr < (u8*)VSYSCALL_END) {
                        instr = __va(instr - (u8*)VSYSCALL_START + (u8*)__pa_symbol(&__vsyscall_0));
-                       DPRINTK("%s: vsyscall fixup: %p => %p\n",
-                               __func__, a->instr, instr);
                }
 #endif
-               memcpy(insnbuf, a->replacement, a->replacementlen);
-               if (*insnbuf == 0xe8 && a->replacementlen == 5)
-                   *(s32 *)(insnbuf + 1) += a->replacement - a->instr;
-               add_nops(insnbuf + a->replacementlen,
-                        a->instrlen - a->replacementlen);
                text_poke_early(instr, insnbuf, a->instrlen);
        }
 }
 
 
 #include <linux/linkage.h>
 #include <asm/dwarf2.h>
+#include <asm/alternative-asm.h>
 
        ALIGN
 copy_page_c:
 2:
        .previous
        .section .altinstructions,"a"
-       .align 8
-       .quad copy_page
-       .quad 1b
-       .word X86_FEATURE_REP_GOOD
-       .byte .Lcopy_page_end - copy_page
-       .byte 2b - 1b
+       altinstruction_entry copy_page, 1b, X86_FEATURE_REP_GOOD,       \
+               .Lcopy_page_end-copy_page, 2b-1b
        .previous
 
 #include <linux/linkage.h>
 #include <asm/dwarf2.h>
 #include <asm/cpufeature.h>
+#include <asm/alternative-asm.h>
 
 #undef memmove
 
        .previous
 
        .section .altinstructions,"a"
-       .align 8
-       .quad .Lmemmove_begin_forward
-       .quad .Lmemmove_begin_forward_efs
-       .word X86_FEATURE_ERMS
-       .byte .Lmemmove_end_forward-.Lmemmove_begin_forward
-       .byte .Lmemmove_end_forward_efs-.Lmemmove_begin_forward_efs
+       altinstruction_entry .Lmemmove_begin_forward,           \
+               .Lmemmove_begin_forward_efs,X86_FEATURE_ERMS,   \
+               .Lmemmove_end_forward-.Lmemmove_begin_forward,  \
+               .Lmemmove_end_forward_efs-.Lmemmove_begin_forward_efs
        .previous
 ENDPROC(memmove)