#if !defined(CONFIG_64BIT)
 
 #define __put_kernel_asm64(__val,ptr) do {                 \
-       u64 __val64 = (u64)(__val);                         \
-       u32 hi = (__val64) >> 32;                           \
-       u32 lo = (__val64) & 0xffffffff;                    \
        __asm__ __volatile__ (                              \
                "\n1:\tstw %2,0(%1)"                        \
-               "\n2:\tstw %3,4(%1)\n\t"                    \
+               "\n2:\tstw %R2,4(%1)\n\t"                   \
                ASM_EXCEPTIONTABLE_ENTRY(1b,fixup_put_user_skip_2)\
                ASM_EXCEPTIONTABLE_ENTRY(2b,fixup_put_user_skip_1)\
                : "=r"(__pu_err)                            \
-               : "r"(ptr), "r"(hi), "r"(lo), "0"(__pu_err) \
+               : "r"(ptr), "r"(__val), "0"(__pu_err) \
                : "r1");                                    \
 } while (0)
 
 #define __put_user_asm64(__val,ptr) do {                   \
-       u64 __val64 = (u64)(__val);                         \
-       u32 hi = (__val64) >> 32;                           \
-       u32 lo = (__val64) & 0xffffffff;                    \
        __asm__ __volatile__ (                              \
                "\n1:\tstw %2,0(%%sr3,%1)"                  \
-               "\n2:\tstw %3,4(%%sr3,%1)\n\t"              \
+               "\n2:\tstw %R2,4(%%sr3,%1)\n\t"             \
                ASM_EXCEPTIONTABLE_ENTRY(1b,fixup_put_user_skip_2)\
                ASM_EXCEPTIONTABLE_ENTRY(2b,fixup_put_user_skip_1)\
                : "=r"(__pu_err)                            \
-               : "r"(ptr), "r"(hi), "r"(lo), "0"(__pu_err) \
+               : "r"(ptr), "r"(__val), "0"(__pu_err) \
                : "r1");                                    \
 } while (0)