#ifdef CONFIG_ALTIVEC
        mflr    r0
-       std     r3,STK_PARAM(R3)(r1)
-       std     r4,STK_PARAM(R4)(r1)
+       std     r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
+       std     r4,-STACKFRAMESIZE+STK_REG(R30)(r1)
        std     r0,16(r1)
        stdu    r1,-STACKFRAMESIZE(r1)
        bl      enter_vmx_copy
        cmpwi   r3,0
        ld      r0,STACKFRAMESIZE+16(r1)
-       ld      r3,STACKFRAMESIZE+STK_PARAM(R3)(r1)
-       ld      r4,STACKFRAMESIZE+STK_PARAM(R4)(r1)
+       ld      r3,STK_REG(R31)(r1)
+       ld      r4,STK_REG(R30)(r1)
        mtlr    r0
 
        li      r0,(PAGE_SIZE/128)
 
 .Lexit:
        addi    r1,r1,STACKFRAMESIZE
 .Ldo_err1:
-       ld      r3,STK_PARAM(R3)(r1)
-       ld      r4,STK_PARAM(R4)(r1)
-       ld      r5,STK_PARAM(R5)(r1)
+       ld      r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
+       ld      r4,-STACKFRAMESIZE+STK_REG(R30)(r1)
+       ld      r5,-STACKFRAMESIZE+STK_REG(R29)(r1)
        b       __copy_tofrom_user_base
 
 
        cmpldi  r5,16
        cmpldi  cr1,r5,4096
 
-       std     r3,STK_PARAM(R3)(r1)
-       std     r4,STK_PARAM(R4)(r1)
-       std     r5,STK_PARAM(R5)(r1)
+       std     r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
+       std     r4,-STACKFRAMESIZE+STK_REG(R30)(r1)
+       std     r5,-STACKFRAMESIZE+STK_REG(R29)(r1)
 
        blt     .Lshort_copy
        bgt     cr1,.Lvmx_copy
 #else
        cmpldi  r5,16
 
-       std     r3,STK_PARAM(R3)(r1)
-       std     r4,STK_PARAM(R4)(r1)
-       std     r5,STK_PARAM(R5)(r1)
+       std     r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
+       std     r4,-STACKFRAMESIZE+STK_REG(R30)(r1)
+       std     r5,-STACKFRAMESIZE+STK_REG(R29)(r1)
 
        blt     .Lshort_copy
 #endif
        bl      enter_vmx_usercopy
        cmpwi   cr1,r3,0
        ld      r0,STACKFRAMESIZE+16(r1)
-       ld      r3,STACKFRAMESIZE+STK_PARAM(R3)(r1)
-       ld      r4,STACKFRAMESIZE+STK_PARAM(R4)(r1)
-       ld      r5,STACKFRAMESIZE+STK_PARAM(R5)(r1)
+       ld      r3,STK_REG(R31)(r1)
+       ld      r4,STK_REG(R30)(r1)
+       ld      r5,STK_REG(R29)(r1)
        mtlr    r0
 
        /*
 
        .align  7
 _GLOBAL(memcpy)
 BEGIN_FTR_SECTION
-       std     r3,STK_PARAM(R3)(r1)    /* save destination pointer for return value */
+       std     r3,-STACKFRAMESIZE+STK_REG(R31)(r1)     /* save destination pointer for return value */
 FTR_SECTION_ELSE
 #ifndef SELFTEST
        b       memcpy_power7
 2:     bf      cr7*4+3,3f
        lbz     r9,8(r4)
        stb     r9,0(r3)
-3:     ld      r3,STK_PARAM(R3)(r1)    /* return dest pointer */
+3:     ld      r3,-STACKFRAMESIZE+STK_REG(R31)(r1)     /* return dest pointer */
        blr
 
 .Lsrc_unaligned:
 2:     bf      cr7*4+3,3f
        rotldi  r9,r9,8
        stb     r9,0(r3)
-3:     ld      r3,STK_PARAM(R3)(r1)    /* return dest pointer */
+3:     ld      r3,-STACKFRAMESIZE+STK_REG(R31)(r1)     /* return dest pointer */
        blr
 
 .Ldst_unaligned:
 3:     bf      cr7*4+3,4f
        lbz     r0,0(r4)
        stb     r0,0(r3)
-4:     ld      r3,STK_PARAM(R3)(r1)    /* return dest pointer */
+4:     ld      r3,-STACKFRAMESIZE+STK_REG(R31)(r1)     /* return dest pointer */
        blr
 
        cmpldi  r5,16
        cmpldi  cr1,r5,4096
 
-       std     r3,STK_PARAM(R1)(r1)
+       std     r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
 
        blt     .Lshort_copy
        bgt     cr1,.Lvmx_copy
 #else
        cmpldi  r5,16
 
-       std     r3,STK_PARAM(R1)(r1)
+       std     r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
 
        blt     .Lshort_copy
 #endif
        lbz     r0,0(r4)
        stb     r0,0(r3)
 
-15:    ld      r3,STK_PARAM(R3)(r1)
+15:    ld      r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
        blr
 
 .Lunwind_stack_nonvmx_copy:
 #ifdef CONFIG_ALTIVEC
 .Lvmx_copy:
        mflr    r0
-       std     r4,STK_PARAM(R4)(r1)
-       std     r5,STK_PARAM(R5)(r1)
+       std     r4,-STACKFRAMESIZE+STK_REG(R30)(r1)
+       std     r5,-STACKFRAMESIZE+STK_REG(R29)(r1)
        std     r0,16(r1)
        stdu    r1,-STACKFRAMESIZE(r1)
        bl      enter_vmx_copy
        cmpwi   cr1,r3,0
        ld      r0,STACKFRAMESIZE+16(r1)
-       ld      r3,STACKFRAMESIZE+STK_PARAM(R3)(r1)
-       ld      r4,STACKFRAMESIZE+STK_PARAM(R4)(r1)
-       ld      r5,STACKFRAMESIZE+STK_PARAM(R5)(r1)
+       ld      r3,STK_REG(R31)(r1)
+       ld      r4,STK_REG(R30)(r1)
+       ld      r5,STK_REG(R29)(r1)
        mtlr    r0
 
        /*
        stb     r0,0(r3)
 
 15:    addi    r1,r1,STACKFRAMESIZE
-       ld      r3,STK_PARAM(R3)(r1)
+       ld      r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
        b       exit_vmx_copy           /* tail call optimise */
 
 .Lvmx_unaligned_copy:
        stb     r0,0(r3)
 
 15:    addi    r1,r1,STACKFRAMESIZE
-       ld      r3,STK_PARAM(R3)(r1)
+       ld      r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
        b       exit_vmx_copy           /* tail call optimise */
 #endif /* CONFiG_ALTIVEC */
 
 _GLOBAL(opal_query_takeover)
        mfcr    r0
        stw     r0,8(r1)
+       stdu    r1,-STACKFRAMESIZE(r1)
        std     r3,STK_PARAM(R3)(r1)
        std     r4,STK_PARAM(R4)(r1)
        li      r3,H_HAL_TAKEOVER
        li      r4,H_HAL_TAKEOVER_QUERY_MAGIC
        HVSC
+       addi    r1,r1,STACKFRAMESIZE
        ld      r10,STK_PARAM(R3)(r1)
        std     r4,0(r10)
        ld      r10,STK_PARAM(R4)(r1)