set_thread_flag(TIF_RESTORE_TM);
        }
 }
+
+static inline bool msr_tm_active(unsigned long msr)
+{
+       return MSR_TM_ACTIVE(msr);
+}
 #else
+static inline bool msr_tm_active(unsigned long msr) { return false; }
 static inline void check_if_tm_restore_required(struct task_struct *tsk) { }
 #endif /* CONFIG_PPC_TRANSACTIONAL_MEM */
 
 EXPORT_SYMBOL(enable_kernel_fp);
 
 static int restore_fp(struct task_struct *tsk) {
-       if (tsk->thread.load_fp) {
+       if (tsk->thread.load_fp || msr_tm_active(tsk->thread.regs->msr)) {
                load_fp_state(¤t->thread.fp_state);
                current->thread.load_fp++;
                return 1;
 
 static int restore_altivec(struct task_struct *tsk)
 {
-       if (cpu_has_feature(CPU_FTR_ALTIVEC) && tsk->thread.load_vec) {
+       if (cpu_has_feature(CPU_FTR_ALTIVEC) &&
+               (tsk->thread.load_vec || msr_tm_active(tsk->thread.regs->msr))) {
                load_vr_state(&tsk->thread.vr_state);
                tsk->thread.used_vr = 1;
                tsk->thread.load_vec++;
 {
        unsigned long msr;
 
-       if (!current->thread.load_fp && !loadvec(current->thread))
+       if (!msr_tm_active(regs->msr) &&
+               !current->thread.load_fp && !loadvec(current->thread))
                return;
 
        msr = regs->msr;
        msr_diff = current->thread.ckpt_regs.msr & ~regs->msr;
        msr_diff &= MSR_FP | MSR_VEC | MSR_VSX;
 
+       /* Ensure that restore_math() will restore */
+       if (msr_diff & MSR_FP)
+               current->thread.load_fp = 1;
+#ifdef CONFIG_ALIVEC
+       if (cpu_has_feature(CPU_FTR_ALTIVEC) && msr_diff & MSR_VEC)
+               current->thread.load_vec = 1;
+#endif
        restore_math(regs);
 
        regs->msr |= msr_diff;