preempt_disable();
        pagefault_disable();
-       enable_kernel_altivec();
        enable_kernel_vsx();
        ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
        ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key);
        } else {
                preempt_disable();
                pagefault_disable();
-               enable_kernel_altivec();
                enable_kernel_vsx();
                aes_p8_encrypt(src, dst, &ctx->enc_key);
                pagefault_enable();
        } else {
                preempt_disable();
                pagefault_disable();
-               enable_kernel_altivec();
                enable_kernel_vsx();
                aes_p8_decrypt(src, dst, &ctx->dec_key);
                pagefault_enable();
 
 
        preempt_disable();
        pagefault_disable();
-       enable_kernel_altivec();
        enable_kernel_vsx();
        ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
        ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key);
        } else {
                preempt_disable();
                pagefault_disable();
-               enable_kernel_altivec();
                enable_kernel_vsx();
 
                blkcipher_walk_init(&walk, dst, src, nbytes);
        } else {
                preempt_disable();
                pagefault_disable();
-               enable_kernel_altivec();
                enable_kernel_vsx();
 
                blkcipher_walk_init(&walk, dst, src, nbytes);
 
        struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
 
        pagefault_disable();
-       enable_kernel_altivec();
        enable_kernel_vsx();
        ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
        pagefault_enable();
        unsigned int nbytes = walk->nbytes;
 
        pagefault_disable();
-       enable_kernel_altivec();
        enable_kernel_vsx();
        aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key);
        pagefault_enable();
                ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
                while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
                        pagefault_disable();
-                       enable_kernel_altivec();
                        enable_kernel_vsx();
                        aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr,
                                                    walk.dst.virt.addr,
 
 
        preempt_disable();
        pagefault_disable();
-       enable_kernel_altivec();
        enable_kernel_vsx();
-       enable_kernel_fp();
        gcm_init_p8(ctx->htable, (const u64 *) key);
        pagefault_enable();
        preempt_enable();
                               GHASH_DIGEST_SIZE - dctx->bytes);
                        preempt_disable();
                        pagefault_disable();
-                       enable_kernel_altivec();
                        enable_kernel_vsx();
-                       enable_kernel_fp();
                        gcm_ghash_p8(dctx->shash, ctx->htable,
                                     dctx->buffer, GHASH_DIGEST_SIZE);
                        pagefault_enable();
                if (len) {
                        preempt_disable();
                        pagefault_disable();
-                       enable_kernel_altivec();
                        enable_kernel_vsx();
-                       enable_kernel_fp();
                        gcm_ghash_p8(dctx->shash, ctx->htable, src, len);
                        pagefault_enable();
                        preempt_enable();
                                dctx->buffer[i] = 0;
                        preempt_disable();
                        pagefault_disable();
-                       enable_kernel_altivec();
                        enable_kernel_vsx();
-                       enable_kernel_fp();
                        gcm_ghash_p8(dctx->shash, ctx->htable,
                                     dctx->buffer, GHASH_DIGEST_SIZE);
                        pagefault_enable();