}
        aesni_ctr_enc_tfm = aesni_ctr_enc;
 #ifdef CONFIG_AS_AVX
-       if (cpu_has_avx) {
+       if (boot_cpu_has(X86_FEATURE_AVX)) {
                /* optimize performance of ctr mode encryption transform */
                aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm;
                pr_info("AES CTR mode by8 optimization enabled\n");
 
 {
        const char *feature_name;
 
-       if (!boot_cpu_has(X86_FEATURE_AVX2) || !cpu_has_avx ||
+       if (!boot_cpu_has(X86_FEATURE_AVX) ||
+           !boot_cpu_has(X86_FEATURE_AVX2) ||
            !boot_cpu_has(X86_FEATURE_AES) ||
            !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
                pr_info("AVX2 or AES-NI instructions are not detected.\n");
 
 {
        const char *feature_name;
 
-       if (!cpu_has_avx ||
+       if (!boot_cpu_has(X86_FEATURE_AVX) ||
            !boot_cpu_has(X86_FEATURE_AES) ||
            !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
                pr_info("AVX or AES-NI instructions are not detected.\n");
 
                return -ENODEV;
 
 #ifdef CONFIG_AS_AVX2
-       chacha20_use_avx2 = cpu_has_avx && boot_cpu_has(X86_FEATURE_AVX2) &&
+       chacha20_use_avx2 = boot_cpu_has(X86_FEATURE_AVX) &&
+                           boot_cpu_has(X86_FEATURE_AVX2) &&
                            cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL);
 #endif
        return crypto_register_alg(&alg);
 
                return -ENODEV;
 
 #ifdef CONFIG_AS_AVX2
-       poly1305_use_avx2 = cpu_has_avx && boot_cpu_has(X86_FEATURE_AVX2) &&
+       poly1305_use_avx2 = boot_cpu_has(X86_FEATURE_AVX) &&
+                           boot_cpu_has(X86_FEATURE_AVX2) &&
                            cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL);
        alg.descsize = sizeof(struct poly1305_simd_desc_ctx);
        if (poly1305_use_avx2)
 
 static bool avx_usable(void)
 {
        if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
-               if (cpu_has_avx)
+               if (boot_cpu_has(X86_FEATURE_AVX))
                        pr_info("AVX detected but unusable.\n");
                return false;
        }
 
 static bool avx_usable(void)
 {
        if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
-               if (cpu_has_avx)
+               if (boot_cpu_has(X86_FEATURE_AVX))
                        pr_info("AVX detected but unusable.\n");
                return false;
        }
 
 static bool avx_usable(void)
 {
        if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
-               if (cpu_has_avx)
+               if (boot_cpu_has(X86_FEATURE_AVX))
                        pr_info("AVX detected but unusable.\n");
                return false;
        }
 
 #define cpu_has_apic           boot_cpu_has(X86_FEATURE_APIC)
 #define cpu_has_fxsr           boot_cpu_has(X86_FEATURE_FXSR)
 #define cpu_has_xmm            boot_cpu_has(X86_FEATURE_XMM)
-#define cpu_has_avx            boot_cpu_has(X86_FEATURE_AVX)
 #define cpu_has_xsave          boot_cpu_has(X86_FEATURE_XSAVE)
 #define cpu_has_xsaves         boot_cpu_has(X86_FEATURE_XSAVES)
 /*
 
 
 #define AVX_XOR_SPEED \
 do { \
-       if (cpu_has_avx && boot_cpu_has(X86_FEATURE_OSXSAVE)) \
+       if (boot_cpu_has(X86_FEATURE_AVX) && boot_cpu_has(X86_FEATURE_OSXSAVE)) \
                xor_speed(&xor_block_avx); \
 } while (0)
 
 #define AVX_SELECT(FASTEST) \
-       (cpu_has_avx && boot_cpu_has(X86_FEATURE_OSXSAVE) ? &xor_block_avx : FASTEST)
+       (boot_cpu_has(X86_FEATURE_AVX) && boot_cpu_has(X86_FEATURE_OSXSAVE) ? &xor_block_avx : FASTEST)
 
 #else