unsigned int num_blks);
 EXPORT_SYMBOL(sha256_block_data_order);
 
-static void __sha256_block_data_order(struct sha256_state *sst, u8 const *src,
-                                     int blocks)
+static void sha256_arm64_transform(struct sha256_state *sst, u8 const *src,
+                                  int blocks)
 {
        sha256_block_data_order(sst->state, src, blocks);
 }
 asmlinkage void sha256_block_neon(u32 *digest, const void *data,
                                  unsigned int num_blks);
 
-static void __sha256_block_neon(struct sha256_state *sst, u8 const *src,
-                               int blocks)
+static void sha256_neon_transform(struct sha256_state *sst, u8 const *src,
+                                 int blocks)
 {
        sha256_block_neon(sst->state, src, blocks);
 }
 static int crypto_sha256_arm64_update(struct shash_desc *desc, const u8 *data,
                                      unsigned int len)
 {
-       return sha256_base_do_update(desc, data, len,
-                                    __sha256_block_data_order);
+       return sha256_base_do_update(desc, data, len, sha256_arm64_transform);
 }
 
 static int crypto_sha256_arm64_finup(struct shash_desc *desc, const u8 *data,
                                     unsigned int len, u8 *out)
 {
        if (len)
-               sha256_base_do_update(desc, data, len,
-                                     __sha256_block_data_order);
-       sha256_base_do_finalize(desc, __sha256_block_data_order);
+               sha256_base_do_update(desc, data, len, sha256_arm64_transform);
+       sha256_base_do_finalize(desc, sha256_arm64_transform);
 
        return sha256_base_finish(desc, out);
 }
 
        if (!crypto_simd_usable())
                return sha256_base_do_update(desc, data, len,
-                               __sha256_block_data_order);
+                               sha256_arm64_transform);
 
        while (len > 0) {
                unsigned int chunk = len;
                                sctx->count % SHA256_BLOCK_SIZE;
 
                kernel_neon_begin();
-               sha256_base_do_update(desc, data, chunk, __sha256_block_neon);
+               sha256_base_do_update(desc, data, chunk, sha256_neon_transform);
                kernel_neon_end();
                data += chunk;
                len -= chunk;
        if (!crypto_simd_usable()) {
                if (len)
                        sha256_base_do_update(desc, data, len,
-                               __sha256_block_data_order);
-               sha256_base_do_finalize(desc, __sha256_block_data_order);
+                               sha256_arm64_transform);
+               sha256_base_do_finalize(desc, sha256_arm64_transform);
        } else {
                if (len)
                        sha256_update_neon(desc, data, len);
                kernel_neon_begin();
-               sha256_base_do_finalize(desc, __sha256_block_neon);
+               sha256_base_do_finalize(desc, sha256_neon_transform);
                kernel_neon_end();
        }
        return sha256_base_finish(desc, out);