}
 EXPORT_SYMBOL(crc32_be_arch);
 
+u32 crc32_optimizations(void)
+{
+       if (alternative_has_cap_likely(ARM64_HAS_CRC32))
+               return CRC32_LE_OPTIMIZATION |
+                      CRC32_BE_OPTIMIZATION |
+                      CRC32C_OPTIMIZATION;
+       return 0;
+}
+EXPORT_SYMBOL(crc32_optimizations);
+
 MODULE_LICENSE("GPL");
 MODULE_DESCRIPTION("arm64-optimized CRC32 functions");
 
 }
 EXPORT_SYMBOL(crc32_be_arch);
 
+u32 crc32_optimizations(void)
+{
+       if (riscv_has_extension_likely(RISCV_ISA_EXT_ZBC))
+               return CRC32_LE_OPTIMIZATION |
+                      CRC32_BE_OPTIMIZATION |
+                      CRC32C_OPTIMIZATION;
+       return 0;
+}
+EXPORT_SYMBOL(crc32_optimizations);
+
 MODULE_LICENSE("GPL");
 MODULE_DESCRIPTION("Accelerated CRC32 implementation with Zbc extension");
 
        return crc32c_le_base(crc, p, len);
 }
 
+/*
+ * crc32_optimizations() returns flags that indicate which CRC32 library
+ * functions are using architecture-specific optimizations.  Unlike
+ * IS_ENABLED(CONFIG_CRC32_ARCH) it takes into account the different CRC32
+ * variants and also whether any needed CPU features are available at runtime.
+ */
+#define CRC32_LE_OPTIMIZATION  BIT(0) /* crc32_le() is optimized */
+#define CRC32_BE_OPTIMIZATION  BIT(1) /* crc32_be() is optimized */
+#define CRC32C_OPTIMIZATION    BIT(2) /* __crc32c_le() is optimized */
+#if IS_ENABLED(CONFIG_CRC32_ARCH)
+u32 crc32_optimizations(void);
+#else
+static inline u32 crc32_optimizations(void) { return 0; }
+#endif
+
 /**
  * crc32_le_combine - Combine two crc32 check values into one. For two
  *                   sequences of bytes, seq1 and seq2 with lengths len1