@@ -83,7 +83,17 @@ u32 __pure crc32_be_arch(u32 crc, const u8 *p, size_t len)
return crc32_be_arm64(crc, p, len);
}
EXPORT_SYMBOL(crc32_be_arch);
+u32 crc32_optimizations(void)
+{
+ if (alternative_has_cap_likely(ARM64_HAS_CRC32))
+ return CRC32_LE_OPTIMIZATION |
+ CRC32_BE_OPTIMIZATION |
+ CRC32C_OPTIMIZATION;
+ return 0;
+}
+EXPORT_SYMBOL(crc32_optimizations);
+
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("arm64-optimized CRC32 functions");
@@ -295,7 +295,17 @@ u32 __pure crc32_be_arch(u32 crc, const u8 *p, size_t len)
legacy:
return crc32_be_base(crc, p, len);
}
EXPORT_SYMBOL(crc32_be_arch);
+u32 crc32_optimizations(void)
+{
+ if (riscv_has_extension_likely(RISCV_ISA_EXT_ZBC))
+ return CRC32_LE_OPTIMIZATION |
+ CRC32_BE_OPTIMIZATION |
+ CRC32C_OPTIMIZATION;
+ return 0;
+}
+EXPORT_SYMBOL(crc32_optimizations);
+
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("Accelerated CRC32 implementation with Zbc extension");
@@ -35,10 +35,25 @@ static inline u32 __pure __crc32c_le(u32 crc, const u8 *p, size_t len)
if (IS_ENABLED(CONFIG_CRC32_ARCH))
return crc32c_le_arch(crc, p, len);
return crc32c_le_base(crc, p, len);
}
+/*
+ * crc32_optimizations() returns flags that indicate which CRC32 library
+ * functions are using architecture-specific optimizations. Unlike
+ * IS_ENABLED(CONFIG_CRC32_ARCH) it takes into account the different CRC32
+ * variants and also whether any needed CPU features are available at runtime.
+ */
+#define CRC32_LE_OPTIMIZATION BIT(0) /* crc32_le() is optimized */
+#define CRC32_BE_OPTIMIZATION BIT(1) /* crc32_be() is optimized */
+#define CRC32C_OPTIMIZATION BIT(2) /* __crc32c_le() is optimized */
+#if IS_ENABLED(CONFIG_CRC32_ARCH)
+u32 crc32_optimizations(void);
+#else
+static inline u32 crc32_optimizations(void) { return 0; }
+#endif
+
/**
* crc32_le_combine - Combine two crc32 check values into one. For two
* sequences of bytes, seq1 and seq2 with lengths len1
* and len2, crc32_le() check values were calculated
* for each, crc1 and crc2.