@@ -682,8 +682,19 @@ config CRYPTO_GHASH
GHASH is the hash function used in GCM (Galois/Counter Mode).
It is not a general-purpose cryptographic hash function.
+config CRYPTO_ARCH_HAVE_LIB_POLY1305
+ tristate
+
+config CRYPTO_LIB_POLY1305_RSIZE
+ int
+ default 1
+
+config CRYPTO_LIB_POLY1305
+ tristate
+
config CRYPTO_LIB_POLY1305_GENERIC
tristate
+ default CRYPTO_LIB_POLY1305 if !CRYPTO_ARCH_HAVE_LIB_POLY1305
config CRYPTO_POLY1305
tristate "Poly1305 authenticator algorithm"
@@ -85,31 +85,11 @@ EXPORT_SYMBOL_GPL(crypto_poly1305_update);
int crypto_poly1305_final(struct shash_desc *desc, u8 *dst)
{
struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
- __le32 digest[4];
- u64 f = 0;
if (unlikely(!dctx->sset))
return -ENOKEY;
- if (unlikely(dctx->buflen)) {
- dctx->buf[dctx->buflen++] = 1;
- memset(dctx->buf + dctx->buflen, 0,
- POLY1305_BLOCK_SIZE - dctx->buflen);
- poly1305_core_blocks(&dctx->h, dctx->r, dctx->buf, 1, 0);
- }
-
- poly1305_core_emit(&dctx->h, digest);
-
- /* mac = (h + s) % (2^128) */
- f = (f >> 32) + le32_to_cpu(digest[0]) + dctx->s[0];
- put_unaligned_le32(f, dst + 0);
- f = (f >> 32) + le32_to_cpu(digest[1]) + dctx->s[1];
- put_unaligned_le32(f, dst + 4);
- f = (f >> 32) + le32_to_cpu(digest[2]) + dctx->s[2];
- put_unaligned_le32(f, dst + 8);
- f = (f >> 32) + le32_to_cpu(digest[3]) + dctx->s[3];
- put_unaligned_le32(f, dst + 12);
-
+ poly1305_final_generic(dctx, dst);
return 0;
}
EXPORT_SYMBOL_GPL(crypto_poly1305_final);
@@ -28,6 +28,13 @@ void poly1305_core_blocks(struct poly1305_state *state,
unsigned int nblocks, u32 hibit);
void poly1305_core_emit(const struct poly1305_state *state, void *dst);
+void poly1305_init_generic(struct poly1305_desc_ctx *desc, const u8 *key);
+
+void poly1305_update_generic(struct poly1305_desc_ctx *desc, const u8 *src,
+ unsigned int nbytes);
+
+void poly1305_final_generic(struct poly1305_desc_ctx *desc, u8 *digest);
+
/* Crypto API helper functions for the Poly1305 MAC */
int crypto_poly1305_init(struct shash_desc *desc);
@@ -35,7 +35,13 @@ struct poly1305_desc_ctx {
/* accumulator */
struct poly1305_state h;
/* key */
- struct poly1305_key r[1];
+ struct poly1305_key r[CONFIG_CRYPTO_LIB_POLY1305_RSIZE];
};
+void poly1305_init(struct poly1305_desc_ctx *desc, const u8 *key);
+
+void poly1305_update(struct poly1305_desc_ctx *desc, const u8 *src,
+ unsigned int nbytes);
+void poly1305_final(struct poly1305_desc_ctx *desc, u8 *digest);
+
#endif
@@ -154,5 +154,95 @@ void poly1305_core_emit(const struct poly1305_state *state, void *dst)
}
EXPORT_SYMBOL_GPL(poly1305_core_emit);
+void poly1305_init_generic(struct poly1305_desc_ctx *desc, const u8 *key)
+{
+ poly1305_core_setkey(desc->r, key);
+ desc->s[0] = get_unaligned_le32(key + 16);
+ desc->s[1] = get_unaligned_le32(key + 20);
+ desc->s[2] = get_unaligned_le32(key + 24);
+ desc->s[3] = get_unaligned_le32(key + 28);
+ poly1305_core_init(&desc->h);
+ desc->buflen = 0;
+ desc->sset = true;
+ desc->rset = 1;
+}
+EXPORT_SYMBOL_GPL(poly1305_init_generic);
+
+void poly1305_update_generic(struct poly1305_desc_ctx *desc, const u8 *src,
+ unsigned int nbytes)
+{
+ unsigned int bytes;
+
+ if (unlikely(desc->buflen)) {
+ bytes = min(nbytes, POLY1305_BLOCK_SIZE - desc->buflen);
+ memcpy(desc->buf + desc->buflen, src, bytes);
+ src += bytes;
+ nbytes -= bytes;
+ desc->buflen += bytes;
+
+ if (desc->buflen == POLY1305_BLOCK_SIZE) {
+ poly1305_core_blocks(&desc->h, desc->r, desc->buf, 1, 1);
+ desc->buflen = 0;
+ }
+ }
+
+ if (likely(nbytes >= POLY1305_BLOCK_SIZE)) {
+ poly1305_core_blocks(&desc->h, desc->r, src,
+ nbytes / POLY1305_BLOCK_SIZE, 1);
+ src += nbytes - (nbytes % POLY1305_BLOCK_SIZE);
+ nbytes %= POLY1305_BLOCK_SIZE;
+ }
+
+ if (unlikely(nbytes)) {
+ desc->buflen = nbytes;
+ memcpy(desc->buf, src, nbytes);
+ }
+}
+EXPORT_SYMBOL_GPL(poly1305_update_generic);
+
+void poly1305_final_generic(struct poly1305_desc_ctx *desc, u8 *dst)
+{
+ __le32 digest[4];
+ u64 f = 0;
+
+ if (unlikely(desc->buflen)) {
+ desc->buf[desc->buflen++] = 1;
+ memset(desc->buf + desc->buflen, 0,
+ POLY1305_BLOCK_SIZE - desc->buflen);
+ poly1305_core_blocks(&desc->h, desc->r, desc->buf, 1, 0);
+ }
+
+ poly1305_core_emit(&desc->h, digest);
+
+ /* mac = (h + s) % (2^128) */
+ f = (f >> 32) + le32_to_cpu(digest[0]) + desc->s[0];
+ put_unaligned_le32(f, dst + 0);
+ f = (f >> 32) + le32_to_cpu(digest[1]) + desc->s[1];
+ put_unaligned_le32(f, dst + 4);
+ f = (f >> 32) + le32_to_cpu(digest[2]) + desc->s[2];
+ put_unaligned_le32(f, dst + 8);
+ f = (f >> 32) + le32_to_cpu(digest[3]) + desc->s[3];
+ put_unaligned_le32(f, dst + 12);
+
+ *desc = (struct poly1305_desc_ctx){};
+}
+EXPORT_SYMBOL_GPL(poly1305_final_generic);
+
MODULE_LICENSE("GPL");
MODULE_AUTHOR("Martin Willi <martin@strongswan.org>");
+
+extern void poly1305_init(struct poly1305_desc_ctx *desc, const u8 *key)
+ __weak __alias(poly1305_init_generic);
+
+extern void poly1305_update(struct poly1305_desc_ctx *desc, const u8 *src,
+ unsigned int nbytes)
+ __weak __alias(poly1305_update_generic);
+
+extern void poly1305_final(struct poly1305_desc_ctx *desc, u8 *dst)
+ __weak __alias(poly1305_final_generic);
+
+#if !IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_POLY1305)
+EXPORT_SYMBOL_GPL(poly1305_init);
+EXPORT_SYMBOL_GPL(poly1305_update);
+EXPORT_SYMBOL_GPL(poly1305_final);
+#endif
Expose the existing generic Poly1305 code via a init/update/final library interface so that callers are not required to go through the crypto API's shash abstraction to access it. At the same time, make some preparations so that the library implementation can be superseded by an accelerated arch-specific version in the future. Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> --- crypto/Kconfig | 11 +++ crypto/poly1305_generic.c | 22 +---- include/crypto/internal/poly1305.h | 7 ++ include/crypto/poly1305.h | 8 +- lib/crypto/poly1305.c | 90 ++++++++++++++++++++ 5 files changed, 116 insertions(+), 22 deletions(-) -- 2.20.1