@@ -143,7 +143,7 @@ static int gcm_crypt(struct aead_request *req, struct skcipher_walk *walk,
{
u8 __aligned(8) iv[SM4_BLOCK_SIZE];
be128 __aligned(8) lengths;
- int err;
+ int err = 0;
memset(ghash, 0, SM4_BLOCK_SIZE);
@@ -158,34 +158,31 @@ static int gcm_crypt(struct aead_request *req, struct skcipher_walk *walk,
if (req->assoclen)
gcm_calculate_auth_mac(req, ghash);
- do {
+ while (walk->nbytes && walk->nbytes != walk->total) {
unsigned int tail = walk->nbytes % SM4_BLOCK_SIZE;
- const u8 *src = walk->src.virt.addr;
- u8 *dst = walk->dst.virt.addr;
-
- if (walk->nbytes == walk->total) {
- tail = 0;
-
- sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, dst, src, iv,
- walk->nbytes, ghash,
- ctx->ghash_table,
- (const u8 *)&lengths);
- } else if (walk->nbytes - tail) {
- sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, dst, src, iv,
- walk->nbytes - tail, ghash,
- ctx->ghash_table, NULL);
- }
+
+ sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, walk->dst.virt.addr,
+ walk->src.virt.addr, iv,
+ walk->nbytes - tail, ghash,
+ ctx->ghash_table, NULL);
kernel_neon_end();
err = skcipher_walk_done(walk, tail);
- if (err)
- return err;
- if (walk->nbytes)
- kernel_neon_begin();
- } while (walk->nbytes > 0);
- return 0;
+ kernel_neon_begin();
+ }
+
+ sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, walk->dst.virt.addr,
+ walk->src.virt.addr, iv, walk->nbytes, ghash,
+ ctx->ghash_table, (const u8 *)&lengths);
+
+ kernel_neon_end();
+
+ if (walk->nbytes)
+ err = skcipher_walk_done(walk, 0);
+
+ return err;
}
static int gcm_encrypt(struct aead_request *req)
When the cryption total length is zero, GCM cryption call skcipher_walk_done() will cause an unexpected crash, so skip calling this function to avoid possible crash when the GCM cryption length is equal to zero. This patch also rewrite the skcipher walker loop, and separates the cryption of the last chunk from the walker loop. In addition to following the usual convention of checking walk->nbytes, it also makes the execution logic of the loop clearer and easier to understand. Fixes: ae1b83c7d572 ("crypto: arm64/sm4 - add CE implementation for GCM mode") Signed-off-by: Tianjia Zhang <tianjia.zhang@linux.alibaba.com> --- arch/arm64/crypto/sm4-ce-gcm-glue.c | 43 ++++++++++++++--------------- 1 file changed, 20 insertions(+), 23 deletions(-)