crypto: arm64/sm4-ccm - Rewrite skcipher walker loop
authorTianjia Zhang <tianjia.zhang@linux.alibaba.com>
Wed, 1 Feb 2023 12:32:07 +0000 (20:32 +0800)
committerHerbert Xu <herbert@gondor.apana.org.au>
Fri, 10 Feb 2023 09:20:19 +0000 (17:20 +0800)
The fact that an error in the skcipher walker API are indicated
not only by a non-zero return value, but also by the fact that
walk->nbytes is zero, causes the layout of the skcipher walker
loop to be sufficiently different from the usual layout, which
is not a problem in itself, but it is likely to cause reading
confusion and difficulty in code maintenance.

This patch rewrites skcipher walker loop, and separates the
last chunk cryption from the loop to avoid wrong calls to the
skcipher walker API. In addition to following the usual convention
of checking walk->nbytes, it also makes the loop execute logic
clearer and easier to understand.

Signed-off-by: Tianjia Zhang <tianjia.zhang@linux.alibaba.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
arch/arm64/crypto/sm4-ce-ccm-glue.c

index f2cec7b52efcd15f9d1dd59194384524e1a0d6e1..5e7e17bbec81e158b6550862900ae7689cfcbe40 100644 (file)
@@ -166,7 +166,7 @@ static int ccm_crypt(struct aead_request *req, struct skcipher_walk *walk,
                                        unsigned int nbytes, u8 *mac))
 {
        u8 __aligned(8) ctr0[SM4_BLOCK_SIZE];
-       int err;
+       int err = 0;
 
        /* preserve the initial ctr0 for the TAG */
        memcpy(ctr0, walk->iv, SM4_BLOCK_SIZE);
@@ -177,33 +177,37 @@ static int ccm_crypt(struct aead_request *req, struct skcipher_walk *walk,
        if (req->assoclen)
                ccm_calculate_auth_mac(req, mac);
 
-       do {
+       while (walk->nbytes && walk->nbytes != walk->total) {
                unsigned int tail = walk->nbytes % SM4_BLOCK_SIZE;
-               const u8 *src = walk->src.virt.addr;
-               u8 *dst = walk->dst.virt.addr;
 
-               if (walk->nbytes == walk->total)
-                       tail = 0;
+               sm4_ce_ccm_crypt(rkey_enc, walk->dst.virt.addr,
+                                walk->src.virt.addr, walk->iv,
+                                walk->nbytes - tail, mac);
+
+               kernel_neon_end();
+
+               err = skcipher_walk_done(walk, tail);
+
+               kernel_neon_begin();
+       }
 
-               if (walk->nbytes - tail)
-                       sm4_ce_ccm_crypt(rkey_enc, dst, src, walk->iv,
-                                        walk->nbytes - tail, mac);
+       if (walk->nbytes) {
+               sm4_ce_ccm_crypt(rkey_enc, walk->dst.virt.addr,
+                                walk->src.virt.addr, walk->iv,
+                                walk->nbytes, mac);
 
-               if (walk->nbytes == walk->total)
-                       sm4_ce_ccm_final(rkey_enc, ctr0, mac);
+               sm4_ce_ccm_final(rkey_enc, ctr0, mac);
 
                kernel_neon_end();
 
-               if (walk->nbytes) {
-                       err = skcipher_walk_done(walk, tail);
-                       if (err)
-                               return err;
-                       if (walk->nbytes)
-                               kernel_neon_begin();
-               }
-       } while (walk->nbytes > 0);
+               err = skcipher_walk_done(walk, 0);
+       } else {
+               sm4_ce_ccm_final(rkey_enc, ctr0, mac);
 
-       return 0;
+               kernel_neon_end();
+       }
+
+       return err;
 }
 
 static int ccm_encrypt(struct aead_request *req)