crypto: x86/sm4 - Remove cfb(sm4)
authorHerbert Xu <herbert@gondor.apana.org.au>
Sat, 16 Sep 2023 09:16:52 +0000 (17:16 +0800)
committerHerbert Xu <herbert@gondor.apana.org.au>
Fri, 8 Dec 2023 03:59:45 +0000 (11:59 +0800)
Remove the unused CFB implementation.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
arch/x86/crypto/Kconfig
arch/x86/crypto/sm4-aesni-avx-asm_64.S
arch/x86/crypto/sm4-aesni-avx2-asm_64.S
arch/x86/crypto/sm4-avx.h
arch/x86/crypto/sm4_aesni_avx2_glue.c
arch/x86/crypto/sm4_aesni_avx_glue.c

index 9bbfd01cfa2f13fde9124ca521fa76c33f7661de..c9e59589a1cee1f76177110c5e6f2b34810f1c9f 100644 (file)
@@ -189,7 +189,7 @@ config CRYPTO_SERPENT_AVX2_X86_64
          Processes 16 blocks in parallel.
 
 config CRYPTO_SM4_AESNI_AVX_X86_64
-       tristate "Ciphers: SM4 with modes: ECB, CBC, CFB, CTR (AES-NI/AVX)"
+       tristate "Ciphers: SM4 with modes: ECB, CBC, CTR (AES-NI/AVX)"
        depends on X86 && 64BIT
        select CRYPTO_SKCIPHER
        select CRYPTO_SIMD
@@ -197,7 +197,7 @@ config CRYPTO_SM4_AESNI_AVX_X86_64
        select CRYPTO_SM4
        help
          Length-preserving ciphers: SM4 cipher algorithms
-         (OSCCA GB/T 32907-2016) with ECB, CBC, CFB, and CTR modes
+         (OSCCA GB/T 32907-2016) with ECB, CBC, and CTR modes
 
          Architecture: x86_64 using:
          - AES-NI (AES New Instructions)
@@ -210,7 +210,7 @@ config CRYPTO_SM4_AESNI_AVX_X86_64
          If unsure, say N.
 
 config CRYPTO_SM4_AESNI_AVX2_X86_64
-       tristate "Ciphers: SM4 with modes: ECB, CBC, CFB, CTR (AES-NI/AVX2)"
+       tristate "Ciphers: SM4 with modes: ECB, CBC, CTR (AES-NI/AVX2)"
        depends on X86 && 64BIT
        select CRYPTO_SKCIPHER
        select CRYPTO_SIMD
@@ -219,7 +219,7 @@ config CRYPTO_SM4_AESNI_AVX2_X86_64
        select CRYPTO_SM4_AESNI_AVX_X86_64
        help
          Length-preserving ciphers: SM4 cipher algorithms
-         (OSCCA GB/T 32907-2016) with ECB, CBC, CFB, and CTR modes
+         (OSCCA GB/T 32907-2016) with ECB, CBC, and CTR modes
 
          Architecture: x86_64 using:
          - AES-NI (AES New Instructions)
index e2668d2fe6ce181650807a8fa5abef864b76bd1b..2bf611eaa1911941e62cf09d1ed9501a9c11d7da 100644 (file)
@@ -534,55 +534,3 @@ SYM_TYPED_FUNC_START(sm4_aesni_avx_cbc_dec_blk8)
        FRAME_END
        RET;
 SYM_FUNC_END(sm4_aesni_avx_cbc_dec_blk8)
-
-/*
- * void sm4_aesni_avx_cfb_dec_blk8(const u32 *rk, u8 *dst,
- *                                 const u8 *src, u8 *iv)
- */
-SYM_TYPED_FUNC_START(sm4_aesni_avx_cfb_dec_blk8)
-       /* input:
-        *      %rdi: round key array, CTX
-        *      %rsi: dst (8 blocks)
-        *      %rdx: src (8 blocks)
-        *      %rcx: iv
-        */
-       FRAME_BEGIN
-
-       /* Load input */
-       vmovdqu (%rcx), RA0;
-       vmovdqu 0 * 16(%rdx), RA1;
-       vmovdqu 1 * 16(%rdx), RA2;
-       vmovdqu 2 * 16(%rdx), RA3;
-       vmovdqu 3 * 16(%rdx), RB0;
-       vmovdqu 4 * 16(%rdx), RB1;
-       vmovdqu 5 * 16(%rdx), RB2;
-       vmovdqu 6 * 16(%rdx), RB3;
-
-       /* Update IV */
-       vmovdqu 7 * 16(%rdx), RNOT;
-       vmovdqu RNOT, (%rcx);
-
-       call __sm4_crypt_blk8;
-
-       vpxor (0 * 16)(%rdx), RA0, RA0;
-       vpxor (1 * 16)(%rdx), RA1, RA1;
-       vpxor (2 * 16)(%rdx), RA2, RA2;
-       vpxor (3 * 16)(%rdx), RA3, RA3;
-       vpxor (4 * 16)(%rdx), RB0, RB0;
-       vpxor (5 * 16)(%rdx), RB1, RB1;
-       vpxor (6 * 16)(%rdx), RB2, RB2;
-       vpxor (7 * 16)(%rdx), RB3, RB3;
-
-       vmovdqu RA0, (0 * 16)(%rsi);
-       vmovdqu RA1, (1 * 16)(%rsi);
-       vmovdqu RA2, (2 * 16)(%rsi);
-       vmovdqu RA3, (3 * 16)(%rsi);
-       vmovdqu RB0, (4 * 16)(%rsi);
-       vmovdqu RB1, (5 * 16)(%rsi);
-       vmovdqu RB2, (6 * 16)(%rsi);
-       vmovdqu RB3, (7 * 16)(%rsi);
-
-       vzeroall;
-       FRAME_END
-       RET;
-SYM_FUNC_END(sm4_aesni_avx_cfb_dec_blk8)
index 98ede94592877c4bb9f2b40614eb34f2055246aa..9ff5ba0755916b071b57f75b2c1f2bde24cc319f 100644 (file)
@@ -439,58 +439,3 @@ SYM_TYPED_FUNC_START(sm4_aesni_avx2_cbc_dec_blk16)
        FRAME_END
        RET;
 SYM_FUNC_END(sm4_aesni_avx2_cbc_dec_blk16)
-
-/*
- * void sm4_aesni_avx2_cfb_dec_blk16(const u32 *rk, u8 *dst,
- *                                   const u8 *src, u8 *iv)
- */
-SYM_TYPED_FUNC_START(sm4_aesni_avx2_cfb_dec_blk16)
-       /* input:
-        *      %rdi: round key array, CTX
-        *      %rsi: dst (16 blocks)
-        *      %rdx: src (16 blocks)
-        *      %rcx: iv
-        */
-       FRAME_BEGIN
-
-       vzeroupper;
-
-       /* Load input */
-       vmovdqu (%rcx), RNOTx;
-       vinserti128 $1, (%rdx), RNOT, RA0;
-       vmovdqu (0 * 32 + 16)(%rdx), RA1;
-       vmovdqu (1 * 32 + 16)(%rdx), RA2;
-       vmovdqu (2 * 32 + 16)(%rdx), RA3;
-       vmovdqu (3 * 32 + 16)(%rdx), RB0;
-       vmovdqu (4 * 32 + 16)(%rdx), RB1;
-       vmovdqu (5 * 32 + 16)(%rdx), RB2;
-       vmovdqu (6 * 32 + 16)(%rdx), RB3;
-
-       /* Update IV */
-       vmovdqu (7 * 32 + 16)(%rdx), RNOTx;
-       vmovdqu RNOTx, (%rcx);
-
-       call __sm4_crypt_blk16;
-
-       vpxor (0 * 32)(%rdx), RA0, RA0;
-       vpxor (1 * 32)(%rdx), RA1, RA1;
-       vpxor (2 * 32)(%rdx), RA2, RA2;
-       vpxor (3 * 32)(%rdx), RA3, RA3;
-       vpxor (4 * 32)(%rdx), RB0, RB0;
-       vpxor (5 * 32)(%rdx), RB1, RB1;
-       vpxor (6 * 32)(%rdx), RB2, RB2;
-       vpxor (7 * 32)(%rdx), RB3, RB3;
-
-       vmovdqu RA0, (0 * 32)(%rsi);
-       vmovdqu RA1, (1 * 32)(%rsi);
-       vmovdqu RA2, (2 * 32)(%rsi);
-       vmovdqu RA3, (3 * 32)(%rsi);
-       vmovdqu RB0, (4 * 32)(%rsi);
-       vmovdqu RB1, (5 * 32)(%rsi);
-       vmovdqu RB2, (6 * 32)(%rsi);
-       vmovdqu RB3, (7 * 32)(%rsi);
-
-       vzeroall;
-       FRAME_END
-       RET;
-SYM_FUNC_END(sm4_aesni_avx2_cfb_dec_blk16)
index 1bceab7516aa1e5eb45191ba815435af9563b51f..b5b5e67e40edec4e276531d6887bc314d26de27b 100644 (file)
@@ -14,10 +14,6 @@ int sm4_cbc_encrypt(struct skcipher_request *req);
 int sm4_avx_cbc_decrypt(struct skcipher_request *req,
                        unsigned int bsize, sm4_crypt_func func);
 
-int sm4_cfb_encrypt(struct skcipher_request *req);
-int sm4_avx_cfb_decrypt(struct skcipher_request *req,
-                       unsigned int bsize, sm4_crypt_func func);
-
 int sm4_avx_ctr_crypt(struct skcipher_request *req,
                        unsigned int bsize, sm4_crypt_func func);
 
index 84bc718f49a3d7107e9962742c8feee649b37352..1148fd4cd57f8e2a4a543304ed38871df394e0b4 100644 (file)
@@ -23,8 +23,6 @@ asmlinkage void sm4_aesni_avx2_ctr_enc_blk16(const u32 *rk, u8 *dst,
                                        const u8 *src, u8 *iv);
 asmlinkage void sm4_aesni_avx2_cbc_dec_blk16(const u32 *rk, u8 *dst,
                                        const u8 *src, u8 *iv);
-asmlinkage void sm4_aesni_avx2_cfb_dec_blk16(const u32 *rk, u8 *dst,
-                                       const u8 *src, u8 *iv);
 
 static int sm4_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
                        unsigned int key_len)
@@ -41,12 +39,6 @@ static int cbc_decrypt(struct skcipher_request *req)
 }
 
 
-static int cfb_decrypt(struct skcipher_request *req)
-{
-       return sm4_avx_cfb_decrypt(req, SM4_CRYPT16_BLOCK_SIZE,
-                               sm4_aesni_avx2_cfb_dec_blk16);
-}
-
 static int ctr_crypt(struct skcipher_request *req)
 {
        return sm4_avx_ctr_crypt(req, SM4_CRYPT16_BLOCK_SIZE,
@@ -87,24 +79,6 @@ static struct skcipher_alg sm4_aesni_avx2_skciphers[] = {
                .setkey         = sm4_skcipher_setkey,
                .encrypt        = sm4_cbc_encrypt,
                .decrypt        = cbc_decrypt,
-       }, {
-               .base = {
-                       .cra_name               = "__cfb(sm4)",
-                       .cra_driver_name        = "__cfb-sm4-aesni-avx2",
-                       .cra_priority           = 500,
-                       .cra_flags              = CRYPTO_ALG_INTERNAL,
-                       .cra_blocksize          = 1,
-                       .cra_ctxsize            = sizeof(struct sm4_ctx),
-                       .cra_module             = THIS_MODULE,
-               },
-               .min_keysize    = SM4_KEY_SIZE,
-               .max_keysize    = SM4_KEY_SIZE,
-               .ivsize         = SM4_BLOCK_SIZE,
-               .chunksize      = SM4_BLOCK_SIZE,
-               .walksize       = 16 * SM4_BLOCK_SIZE,
-               .setkey         = sm4_skcipher_setkey,
-               .encrypt        = sm4_cfb_encrypt,
-               .decrypt        = cfb_decrypt,
        }, {
                .base = {
                        .cra_name               = "__ctr(sm4)",
index 7800f77d68add94c57f752ec5fcf03afee9d51b3..85b4ca78b47b575a77b71a3e55c5ed4b889945b2 100644 (file)
@@ -27,8 +27,6 @@ asmlinkage void sm4_aesni_avx_ctr_enc_blk8(const u32 *rk, u8 *dst,
                                const u8 *src, u8 *iv);
 asmlinkage void sm4_aesni_avx_cbc_dec_blk8(const u32 *rk, u8 *dst,
                                const u8 *src, u8 *iv);
-asmlinkage void sm4_aesni_avx_cfb_dec_blk8(const u32 *rk, u8 *dst,
-                               const u8 *src, u8 *iv);
 
 static int sm4_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
                        unsigned int key_len)
@@ -188,116 +186,6 @@ static int cbc_decrypt(struct skcipher_request *req)
                                sm4_aesni_avx_cbc_dec_blk8);
 }
 
-int sm4_cfb_encrypt(struct skcipher_request *req)
-{
-       struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
-       struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
-       struct skcipher_walk walk;
-       unsigned int nbytes;
-       int err;
-
-       err = skcipher_walk_virt(&walk, req, false);
-
-       while ((nbytes = walk.nbytes) > 0) {
-               u8 keystream[SM4_BLOCK_SIZE];
-               const u8 *iv = walk.iv;
-               const u8 *src = walk.src.virt.addr;
-               u8 *dst = walk.dst.virt.addr;
-
-               while (nbytes >= SM4_BLOCK_SIZE) {
-                       sm4_crypt_block(ctx->rkey_enc, keystream, iv);
-                       crypto_xor_cpy(dst, src, keystream, SM4_BLOCK_SIZE);
-                       iv = dst;
-                       src += SM4_BLOCK_SIZE;
-                       dst += SM4_BLOCK_SIZE;
-                       nbytes -= SM4_BLOCK_SIZE;
-               }
-               if (iv != walk.iv)
-                       memcpy(walk.iv, iv, SM4_BLOCK_SIZE);
-
-               /* tail */
-               if (walk.nbytes == walk.total && nbytes > 0) {
-                       sm4_crypt_block(ctx->rkey_enc, keystream, walk.iv);
-                       crypto_xor_cpy(dst, src, keystream, nbytes);
-                       nbytes = 0;
-               }
-
-               err = skcipher_walk_done(&walk, nbytes);
-       }
-
-       return err;
-}
-EXPORT_SYMBOL_GPL(sm4_cfb_encrypt);
-
-int sm4_avx_cfb_decrypt(struct skcipher_request *req,
-                       unsigned int bsize, sm4_crypt_func func)
-{
-       struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
-       struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
-       struct skcipher_walk walk;
-       unsigned int nbytes;
-       int err;
-
-       err = skcipher_walk_virt(&walk, req, false);
-
-       while ((nbytes = walk.nbytes) > 0) {
-               const u8 *src = walk.src.virt.addr;
-               u8 *dst = walk.dst.virt.addr;
-
-               kernel_fpu_begin();
-
-               while (nbytes >= bsize) {
-                       func(ctx->rkey_enc, dst, src, walk.iv);
-                       dst += bsize;
-                       src += bsize;
-                       nbytes -= bsize;
-               }
-
-               while (nbytes >= SM4_BLOCK_SIZE) {
-                       u8 keystream[SM4_BLOCK_SIZE * 8];
-                       unsigned int nblocks = min(nbytes >> 4, 8u);
-
-                       memcpy(keystream, walk.iv, SM4_BLOCK_SIZE);
-                       if (nblocks > 1)
-                               memcpy(&keystream[SM4_BLOCK_SIZE], src,
-                                       (nblocks - 1) * SM4_BLOCK_SIZE);
-                       memcpy(walk.iv, src + (nblocks - 1) * SM4_BLOCK_SIZE,
-                               SM4_BLOCK_SIZE);
-
-                       sm4_aesni_avx_crypt8(ctx->rkey_enc, keystream,
-                                               keystream, nblocks);
-
-                       crypto_xor_cpy(dst, src, keystream,
-                                       nblocks * SM4_BLOCK_SIZE);
-                       dst += nblocks * SM4_BLOCK_SIZE;
-                       src += nblocks * SM4_BLOCK_SIZE;
-                       nbytes -= nblocks * SM4_BLOCK_SIZE;
-               }
-
-               kernel_fpu_end();
-
-               /* tail */
-               if (walk.nbytes == walk.total && nbytes > 0) {
-                       u8 keystream[SM4_BLOCK_SIZE];
-
-                       sm4_crypt_block(ctx->rkey_enc, keystream, walk.iv);
-                       crypto_xor_cpy(dst, src, keystream, nbytes);
-                       nbytes = 0;
-               }
-
-               err = skcipher_walk_done(&walk, nbytes);
-       }
-
-       return err;
-}
-EXPORT_SYMBOL_GPL(sm4_avx_cfb_decrypt);
-
-static int cfb_decrypt(struct skcipher_request *req)
-{
-       return sm4_avx_cfb_decrypt(req, SM4_CRYPT8_BLOCK_SIZE,
-                               sm4_aesni_avx_cfb_dec_blk8);
-}
-
 int sm4_avx_ctr_crypt(struct skcipher_request *req,
                        unsigned int bsize, sm4_crypt_func func)
 {
@@ -406,24 +294,6 @@ static struct skcipher_alg sm4_aesni_avx_skciphers[] = {
                .setkey         = sm4_skcipher_setkey,
                .encrypt        = sm4_cbc_encrypt,
                .decrypt        = cbc_decrypt,
-       }, {
-               .base = {
-                       .cra_name               = "__cfb(sm4)",
-                       .cra_driver_name        = "__cfb-sm4-aesni-avx",
-                       .cra_priority           = 400,
-                       .cra_flags              = CRYPTO_ALG_INTERNAL,
-                       .cra_blocksize          = 1,
-                       .cra_ctxsize            = sizeof(struct sm4_ctx),
-                       .cra_module             = THIS_MODULE,
-               },
-               .min_keysize    = SM4_KEY_SIZE,
-               .max_keysize    = SM4_KEY_SIZE,
-               .ivsize         = SM4_BLOCK_SIZE,
-               .chunksize      = SM4_BLOCK_SIZE,
-               .walksize       = 8 * SM4_BLOCK_SIZE,
-               .setkey         = sm4_skcipher_setkey,
-               .encrypt        = sm4_cfb_encrypt,
-               .decrypt        = cfb_decrypt,
        }, {
                .base = {
                        .cra_name               = "__ctr(sm4)",