@@ -374,30 +374,3 @@ SYM_FUNC_START(twofish_cbc_dec_8way)
FRAME_END
ret;
SYM_FUNC_END(twofish_cbc_dec_8way)
-
-SYM_FUNC_START(twofish_ctr_8way)
- /* input:
- * %rdi: ctx, CTX
- * %rsi: dst
- * %rdx: src
- * %rcx: iv (little endian, 128bit)
- */
- FRAME_BEGIN
-
- pushq %r12;
-
- movq %rsi, %r11;
- movq %rdx, %r12;
-
- load_ctr_8way(%rcx, .Lbswap128_mask, RA1, RB1, RC1, RD1, RA2, RB2, RC2,
- RD2, RX0, RX1, RY0);
-
- call __twofish_enc_blk8;
-
- store_ctr_8way(%r12, %r11, RC1, RD1, RA1, RB1, RC2, RD2, RA2, RB2);
-
- popq %r12;
-
- FRAME_END
- ret;
-SYM_FUNC_END(twofish_ctr_8way)
@@ -25,8 +25,6 @@ asmlinkage void twofish_ecb_enc_8way(const void *ctx, u8 *dst, const u8 *src);
asmlinkage void twofish_ecb_dec_8way(const void *ctx, u8 *dst, const u8 *src);
asmlinkage void twofish_cbc_dec_8way(const void *ctx, u8 *dst, const u8 *src);
-asmlinkage void twofish_ctr_8way(const void *ctx, u8 *dst, const u8 *src,
- le128 *iv);
static int twofish_setkey_skcipher(struct crypto_skcipher *tfm,
const u8 *key, unsigned int keylen)
@@ -55,22 +53,6 @@ static const struct common_glue_ctx twofish_enc = {
} }
};
-static const struct common_glue_ctx twofish_ctr = {
- .num_funcs = 3,
- .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
-
- .funcs = { {
- .num_blocks = TWOFISH_PARALLEL_BLOCKS,
- .fn_u = { .ctr = twofish_ctr_8way }
- }, {
- .num_blocks = 3,
- .fn_u = { .ctr = twofish_enc_blk_ctr_3way }
- }, {
- .num_blocks = 1,
- .fn_u = { .ctr = twofish_enc_blk_ctr }
- } }
-};
-
static const struct common_glue_ctx twofish_dec = {
.num_funcs = 3,
.fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
@@ -123,11 +105,6 @@ static int cbc_decrypt(struct skcipher_request *req)
return glue_cbc_decrypt_req_128bit(&twofish_dec_cbc, req);
}
-static int ctr_crypt(struct skcipher_request *req)
-{
- return glue_ctr_req_128bit(&twofish_ctr, req);
-}
-
static struct skcipher_alg twofish_algs[] = {
{
.base.cra_name = "__ecb(twofish)",
@@ -156,21 +133,6 @@ static struct skcipher_alg twofish_algs[] = {
.setkey = twofish_setkey_skcipher,
.encrypt = cbc_encrypt,
.decrypt = cbc_decrypt,
- }, {
- .base.cra_name = "__ctr(twofish)",
- .base.cra_driver_name = "__ctr-twofish-avx",
- .base.cra_priority = 400,
- .base.cra_flags = CRYPTO_ALG_INTERNAL,
- .base.cra_blocksize = 1,
- .base.cra_ctxsize = sizeof(struct twofish_ctx),
- .base.cra_module = THIS_MODULE,
- .min_keysize = TF_MIN_KEY_SIZE,
- .max_keysize = TF_MAX_KEY_SIZE,
- .ivsize = TF_BLOCK_SIZE,
- .chunksize = TF_BLOCK_SIZE,
- .setkey = twofish_setkey_skcipher,
- .encrypt = ctr_crypt,
- .decrypt = ctr_crypt,
},
};
@@ -52,46 +52,6 @@ void twofish_dec_blk_cbc_3way(const void *ctx, u8 *d, const u8 *s)
}
EXPORT_SYMBOL_GPL(twofish_dec_blk_cbc_3way);
-void twofish_enc_blk_ctr(const void *ctx, u8 *d, const u8 *s, le128 *iv)
-{
- be128 ctrblk;
- u128 *dst = (u128 *)d;
- const u128 *src = (const u128 *)s;
-
- if (dst != src)
- *dst = *src;
-
- le128_to_be128(&ctrblk, iv);
- le128_inc(iv);
-
- twofish_enc_blk(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
- u128_xor(dst, dst, (u128 *)&ctrblk);
-}
-EXPORT_SYMBOL_GPL(twofish_enc_blk_ctr);
-
-void twofish_enc_blk_ctr_3way(const void *ctx, u8 *d, const u8 *s, le128 *iv)
-{
- be128 ctrblks[3];
- u128 *dst = (u128 *)d;
- const u128 *src = (const u128 *)s;
-
- if (dst != src) {
- dst[0] = src[0];
- dst[1] = src[1];
- dst[2] = src[2];
- }
-
- le128_to_be128(&ctrblks[0], iv);
- le128_inc(iv);
- le128_to_be128(&ctrblks[1], iv);
- le128_inc(iv);
- le128_to_be128(&ctrblks[2], iv);
- le128_inc(iv);
-
- twofish_enc_blk_xor_3way(ctx, (u8 *)dst, (u8 *)ctrblks);
-}
-EXPORT_SYMBOL_GPL(twofish_enc_blk_ctr_3way);
-
static const struct common_glue_ctx twofish_enc = {
.num_funcs = 2,
.fpu_blocks_limit = -1,
@@ -105,19 +65,6 @@ static const struct common_glue_ctx twofish_enc = {
} }
};
-static const struct common_glue_ctx twofish_ctr = {
- .num_funcs = 2,
- .fpu_blocks_limit = -1,
-
- .funcs = { {
- .num_blocks = 3,
- .fn_u = { .ctr = twofish_enc_blk_ctr_3way }
- }, {
- .num_blocks = 1,
- .fn_u = { .ctr = twofish_enc_blk_ctr }
- } }
-};
-
static const struct common_glue_ctx twofish_dec = {
.num_funcs = 2,
.fpu_blocks_limit = -1,
@@ -164,11 +111,6 @@ static int cbc_decrypt(struct skcipher_request *req)
return glue_cbc_decrypt_req_128bit(&twofish_dec_cbc, req);
}
-static int ctr_crypt(struct skcipher_request *req)
-{
- return glue_ctr_req_128bit(&twofish_ctr, req);
-}
-
static struct skcipher_alg tf_skciphers[] = {
{
.base.cra_name = "ecb(twofish)",
@@ -195,20 +137,6 @@ static struct skcipher_alg tf_skciphers[] = {
.setkey = twofish_setkey_skcipher,
.encrypt = cbc_encrypt,
.decrypt = cbc_decrypt,
- }, {
- .base.cra_name = "ctr(twofish)",
- .base.cra_driver_name = "ctr-twofish-3way",
- .base.cra_priority = 300,
- .base.cra_blocksize = 1,
- .base.cra_ctxsize = sizeof(struct twofish_ctx),
- .base.cra_module = THIS_MODULE,
- .min_keysize = TF_MIN_KEY_SIZE,
- .max_keysize = TF_MAX_KEY_SIZE,
- .ivsize = TF_BLOCK_SIZE,
- .chunksize = TF_BLOCK_SIZE,
- .setkey = twofish_setkey_skcipher,
- .encrypt = ctr_crypt,
- .decrypt = ctr_crypt,
},
};
@@ -17,9 +17,5 @@ asmlinkage void twofish_dec_blk_3way(const void *ctx, u8 *dst, const u8 *src);
/* helpers from twofish_x86_64-3way module */
extern void twofish_dec_blk_cbc_3way(const void *ctx, u8 *dst, const u8 *src);
-extern void twofish_enc_blk_ctr(const void *ctx, u8 *dst, const u8 *src,
- le128 *iv);
-extern void twofish_enc_blk_ctr_3way(const void *ctx, u8 *dst, const u8 *src,
- le128 *iv);
#endif /* ASM_X86_TWOFISH_H */
Twofish in CTR mode is never used by the kernel directly, and is highly unlikely to be relied upon by dm-crypt or algif_skcipher. So let's drop the accelerated CTR mode implementation, and instead, rely on the CTR template and the bare cipher. Signed-off-by: Ard Biesheuvel <ardb@kernel.org> --- arch/x86/crypto/twofish-avx-x86_64-asm_64.S | 27 -------- arch/x86/crypto/twofish_avx_glue.c | 38 ----------- arch/x86/crypto/twofish_glue_3way.c | 72 -------------------- arch/x86/include/asm/crypto/twofish.h | 4 -- 4 files changed, 141 deletions(-)