crypto: x86/serpent-avx2 - remove LRW algorithm
authorEric Biggers <ebiggers@google.com>
Tue, 20 Feb 2018 07:48:04 +0000 (23:48 -0800)
committerHerbert Xu <herbert@gondor.apana.org.au>
Fri, 2 Mar 2018 16:03:20 +0000 (00:03 +0800)
The LRW template now wraps an ECB mode algorithm rather than the block
cipher directly.  Therefore it is now redundant for crypto modules to
wrap their ECB code with generic LRW code themselves via lrw_crypt().

Remove the lrw-serpent-avx2 algorithm which did this.  Users who request
lrw(serpent) and previously would have gotten lrw-serpent-avx2 will now
get lrw(ecb-serpent-avx2) instead, which is just as fast.

Signed-off-by: Eric Biggers <ebiggers@google.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
arch/x86/crypto/serpent_avx2_glue.c
crypto/Kconfig

index 870f6d8..2bd0f04 100644 (file)
@@ -168,122 +168,6 @@ static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
        return glue_ctr_crypt_128bit(&serpent_ctr, desc, dst, src, nbytes);
 }
 
-static inline bool serpent_fpu_begin(bool fpu_enabled, unsigned int nbytes)
-{
-       /* since reusing AVX functions, starts using FPU at 8 parallel blocks */
-       return glue_fpu_begin(SERPENT_BLOCK_SIZE, 8, NULL, fpu_enabled, nbytes);
-}
-
-static inline void serpent_fpu_end(bool fpu_enabled)
-{
-       glue_fpu_end(fpu_enabled);
-}
-
-struct crypt_priv {
-       struct serpent_ctx *ctx;
-       bool fpu_enabled;
-};
-
-static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
-{
-       const unsigned int bsize = SERPENT_BLOCK_SIZE;
-       struct crypt_priv *ctx = priv;
-       int i;
-
-       ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
-
-       if (nbytes >= SERPENT_AVX2_PARALLEL_BLOCKS * bsize) {
-               serpent_ecb_enc_16way(ctx->ctx, srcdst, srcdst);
-               srcdst += bsize * SERPENT_AVX2_PARALLEL_BLOCKS;
-               nbytes -= bsize * SERPENT_AVX2_PARALLEL_BLOCKS;
-       }
-
-       while (nbytes >= SERPENT_PARALLEL_BLOCKS * bsize) {
-               serpent_ecb_enc_8way_avx(ctx->ctx, srcdst, srcdst);
-               srcdst += bsize * SERPENT_PARALLEL_BLOCKS;
-               nbytes -= bsize * SERPENT_PARALLEL_BLOCKS;
-       }
-
-       for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
-               __serpent_encrypt(ctx->ctx, srcdst, srcdst);
-}
-
-static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
-{
-       const unsigned int bsize = SERPENT_BLOCK_SIZE;
-       struct crypt_priv *ctx = priv;
-       int i;
-
-       ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
-
-       if (nbytes >= SERPENT_AVX2_PARALLEL_BLOCKS * bsize) {
-               serpent_ecb_dec_16way(ctx->ctx, srcdst, srcdst);
-               srcdst += bsize * SERPENT_AVX2_PARALLEL_BLOCKS;
-               nbytes -= bsize * SERPENT_AVX2_PARALLEL_BLOCKS;
-       }
-
-       while (nbytes >= SERPENT_PARALLEL_BLOCKS * bsize) {
-               serpent_ecb_dec_8way_avx(ctx->ctx, srcdst, srcdst);
-               srcdst += bsize * SERPENT_PARALLEL_BLOCKS;
-               nbytes -= bsize * SERPENT_PARALLEL_BLOCKS;
-       }
-
-       for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
-               __serpent_decrypt(ctx->ctx, srcdst, srcdst);
-}
-
-static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
-                      struct scatterlist *src, unsigned int nbytes)
-{
-       struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
-       be128 buf[SERPENT_AVX2_PARALLEL_BLOCKS];
-       struct crypt_priv crypt_ctx = {
-               .ctx = &ctx->serpent_ctx,
-               .fpu_enabled = false,
-       };
-       struct lrw_crypt_req req = {
-               .tbuf = buf,
-               .tbuflen = sizeof(buf),
-
-               .table_ctx = &ctx->lrw_table,
-               .crypt_ctx = &crypt_ctx,
-               .crypt_fn = encrypt_callback,
-       };
-       int ret;
-
-       desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
-       ret = lrw_crypt(desc, dst, src, nbytes, &req);
-       serpent_fpu_end(crypt_ctx.fpu_enabled);
-
-       return ret;
-}
-
-static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
-                      struct scatterlist *src, unsigned int nbytes)
-{
-       struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
-       be128 buf[SERPENT_AVX2_PARALLEL_BLOCKS];
-       struct crypt_priv crypt_ctx = {
-               .ctx = &ctx->serpent_ctx,
-               .fpu_enabled = false,
-       };
-       struct lrw_crypt_req req = {
-               .tbuf = buf,
-               .tbuflen = sizeof(buf),
-
-               .table_ctx = &ctx->lrw_table,
-               .crypt_ctx = &crypt_ctx,
-               .crypt_fn = decrypt_callback,
-       };
-       int ret;
-
-       desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
-       ret = lrw_crypt(desc, dst, src, nbytes, &req);
-       serpent_fpu_end(crypt_ctx.fpu_enabled);
-
-       return ret;
-}
-
 static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
                       struct scatterlist *src, unsigned int nbytes)
 {
@@ -304,7 +188,7 @@ static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
                                     &ctx->tweak_ctx, &ctx->crypt_ctx);
 }
 
-static struct crypto_alg srp_algs[10] = { {
+static struct crypto_alg srp_algs[] = { {
        .cra_name               = "__ecb-serpent-avx2",
        .cra_driver_name        = "__driver-ecb-serpent-avx2",
        .cra_priority           = 0,
@@ -315,7 +199,6 @@ static struct crypto_alg srp_algs[10] = { {
        .cra_alignmask          = 0,
        .cra_type               = &crypto_blkcipher_type,
        .cra_module             = THIS_MODULE,
-       .cra_list               = LIST_HEAD_INIT(srp_algs[0].cra_list),
        .cra_u = {
                .blkcipher = {
                        .min_keysize    = SERPENT_MIN_KEY_SIZE,
@@ -336,7 +219,6 @@ static struct crypto_alg srp_algs[10] = { {
        .cra_alignmask          = 0,
        .cra_type               = &crypto_blkcipher_type,
        .cra_module             = THIS_MODULE,
-       .cra_list               = LIST_HEAD_INIT(srp_algs[1].cra_list),
        .cra_u = {
                .blkcipher = {
                        .min_keysize    = SERPENT_MIN_KEY_SIZE,
@@ -357,7 +239,6 @@ static struct crypto_alg srp_algs[10] = { {
        .cra_alignmask          = 0,
        .cra_type               = &crypto_blkcipher_type,
        .cra_module             = THIS_MODULE,
-       .cra_list               = LIST_HEAD_INIT(srp_algs[2].cra_list),
        .cra_u = {
                .blkcipher = {
                        .min_keysize    = SERPENT_MIN_KEY_SIZE,
@@ -368,31 +249,6 @@ static struct crypto_alg srp_algs[10] = { {
                        .decrypt        = ctr_crypt,
                },
        },
-}, {
-       .cra_name               = "__lrw-serpent-avx2",
-       .cra_driver_name        = "__driver-lrw-serpent-avx2",
-       .cra_priority           = 0,
-       .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER |
-                                 CRYPTO_ALG_INTERNAL,
-       .cra_blocksize          = SERPENT_BLOCK_SIZE,
-       .cra_ctxsize            = sizeof(struct serpent_lrw_ctx),
-       .cra_alignmask          = 0,
-       .cra_type               = &crypto_blkcipher_type,
-       .cra_module             = THIS_MODULE,
-       .cra_list               = LIST_HEAD_INIT(srp_algs[3].cra_list),
-       .cra_exit               = lrw_serpent_exit_tfm,
-       .cra_u = {
-               .blkcipher = {
-                       .min_keysize    = SERPENT_MIN_KEY_SIZE +
-                                         SERPENT_BLOCK_SIZE,
-                       .max_keysize    = SERPENT_MAX_KEY_SIZE +
-                                         SERPENT_BLOCK_SIZE,
-                       .ivsize         = SERPENT_BLOCK_SIZE,
-                       .setkey         = lrw_serpent_setkey,
-                       .encrypt        = lrw_encrypt,
-                       .decrypt        = lrw_decrypt,
-               },
-       },
 }, {
        .cra_name               = "__xts-serpent-avx2",
        .cra_driver_name        = "__driver-xts-serpent-avx2",
@@ -404,7 +260,6 @@ static struct crypto_alg srp_algs[10] = { {
        .cra_alignmask          = 0,
        .cra_type               = &crypto_blkcipher_type,
        .cra_module             = THIS_MODULE,
-       .cra_list               = LIST_HEAD_INIT(srp_algs[4].cra_list),
        .cra_u = {
                .blkcipher = {
                        .min_keysize    = SERPENT_MIN_KEY_SIZE * 2,
@@ -425,7 +280,6 @@ static struct crypto_alg srp_algs[10] = { {
        .cra_alignmask          = 0,
        .cra_type               = &crypto_ablkcipher_type,
        .cra_module             = THIS_MODULE,
-       .cra_list               = LIST_HEAD_INIT(srp_algs[5].cra_list),
        .cra_init               = ablk_init,
        .cra_exit               = ablk_exit,
        .cra_u = {
@@ -447,7 +301,6 @@ static struct crypto_alg srp_algs[10] = { {
        .cra_alignmask          = 0,
        .cra_type               = &crypto_ablkcipher_type,
        .cra_module             = THIS_MODULE,
-       .cra_list               = LIST_HEAD_INIT(srp_algs[6].cra_list),
        .cra_init               = ablk_init,
        .cra_exit               = ablk_exit,
        .cra_u = {
@@ -470,7 +323,6 @@ static struct crypto_alg srp_algs[10] = { {
        .cra_alignmask          = 0,
        .cra_type               = &crypto_ablkcipher_type,
        .cra_module             = THIS_MODULE,
-       .cra_list               = LIST_HEAD_INIT(srp_algs[7].cra_list),
        .cra_init               = ablk_init,
        .cra_exit               = ablk_exit,
        .cra_u = {
@@ -484,31 +336,6 @@ static struct crypto_alg srp_algs[10] = { {
                        .geniv          = "chainiv",
                },
        },
-}, {
-       .cra_name               = "lrw(serpent)",
-       .cra_driver_name        = "lrw-serpent-avx2",
-       .cra_priority           = 600,
-       .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
-       .cra_blocksize          = SERPENT_BLOCK_SIZE,
-       .cra_ctxsize            = sizeof(struct async_helper_ctx),
-       .cra_alignmask          = 0,
-       .cra_type               = &crypto_ablkcipher_type,
-       .cra_module             = THIS_MODULE,
-       .cra_list               = LIST_HEAD_INIT(srp_algs[8].cra_list),
-       .cra_init               = ablk_init,
-       .cra_exit               = ablk_exit,
-       .cra_u = {
-               .ablkcipher = {
-                       .min_keysize    = SERPENT_MIN_KEY_SIZE +
-                                         SERPENT_BLOCK_SIZE,
-                       .max_keysize    = SERPENT_MAX_KEY_SIZE +
-                                         SERPENT_BLOCK_SIZE,
-                       .ivsize         = SERPENT_BLOCK_SIZE,
-                       .setkey         = ablk_set_key,
-                       .encrypt        = ablk_encrypt,
-                       .decrypt        = ablk_decrypt,
-               },
-       },
 }, {
        .cra_name               = "xts(serpent)",
        .cra_driver_name        = "xts-serpent-avx2",
@@ -519,7 +346,6 @@ static struct crypto_alg srp_algs[10] = { {
        .cra_alignmask          = 0,
        .cra_type               = &crypto_ablkcipher_type,
        .cra_module             = THIS_MODULE,
-       .cra_list               = LIST_HEAD_INIT(srp_algs[9].cra_list),
        .cra_init               = ablk_init,
        .cra_exit               = ablk_exit,
        .cra_u = {
index f6d3eec..fc733a5 100644 (file)
@@ -1488,7 +1488,6 @@ config CRYPTO_SERPENT_AVX2_X86_64
        select CRYPTO_GLUE_HELPER_X86
        select CRYPTO_SERPENT
        select CRYPTO_SERPENT_AVX_X86_64
-       select CRYPTO_LRW
        select CRYPTO_XTS
        help
          Serpent cipher algorithm, by Anderson, Biham & Knudsen.