Merge tag 'riscv/for-v5.5-rc1' of git://git.kernel.org/pub/scm/linux/kernel/git/riscv...
[linux-2.6-microblaze.git] / drivers / crypto / atmel-aes.c
index 026f193..9109250 100644 (file)
@@ -36,6 +36,7 @@
 #include <crypto/gcm.h>
 #include <crypto/xts.h>
 #include <crypto/internal/aead.h>
+#include <crypto/internal/skcipher.h>
 #include <linux/platform_data/crypto-atmel.h>
 #include <dt-bindings/dma/at91.h>
 #include "atmel-aes-regs.h"
@@ -117,7 +118,7 @@ struct atmel_aes_ctx {
 struct atmel_aes_ctr_ctx {
        struct atmel_aes_base_ctx       base;
 
-       u32                     iv[AES_BLOCK_SIZE / sizeof(u32)];
+       __be32                  iv[AES_BLOCK_SIZE / sizeof(u32)];
        size_t                  offset;
        struct scatterlist      src[2];
        struct scatterlist      dst[2];
@@ -129,13 +130,13 @@ struct atmel_aes_gcm_ctx {
        struct scatterlist      src[2];
        struct scatterlist      dst[2];
 
-       u32                     j0[AES_BLOCK_SIZE / sizeof(u32)];
+       __be32                  j0[AES_BLOCK_SIZE / sizeof(u32)];
        u32                     tag[AES_BLOCK_SIZE / sizeof(u32)];
-       u32                     ghash[AES_BLOCK_SIZE / sizeof(u32)];
+       __be32                  ghash[AES_BLOCK_SIZE / sizeof(u32)];
        size_t                  textlen;
 
-       const u32               *ghash_in;
-       u32                     *ghash_out;
+       const __be32            *ghash_in;
+       __be32                  *ghash_out;
        atmel_aes_fn_t          ghash_resume;
 };
 
@@ -145,7 +146,7 @@ struct atmel_aes_xts_ctx {
        u32                     key2[AES_KEYSIZE_256 / sizeof(u32)];
 };
 
-#ifdef CONFIG_CRYPTO_DEV_ATMEL_AUTHENC
+#if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
 struct atmel_aes_authenc_ctx {
        struct atmel_aes_base_ctx       base;
        struct atmel_sha_authenc_ctx    *auth;
@@ -154,10 +155,10 @@ struct atmel_aes_authenc_ctx {
 
 struct atmel_aes_reqctx {
        unsigned long           mode;
-       u32                     lastc[AES_BLOCK_SIZE / sizeof(u32)];
+       u8                      lastc[AES_BLOCK_SIZE];
 };
 
-#ifdef CONFIG_CRYPTO_DEV_ATMEL_AUTHENC
+#if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
 struct atmel_aes_authenc_reqctx {
        struct atmel_aes_reqctx base;
 
@@ -388,13 +389,13 @@ static void atmel_aes_write_n(struct atmel_aes_dev *dd, u32 offset,
 }
 
 static inline void atmel_aes_read_block(struct atmel_aes_dev *dd, u32 offset,
-                                       u32 *value)
+                                       void *value)
 {
        atmel_aes_read_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE));
 }
 
 static inline void atmel_aes_write_block(struct atmel_aes_dev *dd, u32 offset,
-                                        const u32 *value)
+                                        const void *value)
 {
        atmel_aes_write_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE));
 }
@@ -486,13 +487,36 @@ static inline bool atmel_aes_is_encrypt(const struct atmel_aes_dev *dd)
        return (dd->flags & AES_FLAGS_ENCRYPT);
 }
 
-#ifdef CONFIG_CRYPTO_DEV_ATMEL_AUTHENC
+#if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
 static void atmel_aes_authenc_complete(struct atmel_aes_dev *dd, int err);
 #endif
 
+static void atmel_aes_set_iv_as_last_ciphertext_block(struct atmel_aes_dev *dd)
+{
+       struct skcipher_request *req = skcipher_request_cast(dd->areq);
+       struct atmel_aes_reqctx *rctx = skcipher_request_ctx(req);
+       struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
+       unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
+
+       if (req->cryptlen < ivsize)
+               return;
+
+       if (rctx->mode & AES_FLAGS_ENCRYPT) {
+               scatterwalk_map_and_copy(req->iv, req->dst,
+                                        req->cryptlen - ivsize, ivsize, 0);
+       } else {
+               if (req->src == req->dst)
+                       memcpy(req->iv, rctx->lastc, ivsize);
+               else
+                       scatterwalk_map_and_copy(req->iv, req->src,
+                                                req->cryptlen - ivsize,
+                                                ivsize, 0);
+       }
+}
+
 static inline int atmel_aes_complete(struct atmel_aes_dev *dd, int err)
 {
-#ifdef CONFIG_CRYPTO_DEV_ATMEL_AUTHENC
+#if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
        if (dd->ctx->is_aead)
                atmel_aes_authenc_complete(dd, err);
 #endif
@@ -500,26 +524,8 @@ static inline int atmel_aes_complete(struct atmel_aes_dev *dd, int err)
        clk_disable(dd->iclk);
        dd->flags &= ~AES_FLAGS_BUSY;
 
-       if (!dd->ctx->is_aead) {
-               struct ablkcipher_request *req =
-                       ablkcipher_request_cast(dd->areq);
-               struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req);
-               struct crypto_ablkcipher *ablkcipher =
-                       crypto_ablkcipher_reqtfm(req);
-               int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
-
-               if (rctx->mode & AES_FLAGS_ENCRYPT) {
-                       scatterwalk_map_and_copy(req->info, req->dst,
-                               req->nbytes - ivsize, ivsize, 0);
-               } else {
-                       if (req->src == req->dst) {
-                               memcpy(req->info, rctx->lastc, ivsize);
-                       } else {
-                               scatterwalk_map_and_copy(req->info, req->src,
-                                       req->nbytes - ivsize, ivsize, 0);
-                       }
-               }
-       }
+       if (!dd->ctx->is_aead)
+               atmel_aes_set_iv_as_last_ciphertext_block(dd);
 
        if (dd->is_async)
                dd->areq->complete(dd->areq, err);
@@ -530,7 +536,7 @@ static inline int atmel_aes_complete(struct atmel_aes_dev *dd, int err)
 }
 
 static void atmel_aes_write_ctrl_key(struct atmel_aes_dev *dd, bool use_dma,
-                                    const u32 *iv, const u32 *key, int keylen)
+                                    const __be32 *iv, const u32 *key, int keylen)
 {
        u32 valmr = 0;
 
@@ -561,7 +567,7 @@ static void atmel_aes_write_ctrl_key(struct atmel_aes_dev *dd, bool use_dma,
 }
 
 static inline void atmel_aes_write_ctrl(struct atmel_aes_dev *dd, bool use_dma,
-                                       const u32 *iv)
+                                       const __be32 *iv)
 
 {
        atmel_aes_write_ctrl_key(dd, use_dma, iv,
@@ -976,9 +982,9 @@ static int atmel_aes_transfer_complete(struct atmel_aes_dev *dd)
 
 static int atmel_aes_start(struct atmel_aes_dev *dd)
 {
-       struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq);
-       struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req);
-       bool use_dma = (req->nbytes >= ATMEL_AES_DMA_THRESHOLD ||
+       struct skcipher_request *req = skcipher_request_cast(dd->areq);
+       struct atmel_aes_reqctx *rctx = skcipher_request_ctx(req);
+       bool use_dma = (req->cryptlen >= ATMEL_AES_DMA_THRESHOLD ||
                        dd->ctx->block_size != AES_BLOCK_SIZE);
        int err;
 
@@ -988,12 +994,13 @@ static int atmel_aes_start(struct atmel_aes_dev *dd)
        if (err)
                return atmel_aes_complete(dd, err);
 
-       atmel_aes_write_ctrl(dd, use_dma, req->info);
+       atmel_aes_write_ctrl(dd, use_dma, (void *)req->iv);
        if (use_dma)
-               return atmel_aes_dma_start(dd, req->src, req->dst, req->nbytes,
+               return atmel_aes_dma_start(dd, req->src, req->dst,
+                                          req->cryptlen,
                                           atmel_aes_transfer_complete);
 
-       return atmel_aes_cpu_start(dd, req->src, req->dst, req->nbytes,
+       return atmel_aes_cpu_start(dd, req->src, req->dst, req->cryptlen,
                                   atmel_aes_transfer_complete);
 }
 
@@ -1006,7 +1013,7 @@ atmel_aes_ctr_ctx_cast(struct atmel_aes_base_ctx *ctx)
 static int atmel_aes_ctr_transfer(struct atmel_aes_dev *dd)
 {
        struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx);
-       struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq);
+       struct skcipher_request *req = skcipher_request_cast(dd->areq);
        struct scatterlist *src, *dst;
        u32 ctr, blocks;
        size_t datalen;
@@ -1014,11 +1021,11 @@ static int atmel_aes_ctr_transfer(struct atmel_aes_dev *dd)
 
        /* Check for transfer completion. */
        ctx->offset += dd->total;
-       if (ctx->offset >= req->nbytes)
+       if (ctx->offset >= req->cryptlen)
                return atmel_aes_transfer_complete(dd);
 
        /* Compute data length. */
-       datalen = req->nbytes - ctx->offset;
+       datalen = req->cryptlen - ctx->offset;
        blocks = DIV_ROUND_UP(datalen, AES_BLOCK_SIZE);
        ctr = be32_to_cpu(ctx->iv[3]);
        if (dd->caps.has_ctr32) {
@@ -1071,8 +1078,8 @@ static int atmel_aes_ctr_transfer(struct atmel_aes_dev *dd)
 static int atmel_aes_ctr_start(struct atmel_aes_dev *dd)
 {
        struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx);
-       struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq);
-       struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req);
+       struct skcipher_request *req = skcipher_request_cast(dd->areq);
+       struct atmel_aes_reqctx *rctx = skcipher_request_ctx(req);
        int err;
 
        atmel_aes_set_mode(dd, rctx);
@@ -1081,16 +1088,16 @@ static int atmel_aes_ctr_start(struct atmel_aes_dev *dd)
        if (err)
                return atmel_aes_complete(dd, err);
 
-       memcpy(ctx->iv, req->info, AES_BLOCK_SIZE);
+       memcpy(ctx->iv, req->iv, AES_BLOCK_SIZE);
        ctx->offset = 0;
        dd->total = 0;
        return atmel_aes_ctr_transfer(dd);
 }
 
-static int atmel_aes_crypt(struct ablkcipher_request *req, unsigned long mode)
+static int atmel_aes_crypt(struct skcipher_request *req, unsigned long mode)
 {
-       struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
-       struct atmel_aes_base_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
+       struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
+       struct atmel_aes_base_ctx *ctx = crypto_skcipher_ctx(skcipher);
        struct atmel_aes_reqctx *rctx;
        struct atmel_aes_dev *dd;
 
@@ -1121,28 +1128,30 @@ static int atmel_aes_crypt(struct ablkcipher_request *req, unsigned long mode)
        if (!dd)
                return -ENODEV;
 
-       rctx = ablkcipher_request_ctx(req);
+       rctx = skcipher_request_ctx(req);
        rctx->mode = mode;
 
        if (!(mode & AES_FLAGS_ENCRYPT) && (req->src == req->dst)) {
-               int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
+               unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
 
-               scatterwalk_map_and_copy(rctx->lastc, req->src,
-                       (req->nbytes - ivsize), ivsize, 0);
+               if (req->cryptlen >= ivsize)
+                       scatterwalk_map_and_copy(rctx->lastc, req->src,
+                                                req->cryptlen - ivsize,
+                                                ivsize, 0);
        }
 
        return atmel_aes_handle_queue(dd, &req->base);
 }
 
-static int atmel_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
+static int atmel_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
                           unsigned int keylen)
 {
-       struct atmel_aes_base_ctx *ctx = crypto_ablkcipher_ctx(tfm);
+       struct atmel_aes_base_ctx *ctx = crypto_skcipher_ctx(tfm);
 
        if (keylen != AES_KEYSIZE_128 &&
            keylen != AES_KEYSIZE_192 &&
            keylen != AES_KEYSIZE_256) {
-               crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
+               crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
                return -EINVAL;
        }
 
@@ -1152,297 +1161,279 @@ static int atmel_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
        return 0;
 }
 
-static int atmel_aes_ecb_encrypt(struct ablkcipher_request *req)
+static int atmel_aes_ecb_encrypt(struct skcipher_request *req)
 {
        return atmel_aes_crypt(req, AES_FLAGS_ECB | AES_FLAGS_ENCRYPT);
 }
 
-static int atmel_aes_ecb_decrypt(struct ablkcipher_request *req)
+static int atmel_aes_ecb_decrypt(struct skcipher_request *req)
 {
        return atmel_aes_crypt(req, AES_FLAGS_ECB);
 }
 
-static int atmel_aes_cbc_encrypt(struct ablkcipher_request *req)
+static int atmel_aes_cbc_encrypt(struct skcipher_request *req)
 {
        return atmel_aes_crypt(req, AES_FLAGS_CBC | AES_FLAGS_ENCRYPT);
 }
 
-static int atmel_aes_cbc_decrypt(struct ablkcipher_request *req)
+static int atmel_aes_cbc_decrypt(struct skcipher_request *req)
 {
        return atmel_aes_crypt(req, AES_FLAGS_CBC);
 }
 
-static int atmel_aes_ofb_encrypt(struct ablkcipher_request *req)
+static int atmel_aes_ofb_encrypt(struct skcipher_request *req)
 {
        return atmel_aes_crypt(req, AES_FLAGS_OFB | AES_FLAGS_ENCRYPT);
 }
 
-static int atmel_aes_ofb_decrypt(struct ablkcipher_request *req)
+static int atmel_aes_ofb_decrypt(struct skcipher_request *req)
 {
        return atmel_aes_crypt(req, AES_FLAGS_OFB);
 }
 
-static int atmel_aes_cfb_encrypt(struct ablkcipher_request *req)
+static int atmel_aes_cfb_encrypt(struct skcipher_request *req)
 {
        return atmel_aes_crypt(req, AES_FLAGS_CFB128 | AES_FLAGS_ENCRYPT);
 }
 
-static int atmel_aes_cfb_decrypt(struct ablkcipher_request *req)
+static int atmel_aes_cfb_decrypt(struct skcipher_request *req)
 {
        return atmel_aes_crypt(req, AES_FLAGS_CFB128);
 }
 
-static int atmel_aes_cfb64_encrypt(struct ablkcipher_request *req)
+static int atmel_aes_cfb64_encrypt(struct skcipher_request *req)
 {
        return atmel_aes_crypt(req, AES_FLAGS_CFB64 | AES_FLAGS_ENCRYPT);
 }
 
-static int atmel_aes_cfb64_decrypt(struct ablkcipher_request *req)
+static int atmel_aes_cfb64_decrypt(struct skcipher_request *req)
 {
        return atmel_aes_crypt(req, AES_FLAGS_CFB64);
 }
 
-static int atmel_aes_cfb32_encrypt(struct ablkcipher_request *req)
+static int atmel_aes_cfb32_encrypt(struct skcipher_request *req)
 {
        return atmel_aes_crypt(req, AES_FLAGS_CFB32 | AES_FLAGS_ENCRYPT);
 }
 
-static int atmel_aes_cfb32_decrypt(struct ablkcipher_request *req)
+static int atmel_aes_cfb32_decrypt(struct skcipher_request *req)
 {
        return atmel_aes_crypt(req, AES_FLAGS_CFB32);
 }
 
-static int atmel_aes_cfb16_encrypt(struct ablkcipher_request *req)
+static int atmel_aes_cfb16_encrypt(struct skcipher_request *req)
 {
        return atmel_aes_crypt(req, AES_FLAGS_CFB16 | AES_FLAGS_ENCRYPT);
 }
 
-static int atmel_aes_cfb16_decrypt(struct ablkcipher_request *req)
+static int atmel_aes_cfb16_decrypt(struct skcipher_request *req)
 {
        return atmel_aes_crypt(req, AES_FLAGS_CFB16);
 }
 
-static int atmel_aes_cfb8_encrypt(struct ablkcipher_request *req)
+static int atmel_aes_cfb8_encrypt(struct skcipher_request *req)
 {
        return atmel_aes_crypt(req, AES_FLAGS_CFB8 | AES_FLAGS_ENCRYPT);
 }
 
-static int atmel_aes_cfb8_decrypt(struct ablkcipher_request *req)
+static int atmel_aes_cfb8_decrypt(struct skcipher_request *req)
 {
        return atmel_aes_crypt(req, AES_FLAGS_CFB8);
 }
 
-static int atmel_aes_ctr_encrypt(struct ablkcipher_request *req)
+static int atmel_aes_ctr_encrypt(struct skcipher_request *req)
 {
        return atmel_aes_crypt(req, AES_FLAGS_CTR | AES_FLAGS_ENCRYPT);
 }
 
-static int atmel_aes_ctr_decrypt(struct ablkcipher_request *req)
+static int atmel_aes_ctr_decrypt(struct skcipher_request *req)
 {
        return atmel_aes_crypt(req, AES_FLAGS_CTR);
 }
 
-static int atmel_aes_cra_init(struct crypto_tfm *tfm)
+static int atmel_aes_init_tfm(struct crypto_skcipher *tfm)
 {
-       struct atmel_aes_ctx *ctx = crypto_tfm_ctx(tfm);
+       struct atmel_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
 
-       tfm->crt_ablkcipher.reqsize = sizeof(struct atmel_aes_reqctx);
+       crypto_skcipher_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx));
        ctx->base.start = atmel_aes_start;
 
        return 0;
 }
 
-static int atmel_aes_ctr_cra_init(struct crypto_tfm *tfm)
+static int atmel_aes_ctr_init_tfm(struct crypto_skcipher *tfm)
 {
-       struct atmel_aes_ctx *ctx = crypto_tfm_ctx(tfm);
+       struct atmel_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
 
-       tfm->crt_ablkcipher.reqsize = sizeof(struct atmel_aes_reqctx);
+       crypto_skcipher_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx));
        ctx->base.start = atmel_aes_ctr_start;
 
        return 0;
 }
 
-static struct crypto_alg aes_algs[] = {
-{
-       .cra_name               = "ecb(aes)",
-       .cra_driver_name        = "atmel-ecb-aes",
-       .cra_priority           = ATMEL_AES_PRIORITY,
-       .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
-       .cra_blocksize          = AES_BLOCK_SIZE,
-       .cra_ctxsize            = sizeof(struct atmel_aes_ctx),
-       .cra_alignmask          = 0xf,
-       .cra_type               = &crypto_ablkcipher_type,
-       .cra_module             = THIS_MODULE,
-       .cra_init               = atmel_aes_cra_init,
-       .cra_u.ablkcipher = {
-               .min_keysize    = AES_MIN_KEY_SIZE,
-               .max_keysize    = AES_MAX_KEY_SIZE,
-               .setkey         = atmel_aes_setkey,
-               .encrypt        = atmel_aes_ecb_encrypt,
-               .decrypt        = atmel_aes_ecb_decrypt,
-       }
+static struct skcipher_alg aes_algs[] = {
+{
+       .base.cra_name          = "ecb(aes)",
+       .base.cra_driver_name   = "atmel-ecb-aes",
+       .base.cra_priority      = ATMEL_AES_PRIORITY,
+       .base.cra_flags         = CRYPTO_ALG_ASYNC,
+       .base.cra_blocksize     = AES_BLOCK_SIZE,
+       .base.cra_ctxsize       = sizeof(struct atmel_aes_ctx),
+       .base.cra_alignmask     = 0xf,
+       .base.cra_module        = THIS_MODULE,
+
+       .init                   = atmel_aes_init_tfm,
+       .min_keysize            = AES_MIN_KEY_SIZE,
+       .max_keysize            = AES_MAX_KEY_SIZE,
+       .setkey                 = atmel_aes_setkey,
+       .encrypt                = atmel_aes_ecb_encrypt,
+       .decrypt                = atmel_aes_ecb_decrypt,
 },
 {
-       .cra_name               = "cbc(aes)",
-       .cra_driver_name        = "atmel-cbc-aes",
-       .cra_priority           = ATMEL_AES_PRIORITY,
-       .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
-       .cra_blocksize          = AES_BLOCK_SIZE,
-       .cra_ctxsize            = sizeof(struct atmel_aes_ctx),
-       .cra_alignmask          = 0xf,
-       .cra_type               = &crypto_ablkcipher_type,
-       .cra_module             = THIS_MODULE,
-       .cra_init               = atmel_aes_cra_init,
-       .cra_u.ablkcipher = {
-               .min_keysize    = AES_MIN_KEY_SIZE,
-               .max_keysize    = AES_MAX_KEY_SIZE,
-               .ivsize         = AES_BLOCK_SIZE,
-               .setkey         = atmel_aes_setkey,
-               .encrypt        = atmel_aes_cbc_encrypt,
-               .decrypt        = atmel_aes_cbc_decrypt,
-       }
+       .base.cra_name          = "cbc(aes)",
+       .base.cra_driver_name   = "atmel-cbc-aes",
+       .base.cra_priority      = ATMEL_AES_PRIORITY,
+       .base.cra_flags         = CRYPTO_ALG_ASYNC,
+       .base.cra_blocksize     = AES_BLOCK_SIZE,
+       .base.cra_ctxsize       = sizeof(struct atmel_aes_ctx),
+       .base.cra_alignmask     = 0xf,
+       .base.cra_module        = THIS_MODULE,
+
+       .init                   = atmel_aes_init_tfm,
+       .min_keysize            = AES_MIN_KEY_SIZE,
+       .max_keysize            = AES_MAX_KEY_SIZE,
+       .setkey                 = atmel_aes_setkey,
+       .encrypt                = atmel_aes_cbc_encrypt,
+       .decrypt                = atmel_aes_cbc_decrypt,
+       .ivsize                 = AES_BLOCK_SIZE,
 },
 {
-       .cra_name               = "ofb(aes)",
-       .cra_driver_name        = "atmel-ofb-aes",
-       .cra_priority           = ATMEL_AES_PRIORITY,
-       .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
-       .cra_blocksize          = AES_BLOCK_SIZE,
-       .cra_ctxsize            = sizeof(struct atmel_aes_ctx),
-       .cra_alignmask          = 0xf,
-       .cra_type               = &crypto_ablkcipher_type,
-       .cra_module             = THIS_MODULE,
-       .cra_init               = atmel_aes_cra_init,
-       .cra_u.ablkcipher = {
-               .min_keysize    = AES_MIN_KEY_SIZE,
-               .max_keysize    = AES_MAX_KEY_SIZE,
-               .ivsize         = AES_BLOCK_SIZE,
-               .setkey         = atmel_aes_setkey,
-               .encrypt        = atmel_aes_ofb_encrypt,
-               .decrypt        = atmel_aes_ofb_decrypt,
-       }
+       .base.cra_name          = "ofb(aes)",
+       .base.cra_driver_name   = "atmel-ofb-aes",
+       .base.cra_priority      = ATMEL_AES_PRIORITY,
+       .base.cra_flags         = CRYPTO_ALG_ASYNC,
+       .base.cra_blocksize     = AES_BLOCK_SIZE,
+       .base.cra_ctxsize       = sizeof(struct atmel_aes_ctx),
+       .base.cra_alignmask     = 0xf,
+       .base.cra_module        = THIS_MODULE,
+
+       .init                   = atmel_aes_init_tfm,
+       .min_keysize            = AES_MIN_KEY_SIZE,
+       .max_keysize            = AES_MAX_KEY_SIZE,
+       .setkey                 = atmel_aes_setkey,
+       .encrypt                = atmel_aes_ofb_encrypt,
+       .decrypt                = atmel_aes_ofb_decrypt,
+       .ivsize                 = AES_BLOCK_SIZE,
 },
 {
-       .cra_name               = "cfb(aes)",
-       .cra_driver_name        = "atmel-cfb-aes",
-       .cra_priority           = ATMEL_AES_PRIORITY,
-       .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
-       .cra_blocksize          = AES_BLOCK_SIZE,
-       .cra_ctxsize            = sizeof(struct atmel_aes_ctx),
-       .cra_alignmask          = 0xf,
-       .cra_type               = &crypto_ablkcipher_type,
-       .cra_module             = THIS_MODULE,
-       .cra_init               = atmel_aes_cra_init,
-       .cra_u.ablkcipher = {
-               .min_keysize    = AES_MIN_KEY_SIZE,
-               .max_keysize    = AES_MAX_KEY_SIZE,
-               .ivsize         = AES_BLOCK_SIZE,
-               .setkey         = atmel_aes_setkey,
-               .encrypt        = atmel_aes_cfb_encrypt,
-               .decrypt        = atmel_aes_cfb_decrypt,
-       }
+       .base.cra_name          = "cfb(aes)",
+       .base.cra_driver_name   = "atmel-cfb-aes",
+       .base.cra_priority      = ATMEL_AES_PRIORITY,
+       .base.cra_flags         = CRYPTO_ALG_ASYNC,
+       .base.cra_blocksize     = AES_BLOCK_SIZE,
+       .base.cra_ctxsize       = sizeof(struct atmel_aes_ctx),
+       .base.cra_alignmask     = 0xf,
+       .base.cra_module        = THIS_MODULE,
+
+       .init                   = atmel_aes_init_tfm,
+       .min_keysize            = AES_MIN_KEY_SIZE,
+       .max_keysize            = AES_MAX_KEY_SIZE,
+       .setkey                 = atmel_aes_setkey,
+       .encrypt                = atmel_aes_cfb_encrypt,
+       .decrypt                = atmel_aes_cfb_decrypt,
+       .ivsize                 = AES_BLOCK_SIZE,
 },
 {
-       .cra_name               = "cfb32(aes)",
-       .cra_driver_name        = "atmel-cfb32-aes",
-       .cra_priority           = ATMEL_AES_PRIORITY,
-       .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
-       .cra_blocksize          = CFB32_BLOCK_SIZE,
-       .cra_ctxsize            = sizeof(struct atmel_aes_ctx),
-       .cra_alignmask          = 0x3,
-       .cra_type               = &crypto_ablkcipher_type,
-       .cra_module             = THIS_MODULE,
-       .cra_init               = atmel_aes_cra_init,
-       .cra_u.ablkcipher = {
-               .min_keysize    = AES_MIN_KEY_SIZE,
-               .max_keysize    = AES_MAX_KEY_SIZE,
-               .ivsize         = AES_BLOCK_SIZE,
-               .setkey         = atmel_aes_setkey,
-               .encrypt        = atmel_aes_cfb32_encrypt,
-               .decrypt        = atmel_aes_cfb32_decrypt,
-       }
+       .base.cra_name          = "cfb32(aes)",
+       .base.cra_driver_name   = "atmel-cfb32-aes",
+       .base.cra_priority      = ATMEL_AES_PRIORITY,
+       .base.cra_flags         = CRYPTO_ALG_ASYNC,
+       .base.cra_blocksize     = CFB32_BLOCK_SIZE,
+       .base.cra_ctxsize       = sizeof(struct atmel_aes_ctx),
+       .base.cra_alignmask     = 0xf,
+       .base.cra_module        = THIS_MODULE,
+
+       .init                   = atmel_aes_init_tfm,
+       .min_keysize            = AES_MIN_KEY_SIZE,
+       .max_keysize            = AES_MAX_KEY_SIZE,
+       .setkey                 = atmel_aes_setkey,
+       .encrypt                = atmel_aes_cfb32_encrypt,
+       .decrypt                = atmel_aes_cfb32_decrypt,
+       .ivsize                 = AES_BLOCK_SIZE,
 },
 {
-       .cra_name               = "cfb16(aes)",
-       .cra_driver_name        = "atmel-cfb16-aes",
-       .cra_priority           = ATMEL_AES_PRIORITY,
-       .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
-       .cra_blocksize          = CFB16_BLOCK_SIZE,
-       .cra_ctxsize            = sizeof(struct atmel_aes_ctx),
-       .cra_alignmask          = 0x1,
-       .cra_type               = &crypto_ablkcipher_type,
-       .cra_module             = THIS_MODULE,
-       .cra_init               = atmel_aes_cra_init,
-       .cra_u.ablkcipher = {
-               .min_keysize    = AES_MIN_KEY_SIZE,
-               .max_keysize    = AES_MAX_KEY_SIZE,
-               .ivsize         = AES_BLOCK_SIZE,
-               .setkey         = atmel_aes_setkey,
-               .encrypt        = atmel_aes_cfb16_encrypt,
-               .decrypt        = atmel_aes_cfb16_decrypt,
-       }
+       .base.cra_name          = "cfb16(aes)",
+       .base.cra_driver_name   = "atmel-cfb16-aes",
+       .base.cra_priority      = ATMEL_AES_PRIORITY,
+       .base.cra_flags         = CRYPTO_ALG_ASYNC,
+       .base.cra_blocksize     = CFB16_BLOCK_SIZE,
+       .base.cra_ctxsize       = sizeof(struct atmel_aes_ctx),
+       .base.cra_alignmask     = 0xf,
+       .base.cra_module        = THIS_MODULE,
+
+       .init                   = atmel_aes_init_tfm,
+       .min_keysize            = AES_MIN_KEY_SIZE,
+       .max_keysize            = AES_MAX_KEY_SIZE,
+       .setkey                 = atmel_aes_setkey,
+       .encrypt                = atmel_aes_cfb16_encrypt,
+       .decrypt                = atmel_aes_cfb16_decrypt,
+       .ivsize                 = AES_BLOCK_SIZE,
 },
 {
-       .cra_name               = "cfb8(aes)",
-       .cra_driver_name        = "atmel-cfb8-aes",
-       .cra_priority           = ATMEL_AES_PRIORITY,
-       .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
-       .cra_blocksize          = CFB8_BLOCK_SIZE,
-       .cra_ctxsize            = sizeof(struct atmel_aes_ctx),
-       .cra_alignmask          = 0x0,
-       .cra_type               = &crypto_ablkcipher_type,
-       .cra_module             = THIS_MODULE,
-       .cra_init               = atmel_aes_cra_init,
-       .cra_u.ablkcipher = {
-               .min_keysize    = AES_MIN_KEY_SIZE,
-               .max_keysize    = AES_MAX_KEY_SIZE,
-               .ivsize         = AES_BLOCK_SIZE,
-               .setkey         = atmel_aes_setkey,
-               .encrypt        = atmel_aes_cfb8_encrypt,
-               .decrypt        = atmel_aes_cfb8_decrypt,
-       }
+       .base.cra_name          = "cfb8(aes)",
+       .base.cra_driver_name   = "atmel-cfb8-aes",
+       .base.cra_priority      = ATMEL_AES_PRIORITY,
+       .base.cra_flags         = CRYPTO_ALG_ASYNC,
+       .base.cra_blocksize     = CFB8_BLOCK_SIZE,
+       .base.cra_ctxsize       = sizeof(struct atmel_aes_ctx),
+       .base.cra_alignmask     = 0xf,
+       .base.cra_module        = THIS_MODULE,
+
+       .init                   = atmel_aes_init_tfm,
+       .min_keysize            = AES_MIN_KEY_SIZE,
+       .max_keysize            = AES_MAX_KEY_SIZE,
+       .setkey                 = atmel_aes_setkey,
+       .encrypt                = atmel_aes_cfb8_encrypt,
+       .decrypt                = atmel_aes_cfb8_decrypt,
+       .ivsize                 = AES_BLOCK_SIZE,
 },
 {
-       .cra_name               = "ctr(aes)",
-       .cra_driver_name        = "atmel-ctr-aes",
-       .cra_priority           = ATMEL_AES_PRIORITY,
-       .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
-       .cra_blocksize          = 1,
-       .cra_ctxsize            = sizeof(struct atmel_aes_ctr_ctx),
-       .cra_alignmask          = 0xf,
-       .cra_type               = &crypto_ablkcipher_type,
-       .cra_module             = THIS_MODULE,
-       .cra_init               = atmel_aes_ctr_cra_init,
-       .cra_u.ablkcipher = {
-               .min_keysize    = AES_MIN_KEY_SIZE,
-               .max_keysize    = AES_MAX_KEY_SIZE,
-               .ivsize         = AES_BLOCK_SIZE,
-               .setkey         = atmel_aes_setkey,
-               .encrypt        = atmel_aes_ctr_encrypt,
-               .decrypt        = atmel_aes_ctr_decrypt,
-       }
+       .base.cra_name          = "ctr(aes)",
+       .base.cra_driver_name   = "atmel-ctr-aes",
+       .base.cra_priority      = ATMEL_AES_PRIORITY,
+       .base.cra_flags         = CRYPTO_ALG_ASYNC,
+       .base.cra_blocksize     = 1,
+       .base.cra_ctxsize       = sizeof(struct atmel_aes_ctr_ctx),
+       .base.cra_alignmask     = 0xf,
+       .base.cra_module        = THIS_MODULE,
+
+       .init                   = atmel_aes_ctr_init_tfm,
+       .min_keysize            = AES_MIN_KEY_SIZE,
+       .max_keysize            = AES_MAX_KEY_SIZE,
+       .setkey                 = atmel_aes_setkey,
+       .encrypt                = atmel_aes_ctr_encrypt,
+       .decrypt                = atmel_aes_ctr_decrypt,
+       .ivsize                 = AES_BLOCK_SIZE,
 },
 };
 
-static struct crypto_alg aes_cfb64_alg = {
-       .cra_name               = "cfb64(aes)",
-       .cra_driver_name        = "atmel-cfb64-aes",
-       .cra_priority           = ATMEL_AES_PRIORITY,
-       .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
-       .cra_blocksize          = CFB64_BLOCK_SIZE,
-       .cra_ctxsize            = sizeof(struct atmel_aes_ctx),
-       .cra_alignmask          = 0x7,
-       .cra_type               = &crypto_ablkcipher_type,
-       .cra_module             = THIS_MODULE,
-       .cra_init               = atmel_aes_cra_init,
-       .cra_u.ablkcipher = {
-               .min_keysize    = AES_MIN_KEY_SIZE,
-               .max_keysize    = AES_MAX_KEY_SIZE,
-               .ivsize         = AES_BLOCK_SIZE,
-               .setkey         = atmel_aes_setkey,
-               .encrypt        = atmel_aes_cfb64_encrypt,
-               .decrypt        = atmel_aes_cfb64_decrypt,
-       }
+static struct skcipher_alg aes_cfb64_alg = {
+       .base.cra_name          = "cfb64(aes)",
+       .base.cra_driver_name   = "atmel-cfb64-aes",
+       .base.cra_priority      = ATMEL_AES_PRIORITY,
+       .base.cra_flags         = CRYPTO_ALG_ASYNC,
+       .base.cra_blocksize     = CFB64_BLOCK_SIZE,
+       .base.cra_ctxsize       = sizeof(struct atmel_aes_ctx),
+       .base.cra_alignmask     = 0xf,
+       .base.cra_module        = THIS_MODULE,
+
+       .init                   = atmel_aes_init_tfm,
+       .min_keysize            = AES_MIN_KEY_SIZE,
+       .max_keysize            = AES_MAX_KEY_SIZE,
+       .setkey                 = atmel_aes_setkey,
+       .encrypt                = atmel_aes_cfb64_encrypt,
+       .decrypt                = atmel_aes_cfb64_decrypt,
+       .ivsize                 = AES_BLOCK_SIZE,
 };
 
 
@@ -1450,7 +1441,7 @@ static struct crypto_alg aes_cfb64_alg = {
 
 static int atmel_aes_gcm_ghash(struct atmel_aes_dev *dd,
                               const u32 *data, size_t datalen,
-                              const u32 *ghash_in, u32 *ghash_out,
+                              const __be32 *ghash_in, __be32 *ghash_out,
                               atmel_aes_fn_t resume);
 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev *dd);
 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev *dd);
@@ -1471,7 +1462,7 @@ atmel_aes_gcm_ctx_cast(struct atmel_aes_base_ctx *ctx)
 
 static int atmel_aes_gcm_ghash(struct atmel_aes_dev *dd,
                               const u32 *data, size_t datalen,
-                              const u32 *ghash_in, u32 *ghash_out,
+                              const __be32 *ghash_in, __be32 *ghash_out,
                               atmel_aes_fn_t resume)
 {
        struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
@@ -1558,7 +1549,7 @@ static int atmel_aes_gcm_start(struct atmel_aes_dev *dd)
 
        memcpy(data, iv, ivsize);
        memset(data + ivsize, 0, padlen + sizeof(u64));
-       ((u64 *)(data + datalen))[-1] = cpu_to_be64(ivsize * 8);
+       ((__be64 *)(data + datalen))[-1] = cpu_to_be64(ivsize * 8);
 
        return atmel_aes_gcm_ghash(dd, (const u32 *)data, datalen,
                                   NULL, ctx->j0, atmel_aes_gcm_process);
@@ -1591,7 +1582,7 @@ static int atmel_aes_gcm_length(struct atmel_aes_dev *dd)
 {
        struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
        struct aead_request *req = aead_request_cast(dd->areq);
-       u32 j0_lsw, *j0 = ctx->j0;
+       __be32 j0_lsw, *j0 = ctx->j0;
        size_t padlen;
 
        /* Write incr32(J0) into IV. */
@@ -1674,7 +1665,7 @@ static int atmel_aes_gcm_tag_init(struct atmel_aes_dev *dd)
 {
        struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
        struct aead_request *req = aead_request_cast(dd->areq);
-       u64 *data = dd->buf;
+       __be64 *data = dd->buf;
 
        if (likely(dd->flags & AES_FLAGS_GTAGEN)) {
                if (!(atmel_aes_read(dd, AES_ISR) & AES_INT_TAGRDY)) {
@@ -1857,8 +1848,8 @@ static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd);
 static int atmel_aes_xts_start(struct atmel_aes_dev *dd)
 {
        struct atmel_aes_xts_ctx *ctx = atmel_aes_xts_ctx_cast(dd->ctx);
-       struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq);
-       struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req);
+       struct skcipher_request *req = skcipher_request_cast(dd->areq);
+       struct atmel_aes_reqctx *rctx = skcipher_request_ctx(req);
        unsigned long flags;
        int err;
 
@@ -1868,7 +1859,7 @@ static int atmel_aes_xts_start(struct atmel_aes_dev *dd)
        if (err)
                return atmel_aes_complete(dd, err);
 
-       /* Compute the tweak value from req->info with ecb(aes). */
+       /* Compute the tweak value from req->iv with ecb(aes). */
        flags = dd->flags;
        dd->flags &= ~AES_FLAGS_MODE_MASK;
        dd->flags |= (AES_FLAGS_ECB | AES_FLAGS_ENCRYPT);
@@ -1876,16 +1867,16 @@ static int atmel_aes_xts_start(struct atmel_aes_dev *dd)
                                 ctx->key2, ctx->base.keylen);
        dd->flags = flags;
 
-       atmel_aes_write_block(dd, AES_IDATAR(0), req->info);
+       atmel_aes_write_block(dd, AES_IDATAR(0), req->iv);
        return atmel_aes_wait_for_data_ready(dd, atmel_aes_xts_process_data);
 }
 
 static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd)
 {
-       struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq);
-       bool use_dma = (req->nbytes >= ATMEL_AES_DMA_THRESHOLD);
+       struct skcipher_request *req = skcipher_request_cast(dd->areq);
+       bool use_dma = (req->cryptlen >= ATMEL_AES_DMA_THRESHOLD);
        u32 tweak[AES_BLOCK_SIZE / sizeof(u32)];
-       static const u32 one[AES_BLOCK_SIZE / sizeof(u32)] = {cpu_to_le32(1), };
+       static const __le32 one[AES_BLOCK_SIZE / sizeof(u32)] = {cpu_to_le32(1), };
        u8 *tweak_bytes = (u8 *)tweak;
        int i;
 
@@ -1908,20 +1899,21 @@ static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd)
        atmel_aes_write_block(dd, AES_TWR(0), tweak);
        atmel_aes_write_block(dd, AES_ALPHAR(0), one);
        if (use_dma)
-               return atmel_aes_dma_start(dd, req->src, req->dst, req->nbytes,
+               return atmel_aes_dma_start(dd, req->src, req->dst,
+                                          req->cryptlen,
                                           atmel_aes_transfer_complete);
 
-       return atmel_aes_cpu_start(dd, req->src, req->dst, req->nbytes,
+       return atmel_aes_cpu_start(dd, req->src, req->dst, req->cryptlen,
                                   atmel_aes_transfer_complete);
 }
 
-static int atmel_aes_xts_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
+static int atmel_aes_xts_setkey(struct crypto_skcipher *tfm, const u8 *key,
                                unsigned int keylen)
 {
-       struct atmel_aes_xts_ctx *ctx = crypto_ablkcipher_ctx(tfm);
+       struct atmel_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
        int err;
 
-       err = xts_check_key(crypto_ablkcipher_tfm(tfm), key, keylen);
+       err = xts_check_key(crypto_skcipher_tfm(tfm), key, keylen);
        if (err)
                return err;
 
@@ -1932,48 +1924,46 @@ static int atmel_aes_xts_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
        return 0;
 }
 
-static int atmel_aes_xts_encrypt(struct ablkcipher_request *req)
+static int atmel_aes_xts_encrypt(struct skcipher_request *req)
 {
        return atmel_aes_crypt(req, AES_FLAGS_XTS | AES_FLAGS_ENCRYPT);
 }
 
-static int atmel_aes_xts_decrypt(struct ablkcipher_request *req)
+static int atmel_aes_xts_decrypt(struct skcipher_request *req)
 {
        return atmel_aes_crypt(req, AES_FLAGS_XTS);
 }
 
-static int atmel_aes_xts_cra_init(struct crypto_tfm *tfm)
+static int atmel_aes_xts_init_tfm(struct crypto_skcipher *tfm)
 {
-       struct atmel_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
+       struct atmel_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
 
-       tfm->crt_ablkcipher.reqsize = sizeof(struct atmel_aes_reqctx);
+       crypto_skcipher_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx));
        ctx->base.start = atmel_aes_xts_start;
 
        return 0;
 }
 
-static struct crypto_alg aes_xts_alg = {
-       .cra_name               = "xts(aes)",
-       .cra_driver_name        = "atmel-xts-aes",
-       .cra_priority           = ATMEL_AES_PRIORITY,
-       .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
-       .cra_blocksize          = AES_BLOCK_SIZE,
-       .cra_ctxsize            = sizeof(struct atmel_aes_xts_ctx),
-       .cra_alignmask          = 0xf,
-       .cra_type               = &crypto_ablkcipher_type,
-       .cra_module             = THIS_MODULE,
-       .cra_init               = atmel_aes_xts_cra_init,
-       .cra_u.ablkcipher = {
-               .min_keysize    = 2 * AES_MIN_KEY_SIZE,
-               .max_keysize    = 2 * AES_MAX_KEY_SIZE,
-               .ivsize         = AES_BLOCK_SIZE,
-               .setkey         = atmel_aes_xts_setkey,
-               .encrypt        = atmel_aes_xts_encrypt,
-               .decrypt        = atmel_aes_xts_decrypt,
-       }
+static struct skcipher_alg aes_xts_alg = {
+       .base.cra_name          = "xts(aes)",
+       .base.cra_driver_name   = "atmel-xts-aes",
+       .base.cra_priority      = ATMEL_AES_PRIORITY,
+       .base.cra_flags         = CRYPTO_ALG_ASYNC,
+       .base.cra_blocksize     = AES_BLOCK_SIZE,
+       .base.cra_ctxsize       = sizeof(struct atmel_aes_xts_ctx),
+       .base.cra_alignmask     = 0xf,
+       .base.cra_module        = THIS_MODULE,
+
+       .min_keysize            = 2 * AES_MIN_KEY_SIZE,
+       .max_keysize            = 2 * AES_MAX_KEY_SIZE,
+       .ivsize                 = AES_BLOCK_SIZE,
+       .setkey                 = atmel_aes_xts_setkey,
+       .encrypt                = atmel_aes_xts_encrypt,
+       .decrypt                = atmel_aes_xts_decrypt,
+       .init                   = atmel_aes_xts_init_tfm,
 };
 
-#ifdef CONFIG_CRYPTO_DEV_ATMEL_AUTHENC
+#if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
 /* authenc aead functions */
 
 static int atmel_aes_authenc_start(struct atmel_aes_dev *dd);
@@ -2041,7 +2031,7 @@ static int atmel_aes_authenc_transfer(struct atmel_aes_dev *dd, int err,
        struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
        bool enc = atmel_aes_is_encrypt(dd);
        struct scatterlist *src, *dst;
-       u32 iv[AES_BLOCK_SIZE / sizeof(u32)];
+       __be32 iv[AES_BLOCK_SIZE / sizeof(u32)];
        u32 emr;
 
        if (is_async)
@@ -2460,23 +2450,23 @@ static void atmel_aes_unregister_algs(struct atmel_aes_dev *dd)
 {
        int i;
 
-#ifdef CONFIG_CRYPTO_DEV_ATMEL_AUTHENC
+#if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
        if (dd->caps.has_authenc)
                for (i = 0; i < ARRAY_SIZE(aes_authenc_algs); i++)
                        crypto_unregister_aead(&aes_authenc_algs[i]);
 #endif
 
        if (dd->caps.has_xts)
-               crypto_unregister_alg(&aes_xts_alg);
+               crypto_unregister_skcipher(&aes_xts_alg);
 
        if (dd->caps.has_gcm)
                crypto_unregister_aead(&aes_gcm_alg);
 
        if (dd->caps.has_cfb64)
-               crypto_unregister_alg(&aes_cfb64_alg);
+               crypto_unregister_skcipher(&aes_cfb64_alg);
 
        for (i = 0; i < ARRAY_SIZE(aes_algs); i++)
-               crypto_unregister_alg(&aes_algs[i]);
+               crypto_unregister_skcipher(&aes_algs[i]);
 }
 
 static int atmel_aes_register_algs(struct atmel_aes_dev *dd)
@@ -2484,13 +2474,13 @@ static int atmel_aes_register_algs(struct atmel_aes_dev *dd)
        int err, i, j;
 
        for (i = 0; i < ARRAY_SIZE(aes_algs); i++) {
-               err = crypto_register_alg(&aes_algs[i]);
+               err = crypto_register_skcipher(&aes_algs[i]);
                if (err)
                        goto err_aes_algs;
        }
 
        if (dd->caps.has_cfb64) {
-               err = crypto_register_alg(&aes_cfb64_alg);
+               err = crypto_register_skcipher(&aes_cfb64_alg);
                if (err)
                        goto err_aes_cfb64_alg;
        }
@@ -2502,12 +2492,12 @@ static int atmel_aes_register_algs(struct atmel_aes_dev *dd)
        }
 
        if (dd->caps.has_xts) {
-               err = crypto_register_alg(&aes_xts_alg);
+               err = crypto_register_skcipher(&aes_xts_alg);
                if (err)
                        goto err_aes_xts_alg;
        }
 
-#ifdef CONFIG_CRYPTO_DEV_ATMEL_AUTHENC
+#if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
        if (dd->caps.has_authenc) {
                for (i = 0; i < ARRAY_SIZE(aes_authenc_algs); i++) {
                        err = crypto_register_aead(&aes_authenc_algs[i]);
@@ -2519,22 +2509,22 @@ static int atmel_aes_register_algs(struct atmel_aes_dev *dd)
 
        return 0;
 
-#ifdef CONFIG_CRYPTO_DEV_ATMEL_AUTHENC
+#if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
        /* i = ARRAY_SIZE(aes_authenc_algs); */
 err_aes_authenc_alg:
        for (j = 0; j < i; j++)
                crypto_unregister_aead(&aes_authenc_algs[j]);
-       crypto_unregister_alg(&aes_xts_alg);
+       crypto_unregister_skcipher(&aes_xts_alg);
 #endif
 err_aes_xts_alg:
        crypto_unregister_aead(&aes_gcm_alg);
 err_aes_gcm_alg:
-       crypto_unregister_alg(&aes_cfb64_alg);
+       crypto_unregister_skcipher(&aes_cfb64_alg);
 err_aes_cfb64_alg:
        i = ARRAY_SIZE(aes_algs);
 err_aes_algs:
        for (j = 0; j < i; j++)
-               crypto_unregister_alg(&aes_algs[j]);
+               crypto_unregister_skcipher(&aes_algs[j]);
 
        return err;
 }
@@ -2709,7 +2699,7 @@ static int atmel_aes_probe(struct platform_device *pdev)
 
        atmel_aes_get_cap(aes_dd);
 
-#ifdef CONFIG_CRYPTO_DEV_ATMEL_AUTHENC
+#if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
        if (aes_dd->caps.has_authenc && !atmel_sha_authenc_is_ready()) {
                err = -EPROBE_DEFER;
                goto iclk_unprepare;