1 // SPDX-License-Identifier: GPL-2.0
5 * Support for ATMEL AES HW acceleration.
7 * Copyright (c) 2012 Eukréa Electromatique - ATMEL
8 * Author: Nicolas Royer <nicolas@eukrea.com>
10 * Some ideas are from omap-aes.c driver.
14 #include <linux/kernel.h>
15 #include <linux/module.h>
16 #include <linux/slab.h>
17 #include <linux/err.h>
18 #include <linux/clk.h>
20 #include <linux/hw_random.h>
21 #include <linux/platform_device.h>
23 #include <linux/device.h>
24 #include <linux/dmaengine.h>
25 #include <linux/init.h>
26 #include <linux/errno.h>
27 #include <linux/interrupt.h>
28 #include <linux/irq.h>
29 #include <linux/scatterlist.h>
30 #include <linux/dma-mapping.h>
31 #include <linux/of_device.h>
32 #include <linux/delay.h>
33 #include <linux/crypto.h>
34 #include <crypto/scatterwalk.h>
35 #include <crypto/algapi.h>
36 #include <crypto/aes.h>
37 #include <crypto/gcm.h>
38 #include <crypto/xts.h>
39 #include <crypto/internal/aead.h>
40 #include <crypto/internal/skcipher.h>
41 #include "atmel-aes-regs.h"
42 #include "atmel-authenc.h"
44 #define ATMEL_AES_PRIORITY 300
46 #define ATMEL_AES_BUFFER_ORDER 2
47 #define ATMEL_AES_BUFFER_SIZE (PAGE_SIZE << ATMEL_AES_BUFFER_ORDER)
49 #define CFB8_BLOCK_SIZE 1
50 #define CFB16_BLOCK_SIZE 2
51 #define CFB32_BLOCK_SIZE 4
52 #define CFB64_BLOCK_SIZE 8
54 #define SIZE_IN_WORDS(x) ((x) >> 2)
57 /* Reserve bits [18:16] [14:12] [1:0] for mode (same as for AES_MR) */
58 #define AES_FLAGS_ENCRYPT AES_MR_CYPHER_ENC
59 #define AES_FLAGS_GTAGEN AES_MR_GTAGEN
60 #define AES_FLAGS_OPMODE_MASK (AES_MR_OPMOD_MASK | AES_MR_CFBS_MASK)
61 #define AES_FLAGS_ECB AES_MR_OPMOD_ECB
62 #define AES_FLAGS_CBC AES_MR_OPMOD_CBC
63 #define AES_FLAGS_OFB AES_MR_OPMOD_OFB
64 #define AES_FLAGS_CFB128 (AES_MR_OPMOD_CFB | AES_MR_CFBS_128b)
65 #define AES_FLAGS_CFB64 (AES_MR_OPMOD_CFB | AES_MR_CFBS_64b)
66 #define AES_FLAGS_CFB32 (AES_MR_OPMOD_CFB | AES_MR_CFBS_32b)
67 #define AES_FLAGS_CFB16 (AES_MR_OPMOD_CFB | AES_MR_CFBS_16b)
68 #define AES_FLAGS_CFB8 (AES_MR_OPMOD_CFB | AES_MR_CFBS_8b)
69 #define AES_FLAGS_CTR AES_MR_OPMOD_CTR
70 #define AES_FLAGS_GCM AES_MR_OPMOD_GCM
71 #define AES_FLAGS_XTS AES_MR_OPMOD_XTS
73 #define AES_FLAGS_MODE_MASK (AES_FLAGS_OPMODE_MASK | \
77 #define AES_FLAGS_BUSY BIT(3)
78 #define AES_FLAGS_DUMP_REG BIT(4)
79 #define AES_FLAGS_OWN_SHA BIT(5)
81 #define AES_FLAGS_PERSISTENT AES_FLAGS_BUSY
83 #define ATMEL_AES_QUEUE_LENGTH 50
85 #define ATMEL_AES_DMA_THRESHOLD 256
88 struct atmel_aes_caps {
100 typedef int (*atmel_aes_fn_t)(struct atmel_aes_dev *);
103 struct atmel_aes_base_ctx {
104 struct atmel_aes_dev *dd;
105 atmel_aes_fn_t start;
107 u32 key[AES_KEYSIZE_256 / sizeof(u32)];
112 struct atmel_aes_ctx {
113 struct atmel_aes_base_ctx base;
116 struct atmel_aes_ctr_ctx {
117 struct atmel_aes_base_ctx base;
119 __be32 iv[AES_BLOCK_SIZE / sizeof(u32)];
121 struct scatterlist src[2];
122 struct scatterlist dst[2];
126 struct atmel_aes_gcm_ctx {
127 struct atmel_aes_base_ctx base;
129 struct scatterlist src[2];
130 struct scatterlist dst[2];
132 __be32 j0[AES_BLOCK_SIZE / sizeof(u32)];
133 u32 tag[AES_BLOCK_SIZE / sizeof(u32)];
134 __be32 ghash[AES_BLOCK_SIZE / sizeof(u32)];
137 const __be32 *ghash_in;
139 atmel_aes_fn_t ghash_resume;
142 struct atmel_aes_xts_ctx {
143 struct atmel_aes_base_ctx base;
145 u32 key2[AES_KEYSIZE_256 / sizeof(u32)];
146 struct crypto_skcipher *fallback_tfm;
149 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
150 struct atmel_aes_authenc_ctx {
151 struct atmel_aes_base_ctx base;
152 struct atmel_sha_authenc_ctx *auth;
156 struct atmel_aes_reqctx {
158 u8 lastc[AES_BLOCK_SIZE];
159 struct skcipher_request fallback_req;
162 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
163 struct atmel_aes_authenc_reqctx {
164 struct atmel_aes_reqctx base;
166 struct scatterlist src[2];
167 struct scatterlist dst[2];
169 u32 digest[SHA512_DIGEST_SIZE / sizeof(u32)];
171 /* auth_req MUST be place last. */
172 struct ahash_request auth_req;
176 struct atmel_aes_dma {
177 struct dma_chan *chan;
178 struct scatterlist *sg;
180 unsigned int remainder;
184 struct atmel_aes_dev {
185 struct list_head list;
186 unsigned long phys_base;
187 void __iomem *io_base;
189 struct crypto_async_request *areq;
190 struct atmel_aes_base_ctx *ctx;
193 atmel_aes_fn_t resume;
194 atmel_aes_fn_t cpu_transfer_complete;
203 struct crypto_queue queue;
205 struct tasklet_struct done_task;
206 struct tasklet_struct queue_task;
212 struct atmel_aes_dma src;
213 struct atmel_aes_dma dst;
217 struct scatterlist aligned_sg;
218 struct scatterlist *real_dst;
220 struct atmel_aes_caps caps;
225 struct atmel_aes_drv {
226 struct list_head dev_list;
230 static struct atmel_aes_drv atmel_aes = {
231 .dev_list = LIST_HEAD_INIT(atmel_aes.dev_list),
232 .lock = __SPIN_LOCK_UNLOCKED(atmel_aes.lock),
236 static const char *atmel_aes_reg_name(u32 offset, char *tmp, size_t sz)
265 snprintf(tmp, sz, "KEYWR[%u]", (offset - AES_KEYWR(0)) >> 2);
272 snprintf(tmp, sz, "IDATAR[%u]", (offset - AES_IDATAR(0)) >> 2);
279 snprintf(tmp, sz, "ODATAR[%u]", (offset - AES_ODATAR(0)) >> 2);
286 snprintf(tmp, sz, "IVR[%u]", (offset - AES_IVR(0)) >> 2);
299 snprintf(tmp, sz, "GHASHR[%u]", (offset - AES_GHASHR(0)) >> 2);
306 snprintf(tmp, sz, "TAGR[%u]", (offset - AES_TAGR(0)) >> 2);
316 snprintf(tmp, sz, "GCMHR[%u]", (offset - AES_GCMHR(0)) >> 2);
326 snprintf(tmp, sz, "TWR[%u]", (offset - AES_TWR(0)) >> 2);
333 snprintf(tmp, sz, "ALPHAR[%u]", (offset - AES_ALPHAR(0)) >> 2);
337 snprintf(tmp, sz, "0x%02x", offset);
343 #endif /* VERBOSE_DEBUG */
345 /* Shared functions */
347 static inline u32 atmel_aes_read(struct atmel_aes_dev *dd, u32 offset)
349 u32 value = readl_relaxed(dd->io_base + offset);
352 if (dd->flags & AES_FLAGS_DUMP_REG) {
355 dev_vdbg(dd->dev, "read 0x%08x from %s\n", value,
356 atmel_aes_reg_name(offset, tmp, sizeof(tmp)));
358 #endif /* VERBOSE_DEBUG */
363 static inline void atmel_aes_write(struct atmel_aes_dev *dd,
364 u32 offset, u32 value)
367 if (dd->flags & AES_FLAGS_DUMP_REG) {
370 dev_vdbg(dd->dev, "write 0x%08x into %s\n", value,
371 atmel_aes_reg_name(offset, tmp, sizeof(tmp)));
373 #endif /* VERBOSE_DEBUG */
375 writel_relaxed(value, dd->io_base + offset);
378 static void atmel_aes_read_n(struct atmel_aes_dev *dd, u32 offset,
379 u32 *value, int count)
381 for (; count--; value++, offset += 4)
382 *value = atmel_aes_read(dd, offset);
385 static void atmel_aes_write_n(struct atmel_aes_dev *dd, u32 offset,
386 const u32 *value, int count)
388 for (; count--; value++, offset += 4)
389 atmel_aes_write(dd, offset, *value);
392 static inline void atmel_aes_read_block(struct atmel_aes_dev *dd, u32 offset,
395 atmel_aes_read_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE));
398 static inline void atmel_aes_write_block(struct atmel_aes_dev *dd, u32 offset,
401 atmel_aes_write_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE));
404 static inline int atmel_aes_wait_for_data_ready(struct atmel_aes_dev *dd,
405 atmel_aes_fn_t resume)
407 u32 isr = atmel_aes_read(dd, AES_ISR);
409 if (unlikely(isr & AES_INT_DATARDY))
413 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY);
417 static inline size_t atmel_aes_padlen(size_t len, size_t block_size)
419 len &= block_size - 1;
420 return len ? block_size - len : 0;
423 static struct atmel_aes_dev *atmel_aes_dev_alloc(struct atmel_aes_base_ctx *ctx)
425 struct atmel_aes_dev *aes_dd;
427 spin_lock_bh(&atmel_aes.lock);
428 /* One AES IP per SoC. */
429 aes_dd = list_first_entry_or_null(&atmel_aes.dev_list,
430 struct atmel_aes_dev, list);
431 spin_unlock_bh(&atmel_aes.lock);
435 static int atmel_aes_hw_init(struct atmel_aes_dev *dd)
439 err = clk_enable(dd->iclk);
443 atmel_aes_write(dd, AES_CR, AES_CR_SWRST);
444 atmel_aes_write(dd, AES_MR, 0xE << AES_MR_CKEY_OFFSET);
449 static inline unsigned int atmel_aes_get_version(struct atmel_aes_dev *dd)
451 return atmel_aes_read(dd, AES_HW_VERSION) & 0x00000fff;
454 static int atmel_aes_hw_version_init(struct atmel_aes_dev *dd)
458 err = atmel_aes_hw_init(dd);
462 dd->hw_version = atmel_aes_get_version(dd);
464 dev_info(dd->dev, "version: 0x%x\n", dd->hw_version);
466 clk_disable(dd->iclk);
470 static inline void atmel_aes_set_mode(struct atmel_aes_dev *dd,
471 const struct atmel_aes_reqctx *rctx)
473 /* Clear all but persistent flags and set request flags. */
474 dd->flags = (dd->flags & AES_FLAGS_PERSISTENT) | rctx->mode;
477 static inline bool atmel_aes_is_encrypt(const struct atmel_aes_dev *dd)
479 return (dd->flags & AES_FLAGS_ENCRYPT);
482 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
483 static void atmel_aes_authenc_complete(struct atmel_aes_dev *dd, int err);
486 static void atmel_aes_set_iv_as_last_ciphertext_block(struct atmel_aes_dev *dd)
488 struct skcipher_request *req = skcipher_request_cast(dd->areq);
489 struct atmel_aes_reqctx *rctx = skcipher_request_ctx(req);
490 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
491 unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
493 if (req->cryptlen < ivsize)
496 if (rctx->mode & AES_FLAGS_ENCRYPT) {
497 scatterwalk_map_and_copy(req->iv, req->dst,
498 req->cryptlen - ivsize, ivsize, 0);
500 if (req->src == req->dst)
501 memcpy(req->iv, rctx->lastc, ivsize);
503 scatterwalk_map_and_copy(req->iv, req->src,
504 req->cryptlen - ivsize,
509 static inline struct atmel_aes_ctr_ctx *
510 atmel_aes_ctr_ctx_cast(struct atmel_aes_base_ctx *ctx)
512 return container_of(ctx, struct atmel_aes_ctr_ctx, base);
515 static void atmel_aes_ctr_update_req_iv(struct atmel_aes_dev *dd)
517 struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx);
518 struct skcipher_request *req = skcipher_request_cast(dd->areq);
519 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
520 unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
524 * The CTR transfer works in fragments of data of maximum 1 MByte
525 * because of the 16 bit CTR counter embedded in the IP. When reaching
526 * here, ctx->blocks contains the number of blocks of the last fragment
527 * processed, there is no need to explicit cast it to u16.
529 for (i = 0; i < ctx->blocks; i++)
530 crypto_inc((u8 *)ctx->iv, AES_BLOCK_SIZE);
532 memcpy(req->iv, ctx->iv, ivsize);
535 static inline int atmel_aes_complete(struct atmel_aes_dev *dd, int err)
537 struct skcipher_request *req = skcipher_request_cast(dd->areq);
538 struct atmel_aes_reqctx *rctx = skcipher_request_ctx(req);
540 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
541 if (dd->ctx->is_aead)
542 atmel_aes_authenc_complete(dd, err);
545 clk_disable(dd->iclk);
546 dd->flags &= ~AES_FLAGS_BUSY;
548 if (!err && !dd->ctx->is_aead &&
549 (rctx->mode & AES_FLAGS_OPMODE_MASK) != AES_FLAGS_ECB) {
550 if ((rctx->mode & AES_FLAGS_OPMODE_MASK) != AES_FLAGS_CTR)
551 atmel_aes_set_iv_as_last_ciphertext_block(dd);
553 atmel_aes_ctr_update_req_iv(dd);
557 dd->areq->complete(dd->areq, err);
559 tasklet_schedule(&dd->queue_task);
564 static void atmel_aes_write_ctrl_key(struct atmel_aes_dev *dd, bool use_dma,
565 const __be32 *iv, const u32 *key, int keylen)
569 /* MR register must be set before IV registers */
570 if (keylen == AES_KEYSIZE_128)
571 valmr |= AES_MR_KEYSIZE_128;
572 else if (keylen == AES_KEYSIZE_192)
573 valmr |= AES_MR_KEYSIZE_192;
575 valmr |= AES_MR_KEYSIZE_256;
577 valmr |= dd->flags & AES_FLAGS_MODE_MASK;
580 valmr |= AES_MR_SMOD_IDATAR0;
581 if (dd->caps.has_dualbuff)
582 valmr |= AES_MR_DUALBUFF;
584 valmr |= AES_MR_SMOD_AUTO;
587 atmel_aes_write(dd, AES_MR, valmr);
589 atmel_aes_write_n(dd, AES_KEYWR(0), key, SIZE_IN_WORDS(keylen));
591 if (iv && (valmr & AES_MR_OPMOD_MASK) != AES_MR_OPMOD_ECB)
592 atmel_aes_write_block(dd, AES_IVR(0), iv);
595 static inline void atmel_aes_write_ctrl(struct atmel_aes_dev *dd, bool use_dma,
599 atmel_aes_write_ctrl_key(dd, use_dma, iv,
600 dd->ctx->key, dd->ctx->keylen);
605 static int atmel_aes_cpu_transfer(struct atmel_aes_dev *dd)
611 atmel_aes_read_block(dd, AES_ODATAR(0), dd->data);
613 dd->datalen -= AES_BLOCK_SIZE;
615 if (dd->datalen < AES_BLOCK_SIZE)
618 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data);
620 isr = atmel_aes_read(dd, AES_ISR);
621 if (!(isr & AES_INT_DATARDY)) {
622 dd->resume = atmel_aes_cpu_transfer;
623 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY);
628 if (!sg_copy_from_buffer(dd->real_dst, sg_nents(dd->real_dst),
633 return atmel_aes_complete(dd, err);
635 return dd->cpu_transfer_complete(dd);
638 static int atmel_aes_cpu_start(struct atmel_aes_dev *dd,
639 struct scatterlist *src,
640 struct scatterlist *dst,
642 atmel_aes_fn_t resume)
644 size_t padlen = atmel_aes_padlen(len, AES_BLOCK_SIZE);
646 if (unlikely(len == 0))
649 sg_copy_to_buffer(src, sg_nents(src), dd->buf, len);
653 dd->cpu_transfer_complete = resume;
654 dd->datalen = len + padlen;
655 dd->data = (u32 *)dd->buf;
656 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data);
657 return atmel_aes_wait_for_data_ready(dd, atmel_aes_cpu_transfer);
663 static void atmel_aes_dma_callback(void *data);
665 static bool atmel_aes_check_aligned(struct atmel_aes_dev *dd,
666 struct scatterlist *sg,
668 struct atmel_aes_dma *dma)
672 if (!IS_ALIGNED(len, dd->ctx->block_size))
675 for (nents = 0; sg; sg = sg_next(sg), ++nents) {
676 if (!IS_ALIGNED(sg->offset, sizeof(u32)))
679 if (len <= sg->length) {
680 if (!IS_ALIGNED(len, dd->ctx->block_size))
683 dma->nents = nents+1;
684 dma->remainder = sg->length - len;
689 if (!IS_ALIGNED(sg->length, dd->ctx->block_size))
698 static inline void atmel_aes_restore_sg(const struct atmel_aes_dma *dma)
700 struct scatterlist *sg = dma->sg;
701 int nents = dma->nents;
706 while (--nents > 0 && sg)
712 sg->length += dma->remainder;
715 static int atmel_aes_map(struct atmel_aes_dev *dd,
716 struct scatterlist *src,
717 struct scatterlist *dst,
720 bool src_aligned, dst_aligned;
728 src_aligned = atmel_aes_check_aligned(dd, src, len, &dd->src);
730 dst_aligned = src_aligned;
732 dst_aligned = atmel_aes_check_aligned(dd, dst, len, &dd->dst);
733 if (!src_aligned || !dst_aligned) {
734 padlen = atmel_aes_padlen(len, dd->ctx->block_size);
736 if (dd->buflen < len + padlen)
740 sg_copy_to_buffer(src, sg_nents(src), dd->buf, len);
741 dd->src.sg = &dd->aligned_sg;
743 dd->src.remainder = 0;
747 dd->dst.sg = &dd->aligned_sg;
749 dd->dst.remainder = 0;
752 sg_init_table(&dd->aligned_sg, 1);
753 sg_set_buf(&dd->aligned_sg, dd->buf, len + padlen);
756 if (dd->src.sg == dd->dst.sg) {
757 dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents,
759 dd->dst.sg_len = dd->src.sg_len;
763 dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents,
768 dd->dst.sg_len = dma_map_sg(dd->dev, dd->dst.sg, dd->dst.nents,
770 if (!dd->dst.sg_len) {
771 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents,
780 static void atmel_aes_unmap(struct atmel_aes_dev *dd)
782 if (dd->src.sg == dd->dst.sg) {
783 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents,
786 if (dd->src.sg != &dd->aligned_sg)
787 atmel_aes_restore_sg(&dd->src);
789 dma_unmap_sg(dd->dev, dd->dst.sg, dd->dst.nents,
792 if (dd->dst.sg != &dd->aligned_sg)
793 atmel_aes_restore_sg(&dd->dst);
795 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents,
798 if (dd->src.sg != &dd->aligned_sg)
799 atmel_aes_restore_sg(&dd->src);
802 if (dd->dst.sg == &dd->aligned_sg)
803 sg_copy_from_buffer(dd->real_dst, sg_nents(dd->real_dst),
807 static int atmel_aes_dma_transfer_start(struct atmel_aes_dev *dd,
808 enum dma_slave_buswidth addr_width,
809 enum dma_transfer_direction dir,
812 struct dma_async_tx_descriptor *desc;
813 struct dma_slave_config config;
814 dma_async_tx_callback callback;
815 struct atmel_aes_dma *dma;
818 memset(&config, 0, sizeof(config));
819 config.src_addr_width = addr_width;
820 config.dst_addr_width = addr_width;
821 config.src_maxburst = maxburst;
822 config.dst_maxburst = maxburst;
828 config.dst_addr = dd->phys_base + AES_IDATAR(0);
833 callback = atmel_aes_dma_callback;
834 config.src_addr = dd->phys_base + AES_ODATAR(0);
841 err = dmaengine_slave_config(dma->chan, &config);
845 desc = dmaengine_prep_slave_sg(dma->chan, dma->sg, dma->sg_len, dir,
846 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
850 desc->callback = callback;
851 desc->callback_param = dd;
852 dmaengine_submit(desc);
853 dma_async_issue_pending(dma->chan);
858 static int atmel_aes_dma_start(struct atmel_aes_dev *dd,
859 struct scatterlist *src,
860 struct scatterlist *dst,
862 atmel_aes_fn_t resume)
864 enum dma_slave_buswidth addr_width;
868 switch (dd->ctx->block_size) {
869 case CFB8_BLOCK_SIZE:
870 addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
874 case CFB16_BLOCK_SIZE:
875 addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES;
879 case CFB32_BLOCK_SIZE:
880 case CFB64_BLOCK_SIZE:
881 addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
886 addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
887 maxburst = dd->caps.max_burst_size;
895 err = atmel_aes_map(dd, src, dst, len);
901 /* Set output DMA transfer first */
902 err = atmel_aes_dma_transfer_start(dd, addr_width, DMA_DEV_TO_MEM,
907 /* Then set input DMA transfer */
908 err = atmel_aes_dma_transfer_start(dd, addr_width, DMA_MEM_TO_DEV,
911 goto output_transfer_stop;
915 output_transfer_stop:
916 dmaengine_terminate_sync(dd->dst.chan);
920 return atmel_aes_complete(dd, err);
923 static void atmel_aes_dma_callback(void *data)
925 struct atmel_aes_dev *dd = data;
929 (void)dd->resume(dd);
932 static int atmel_aes_handle_queue(struct atmel_aes_dev *dd,
933 struct crypto_async_request *new_areq)
935 struct crypto_async_request *areq, *backlog;
936 struct atmel_aes_base_ctx *ctx;
941 spin_lock_irqsave(&dd->lock, flags);
943 ret = crypto_enqueue_request(&dd->queue, new_areq);
944 if (dd->flags & AES_FLAGS_BUSY) {
945 spin_unlock_irqrestore(&dd->lock, flags);
948 backlog = crypto_get_backlog(&dd->queue);
949 areq = crypto_dequeue_request(&dd->queue);
951 dd->flags |= AES_FLAGS_BUSY;
952 spin_unlock_irqrestore(&dd->lock, flags);
958 backlog->complete(backlog, -EINPROGRESS);
960 ctx = crypto_tfm_ctx(areq->tfm);
963 start_async = (areq != new_areq);
964 dd->is_async = start_async;
966 /* WARNING: ctx->start() MAY change dd->is_async. */
967 err = ctx->start(dd);
968 return (start_async) ? ret : err;
972 /* AES async block ciphers */
974 static int atmel_aes_transfer_complete(struct atmel_aes_dev *dd)
976 return atmel_aes_complete(dd, 0);
979 static int atmel_aes_start(struct atmel_aes_dev *dd)
981 struct skcipher_request *req = skcipher_request_cast(dd->areq);
982 struct atmel_aes_reqctx *rctx = skcipher_request_ctx(req);
983 bool use_dma = (req->cryptlen >= ATMEL_AES_DMA_THRESHOLD ||
984 dd->ctx->block_size != AES_BLOCK_SIZE);
987 atmel_aes_set_mode(dd, rctx);
989 err = atmel_aes_hw_init(dd);
991 return atmel_aes_complete(dd, err);
993 atmel_aes_write_ctrl(dd, use_dma, (void *)req->iv);
995 return atmel_aes_dma_start(dd, req->src, req->dst,
997 atmel_aes_transfer_complete);
999 return atmel_aes_cpu_start(dd, req->src, req->dst, req->cryptlen,
1000 atmel_aes_transfer_complete);
1003 static int atmel_aes_ctr_transfer(struct atmel_aes_dev *dd)
1005 struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx);
1006 struct skcipher_request *req = skcipher_request_cast(dd->areq);
1007 struct scatterlist *src, *dst;
1011 bool use_dma, fragmented = false;
1013 /* Check for transfer completion. */
1014 ctx->offset += dd->total;
1015 if (ctx->offset >= req->cryptlen)
1016 return atmel_aes_transfer_complete(dd);
1018 /* Compute data length. */
1019 datalen = req->cryptlen - ctx->offset;
1020 ctx->blocks = DIV_ROUND_UP(datalen, AES_BLOCK_SIZE);
1021 ctr = be32_to_cpu(ctx->iv[3]);
1023 /* Check 16bit counter overflow. */
1024 start = ctr & 0xffff;
1025 end = start + ctx->blocks - 1;
1027 if (ctx->blocks >> 16 || end < start) {
1029 datalen = AES_BLOCK_SIZE * (0x10000 - start);
1033 use_dma = (datalen >= ATMEL_AES_DMA_THRESHOLD);
1035 /* Jump to offset. */
1036 src = scatterwalk_ffwd(ctx->src, req->src, ctx->offset);
1037 dst = ((req->src == req->dst) ? src :
1038 scatterwalk_ffwd(ctx->dst, req->dst, ctx->offset));
1040 /* Configure hardware. */
1041 atmel_aes_write_ctrl(dd, use_dma, ctx->iv);
1042 if (unlikely(fragmented)) {
1044 * Increment the counter manually to cope with the hardware
1047 ctx->iv[3] = cpu_to_be32(ctr);
1048 crypto_inc((u8 *)ctx->iv, AES_BLOCK_SIZE);
1052 return atmel_aes_dma_start(dd, src, dst, datalen,
1053 atmel_aes_ctr_transfer);
1055 return atmel_aes_cpu_start(dd, src, dst, datalen,
1056 atmel_aes_ctr_transfer);
1059 static int atmel_aes_ctr_start(struct atmel_aes_dev *dd)
1061 struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx);
1062 struct skcipher_request *req = skcipher_request_cast(dd->areq);
1063 struct atmel_aes_reqctx *rctx = skcipher_request_ctx(req);
1066 atmel_aes_set_mode(dd, rctx);
1068 err = atmel_aes_hw_init(dd);
1070 return atmel_aes_complete(dd, err);
1072 memcpy(ctx->iv, req->iv, AES_BLOCK_SIZE);
1075 return atmel_aes_ctr_transfer(dd);
1078 static int atmel_aes_xts_fallback(struct skcipher_request *req, bool enc)
1080 struct atmel_aes_reqctx *rctx = skcipher_request_ctx(req);
1081 struct atmel_aes_xts_ctx *ctx = crypto_skcipher_ctx(
1082 crypto_skcipher_reqtfm(req));
1084 skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
1085 skcipher_request_set_callback(&rctx->fallback_req, req->base.flags,
1086 req->base.complete, req->base.data);
1087 skcipher_request_set_crypt(&rctx->fallback_req, req->src, req->dst,
1088 req->cryptlen, req->iv);
1090 return enc ? crypto_skcipher_encrypt(&rctx->fallback_req) :
1091 crypto_skcipher_decrypt(&rctx->fallback_req);
1094 static int atmel_aes_crypt(struct skcipher_request *req, unsigned long mode)
1096 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1097 struct atmel_aes_base_ctx *ctx = crypto_skcipher_ctx(skcipher);
1098 struct atmel_aes_reqctx *rctx;
1099 u32 opmode = mode & AES_FLAGS_OPMODE_MASK;
1101 if (opmode == AES_FLAGS_XTS) {
1102 if (req->cryptlen < XTS_BLOCK_SIZE)
1105 if (!IS_ALIGNED(req->cryptlen, XTS_BLOCK_SIZE))
1106 return atmel_aes_xts_fallback(req,
1107 mode & AES_FLAGS_ENCRYPT);
1111 * ECB, CBC, CFB, OFB or CTR mode require the plaintext and ciphertext
1112 * to have a positve integer length.
1114 if (!req->cryptlen && opmode != AES_FLAGS_XTS)
1117 if ((opmode == AES_FLAGS_ECB || opmode == AES_FLAGS_CBC) &&
1118 !IS_ALIGNED(req->cryptlen, crypto_skcipher_blocksize(skcipher)))
1121 switch (mode & AES_FLAGS_OPMODE_MASK) {
1122 case AES_FLAGS_CFB8:
1123 ctx->block_size = CFB8_BLOCK_SIZE;
1126 case AES_FLAGS_CFB16:
1127 ctx->block_size = CFB16_BLOCK_SIZE;
1130 case AES_FLAGS_CFB32:
1131 ctx->block_size = CFB32_BLOCK_SIZE;
1134 case AES_FLAGS_CFB64:
1135 ctx->block_size = CFB64_BLOCK_SIZE;
1139 ctx->block_size = AES_BLOCK_SIZE;
1142 ctx->is_aead = false;
1144 rctx = skcipher_request_ctx(req);
1147 if (opmode != AES_FLAGS_ECB &&
1148 !(mode & AES_FLAGS_ENCRYPT) && req->src == req->dst) {
1149 unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
1151 if (req->cryptlen >= ivsize)
1152 scatterwalk_map_and_copy(rctx->lastc, req->src,
1153 req->cryptlen - ivsize,
1157 return atmel_aes_handle_queue(ctx->dd, &req->base);
1160 static int atmel_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
1161 unsigned int keylen)
1163 struct atmel_aes_base_ctx *ctx = crypto_skcipher_ctx(tfm);
1165 if (keylen != AES_KEYSIZE_128 &&
1166 keylen != AES_KEYSIZE_192 &&
1167 keylen != AES_KEYSIZE_256)
1170 memcpy(ctx->key, key, keylen);
1171 ctx->keylen = keylen;
1176 static int atmel_aes_ecb_encrypt(struct skcipher_request *req)
1178 return atmel_aes_crypt(req, AES_FLAGS_ECB | AES_FLAGS_ENCRYPT);
1181 static int atmel_aes_ecb_decrypt(struct skcipher_request *req)
1183 return atmel_aes_crypt(req, AES_FLAGS_ECB);
1186 static int atmel_aes_cbc_encrypt(struct skcipher_request *req)
1188 return atmel_aes_crypt(req, AES_FLAGS_CBC | AES_FLAGS_ENCRYPT);
1191 static int atmel_aes_cbc_decrypt(struct skcipher_request *req)
1193 return atmel_aes_crypt(req, AES_FLAGS_CBC);
1196 static int atmel_aes_ofb_encrypt(struct skcipher_request *req)
1198 return atmel_aes_crypt(req, AES_FLAGS_OFB | AES_FLAGS_ENCRYPT);
1201 static int atmel_aes_ofb_decrypt(struct skcipher_request *req)
1203 return atmel_aes_crypt(req, AES_FLAGS_OFB);
1206 static int atmel_aes_cfb_encrypt(struct skcipher_request *req)
1208 return atmel_aes_crypt(req, AES_FLAGS_CFB128 | AES_FLAGS_ENCRYPT);
1211 static int atmel_aes_cfb_decrypt(struct skcipher_request *req)
1213 return atmel_aes_crypt(req, AES_FLAGS_CFB128);
1216 static int atmel_aes_cfb64_encrypt(struct skcipher_request *req)
1218 return atmel_aes_crypt(req, AES_FLAGS_CFB64 | AES_FLAGS_ENCRYPT);
1221 static int atmel_aes_cfb64_decrypt(struct skcipher_request *req)
1223 return atmel_aes_crypt(req, AES_FLAGS_CFB64);
1226 static int atmel_aes_cfb32_encrypt(struct skcipher_request *req)
1228 return atmel_aes_crypt(req, AES_FLAGS_CFB32 | AES_FLAGS_ENCRYPT);
1231 static int atmel_aes_cfb32_decrypt(struct skcipher_request *req)
1233 return atmel_aes_crypt(req, AES_FLAGS_CFB32);
1236 static int atmel_aes_cfb16_encrypt(struct skcipher_request *req)
1238 return atmel_aes_crypt(req, AES_FLAGS_CFB16 | AES_FLAGS_ENCRYPT);
1241 static int atmel_aes_cfb16_decrypt(struct skcipher_request *req)
1243 return atmel_aes_crypt(req, AES_FLAGS_CFB16);
1246 static int atmel_aes_cfb8_encrypt(struct skcipher_request *req)
1248 return atmel_aes_crypt(req, AES_FLAGS_CFB8 | AES_FLAGS_ENCRYPT);
1251 static int atmel_aes_cfb8_decrypt(struct skcipher_request *req)
1253 return atmel_aes_crypt(req, AES_FLAGS_CFB8);
1256 static int atmel_aes_ctr_encrypt(struct skcipher_request *req)
1258 return atmel_aes_crypt(req, AES_FLAGS_CTR | AES_FLAGS_ENCRYPT);
1261 static int atmel_aes_ctr_decrypt(struct skcipher_request *req)
1263 return atmel_aes_crypt(req, AES_FLAGS_CTR);
1266 static int atmel_aes_init_tfm(struct crypto_skcipher *tfm)
1268 struct atmel_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
1269 struct atmel_aes_dev *dd;
1271 dd = atmel_aes_dev_alloc(&ctx->base);
1275 crypto_skcipher_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx));
1277 ctx->base.dd->ctx = &ctx->base;
1278 ctx->base.start = atmel_aes_start;
1283 static int atmel_aes_ctr_init_tfm(struct crypto_skcipher *tfm)
1285 struct atmel_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
1286 struct atmel_aes_dev *dd;
1288 dd = atmel_aes_dev_alloc(&ctx->base);
1292 crypto_skcipher_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx));
1294 ctx->base.dd->ctx = &ctx->base;
1295 ctx->base.start = atmel_aes_ctr_start;
1300 static struct skcipher_alg aes_algs[] = {
1302 .base.cra_name = "ecb(aes)",
1303 .base.cra_driver_name = "atmel-ecb-aes",
1304 .base.cra_blocksize = AES_BLOCK_SIZE,
1305 .base.cra_ctxsize = sizeof(struct atmel_aes_ctx),
1307 .init = atmel_aes_init_tfm,
1308 .min_keysize = AES_MIN_KEY_SIZE,
1309 .max_keysize = AES_MAX_KEY_SIZE,
1310 .setkey = atmel_aes_setkey,
1311 .encrypt = atmel_aes_ecb_encrypt,
1312 .decrypt = atmel_aes_ecb_decrypt,
1315 .base.cra_name = "cbc(aes)",
1316 .base.cra_driver_name = "atmel-cbc-aes",
1317 .base.cra_blocksize = AES_BLOCK_SIZE,
1318 .base.cra_ctxsize = sizeof(struct atmel_aes_ctx),
1320 .init = atmel_aes_init_tfm,
1321 .min_keysize = AES_MIN_KEY_SIZE,
1322 .max_keysize = AES_MAX_KEY_SIZE,
1323 .setkey = atmel_aes_setkey,
1324 .encrypt = atmel_aes_cbc_encrypt,
1325 .decrypt = atmel_aes_cbc_decrypt,
1326 .ivsize = AES_BLOCK_SIZE,
1329 .base.cra_name = "ofb(aes)",
1330 .base.cra_driver_name = "atmel-ofb-aes",
1331 .base.cra_blocksize = 1,
1332 .base.cra_ctxsize = sizeof(struct atmel_aes_ctx),
1334 .init = atmel_aes_init_tfm,
1335 .min_keysize = AES_MIN_KEY_SIZE,
1336 .max_keysize = AES_MAX_KEY_SIZE,
1337 .setkey = atmel_aes_setkey,
1338 .encrypt = atmel_aes_ofb_encrypt,
1339 .decrypt = atmel_aes_ofb_decrypt,
1340 .ivsize = AES_BLOCK_SIZE,
1343 .base.cra_name = "cfb(aes)",
1344 .base.cra_driver_name = "atmel-cfb-aes",
1345 .base.cra_blocksize = AES_BLOCK_SIZE,
1346 .base.cra_ctxsize = sizeof(struct atmel_aes_ctx),
1348 .init = atmel_aes_init_tfm,
1349 .min_keysize = AES_MIN_KEY_SIZE,
1350 .max_keysize = AES_MAX_KEY_SIZE,
1351 .setkey = atmel_aes_setkey,
1352 .encrypt = atmel_aes_cfb_encrypt,
1353 .decrypt = atmel_aes_cfb_decrypt,
1354 .ivsize = AES_BLOCK_SIZE,
1357 .base.cra_name = "cfb32(aes)",
1358 .base.cra_driver_name = "atmel-cfb32-aes",
1359 .base.cra_blocksize = CFB32_BLOCK_SIZE,
1360 .base.cra_ctxsize = sizeof(struct atmel_aes_ctx),
1362 .init = atmel_aes_init_tfm,
1363 .min_keysize = AES_MIN_KEY_SIZE,
1364 .max_keysize = AES_MAX_KEY_SIZE,
1365 .setkey = atmel_aes_setkey,
1366 .encrypt = atmel_aes_cfb32_encrypt,
1367 .decrypt = atmel_aes_cfb32_decrypt,
1368 .ivsize = AES_BLOCK_SIZE,
1371 .base.cra_name = "cfb16(aes)",
1372 .base.cra_driver_name = "atmel-cfb16-aes",
1373 .base.cra_blocksize = CFB16_BLOCK_SIZE,
1374 .base.cra_ctxsize = sizeof(struct atmel_aes_ctx),
1376 .init = atmel_aes_init_tfm,
1377 .min_keysize = AES_MIN_KEY_SIZE,
1378 .max_keysize = AES_MAX_KEY_SIZE,
1379 .setkey = atmel_aes_setkey,
1380 .encrypt = atmel_aes_cfb16_encrypt,
1381 .decrypt = atmel_aes_cfb16_decrypt,
1382 .ivsize = AES_BLOCK_SIZE,
1385 .base.cra_name = "cfb8(aes)",
1386 .base.cra_driver_name = "atmel-cfb8-aes",
1387 .base.cra_blocksize = CFB8_BLOCK_SIZE,
1388 .base.cra_ctxsize = sizeof(struct atmel_aes_ctx),
1390 .init = atmel_aes_init_tfm,
1391 .min_keysize = AES_MIN_KEY_SIZE,
1392 .max_keysize = AES_MAX_KEY_SIZE,
1393 .setkey = atmel_aes_setkey,
1394 .encrypt = atmel_aes_cfb8_encrypt,
1395 .decrypt = atmel_aes_cfb8_decrypt,
1396 .ivsize = AES_BLOCK_SIZE,
1399 .base.cra_name = "ctr(aes)",
1400 .base.cra_driver_name = "atmel-ctr-aes",
1401 .base.cra_blocksize = 1,
1402 .base.cra_ctxsize = sizeof(struct atmel_aes_ctr_ctx),
1404 .init = atmel_aes_ctr_init_tfm,
1405 .min_keysize = AES_MIN_KEY_SIZE,
1406 .max_keysize = AES_MAX_KEY_SIZE,
1407 .setkey = atmel_aes_setkey,
1408 .encrypt = atmel_aes_ctr_encrypt,
1409 .decrypt = atmel_aes_ctr_decrypt,
1410 .ivsize = AES_BLOCK_SIZE,
1414 static struct skcipher_alg aes_cfb64_alg = {
1415 .base.cra_name = "cfb64(aes)",
1416 .base.cra_driver_name = "atmel-cfb64-aes",
1417 .base.cra_blocksize = CFB64_BLOCK_SIZE,
1418 .base.cra_ctxsize = sizeof(struct atmel_aes_ctx),
1420 .init = atmel_aes_init_tfm,
1421 .min_keysize = AES_MIN_KEY_SIZE,
1422 .max_keysize = AES_MAX_KEY_SIZE,
1423 .setkey = atmel_aes_setkey,
1424 .encrypt = atmel_aes_cfb64_encrypt,
1425 .decrypt = atmel_aes_cfb64_decrypt,
1426 .ivsize = AES_BLOCK_SIZE,
1430 /* gcm aead functions */
1432 static int atmel_aes_gcm_ghash(struct atmel_aes_dev *dd,
1433 const u32 *data, size_t datalen,
1434 const __be32 *ghash_in, __be32 *ghash_out,
1435 atmel_aes_fn_t resume);
1436 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev *dd);
1437 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev *dd);
1439 static int atmel_aes_gcm_start(struct atmel_aes_dev *dd);
1440 static int atmel_aes_gcm_process(struct atmel_aes_dev *dd);
1441 static int atmel_aes_gcm_length(struct atmel_aes_dev *dd);
1442 static int atmel_aes_gcm_data(struct atmel_aes_dev *dd);
1443 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev *dd);
1444 static int atmel_aes_gcm_tag(struct atmel_aes_dev *dd);
1445 static int atmel_aes_gcm_finalize(struct atmel_aes_dev *dd);
1447 static inline struct atmel_aes_gcm_ctx *
1448 atmel_aes_gcm_ctx_cast(struct atmel_aes_base_ctx *ctx)
1450 return container_of(ctx, struct atmel_aes_gcm_ctx, base);
1453 static int atmel_aes_gcm_ghash(struct atmel_aes_dev *dd,
1454 const u32 *data, size_t datalen,
1455 const __be32 *ghash_in, __be32 *ghash_out,
1456 atmel_aes_fn_t resume)
1458 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
1460 dd->data = (u32 *)data;
1461 dd->datalen = datalen;
1462 ctx->ghash_in = ghash_in;
1463 ctx->ghash_out = ghash_out;
1464 ctx->ghash_resume = resume;
1466 atmel_aes_write_ctrl(dd, false, NULL);
1467 return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_ghash_init);
1470 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev *dd)
1472 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
1474 /* Set the data length. */
1475 atmel_aes_write(dd, AES_AADLENR, dd->total);
1476 atmel_aes_write(dd, AES_CLENR, 0);
1478 /* If needed, overwrite the GCM Intermediate Hash Word Registers */
1480 atmel_aes_write_block(dd, AES_GHASHR(0), ctx->ghash_in);
1482 return atmel_aes_gcm_ghash_finalize(dd);
1485 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev *dd)
1487 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
1490 /* Write data into the Input Data Registers. */
1491 while (dd->datalen > 0) {
1492 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data);
1494 dd->datalen -= AES_BLOCK_SIZE;
1496 isr = atmel_aes_read(dd, AES_ISR);
1497 if (!(isr & AES_INT_DATARDY)) {
1498 dd->resume = atmel_aes_gcm_ghash_finalize;
1499 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY);
1500 return -EINPROGRESS;
1504 /* Read the computed hash from GHASHRx. */
1505 atmel_aes_read_block(dd, AES_GHASHR(0), ctx->ghash_out);
1507 return ctx->ghash_resume(dd);
1511 static int atmel_aes_gcm_start(struct atmel_aes_dev *dd)
1513 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
1514 struct aead_request *req = aead_request_cast(dd->areq);
1515 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1516 struct atmel_aes_reqctx *rctx = aead_request_ctx(req);
1517 size_t ivsize = crypto_aead_ivsize(tfm);
1518 size_t datalen, padlen;
1519 const void *iv = req->iv;
1523 atmel_aes_set_mode(dd, rctx);
1525 err = atmel_aes_hw_init(dd);
1527 return atmel_aes_complete(dd, err);
1529 if (likely(ivsize == GCM_AES_IV_SIZE)) {
1530 memcpy(ctx->j0, iv, ivsize);
1531 ctx->j0[3] = cpu_to_be32(1);
1532 return atmel_aes_gcm_process(dd);
1535 padlen = atmel_aes_padlen(ivsize, AES_BLOCK_SIZE);
1536 datalen = ivsize + padlen + AES_BLOCK_SIZE;
1537 if (datalen > dd->buflen)
1538 return atmel_aes_complete(dd, -EINVAL);
1540 memcpy(data, iv, ivsize);
1541 memset(data + ivsize, 0, padlen + sizeof(u64));
1542 ((__be64 *)(data + datalen))[-1] = cpu_to_be64(ivsize * 8);
1544 return atmel_aes_gcm_ghash(dd, (const u32 *)data, datalen,
1545 NULL, ctx->j0, atmel_aes_gcm_process);
1548 static int atmel_aes_gcm_process(struct atmel_aes_dev *dd)
1550 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
1551 struct aead_request *req = aead_request_cast(dd->areq);
1552 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1553 bool enc = atmel_aes_is_encrypt(dd);
1556 /* Compute text length. */
1557 authsize = crypto_aead_authsize(tfm);
1558 ctx->textlen = req->cryptlen - (enc ? 0 : authsize);
1561 * According to tcrypt test suite, the GCM Automatic Tag Generation
1562 * fails when both the message and its associated data are empty.
1564 if (likely(req->assoclen != 0 || ctx->textlen != 0))
1565 dd->flags |= AES_FLAGS_GTAGEN;
1567 atmel_aes_write_ctrl(dd, false, NULL);
1568 return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_length);
1571 static int atmel_aes_gcm_length(struct atmel_aes_dev *dd)
1573 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
1574 struct aead_request *req = aead_request_cast(dd->areq);
1575 __be32 j0_lsw, *j0 = ctx->j0;
1578 /* Write incr32(J0) into IV. */
1580 be32_add_cpu(&j0[3], 1);
1581 atmel_aes_write_block(dd, AES_IVR(0), j0);
1584 /* Set aad and text lengths. */
1585 atmel_aes_write(dd, AES_AADLENR, req->assoclen);
1586 atmel_aes_write(dd, AES_CLENR, ctx->textlen);
1588 /* Check whether AAD are present. */
1589 if (unlikely(req->assoclen == 0)) {
1591 return atmel_aes_gcm_data(dd);
1594 /* Copy assoc data and add padding. */
1595 padlen = atmel_aes_padlen(req->assoclen, AES_BLOCK_SIZE);
1596 if (unlikely(req->assoclen + padlen > dd->buflen))
1597 return atmel_aes_complete(dd, -EINVAL);
1598 sg_copy_to_buffer(req->src, sg_nents(req->src), dd->buf, req->assoclen);
1600 /* Write assoc data into the Input Data register. */
1601 dd->data = (u32 *)dd->buf;
1602 dd->datalen = req->assoclen + padlen;
1603 return atmel_aes_gcm_data(dd);
1606 static int atmel_aes_gcm_data(struct atmel_aes_dev *dd)
1608 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
1609 struct aead_request *req = aead_request_cast(dd->areq);
1610 bool use_dma = (ctx->textlen >= ATMEL_AES_DMA_THRESHOLD);
1611 struct scatterlist *src, *dst;
1614 /* Write AAD first. */
1615 while (dd->datalen > 0) {
1616 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data);
1618 dd->datalen -= AES_BLOCK_SIZE;
1620 isr = atmel_aes_read(dd, AES_ISR);
1621 if (!(isr & AES_INT_DATARDY)) {
1622 dd->resume = atmel_aes_gcm_data;
1623 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY);
1624 return -EINPROGRESS;
1629 if (unlikely(ctx->textlen == 0))
1630 return atmel_aes_gcm_tag_init(dd);
1632 /* Prepare src and dst scatter lists to transfer cipher/plain texts */
1633 src = scatterwalk_ffwd(ctx->src, req->src, req->assoclen);
1634 dst = ((req->src == req->dst) ? src :
1635 scatterwalk_ffwd(ctx->dst, req->dst, req->assoclen));
1638 /* Update the Mode Register for DMA transfers. */
1639 mr = atmel_aes_read(dd, AES_MR);
1640 mr &= ~(AES_MR_SMOD_MASK | AES_MR_DUALBUFF);
1641 mr |= AES_MR_SMOD_IDATAR0;
1642 if (dd->caps.has_dualbuff)
1643 mr |= AES_MR_DUALBUFF;
1644 atmel_aes_write(dd, AES_MR, mr);
1646 return atmel_aes_dma_start(dd, src, dst, ctx->textlen,
1647 atmel_aes_gcm_tag_init);
1650 return atmel_aes_cpu_start(dd, src, dst, ctx->textlen,
1651 atmel_aes_gcm_tag_init);
1654 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev *dd)
1656 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
1657 struct aead_request *req = aead_request_cast(dd->areq);
1658 __be64 *data = dd->buf;
1660 if (likely(dd->flags & AES_FLAGS_GTAGEN)) {
1661 if (!(atmel_aes_read(dd, AES_ISR) & AES_INT_TAGRDY)) {
1662 dd->resume = atmel_aes_gcm_tag_init;
1663 atmel_aes_write(dd, AES_IER, AES_INT_TAGRDY);
1664 return -EINPROGRESS;
1667 return atmel_aes_gcm_finalize(dd);
1670 /* Read the GCM Intermediate Hash Word Registers. */
1671 atmel_aes_read_block(dd, AES_GHASHR(0), ctx->ghash);
1673 data[0] = cpu_to_be64(req->assoclen * 8);
1674 data[1] = cpu_to_be64(ctx->textlen * 8);
1676 return atmel_aes_gcm_ghash(dd, (const u32 *)data, AES_BLOCK_SIZE,
1677 ctx->ghash, ctx->ghash, atmel_aes_gcm_tag);
1680 static int atmel_aes_gcm_tag(struct atmel_aes_dev *dd)
1682 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
1683 unsigned long flags;
1686 * Change mode to CTR to complete the tag generation.
1687 * Use J0 as Initialization Vector.
1690 dd->flags &= ~(AES_FLAGS_OPMODE_MASK | AES_FLAGS_GTAGEN);
1691 dd->flags |= AES_FLAGS_CTR;
1692 atmel_aes_write_ctrl(dd, false, ctx->j0);
1695 atmel_aes_write_block(dd, AES_IDATAR(0), ctx->ghash);
1696 return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_finalize);
1699 static int atmel_aes_gcm_finalize(struct atmel_aes_dev *dd)
1701 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
1702 struct aead_request *req = aead_request_cast(dd->areq);
1703 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1704 bool enc = atmel_aes_is_encrypt(dd);
1705 u32 offset, authsize, itag[4], *otag = ctx->tag;
1708 /* Read the computed tag. */
1709 if (likely(dd->flags & AES_FLAGS_GTAGEN))
1710 atmel_aes_read_block(dd, AES_TAGR(0), ctx->tag);
1712 atmel_aes_read_block(dd, AES_ODATAR(0), ctx->tag);
1714 offset = req->assoclen + ctx->textlen;
1715 authsize = crypto_aead_authsize(tfm);
1717 scatterwalk_map_and_copy(otag, req->dst, offset, authsize, 1);
1720 scatterwalk_map_and_copy(itag, req->src, offset, authsize, 0);
1721 err = crypto_memneq(itag, otag, authsize) ? -EBADMSG : 0;
1724 return atmel_aes_complete(dd, err);
1727 static int atmel_aes_gcm_crypt(struct aead_request *req,
1730 struct atmel_aes_base_ctx *ctx;
1731 struct atmel_aes_reqctx *rctx;
1733 ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1734 ctx->block_size = AES_BLOCK_SIZE;
1735 ctx->is_aead = true;
1737 rctx = aead_request_ctx(req);
1738 rctx->mode = AES_FLAGS_GCM | mode;
1740 return atmel_aes_handle_queue(ctx->dd, &req->base);
1743 static int atmel_aes_gcm_setkey(struct crypto_aead *tfm, const u8 *key,
1744 unsigned int keylen)
1746 struct atmel_aes_base_ctx *ctx = crypto_aead_ctx(tfm);
1748 if (keylen != AES_KEYSIZE_256 &&
1749 keylen != AES_KEYSIZE_192 &&
1750 keylen != AES_KEYSIZE_128)
1753 memcpy(ctx->key, key, keylen);
1754 ctx->keylen = keylen;
1759 static int atmel_aes_gcm_setauthsize(struct crypto_aead *tfm,
1760 unsigned int authsize)
1762 return crypto_gcm_check_authsize(authsize);
1765 static int atmel_aes_gcm_encrypt(struct aead_request *req)
1767 return atmel_aes_gcm_crypt(req, AES_FLAGS_ENCRYPT);
1770 static int atmel_aes_gcm_decrypt(struct aead_request *req)
1772 return atmel_aes_gcm_crypt(req, 0);
1775 static int atmel_aes_gcm_init(struct crypto_aead *tfm)
1777 struct atmel_aes_gcm_ctx *ctx = crypto_aead_ctx(tfm);
1778 struct atmel_aes_dev *dd;
1780 dd = atmel_aes_dev_alloc(&ctx->base);
1784 crypto_aead_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx));
1786 ctx->base.dd->ctx = &ctx->base;
1787 ctx->base.start = atmel_aes_gcm_start;
1792 static struct aead_alg aes_gcm_alg = {
1793 .setkey = atmel_aes_gcm_setkey,
1794 .setauthsize = atmel_aes_gcm_setauthsize,
1795 .encrypt = atmel_aes_gcm_encrypt,
1796 .decrypt = atmel_aes_gcm_decrypt,
1797 .init = atmel_aes_gcm_init,
1798 .ivsize = GCM_AES_IV_SIZE,
1799 .maxauthsize = AES_BLOCK_SIZE,
1802 .cra_name = "gcm(aes)",
1803 .cra_driver_name = "atmel-gcm-aes",
1805 .cra_ctxsize = sizeof(struct atmel_aes_gcm_ctx),
1812 static inline struct atmel_aes_xts_ctx *
1813 atmel_aes_xts_ctx_cast(struct atmel_aes_base_ctx *ctx)
1815 return container_of(ctx, struct atmel_aes_xts_ctx, base);
1818 static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd);
1820 static int atmel_aes_xts_start(struct atmel_aes_dev *dd)
1822 struct atmel_aes_xts_ctx *ctx = atmel_aes_xts_ctx_cast(dd->ctx);
1823 struct skcipher_request *req = skcipher_request_cast(dd->areq);
1824 struct atmel_aes_reqctx *rctx = skcipher_request_ctx(req);
1825 unsigned long flags;
1828 atmel_aes_set_mode(dd, rctx);
1830 err = atmel_aes_hw_init(dd);
1832 return atmel_aes_complete(dd, err);
1834 /* Compute the tweak value from req->iv with ecb(aes). */
1836 dd->flags &= ~AES_FLAGS_MODE_MASK;
1837 dd->flags |= (AES_FLAGS_ECB | AES_FLAGS_ENCRYPT);
1838 atmel_aes_write_ctrl_key(dd, false, NULL,
1839 ctx->key2, ctx->base.keylen);
1842 atmel_aes_write_block(dd, AES_IDATAR(0), req->iv);
1843 return atmel_aes_wait_for_data_ready(dd, atmel_aes_xts_process_data);
1846 static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd)
1848 struct skcipher_request *req = skcipher_request_cast(dd->areq);
1849 bool use_dma = (req->cryptlen >= ATMEL_AES_DMA_THRESHOLD);
1850 u32 tweak[AES_BLOCK_SIZE / sizeof(u32)];
1851 static const __le32 one[AES_BLOCK_SIZE / sizeof(u32)] = {cpu_to_le32(1), };
1852 u8 *tweak_bytes = (u8 *)tweak;
1855 /* Read the computed ciphered tweak value. */
1856 atmel_aes_read_block(dd, AES_ODATAR(0), tweak);
1859 * the order of the ciphered tweak bytes need to be reversed before
1860 * writing them into the ODATARx registers.
1862 for (i = 0; i < AES_BLOCK_SIZE/2; ++i)
1863 swap(tweak_bytes[i], tweak_bytes[AES_BLOCK_SIZE - 1 - i]);
1865 /* Process the data. */
1866 atmel_aes_write_ctrl(dd, use_dma, NULL);
1867 atmel_aes_write_block(dd, AES_TWR(0), tweak);
1868 atmel_aes_write_block(dd, AES_ALPHAR(0), one);
1870 return atmel_aes_dma_start(dd, req->src, req->dst,
1872 atmel_aes_transfer_complete);
1874 return atmel_aes_cpu_start(dd, req->src, req->dst, req->cryptlen,
1875 atmel_aes_transfer_complete);
1878 static int atmel_aes_xts_setkey(struct crypto_skcipher *tfm, const u8 *key,
1879 unsigned int keylen)
1881 struct atmel_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
1884 err = xts_check_key(crypto_skcipher_tfm(tfm), key, keylen);
1888 crypto_skcipher_clear_flags(ctx->fallback_tfm, CRYPTO_TFM_REQ_MASK);
1889 crypto_skcipher_set_flags(ctx->fallback_tfm, tfm->base.crt_flags &
1890 CRYPTO_TFM_REQ_MASK);
1891 err = crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
1895 memcpy(ctx->base.key, key, keylen/2);
1896 memcpy(ctx->key2, key + keylen/2, keylen/2);
1897 ctx->base.keylen = keylen/2;
1902 static int atmel_aes_xts_encrypt(struct skcipher_request *req)
1904 return atmel_aes_crypt(req, AES_FLAGS_XTS | AES_FLAGS_ENCRYPT);
1907 static int atmel_aes_xts_decrypt(struct skcipher_request *req)
1909 return atmel_aes_crypt(req, AES_FLAGS_XTS);
1912 static int atmel_aes_xts_init_tfm(struct crypto_skcipher *tfm)
1914 struct atmel_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
1915 struct atmel_aes_dev *dd;
1916 const char *tfm_name = crypto_tfm_alg_name(&tfm->base);
1918 dd = atmel_aes_dev_alloc(&ctx->base);
1922 ctx->fallback_tfm = crypto_alloc_skcipher(tfm_name, 0,
1923 CRYPTO_ALG_NEED_FALLBACK);
1924 if (IS_ERR(ctx->fallback_tfm))
1925 return PTR_ERR(ctx->fallback_tfm);
1927 crypto_skcipher_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx) +
1928 crypto_skcipher_reqsize(ctx->fallback_tfm));
1930 ctx->base.dd->ctx = &ctx->base;
1931 ctx->base.start = atmel_aes_xts_start;
1936 static void atmel_aes_xts_exit_tfm(struct crypto_skcipher *tfm)
1938 struct atmel_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
1940 crypto_free_skcipher(ctx->fallback_tfm);
1943 static struct skcipher_alg aes_xts_alg = {
1944 .base.cra_name = "xts(aes)",
1945 .base.cra_driver_name = "atmel-xts-aes",
1946 .base.cra_blocksize = AES_BLOCK_SIZE,
1947 .base.cra_ctxsize = sizeof(struct atmel_aes_xts_ctx),
1948 .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
1950 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1951 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1952 .ivsize = AES_BLOCK_SIZE,
1953 .setkey = atmel_aes_xts_setkey,
1954 .encrypt = atmel_aes_xts_encrypt,
1955 .decrypt = atmel_aes_xts_decrypt,
1956 .init = atmel_aes_xts_init_tfm,
1957 .exit = atmel_aes_xts_exit_tfm,
1960 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
1961 /* authenc aead functions */
1963 static int atmel_aes_authenc_start(struct atmel_aes_dev *dd);
1964 static int atmel_aes_authenc_init(struct atmel_aes_dev *dd, int err,
1966 static int atmel_aes_authenc_transfer(struct atmel_aes_dev *dd, int err,
1968 static int atmel_aes_authenc_digest(struct atmel_aes_dev *dd);
1969 static int atmel_aes_authenc_final(struct atmel_aes_dev *dd, int err,
1972 static void atmel_aes_authenc_complete(struct atmel_aes_dev *dd, int err)
1974 struct aead_request *req = aead_request_cast(dd->areq);
1975 struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
1977 if (err && (dd->flags & AES_FLAGS_OWN_SHA))
1978 atmel_sha_authenc_abort(&rctx->auth_req);
1979 dd->flags &= ~AES_FLAGS_OWN_SHA;
1982 static int atmel_aes_authenc_start(struct atmel_aes_dev *dd)
1984 struct aead_request *req = aead_request_cast(dd->areq);
1985 struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
1986 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1987 struct atmel_aes_authenc_ctx *ctx = crypto_aead_ctx(tfm);
1990 atmel_aes_set_mode(dd, &rctx->base);
1992 err = atmel_aes_hw_init(dd);
1994 return atmel_aes_complete(dd, err);
1996 return atmel_sha_authenc_schedule(&rctx->auth_req, ctx->auth,
1997 atmel_aes_authenc_init, dd);
2000 static int atmel_aes_authenc_init(struct atmel_aes_dev *dd, int err,
2003 struct aead_request *req = aead_request_cast(dd->areq);
2004 struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
2007 dd->is_async = true;
2009 return atmel_aes_complete(dd, err);
2011 /* If here, we've got the ownership of the SHA device. */
2012 dd->flags |= AES_FLAGS_OWN_SHA;
2014 /* Configure the SHA device. */
2015 return atmel_sha_authenc_init(&rctx->auth_req,
2016 req->src, req->assoclen,
2018 atmel_aes_authenc_transfer, dd);
2021 static int atmel_aes_authenc_transfer(struct atmel_aes_dev *dd, int err,
2024 struct aead_request *req = aead_request_cast(dd->areq);
2025 struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
2026 bool enc = atmel_aes_is_encrypt(dd);
2027 struct scatterlist *src, *dst;
2028 __be32 iv[AES_BLOCK_SIZE / sizeof(u32)];
2032 dd->is_async = true;
2034 return atmel_aes_complete(dd, err);
2036 /* Prepare src and dst scatter-lists to transfer cipher/plain texts. */
2037 src = scatterwalk_ffwd(rctx->src, req->src, req->assoclen);
2040 if (req->src != req->dst)
2041 dst = scatterwalk_ffwd(rctx->dst, req->dst, req->assoclen);
2043 /* Configure the AES device. */
2044 memcpy(iv, req->iv, sizeof(iv));
2047 * Here we always set the 2nd parameter of atmel_aes_write_ctrl() to
2048 * 'true' even if the data transfer is actually performed by the CPU (so
2049 * not by the DMA) because we must force the AES_MR_SMOD bitfield to the
2050 * value AES_MR_SMOD_IDATAR0. Indeed, both AES_MR_SMOD and SHA_MR_SMOD
2051 * must be set to *_MR_SMOD_IDATAR0.
2053 atmel_aes_write_ctrl(dd, true, iv);
2054 emr = AES_EMR_PLIPEN;
2056 emr |= AES_EMR_PLIPD;
2057 atmel_aes_write(dd, AES_EMR, emr);
2059 /* Transfer data. */
2060 return atmel_aes_dma_start(dd, src, dst, rctx->textlen,
2061 atmel_aes_authenc_digest);
2064 static int atmel_aes_authenc_digest(struct atmel_aes_dev *dd)
2066 struct aead_request *req = aead_request_cast(dd->areq);
2067 struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
2069 /* atmel_sha_authenc_final() releases the SHA device. */
2070 dd->flags &= ~AES_FLAGS_OWN_SHA;
2071 return atmel_sha_authenc_final(&rctx->auth_req,
2072 rctx->digest, sizeof(rctx->digest),
2073 atmel_aes_authenc_final, dd);
2076 static int atmel_aes_authenc_final(struct atmel_aes_dev *dd, int err,
2079 struct aead_request *req = aead_request_cast(dd->areq);
2080 struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
2081 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2082 bool enc = atmel_aes_is_encrypt(dd);
2083 u32 idigest[SHA512_DIGEST_SIZE / sizeof(u32)], *odigest = rctx->digest;
2087 dd->is_async = true;
2091 offs = req->assoclen + rctx->textlen;
2092 authsize = crypto_aead_authsize(tfm);
2094 scatterwalk_map_and_copy(odigest, req->dst, offs, authsize, 1);
2096 scatterwalk_map_and_copy(idigest, req->src, offs, authsize, 0);
2097 if (crypto_memneq(idigest, odigest, authsize))
2102 return atmel_aes_complete(dd, err);
2105 static int atmel_aes_authenc_setkey(struct crypto_aead *tfm, const u8 *key,
2106 unsigned int keylen)
2108 struct atmel_aes_authenc_ctx *ctx = crypto_aead_ctx(tfm);
2109 struct crypto_authenc_keys keys;
2112 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
2115 if (keys.enckeylen > sizeof(ctx->base.key))
2118 /* Save auth key. */
2119 err = atmel_sha_authenc_setkey(ctx->auth,
2120 keys.authkey, keys.authkeylen,
2121 crypto_aead_get_flags(tfm));
2123 memzero_explicit(&keys, sizeof(keys));
2128 ctx->base.keylen = keys.enckeylen;
2129 memcpy(ctx->base.key, keys.enckey, keys.enckeylen);
2131 memzero_explicit(&keys, sizeof(keys));
2135 memzero_explicit(&keys, sizeof(keys));
2139 static int atmel_aes_authenc_init_tfm(struct crypto_aead *tfm,
2140 unsigned long auth_mode)
2142 struct atmel_aes_authenc_ctx *ctx = crypto_aead_ctx(tfm);
2143 unsigned int auth_reqsize = atmel_sha_authenc_get_reqsize();
2144 struct atmel_aes_dev *dd;
2146 dd = atmel_aes_dev_alloc(&ctx->base);
2150 ctx->auth = atmel_sha_authenc_spawn(auth_mode);
2151 if (IS_ERR(ctx->auth))
2152 return PTR_ERR(ctx->auth);
2154 crypto_aead_set_reqsize(tfm, (sizeof(struct atmel_aes_authenc_reqctx) +
2157 ctx->base.dd->ctx = &ctx->base;
2158 ctx->base.start = atmel_aes_authenc_start;
2163 static int atmel_aes_authenc_hmac_sha1_init_tfm(struct crypto_aead *tfm)
2165 return atmel_aes_authenc_init_tfm(tfm, SHA_FLAGS_HMAC_SHA1);
2168 static int atmel_aes_authenc_hmac_sha224_init_tfm(struct crypto_aead *tfm)
2170 return atmel_aes_authenc_init_tfm(tfm, SHA_FLAGS_HMAC_SHA224);
2173 static int atmel_aes_authenc_hmac_sha256_init_tfm(struct crypto_aead *tfm)
2175 return atmel_aes_authenc_init_tfm(tfm, SHA_FLAGS_HMAC_SHA256);
2178 static int atmel_aes_authenc_hmac_sha384_init_tfm(struct crypto_aead *tfm)
2180 return atmel_aes_authenc_init_tfm(tfm, SHA_FLAGS_HMAC_SHA384);
2183 static int atmel_aes_authenc_hmac_sha512_init_tfm(struct crypto_aead *tfm)
2185 return atmel_aes_authenc_init_tfm(tfm, SHA_FLAGS_HMAC_SHA512);
2188 static void atmel_aes_authenc_exit_tfm(struct crypto_aead *tfm)
2190 struct atmel_aes_authenc_ctx *ctx = crypto_aead_ctx(tfm);
2192 atmel_sha_authenc_free(ctx->auth);
2195 static int atmel_aes_authenc_crypt(struct aead_request *req,
2198 struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
2199 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2200 struct atmel_aes_base_ctx *ctx = crypto_aead_ctx(tfm);
2201 u32 authsize = crypto_aead_authsize(tfm);
2202 bool enc = (mode & AES_FLAGS_ENCRYPT);
2204 /* Compute text length. */
2205 if (!enc && req->cryptlen < authsize)
2207 rctx->textlen = req->cryptlen - (enc ? 0 : authsize);
2210 * Currently, empty messages are not supported yet:
2211 * the SHA auto-padding can be used only on non-empty messages.
2212 * Hence a special case needs to be implemented for empty message.
2214 if (!rctx->textlen && !req->assoclen)
2217 rctx->base.mode = mode;
2218 ctx->block_size = AES_BLOCK_SIZE;
2219 ctx->is_aead = true;
2221 return atmel_aes_handle_queue(ctx->dd, &req->base);
2224 static int atmel_aes_authenc_cbc_aes_encrypt(struct aead_request *req)
2226 return atmel_aes_authenc_crypt(req, AES_FLAGS_CBC | AES_FLAGS_ENCRYPT);
2229 static int atmel_aes_authenc_cbc_aes_decrypt(struct aead_request *req)
2231 return atmel_aes_authenc_crypt(req, AES_FLAGS_CBC);
2234 static struct aead_alg aes_authenc_algs[] = {
2236 .setkey = atmel_aes_authenc_setkey,
2237 .encrypt = atmel_aes_authenc_cbc_aes_encrypt,
2238 .decrypt = atmel_aes_authenc_cbc_aes_decrypt,
2239 .init = atmel_aes_authenc_hmac_sha1_init_tfm,
2240 .exit = atmel_aes_authenc_exit_tfm,
2241 .ivsize = AES_BLOCK_SIZE,
2242 .maxauthsize = SHA1_DIGEST_SIZE,
2245 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2246 .cra_driver_name = "atmel-authenc-hmac-sha1-cbc-aes",
2247 .cra_blocksize = AES_BLOCK_SIZE,
2248 .cra_ctxsize = sizeof(struct atmel_aes_authenc_ctx),
2252 .setkey = atmel_aes_authenc_setkey,
2253 .encrypt = atmel_aes_authenc_cbc_aes_encrypt,
2254 .decrypt = atmel_aes_authenc_cbc_aes_decrypt,
2255 .init = atmel_aes_authenc_hmac_sha224_init_tfm,
2256 .exit = atmel_aes_authenc_exit_tfm,
2257 .ivsize = AES_BLOCK_SIZE,
2258 .maxauthsize = SHA224_DIGEST_SIZE,
2261 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2262 .cra_driver_name = "atmel-authenc-hmac-sha224-cbc-aes",
2263 .cra_blocksize = AES_BLOCK_SIZE,
2264 .cra_ctxsize = sizeof(struct atmel_aes_authenc_ctx),
2268 .setkey = atmel_aes_authenc_setkey,
2269 .encrypt = atmel_aes_authenc_cbc_aes_encrypt,
2270 .decrypt = atmel_aes_authenc_cbc_aes_decrypt,
2271 .init = atmel_aes_authenc_hmac_sha256_init_tfm,
2272 .exit = atmel_aes_authenc_exit_tfm,
2273 .ivsize = AES_BLOCK_SIZE,
2274 .maxauthsize = SHA256_DIGEST_SIZE,
2277 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2278 .cra_driver_name = "atmel-authenc-hmac-sha256-cbc-aes",
2279 .cra_blocksize = AES_BLOCK_SIZE,
2280 .cra_ctxsize = sizeof(struct atmel_aes_authenc_ctx),
2284 .setkey = atmel_aes_authenc_setkey,
2285 .encrypt = atmel_aes_authenc_cbc_aes_encrypt,
2286 .decrypt = atmel_aes_authenc_cbc_aes_decrypt,
2287 .init = atmel_aes_authenc_hmac_sha384_init_tfm,
2288 .exit = atmel_aes_authenc_exit_tfm,
2289 .ivsize = AES_BLOCK_SIZE,
2290 .maxauthsize = SHA384_DIGEST_SIZE,
2293 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2294 .cra_driver_name = "atmel-authenc-hmac-sha384-cbc-aes",
2295 .cra_blocksize = AES_BLOCK_SIZE,
2296 .cra_ctxsize = sizeof(struct atmel_aes_authenc_ctx),
2300 .setkey = atmel_aes_authenc_setkey,
2301 .encrypt = atmel_aes_authenc_cbc_aes_encrypt,
2302 .decrypt = atmel_aes_authenc_cbc_aes_decrypt,
2303 .init = atmel_aes_authenc_hmac_sha512_init_tfm,
2304 .exit = atmel_aes_authenc_exit_tfm,
2305 .ivsize = AES_BLOCK_SIZE,
2306 .maxauthsize = SHA512_DIGEST_SIZE,
2309 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2310 .cra_driver_name = "atmel-authenc-hmac-sha512-cbc-aes",
2311 .cra_blocksize = AES_BLOCK_SIZE,
2312 .cra_ctxsize = sizeof(struct atmel_aes_authenc_ctx),
2316 #endif /* CONFIG_CRYPTO_DEV_ATMEL_AUTHENC */
2318 /* Probe functions */
2320 static int atmel_aes_buff_init(struct atmel_aes_dev *dd)
2322 dd->buf = (void *)__get_free_pages(GFP_KERNEL, ATMEL_AES_BUFFER_ORDER);
2323 dd->buflen = ATMEL_AES_BUFFER_SIZE;
2324 dd->buflen &= ~(AES_BLOCK_SIZE - 1);
2327 dev_err(dd->dev, "unable to alloc pages.\n");
2334 static void atmel_aes_buff_cleanup(struct atmel_aes_dev *dd)
2336 free_page((unsigned long)dd->buf);
2339 static int atmel_aes_dma_init(struct atmel_aes_dev *dd)
2343 /* Try to grab 2 DMA channels */
2344 dd->src.chan = dma_request_chan(dd->dev, "tx");
2345 if (IS_ERR(dd->src.chan)) {
2346 ret = PTR_ERR(dd->src.chan);
2350 dd->dst.chan = dma_request_chan(dd->dev, "rx");
2351 if (IS_ERR(dd->dst.chan)) {
2352 ret = PTR_ERR(dd->dst.chan);
2359 dma_release_channel(dd->src.chan);
2361 dev_err(dd->dev, "no DMA channel available\n");
2365 static void atmel_aes_dma_cleanup(struct atmel_aes_dev *dd)
2367 dma_release_channel(dd->dst.chan);
2368 dma_release_channel(dd->src.chan);
2371 static void atmel_aes_queue_task(unsigned long data)
2373 struct atmel_aes_dev *dd = (struct atmel_aes_dev *)data;
2375 atmel_aes_handle_queue(dd, NULL);
2378 static void atmel_aes_done_task(unsigned long data)
2380 struct atmel_aes_dev *dd = (struct atmel_aes_dev *)data;
2382 dd->is_async = true;
2383 (void)dd->resume(dd);
2386 static irqreturn_t atmel_aes_irq(int irq, void *dev_id)
2388 struct atmel_aes_dev *aes_dd = dev_id;
2391 reg = atmel_aes_read(aes_dd, AES_ISR);
2392 if (reg & atmel_aes_read(aes_dd, AES_IMR)) {
2393 atmel_aes_write(aes_dd, AES_IDR, reg);
2394 if (AES_FLAGS_BUSY & aes_dd->flags)
2395 tasklet_schedule(&aes_dd->done_task);
2397 dev_warn(aes_dd->dev, "AES interrupt when no active requests.\n");
2404 static void atmel_aes_unregister_algs(struct atmel_aes_dev *dd)
2408 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
2409 if (dd->caps.has_authenc)
2410 for (i = 0; i < ARRAY_SIZE(aes_authenc_algs); i++)
2411 crypto_unregister_aead(&aes_authenc_algs[i]);
2414 if (dd->caps.has_xts)
2415 crypto_unregister_skcipher(&aes_xts_alg);
2417 if (dd->caps.has_gcm)
2418 crypto_unregister_aead(&aes_gcm_alg);
2420 if (dd->caps.has_cfb64)
2421 crypto_unregister_skcipher(&aes_cfb64_alg);
2423 for (i = 0; i < ARRAY_SIZE(aes_algs); i++)
2424 crypto_unregister_skcipher(&aes_algs[i]);
2427 static void atmel_aes_crypto_alg_init(struct crypto_alg *alg)
2429 alg->cra_flags |= CRYPTO_ALG_ASYNC;
2430 alg->cra_alignmask = 0xf;
2431 alg->cra_priority = ATMEL_AES_PRIORITY;
2432 alg->cra_module = THIS_MODULE;
2435 static int atmel_aes_register_algs(struct atmel_aes_dev *dd)
2439 for (i = 0; i < ARRAY_SIZE(aes_algs); i++) {
2440 atmel_aes_crypto_alg_init(&aes_algs[i].base);
2442 err = crypto_register_skcipher(&aes_algs[i]);
2447 if (dd->caps.has_cfb64) {
2448 atmel_aes_crypto_alg_init(&aes_cfb64_alg.base);
2450 err = crypto_register_skcipher(&aes_cfb64_alg);
2452 goto err_aes_cfb64_alg;
2455 if (dd->caps.has_gcm) {
2456 atmel_aes_crypto_alg_init(&aes_gcm_alg.base);
2458 err = crypto_register_aead(&aes_gcm_alg);
2460 goto err_aes_gcm_alg;
2463 if (dd->caps.has_xts) {
2464 atmel_aes_crypto_alg_init(&aes_xts_alg.base);
2466 err = crypto_register_skcipher(&aes_xts_alg);
2468 goto err_aes_xts_alg;
2471 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
2472 if (dd->caps.has_authenc) {
2473 for (i = 0; i < ARRAY_SIZE(aes_authenc_algs); i++) {
2474 atmel_aes_crypto_alg_init(&aes_authenc_algs[i].base);
2476 err = crypto_register_aead(&aes_authenc_algs[i]);
2478 goto err_aes_authenc_alg;
2485 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
2486 /* i = ARRAY_SIZE(aes_authenc_algs); */
2487 err_aes_authenc_alg:
2488 for (j = 0; j < i; j++)
2489 crypto_unregister_aead(&aes_authenc_algs[j]);
2490 crypto_unregister_skcipher(&aes_xts_alg);
2493 crypto_unregister_aead(&aes_gcm_alg);
2495 crypto_unregister_skcipher(&aes_cfb64_alg);
2497 i = ARRAY_SIZE(aes_algs);
2499 for (j = 0; j < i; j++)
2500 crypto_unregister_skcipher(&aes_algs[j]);
2505 static void atmel_aes_get_cap(struct atmel_aes_dev *dd)
2507 dd->caps.has_dualbuff = 0;
2508 dd->caps.has_cfb64 = 0;
2509 dd->caps.has_gcm = 0;
2510 dd->caps.has_xts = 0;
2511 dd->caps.has_authenc = 0;
2512 dd->caps.max_burst_size = 1;
2514 /* keep only major version number */
2515 switch (dd->hw_version & 0xff0) {
2517 dd->caps.has_dualbuff = 1;
2518 dd->caps.has_cfb64 = 1;
2519 dd->caps.has_gcm = 1;
2520 dd->caps.has_xts = 1;
2521 dd->caps.has_authenc = 1;
2522 dd->caps.max_burst_size = 4;
2525 dd->caps.has_dualbuff = 1;
2526 dd->caps.has_cfb64 = 1;
2527 dd->caps.has_gcm = 1;
2528 dd->caps.max_burst_size = 4;
2531 dd->caps.has_dualbuff = 1;
2532 dd->caps.has_cfb64 = 1;
2533 dd->caps.max_burst_size = 4;
2539 "Unmanaged aes version, set minimum capabilities\n");
2544 #if defined(CONFIG_OF)
2545 static const struct of_device_id atmel_aes_dt_ids[] = {
2546 { .compatible = "atmel,at91sam9g46-aes" },
2549 MODULE_DEVICE_TABLE(of, atmel_aes_dt_ids);
2552 static int atmel_aes_probe(struct platform_device *pdev)
2554 struct atmel_aes_dev *aes_dd;
2555 struct device *dev = &pdev->dev;
2556 struct resource *aes_res;
2559 aes_dd = devm_kzalloc(&pdev->dev, sizeof(*aes_dd), GFP_KERNEL);
2565 platform_set_drvdata(pdev, aes_dd);
2567 INIT_LIST_HEAD(&aes_dd->list);
2568 spin_lock_init(&aes_dd->lock);
2570 tasklet_init(&aes_dd->done_task, atmel_aes_done_task,
2571 (unsigned long)aes_dd);
2572 tasklet_init(&aes_dd->queue_task, atmel_aes_queue_task,
2573 (unsigned long)aes_dd);
2575 crypto_init_queue(&aes_dd->queue, ATMEL_AES_QUEUE_LENGTH);
2577 /* Get the base address */
2578 aes_res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
2580 dev_err(dev, "no MEM resource info\n");
2582 goto err_tasklet_kill;
2584 aes_dd->phys_base = aes_res->start;
2587 aes_dd->irq = platform_get_irq(pdev, 0);
2588 if (aes_dd->irq < 0) {
2590 goto err_tasklet_kill;
2593 err = devm_request_irq(&pdev->dev, aes_dd->irq, atmel_aes_irq,
2594 IRQF_SHARED, "atmel-aes", aes_dd);
2596 dev_err(dev, "unable to request aes irq.\n");
2597 goto err_tasklet_kill;
2600 /* Initializing the clock */
2601 aes_dd->iclk = devm_clk_get(&pdev->dev, "aes_clk");
2602 if (IS_ERR(aes_dd->iclk)) {
2603 dev_err(dev, "clock initialization failed.\n");
2604 err = PTR_ERR(aes_dd->iclk);
2605 goto err_tasklet_kill;
2608 aes_dd->io_base = devm_ioremap_resource(&pdev->dev, aes_res);
2609 if (IS_ERR(aes_dd->io_base)) {
2610 dev_err(dev, "can't ioremap\n");
2611 err = PTR_ERR(aes_dd->io_base);
2612 goto err_tasklet_kill;
2615 err = clk_prepare(aes_dd->iclk);
2617 goto err_tasklet_kill;
2619 err = atmel_aes_hw_version_init(aes_dd);
2621 goto err_iclk_unprepare;
2623 atmel_aes_get_cap(aes_dd);
2625 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
2626 if (aes_dd->caps.has_authenc && !atmel_sha_authenc_is_ready()) {
2627 err = -EPROBE_DEFER;
2628 goto err_iclk_unprepare;
2632 err = atmel_aes_buff_init(aes_dd);
2634 goto err_iclk_unprepare;
2636 err = atmel_aes_dma_init(aes_dd);
2638 goto err_buff_cleanup;
2640 spin_lock(&atmel_aes.lock);
2641 list_add_tail(&aes_dd->list, &atmel_aes.dev_list);
2642 spin_unlock(&atmel_aes.lock);
2644 err = atmel_aes_register_algs(aes_dd);
2648 dev_info(dev, "Atmel AES - Using %s, %s for DMA transfers\n",
2649 dma_chan_name(aes_dd->src.chan),
2650 dma_chan_name(aes_dd->dst.chan));
2655 spin_lock(&atmel_aes.lock);
2656 list_del(&aes_dd->list);
2657 spin_unlock(&atmel_aes.lock);
2658 atmel_aes_dma_cleanup(aes_dd);
2660 atmel_aes_buff_cleanup(aes_dd);
2662 clk_unprepare(aes_dd->iclk);
2664 tasklet_kill(&aes_dd->done_task);
2665 tasklet_kill(&aes_dd->queue_task);
2670 static int atmel_aes_remove(struct platform_device *pdev)
2672 struct atmel_aes_dev *aes_dd;
2674 aes_dd = platform_get_drvdata(pdev);
2677 spin_lock(&atmel_aes.lock);
2678 list_del(&aes_dd->list);
2679 spin_unlock(&atmel_aes.lock);
2681 atmel_aes_unregister_algs(aes_dd);
2683 tasklet_kill(&aes_dd->done_task);
2684 tasklet_kill(&aes_dd->queue_task);
2686 atmel_aes_dma_cleanup(aes_dd);
2687 atmel_aes_buff_cleanup(aes_dd);
2689 clk_unprepare(aes_dd->iclk);
2694 static struct platform_driver atmel_aes_driver = {
2695 .probe = atmel_aes_probe,
2696 .remove = atmel_aes_remove,
2698 .name = "atmel_aes",
2699 .of_match_table = of_match_ptr(atmel_aes_dt_ids),
2703 module_platform_driver(atmel_aes_driver);
2705 MODULE_DESCRIPTION("Atmel AES hw acceleration support.");
2706 MODULE_LICENSE("GPL v2");
2707 MODULE_AUTHOR("Nicolas Royer - Eukréa Electromatique");