1 // SPDX-License-Identifier: GPL-2.0+
3 * Freescale FSL CAAM support for crypto API over QI backend.
6 * Copyright 2013-2016 Freescale Semiconductor, Inc.
7 * Copyright 2016-2018 NXP
14 #include "desc_constr.h"
20 #include "caamalg_desc.h"
25 #define CAAM_CRA_PRIORITY 2000
26 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
27 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
28 SHA512_DIGEST_SIZE * 2)
30 #define DESC_MAX_USED_BYTES (DESC_QI_AEAD_GIVENC_LEN + \
32 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
34 struct caam_alg_entry {
41 struct caam_aead_alg {
43 struct caam_alg_entry caam;
47 struct caam_skcipher_alg {
48 struct skcipher_alg skcipher;
49 struct caam_alg_entry caam;
58 u32 sh_desc_enc[DESC_MAX_USED_LEN];
59 u32 sh_desc_dec[DESC_MAX_USED_LEN];
60 u8 key[CAAM_MAX_KEY_SIZE];
62 enum dma_data_direction dir;
65 unsigned int authsize;
67 spinlock_t lock; /* Protects multiple init of driver context */
68 struct caam_drv_ctx *drv_ctx[NUM_OP];
71 static int aead_set_sh_desc(struct crypto_aead *aead)
73 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
75 struct caam_ctx *ctx = crypto_aead_ctx(aead);
76 unsigned int ivsize = crypto_aead_ivsize(aead);
79 unsigned int data_len[2];
81 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
82 OP_ALG_AAI_CTR_MOD128);
83 const bool is_rfc3686 = alg->caam.rfc3686;
84 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
86 if (!ctx->cdata.keylen || !ctx->authsize)
90 * AES-CTR needs to load IV in CONTEXT1 reg
91 * at an offset of 128bits (16bytes)
92 * CONTEXT1[255:128] = IV
99 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
102 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
103 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
104 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
107 data_len[0] = ctx->adata.keylen_pad;
108 data_len[1] = ctx->cdata.keylen;
113 /* aead_encrypt shared descriptor */
114 if (desc_inline_query(DESC_QI_AEAD_ENC_LEN +
115 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
116 DESC_JOB_IO_LEN, data_len, &inl_mask,
117 ARRAY_SIZE(data_len)) < 0)
121 ctx->adata.key_virt = ctx->key;
123 ctx->adata.key_dma = ctx->key_dma;
126 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
128 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
130 ctx->adata.key_inline = !!(inl_mask & 1);
131 ctx->cdata.key_inline = !!(inl_mask & 2);
133 cnstr_shdsc_aead_encap(ctx->sh_desc_enc, &ctx->cdata, &ctx->adata,
134 ivsize, ctx->authsize, is_rfc3686, nonce,
135 ctx1_iv_off, true, ctrlpriv->era);
138 /* aead_decrypt shared descriptor */
139 if (desc_inline_query(DESC_QI_AEAD_DEC_LEN +
140 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
141 DESC_JOB_IO_LEN, data_len, &inl_mask,
142 ARRAY_SIZE(data_len)) < 0)
146 ctx->adata.key_virt = ctx->key;
148 ctx->adata.key_dma = ctx->key_dma;
151 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
153 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
155 ctx->adata.key_inline = !!(inl_mask & 1);
156 ctx->cdata.key_inline = !!(inl_mask & 2);
158 cnstr_shdsc_aead_decap(ctx->sh_desc_dec, &ctx->cdata, &ctx->adata,
159 ivsize, ctx->authsize, alg->caam.geniv,
160 is_rfc3686, nonce, ctx1_iv_off, true,
163 if (!alg->caam.geniv)
166 /* aead_givencrypt shared descriptor */
167 if (desc_inline_query(DESC_QI_AEAD_GIVENC_LEN +
168 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
169 DESC_JOB_IO_LEN, data_len, &inl_mask,
170 ARRAY_SIZE(data_len)) < 0)
174 ctx->adata.key_virt = ctx->key;
176 ctx->adata.key_dma = ctx->key_dma;
179 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
181 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
183 ctx->adata.key_inline = !!(inl_mask & 1);
184 ctx->cdata.key_inline = !!(inl_mask & 2);
186 cnstr_shdsc_aead_givencap(ctx->sh_desc_enc, &ctx->cdata, &ctx->adata,
187 ivsize, ctx->authsize, is_rfc3686, nonce,
188 ctx1_iv_off, true, ctrlpriv->era);
194 static int aead_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
196 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
198 ctx->authsize = authsize;
199 aead_set_sh_desc(authenc);
204 static int aead_setkey(struct crypto_aead *aead, const u8 *key,
207 struct caam_ctx *ctx = crypto_aead_ctx(aead);
208 struct device *jrdev = ctx->jrdev;
209 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
210 struct crypto_authenc_keys keys;
213 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
217 dev_err(jrdev, "keylen %d enckeylen %d authkeylen %d\n",
218 keys.authkeylen + keys.enckeylen, keys.enckeylen,
220 print_hex_dump(KERN_ERR, "key in @" __stringify(__LINE__)": ",
221 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
225 * If DKP is supported, use it in the shared descriptor to generate
228 if (ctrlpriv->era >= 6) {
229 ctx->adata.keylen = keys.authkeylen;
230 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
233 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
236 memcpy(ctx->key, keys.authkey, keys.authkeylen);
237 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
239 dma_sync_single_for_device(jrdev, ctx->key_dma,
240 ctx->adata.keylen_pad +
241 keys.enckeylen, ctx->dir);
245 ret = gen_split_key(jrdev, ctx->key, &ctx->adata, keys.authkey,
246 keys.authkeylen, CAAM_MAX_KEY_SIZE -
251 /* postpend encryption key to auth split key */
252 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
253 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
254 keys.enckeylen, ctx->dir);
256 print_hex_dump(KERN_ERR, "ctx.key@" __stringify(__LINE__)": ",
257 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
258 ctx->adata.keylen_pad + keys.enckeylen, 1);
262 ctx->cdata.keylen = keys.enckeylen;
264 ret = aead_set_sh_desc(aead);
268 /* Now update the driver contexts with the new shared descriptor */
269 if (ctx->drv_ctx[ENCRYPT]) {
270 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
273 dev_err(jrdev, "driver enc context update failed\n");
278 if (ctx->drv_ctx[DECRYPT]) {
279 ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
282 dev_err(jrdev, "driver dec context update failed\n");
287 memzero_explicit(&keys, sizeof(keys));
290 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
291 memzero_explicit(&keys, sizeof(keys));
295 static int gcm_set_sh_desc(struct crypto_aead *aead)
297 struct caam_ctx *ctx = crypto_aead_ctx(aead);
298 unsigned int ivsize = crypto_aead_ivsize(aead);
299 int rem_bytes = CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN -
302 if (!ctx->cdata.keylen || !ctx->authsize)
306 * Job Descriptor and Shared Descriptor
307 * must fit into the 64-word Descriptor h/w Buffer
309 if (rem_bytes >= DESC_QI_GCM_ENC_LEN) {
310 ctx->cdata.key_inline = true;
311 ctx->cdata.key_virt = ctx->key;
313 ctx->cdata.key_inline = false;
314 ctx->cdata.key_dma = ctx->key_dma;
317 cnstr_shdsc_gcm_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
318 ctx->authsize, true);
321 * Job Descriptor and Shared Descriptor
322 * must fit into the 64-word Descriptor h/w Buffer
324 if (rem_bytes >= DESC_QI_GCM_DEC_LEN) {
325 ctx->cdata.key_inline = true;
326 ctx->cdata.key_virt = ctx->key;
328 ctx->cdata.key_inline = false;
329 ctx->cdata.key_dma = ctx->key_dma;
332 cnstr_shdsc_gcm_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
333 ctx->authsize, true);
338 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
340 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
342 ctx->authsize = authsize;
343 gcm_set_sh_desc(authenc);
348 static int gcm_setkey(struct crypto_aead *aead,
349 const u8 *key, unsigned int keylen)
351 struct caam_ctx *ctx = crypto_aead_ctx(aead);
352 struct device *jrdev = ctx->jrdev;
356 print_hex_dump(KERN_ERR, "key in @" __stringify(__LINE__)": ",
357 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
360 memcpy(ctx->key, key, keylen);
361 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
362 ctx->cdata.keylen = keylen;
364 ret = gcm_set_sh_desc(aead);
368 /* Now update the driver contexts with the new shared descriptor */
369 if (ctx->drv_ctx[ENCRYPT]) {
370 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
373 dev_err(jrdev, "driver enc context update failed\n");
378 if (ctx->drv_ctx[DECRYPT]) {
379 ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
382 dev_err(jrdev, "driver dec context update failed\n");
390 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
392 struct caam_ctx *ctx = crypto_aead_ctx(aead);
393 unsigned int ivsize = crypto_aead_ivsize(aead);
394 int rem_bytes = CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN -
397 if (!ctx->cdata.keylen || !ctx->authsize)
400 ctx->cdata.key_virt = ctx->key;
403 * Job Descriptor and Shared Descriptor
404 * must fit into the 64-word Descriptor h/w Buffer
406 if (rem_bytes >= DESC_QI_RFC4106_ENC_LEN) {
407 ctx->cdata.key_inline = true;
409 ctx->cdata.key_inline = false;
410 ctx->cdata.key_dma = ctx->key_dma;
413 cnstr_shdsc_rfc4106_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
414 ctx->authsize, true);
417 * Job Descriptor and Shared Descriptor
418 * must fit into the 64-word Descriptor h/w Buffer
420 if (rem_bytes >= DESC_QI_RFC4106_DEC_LEN) {
421 ctx->cdata.key_inline = true;
423 ctx->cdata.key_inline = false;
424 ctx->cdata.key_dma = ctx->key_dma;
427 cnstr_shdsc_rfc4106_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
428 ctx->authsize, true);
433 static int rfc4106_setauthsize(struct crypto_aead *authenc,
434 unsigned int authsize)
436 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
438 ctx->authsize = authsize;
439 rfc4106_set_sh_desc(authenc);
444 static int rfc4106_setkey(struct crypto_aead *aead,
445 const u8 *key, unsigned int keylen)
447 struct caam_ctx *ctx = crypto_aead_ctx(aead);
448 struct device *jrdev = ctx->jrdev;
455 print_hex_dump(KERN_ERR, "key in @" __stringify(__LINE__)": ",
456 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
459 memcpy(ctx->key, key, keylen);
461 * The last four bytes of the key material are used as the salt value
462 * in the nonce. Update the AES key length.
464 ctx->cdata.keylen = keylen - 4;
465 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
468 ret = rfc4106_set_sh_desc(aead);
472 /* Now update the driver contexts with the new shared descriptor */
473 if (ctx->drv_ctx[ENCRYPT]) {
474 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
477 dev_err(jrdev, "driver enc context update failed\n");
482 if (ctx->drv_ctx[DECRYPT]) {
483 ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
486 dev_err(jrdev, "driver dec context update failed\n");
494 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
496 struct caam_ctx *ctx = crypto_aead_ctx(aead);
497 unsigned int ivsize = crypto_aead_ivsize(aead);
498 int rem_bytes = CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN -
501 if (!ctx->cdata.keylen || !ctx->authsize)
504 ctx->cdata.key_virt = ctx->key;
507 * Job Descriptor and Shared Descriptor
508 * must fit into the 64-word Descriptor h/w Buffer
510 if (rem_bytes >= DESC_QI_RFC4543_ENC_LEN) {
511 ctx->cdata.key_inline = true;
513 ctx->cdata.key_inline = false;
514 ctx->cdata.key_dma = ctx->key_dma;
517 cnstr_shdsc_rfc4543_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
518 ctx->authsize, true);
521 * Job Descriptor and Shared Descriptor
522 * must fit into the 64-word Descriptor h/w Buffer
524 if (rem_bytes >= DESC_QI_RFC4543_DEC_LEN) {
525 ctx->cdata.key_inline = true;
527 ctx->cdata.key_inline = false;
528 ctx->cdata.key_dma = ctx->key_dma;
531 cnstr_shdsc_rfc4543_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
532 ctx->authsize, true);
537 static int rfc4543_setauthsize(struct crypto_aead *authenc,
538 unsigned int authsize)
540 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
542 ctx->authsize = authsize;
543 rfc4543_set_sh_desc(authenc);
548 static int rfc4543_setkey(struct crypto_aead *aead,
549 const u8 *key, unsigned int keylen)
551 struct caam_ctx *ctx = crypto_aead_ctx(aead);
552 struct device *jrdev = ctx->jrdev;
559 print_hex_dump(KERN_ERR, "key in @" __stringify(__LINE__)": ",
560 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
563 memcpy(ctx->key, key, keylen);
565 * The last four bytes of the key material are used as the salt value
566 * in the nonce. Update the AES key length.
568 ctx->cdata.keylen = keylen - 4;
569 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
572 ret = rfc4543_set_sh_desc(aead);
576 /* Now update the driver contexts with the new shared descriptor */
577 if (ctx->drv_ctx[ENCRYPT]) {
578 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
581 dev_err(jrdev, "driver enc context update failed\n");
586 if (ctx->drv_ctx[DECRYPT]) {
587 ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
590 dev_err(jrdev, "driver dec context update failed\n");
598 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
601 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
602 struct caam_skcipher_alg *alg =
603 container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
605 struct device *jrdev = ctx->jrdev;
606 unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
608 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
609 OP_ALG_AAI_CTR_MOD128);
610 const bool is_rfc3686 = alg->caam.rfc3686;
614 print_hex_dump(KERN_ERR, "key in @" __stringify(__LINE__)": ",
615 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
618 * AES-CTR needs to load IV in CONTEXT1 reg
619 * at an offset of 128bits (16bytes)
620 * CONTEXT1[255:128] = IV
627 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
628 * | *key = {KEY, NONCE}
631 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
632 keylen -= CTR_RFC3686_NONCE_SIZE;
635 ctx->cdata.keylen = keylen;
636 ctx->cdata.key_virt = key;
637 ctx->cdata.key_inline = true;
639 /* skcipher encrypt, decrypt shared descriptors */
640 cnstr_shdsc_skcipher_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
641 is_rfc3686, ctx1_iv_off);
642 cnstr_shdsc_skcipher_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
643 is_rfc3686, ctx1_iv_off);
645 /* Now update the driver contexts with the new shared descriptor */
646 if (ctx->drv_ctx[ENCRYPT]) {
647 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
650 dev_err(jrdev, "driver enc context update failed\n");
655 if (ctx->drv_ctx[DECRYPT]) {
656 ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
659 dev_err(jrdev, "driver dec context update failed\n");
666 crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
670 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
673 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
674 struct device *jrdev = ctx->jrdev;
677 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
678 dev_err(jrdev, "key size mismatch\n");
682 ctx->cdata.keylen = keylen;
683 ctx->cdata.key_virt = key;
684 ctx->cdata.key_inline = true;
686 /* xts skcipher encrypt, decrypt shared descriptors */
687 cnstr_shdsc_xts_skcipher_encap(ctx->sh_desc_enc, &ctx->cdata);
688 cnstr_shdsc_xts_skcipher_decap(ctx->sh_desc_dec, &ctx->cdata);
690 /* Now update the driver contexts with the new shared descriptor */
691 if (ctx->drv_ctx[ENCRYPT]) {
692 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
695 dev_err(jrdev, "driver enc context update failed\n");
700 if (ctx->drv_ctx[DECRYPT]) {
701 ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
704 dev_err(jrdev, "driver dec context update failed\n");
711 crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
716 * aead_edesc - s/w-extended aead descriptor
717 * @src_nents: number of segments in input scatterlist
718 * @dst_nents: number of segments in output scatterlist
719 * @iv_dma: dma address of iv for checking continuity and link table
720 * @qm_sg_bytes: length of dma mapped h/w link table
721 * @qm_sg_dma: bus physical mapped address of h/w link table
722 * @assoclen: associated data length, in CAAM endianness
723 * @assoclen_dma: bus physical mapped address of req->assoclen
724 * @drv_req: driver-specific request structure
725 * @sgt: the h/w link table, followed by IV
732 dma_addr_t qm_sg_dma;
733 unsigned int assoclen;
734 dma_addr_t assoclen_dma;
735 struct caam_drv_req drv_req;
736 struct qm_sg_entry sgt[0];
740 * skcipher_edesc - s/w-extended skcipher descriptor
741 * @src_nents: number of segments in input scatterlist
742 * @dst_nents: number of segments in output scatterlist
743 * @iv_dma: dma address of iv for checking continuity and link table
744 * @qm_sg_bytes: length of dma mapped h/w link table
745 * @qm_sg_dma: bus physical mapped address of h/w link table
746 * @drv_req: driver-specific request structure
747 * @sgt: the h/w link table, followed by IV
749 struct skcipher_edesc {
754 dma_addr_t qm_sg_dma;
755 struct caam_drv_req drv_req;
756 struct qm_sg_entry sgt[0];
759 static struct caam_drv_ctx *get_drv_ctx(struct caam_ctx *ctx,
763 * This function is called on the fast path with values of 'type'
764 * known at compile time. Invalid arguments are not expected and
765 * thus no checks are made.
767 struct caam_drv_ctx *drv_ctx = ctx->drv_ctx[type];
770 if (unlikely(!drv_ctx)) {
771 spin_lock(&ctx->lock);
773 /* Read again to check if some other core init drv_ctx */
774 drv_ctx = ctx->drv_ctx[type];
779 desc = ctx->sh_desc_enc;
780 else /* (type == DECRYPT) */
781 desc = ctx->sh_desc_dec;
783 cpu = smp_processor_id();
784 drv_ctx = caam_drv_ctx_init(ctx->qidev, &cpu, desc);
785 if (likely(!IS_ERR_OR_NULL(drv_ctx)))
786 drv_ctx->op_type = type;
788 ctx->drv_ctx[type] = drv_ctx;
791 spin_unlock(&ctx->lock);
797 static void caam_unmap(struct device *dev, struct scatterlist *src,
798 struct scatterlist *dst, int src_nents,
799 int dst_nents, dma_addr_t iv_dma, int ivsize,
800 dma_addr_t qm_sg_dma, int qm_sg_bytes)
804 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
805 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
807 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
811 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
813 dma_unmap_single(dev, qm_sg_dma, qm_sg_bytes, DMA_TO_DEVICE);
816 static void aead_unmap(struct device *dev,
817 struct aead_edesc *edesc,
818 struct aead_request *req)
820 struct crypto_aead *aead = crypto_aead_reqtfm(req);
821 int ivsize = crypto_aead_ivsize(aead);
823 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents,
824 edesc->iv_dma, ivsize, edesc->qm_sg_dma, edesc->qm_sg_bytes);
825 dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE);
828 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
829 struct skcipher_request *req)
831 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
832 int ivsize = crypto_skcipher_ivsize(skcipher);
834 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents,
835 edesc->iv_dma, ivsize, edesc->qm_sg_dma, edesc->qm_sg_bytes);
838 static void aead_done(struct caam_drv_req *drv_req, u32 status)
840 struct device *qidev;
841 struct aead_edesc *edesc;
842 struct aead_request *aead_req = drv_req->app_ctx;
843 struct crypto_aead *aead = crypto_aead_reqtfm(aead_req);
844 struct caam_ctx *caam_ctx = crypto_aead_ctx(aead);
847 qidev = caam_ctx->qidev;
849 if (unlikely(status)) {
850 u32 ssrc = status & JRSTA_SSRC_MASK;
851 u8 err_id = status & JRSTA_CCBERR_ERRID_MASK;
853 caam_jr_strstatus(qidev, status);
855 * verify hw auth check passed else return -EBADMSG
857 if (ssrc == JRSTA_SSRC_CCB_ERROR &&
858 err_id == JRSTA_CCBERR_ERRID_ICVCHK)
864 edesc = container_of(drv_req, typeof(*edesc), drv_req);
865 aead_unmap(qidev, edesc, aead_req);
867 aead_request_complete(aead_req, ecode);
868 qi_cache_free(edesc);
872 * allocate and map the aead extended descriptor
874 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
877 struct crypto_aead *aead = crypto_aead_reqtfm(req);
878 struct caam_ctx *ctx = crypto_aead_ctx(aead);
879 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
881 struct device *qidev = ctx->qidev;
882 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
883 GFP_KERNEL : GFP_ATOMIC;
884 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
885 struct aead_edesc *edesc;
886 dma_addr_t qm_sg_dma, iv_dma = 0;
888 unsigned int authsize = ctx->authsize;
889 int qm_sg_index = 0, qm_sg_ents = 0, qm_sg_bytes;
891 struct qm_sg_entry *sg_table, *fd_sgt;
892 struct caam_drv_ctx *drv_ctx;
894 drv_ctx = get_drv_ctx(ctx, encrypt ? ENCRYPT : DECRYPT);
895 if (unlikely(IS_ERR_OR_NULL(drv_ctx)))
896 return (struct aead_edesc *)drv_ctx;
898 /* allocate space for base edesc and hw desc commands, link tables */
899 edesc = qi_cache_alloc(GFP_DMA | flags);
900 if (unlikely(!edesc)) {
901 dev_err(qidev, "could not allocate extended descriptor\n");
902 return ERR_PTR(-ENOMEM);
905 if (likely(req->src == req->dst)) {
906 src_nents = sg_nents_for_len(req->src, req->assoclen +
908 (encrypt ? authsize : 0));
909 if (unlikely(src_nents < 0)) {
910 dev_err(qidev, "Insufficient bytes (%d) in src S/G\n",
911 req->assoclen + req->cryptlen +
912 (encrypt ? authsize : 0));
913 qi_cache_free(edesc);
914 return ERR_PTR(src_nents);
917 mapped_src_nents = dma_map_sg(qidev, req->src, src_nents,
919 if (unlikely(!mapped_src_nents)) {
920 dev_err(qidev, "unable to map source\n");
921 qi_cache_free(edesc);
922 return ERR_PTR(-ENOMEM);
925 src_nents = sg_nents_for_len(req->src, req->assoclen +
927 if (unlikely(src_nents < 0)) {
928 dev_err(qidev, "Insufficient bytes (%d) in src S/G\n",
929 req->assoclen + req->cryptlen);
930 qi_cache_free(edesc);
931 return ERR_PTR(src_nents);
934 dst_nents = sg_nents_for_len(req->dst, req->assoclen +
936 (encrypt ? authsize :
938 if (unlikely(dst_nents < 0)) {
939 dev_err(qidev, "Insufficient bytes (%d) in dst S/G\n",
940 req->assoclen + req->cryptlen +
941 (encrypt ? authsize : (-authsize)));
942 qi_cache_free(edesc);
943 return ERR_PTR(dst_nents);
947 mapped_src_nents = dma_map_sg(qidev, req->src,
948 src_nents, DMA_TO_DEVICE);
949 if (unlikely(!mapped_src_nents)) {
950 dev_err(qidev, "unable to map source\n");
951 qi_cache_free(edesc);
952 return ERR_PTR(-ENOMEM);
955 mapped_src_nents = 0;
958 mapped_dst_nents = dma_map_sg(qidev, req->dst, dst_nents,
960 if (unlikely(!mapped_dst_nents)) {
961 dev_err(qidev, "unable to map destination\n");
962 dma_unmap_sg(qidev, req->src, src_nents, DMA_TO_DEVICE);
963 qi_cache_free(edesc);
964 return ERR_PTR(-ENOMEM);
968 if ((alg->caam.rfc3686 && encrypt) || !alg->caam.geniv)
969 ivsize = crypto_aead_ivsize(aead);
972 * Create S/G table: req->assoclen, [IV,] req->src [, req->dst].
973 * Input is not contiguous.
975 qm_sg_ents = 1 + !!ivsize + mapped_src_nents +
976 (mapped_dst_nents > 1 ? mapped_dst_nents : 0);
977 sg_table = &edesc->sgt[0];
978 qm_sg_bytes = qm_sg_ents * sizeof(*sg_table);
979 if (unlikely(offsetof(struct aead_edesc, sgt) + qm_sg_bytes + ivsize >
980 CAAM_QI_MEMCACHE_SIZE)) {
981 dev_err(qidev, "No space for %d S/G entries and/or %dB IV\n",
983 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
985 qi_cache_free(edesc);
986 return ERR_PTR(-ENOMEM);
990 u8 *iv = (u8 *)(sg_table + qm_sg_ents);
992 /* Make sure IV is located in a DMAable area */
993 memcpy(iv, req->iv, ivsize);
995 iv_dma = dma_map_single(qidev, iv, ivsize, DMA_TO_DEVICE);
996 if (dma_mapping_error(qidev, iv_dma)) {
997 dev_err(qidev, "unable to map IV\n");
998 caam_unmap(qidev, req->src, req->dst, src_nents,
999 dst_nents, 0, 0, 0, 0);
1000 qi_cache_free(edesc);
1001 return ERR_PTR(-ENOMEM);
1005 edesc->src_nents = src_nents;
1006 edesc->dst_nents = dst_nents;
1007 edesc->iv_dma = iv_dma;
1008 edesc->drv_req.app_ctx = req;
1009 edesc->drv_req.cbk = aead_done;
1010 edesc->drv_req.drv_ctx = drv_ctx;
1012 edesc->assoclen = cpu_to_caam32(req->assoclen);
1013 edesc->assoclen_dma = dma_map_single(qidev, &edesc->assoclen, 4,
1015 if (dma_mapping_error(qidev, edesc->assoclen_dma)) {
1016 dev_err(qidev, "unable to map assoclen\n");
1017 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents,
1018 iv_dma, ivsize, 0, 0);
1019 qi_cache_free(edesc);
1020 return ERR_PTR(-ENOMEM);
1023 dma_to_qm_sg_one(sg_table, edesc->assoclen_dma, 4, 0);
1026 dma_to_qm_sg_one(sg_table + qm_sg_index, iv_dma, ivsize, 0);
1029 sg_to_qm_sg_last(req->src, mapped_src_nents, sg_table + qm_sg_index, 0);
1030 qm_sg_index += mapped_src_nents;
1032 if (mapped_dst_nents > 1)
1033 sg_to_qm_sg_last(req->dst, mapped_dst_nents, sg_table +
1036 qm_sg_dma = dma_map_single(qidev, sg_table, qm_sg_bytes, DMA_TO_DEVICE);
1037 if (dma_mapping_error(qidev, qm_sg_dma)) {
1038 dev_err(qidev, "unable to map S/G table\n");
1039 dma_unmap_single(qidev, edesc->assoclen_dma, 4, DMA_TO_DEVICE);
1040 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents,
1041 iv_dma, ivsize, 0, 0);
1042 qi_cache_free(edesc);
1043 return ERR_PTR(-ENOMEM);
1046 edesc->qm_sg_dma = qm_sg_dma;
1047 edesc->qm_sg_bytes = qm_sg_bytes;
1049 out_len = req->assoclen + req->cryptlen +
1050 (encrypt ? ctx->authsize : (-ctx->authsize));
1051 in_len = 4 + ivsize + req->assoclen + req->cryptlen;
1053 fd_sgt = &edesc->drv_req.fd_sgt[0];
1054 dma_to_qm_sg_one_last_ext(&fd_sgt[1], qm_sg_dma, in_len, 0);
1056 if (req->dst == req->src) {
1057 if (mapped_src_nents == 1)
1058 dma_to_qm_sg_one(&fd_sgt[0], sg_dma_address(req->src),
1061 dma_to_qm_sg_one_ext(&fd_sgt[0], qm_sg_dma +
1062 (1 + !!ivsize) * sizeof(*sg_table),
1064 } else if (mapped_dst_nents == 1) {
1065 dma_to_qm_sg_one(&fd_sgt[0], sg_dma_address(req->dst), out_len,
1068 dma_to_qm_sg_one_ext(&fd_sgt[0], qm_sg_dma + sizeof(*sg_table) *
1069 qm_sg_index, out_len, 0);
1075 static inline int aead_crypt(struct aead_request *req, bool encrypt)
1077 struct aead_edesc *edesc;
1078 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1079 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1082 if (unlikely(caam_congested))
1085 /* allocate extended descriptor */
1086 edesc = aead_edesc_alloc(req, encrypt);
1087 if (IS_ERR_OR_NULL(edesc))
1088 return PTR_ERR(edesc);
1090 /* Create and submit job descriptor */
1091 ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req);
1095 aead_unmap(ctx->qidev, edesc, req);
1096 qi_cache_free(edesc);
1102 static int aead_encrypt(struct aead_request *req)
1104 return aead_crypt(req, true);
1107 static int aead_decrypt(struct aead_request *req)
1109 return aead_crypt(req, false);
1112 static int ipsec_gcm_encrypt(struct aead_request *req)
1114 if (req->assoclen < 8)
1117 return aead_crypt(req, true);
1120 static int ipsec_gcm_decrypt(struct aead_request *req)
1122 if (req->assoclen < 8)
1125 return aead_crypt(req, false);
1128 static void skcipher_done(struct caam_drv_req *drv_req, u32 status)
1130 struct skcipher_edesc *edesc;
1131 struct skcipher_request *req = drv_req->app_ctx;
1132 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1133 struct caam_ctx *caam_ctx = crypto_skcipher_ctx(skcipher);
1134 struct device *qidev = caam_ctx->qidev;
1135 int ivsize = crypto_skcipher_ivsize(skcipher);
1138 dev_err(qidev, "%s %d: status 0x%x\n", __func__, __LINE__, status);
1141 edesc = container_of(drv_req, typeof(*edesc), drv_req);
1144 caam_jr_strstatus(qidev, status);
1147 print_hex_dump(KERN_ERR, "dstiv @" __stringify(__LINE__)": ",
1148 DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
1149 edesc->src_nents > 1 ? 100 : ivsize, 1);
1150 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
1151 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
1152 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
1155 skcipher_unmap(qidev, edesc, req);
1158 * The crypto API expects us to set the IV (req->iv) to the last
1159 * ciphertext block. This is used e.g. by the CTS mode.
1161 if (edesc->drv_req.drv_ctx->op_type == ENCRYPT)
1162 scatterwalk_map_and_copy(req->iv, req->dst, req->cryptlen -
1165 qi_cache_free(edesc);
1166 skcipher_request_complete(req, status);
1169 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
1172 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1173 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1174 struct device *qidev = ctx->qidev;
1175 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1176 GFP_KERNEL : GFP_ATOMIC;
1177 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1178 struct skcipher_edesc *edesc;
1181 int ivsize = crypto_skcipher_ivsize(skcipher);
1182 int dst_sg_idx, qm_sg_ents, qm_sg_bytes;
1183 struct qm_sg_entry *sg_table, *fd_sgt;
1184 struct caam_drv_ctx *drv_ctx;
1186 drv_ctx = get_drv_ctx(ctx, encrypt ? ENCRYPT : DECRYPT);
1187 if (unlikely(IS_ERR_OR_NULL(drv_ctx)))
1188 return (struct skcipher_edesc *)drv_ctx;
1190 src_nents = sg_nents_for_len(req->src, req->cryptlen);
1191 if (unlikely(src_nents < 0)) {
1192 dev_err(qidev, "Insufficient bytes (%d) in src S/G\n",
1194 return ERR_PTR(src_nents);
1197 if (unlikely(req->src != req->dst)) {
1198 dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
1199 if (unlikely(dst_nents < 0)) {
1200 dev_err(qidev, "Insufficient bytes (%d) in dst S/G\n",
1202 return ERR_PTR(dst_nents);
1205 mapped_src_nents = dma_map_sg(qidev, req->src, src_nents,
1207 if (unlikely(!mapped_src_nents)) {
1208 dev_err(qidev, "unable to map source\n");
1209 return ERR_PTR(-ENOMEM);
1212 mapped_dst_nents = dma_map_sg(qidev, req->dst, dst_nents,
1214 if (unlikely(!mapped_dst_nents)) {
1215 dev_err(qidev, "unable to map destination\n");
1216 dma_unmap_sg(qidev, req->src, src_nents, DMA_TO_DEVICE);
1217 return ERR_PTR(-ENOMEM);
1220 mapped_src_nents = dma_map_sg(qidev, req->src, src_nents,
1222 if (unlikely(!mapped_src_nents)) {
1223 dev_err(qidev, "unable to map source\n");
1224 return ERR_PTR(-ENOMEM);
1228 qm_sg_ents = 1 + mapped_src_nents;
1229 dst_sg_idx = qm_sg_ents;
1231 qm_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1232 qm_sg_bytes = qm_sg_ents * sizeof(struct qm_sg_entry);
1233 if (unlikely(offsetof(struct skcipher_edesc, sgt) + qm_sg_bytes +
1234 ivsize > CAAM_QI_MEMCACHE_SIZE)) {
1235 dev_err(qidev, "No space for %d S/G entries and/or %dB IV\n",
1236 qm_sg_ents, ivsize);
1237 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
1239 return ERR_PTR(-ENOMEM);
1242 /* allocate space for base edesc, link tables and IV */
1243 edesc = qi_cache_alloc(GFP_DMA | flags);
1244 if (unlikely(!edesc)) {
1245 dev_err(qidev, "could not allocate extended descriptor\n");
1246 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
1248 return ERR_PTR(-ENOMEM);
1251 /* Make sure IV is located in a DMAable area */
1252 sg_table = &edesc->sgt[0];
1253 iv = (u8 *)(sg_table + qm_sg_ents);
1254 memcpy(iv, req->iv, ivsize);
1256 iv_dma = dma_map_single(qidev, iv, ivsize, DMA_TO_DEVICE);
1257 if (dma_mapping_error(qidev, iv_dma)) {
1258 dev_err(qidev, "unable to map IV\n");
1259 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
1261 qi_cache_free(edesc);
1262 return ERR_PTR(-ENOMEM);
1265 edesc->src_nents = src_nents;
1266 edesc->dst_nents = dst_nents;
1267 edesc->iv_dma = iv_dma;
1268 edesc->qm_sg_bytes = qm_sg_bytes;
1269 edesc->drv_req.app_ctx = req;
1270 edesc->drv_req.cbk = skcipher_done;
1271 edesc->drv_req.drv_ctx = drv_ctx;
1273 dma_to_qm_sg_one(sg_table, iv_dma, ivsize, 0);
1274 sg_to_qm_sg_last(req->src, mapped_src_nents, sg_table + 1, 0);
1276 if (mapped_dst_nents > 1)
1277 sg_to_qm_sg_last(req->dst, mapped_dst_nents, sg_table +
1280 edesc->qm_sg_dma = dma_map_single(qidev, sg_table, edesc->qm_sg_bytes,
1282 if (dma_mapping_error(qidev, edesc->qm_sg_dma)) {
1283 dev_err(qidev, "unable to map S/G table\n");
1284 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents,
1285 iv_dma, ivsize, 0, 0);
1286 qi_cache_free(edesc);
1287 return ERR_PTR(-ENOMEM);
1290 fd_sgt = &edesc->drv_req.fd_sgt[0];
1292 dma_to_qm_sg_one_last_ext(&fd_sgt[1], edesc->qm_sg_dma,
1293 ivsize + req->cryptlen, 0);
1295 if (req->src == req->dst) {
1296 dma_to_qm_sg_one_ext(&fd_sgt[0], edesc->qm_sg_dma +
1297 sizeof(*sg_table), req->cryptlen, 0);
1298 } else if (mapped_dst_nents > 1) {
1299 dma_to_qm_sg_one_ext(&fd_sgt[0], edesc->qm_sg_dma + dst_sg_idx *
1300 sizeof(*sg_table), req->cryptlen, 0);
1302 dma_to_qm_sg_one(&fd_sgt[0], sg_dma_address(req->dst),
1309 static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
1311 struct skcipher_edesc *edesc;
1312 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1313 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1314 int ivsize = crypto_skcipher_ivsize(skcipher);
1317 if (unlikely(caam_congested))
1320 /* allocate extended descriptor */
1321 edesc = skcipher_edesc_alloc(req, encrypt);
1323 return PTR_ERR(edesc);
1326 * The crypto API expects us to set the IV (req->iv) to the last
1330 scatterwalk_map_and_copy(req->iv, req->src, req->cryptlen -
1333 ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req);
1337 skcipher_unmap(ctx->qidev, edesc, req);
1338 qi_cache_free(edesc);
1344 static int skcipher_encrypt(struct skcipher_request *req)
1346 return skcipher_crypt(req, true);
1349 static int skcipher_decrypt(struct skcipher_request *req)
1351 return skcipher_crypt(req, false);
1354 static struct caam_skcipher_alg driver_algs[] = {
1358 .cra_name = "cbc(aes)",
1359 .cra_driver_name = "cbc-aes-caam-qi",
1360 .cra_blocksize = AES_BLOCK_SIZE,
1362 .setkey = skcipher_setkey,
1363 .encrypt = skcipher_encrypt,
1364 .decrypt = skcipher_decrypt,
1365 .min_keysize = AES_MIN_KEY_SIZE,
1366 .max_keysize = AES_MAX_KEY_SIZE,
1367 .ivsize = AES_BLOCK_SIZE,
1369 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1374 .cra_name = "cbc(des3_ede)",
1375 .cra_driver_name = "cbc-3des-caam-qi",
1376 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1378 .setkey = skcipher_setkey,
1379 .encrypt = skcipher_encrypt,
1380 .decrypt = skcipher_decrypt,
1381 .min_keysize = DES3_EDE_KEY_SIZE,
1382 .max_keysize = DES3_EDE_KEY_SIZE,
1383 .ivsize = DES3_EDE_BLOCK_SIZE,
1385 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1390 .cra_name = "cbc(des)",
1391 .cra_driver_name = "cbc-des-caam-qi",
1392 .cra_blocksize = DES_BLOCK_SIZE,
1394 .setkey = skcipher_setkey,
1395 .encrypt = skcipher_encrypt,
1396 .decrypt = skcipher_decrypt,
1397 .min_keysize = DES_KEY_SIZE,
1398 .max_keysize = DES_KEY_SIZE,
1399 .ivsize = DES_BLOCK_SIZE,
1401 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1406 .cra_name = "ctr(aes)",
1407 .cra_driver_name = "ctr-aes-caam-qi",
1410 .setkey = skcipher_setkey,
1411 .encrypt = skcipher_encrypt,
1412 .decrypt = skcipher_decrypt,
1413 .min_keysize = AES_MIN_KEY_SIZE,
1414 .max_keysize = AES_MAX_KEY_SIZE,
1415 .ivsize = AES_BLOCK_SIZE,
1416 .chunksize = AES_BLOCK_SIZE,
1418 .caam.class1_alg_type = OP_ALG_ALGSEL_AES |
1419 OP_ALG_AAI_CTR_MOD128,
1424 .cra_name = "rfc3686(ctr(aes))",
1425 .cra_driver_name = "rfc3686-ctr-aes-caam-qi",
1428 .setkey = skcipher_setkey,
1429 .encrypt = skcipher_encrypt,
1430 .decrypt = skcipher_decrypt,
1431 .min_keysize = AES_MIN_KEY_SIZE +
1432 CTR_RFC3686_NONCE_SIZE,
1433 .max_keysize = AES_MAX_KEY_SIZE +
1434 CTR_RFC3686_NONCE_SIZE,
1435 .ivsize = CTR_RFC3686_IV_SIZE,
1436 .chunksize = AES_BLOCK_SIZE,
1439 .class1_alg_type = OP_ALG_ALGSEL_AES |
1440 OP_ALG_AAI_CTR_MOD128,
1447 .cra_name = "xts(aes)",
1448 .cra_driver_name = "xts-aes-caam-qi",
1449 .cra_blocksize = AES_BLOCK_SIZE,
1451 .setkey = xts_skcipher_setkey,
1452 .encrypt = skcipher_encrypt,
1453 .decrypt = skcipher_decrypt,
1454 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1455 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1456 .ivsize = AES_BLOCK_SIZE,
1458 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1462 static struct caam_aead_alg driver_aeads[] = {
1466 .cra_name = "rfc4106(gcm(aes))",
1467 .cra_driver_name = "rfc4106-gcm-aes-caam-qi",
1470 .setkey = rfc4106_setkey,
1471 .setauthsize = rfc4106_setauthsize,
1472 .encrypt = ipsec_gcm_encrypt,
1473 .decrypt = ipsec_gcm_decrypt,
1475 .maxauthsize = AES_BLOCK_SIZE,
1478 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1484 .cra_name = "rfc4543(gcm(aes))",
1485 .cra_driver_name = "rfc4543-gcm-aes-caam-qi",
1488 .setkey = rfc4543_setkey,
1489 .setauthsize = rfc4543_setauthsize,
1490 .encrypt = ipsec_gcm_encrypt,
1491 .decrypt = ipsec_gcm_decrypt,
1493 .maxauthsize = AES_BLOCK_SIZE,
1496 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1499 /* Galois Counter Mode */
1503 .cra_name = "gcm(aes)",
1504 .cra_driver_name = "gcm-aes-caam-qi",
1507 .setkey = gcm_setkey,
1508 .setauthsize = gcm_setauthsize,
1509 .encrypt = aead_encrypt,
1510 .decrypt = aead_decrypt,
1512 .maxauthsize = AES_BLOCK_SIZE,
1515 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1518 /* single-pass ipsec_esp descriptor */
1522 .cra_name = "authenc(hmac(md5),cbc(aes))",
1523 .cra_driver_name = "authenc-hmac-md5-"
1525 .cra_blocksize = AES_BLOCK_SIZE,
1527 .setkey = aead_setkey,
1528 .setauthsize = aead_setauthsize,
1529 .encrypt = aead_encrypt,
1530 .decrypt = aead_decrypt,
1531 .ivsize = AES_BLOCK_SIZE,
1532 .maxauthsize = MD5_DIGEST_SIZE,
1535 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1536 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
1537 OP_ALG_AAI_HMAC_PRECOMP,
1543 .cra_name = "echainiv(authenc(hmac(md5),"
1545 .cra_driver_name = "echainiv-authenc-hmac-md5-"
1547 .cra_blocksize = AES_BLOCK_SIZE,
1549 .setkey = aead_setkey,
1550 .setauthsize = aead_setauthsize,
1551 .encrypt = aead_encrypt,
1552 .decrypt = aead_decrypt,
1553 .ivsize = AES_BLOCK_SIZE,
1554 .maxauthsize = MD5_DIGEST_SIZE,
1557 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1558 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
1559 OP_ALG_AAI_HMAC_PRECOMP,
1566 .cra_name = "authenc(hmac(sha1),cbc(aes))",
1567 .cra_driver_name = "authenc-hmac-sha1-"
1569 .cra_blocksize = AES_BLOCK_SIZE,
1571 .setkey = aead_setkey,
1572 .setauthsize = aead_setauthsize,
1573 .encrypt = aead_encrypt,
1574 .decrypt = aead_decrypt,
1575 .ivsize = AES_BLOCK_SIZE,
1576 .maxauthsize = SHA1_DIGEST_SIZE,
1579 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1580 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1581 OP_ALG_AAI_HMAC_PRECOMP,
1587 .cra_name = "echainiv(authenc(hmac(sha1),"
1589 .cra_driver_name = "echainiv-authenc-"
1590 "hmac-sha1-cbc-aes-caam-qi",
1591 .cra_blocksize = AES_BLOCK_SIZE,
1593 .setkey = aead_setkey,
1594 .setauthsize = aead_setauthsize,
1595 .encrypt = aead_encrypt,
1596 .decrypt = aead_decrypt,
1597 .ivsize = AES_BLOCK_SIZE,
1598 .maxauthsize = SHA1_DIGEST_SIZE,
1601 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1602 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1603 OP_ALG_AAI_HMAC_PRECOMP,
1610 .cra_name = "authenc(hmac(sha224),cbc(aes))",
1611 .cra_driver_name = "authenc-hmac-sha224-"
1613 .cra_blocksize = AES_BLOCK_SIZE,
1615 .setkey = aead_setkey,
1616 .setauthsize = aead_setauthsize,
1617 .encrypt = aead_encrypt,
1618 .decrypt = aead_decrypt,
1619 .ivsize = AES_BLOCK_SIZE,
1620 .maxauthsize = SHA224_DIGEST_SIZE,
1623 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1624 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1625 OP_ALG_AAI_HMAC_PRECOMP,
1631 .cra_name = "echainiv(authenc(hmac(sha224),"
1633 .cra_driver_name = "echainiv-authenc-"
1634 "hmac-sha224-cbc-aes-caam-qi",
1635 .cra_blocksize = AES_BLOCK_SIZE,
1637 .setkey = aead_setkey,
1638 .setauthsize = aead_setauthsize,
1639 .encrypt = aead_encrypt,
1640 .decrypt = aead_decrypt,
1641 .ivsize = AES_BLOCK_SIZE,
1642 .maxauthsize = SHA224_DIGEST_SIZE,
1645 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1646 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1647 OP_ALG_AAI_HMAC_PRECOMP,
1654 .cra_name = "authenc(hmac(sha256),cbc(aes))",
1655 .cra_driver_name = "authenc-hmac-sha256-"
1657 .cra_blocksize = AES_BLOCK_SIZE,
1659 .setkey = aead_setkey,
1660 .setauthsize = aead_setauthsize,
1661 .encrypt = aead_encrypt,
1662 .decrypt = aead_decrypt,
1663 .ivsize = AES_BLOCK_SIZE,
1664 .maxauthsize = SHA256_DIGEST_SIZE,
1667 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1668 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
1669 OP_ALG_AAI_HMAC_PRECOMP,
1675 .cra_name = "echainiv(authenc(hmac(sha256),"
1677 .cra_driver_name = "echainiv-authenc-"
1678 "hmac-sha256-cbc-aes-"
1680 .cra_blocksize = AES_BLOCK_SIZE,
1682 .setkey = aead_setkey,
1683 .setauthsize = aead_setauthsize,
1684 .encrypt = aead_encrypt,
1685 .decrypt = aead_decrypt,
1686 .ivsize = AES_BLOCK_SIZE,
1687 .maxauthsize = SHA256_DIGEST_SIZE,
1690 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1691 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
1692 OP_ALG_AAI_HMAC_PRECOMP,
1699 .cra_name = "authenc(hmac(sha384),cbc(aes))",
1700 .cra_driver_name = "authenc-hmac-sha384-"
1702 .cra_blocksize = AES_BLOCK_SIZE,
1704 .setkey = aead_setkey,
1705 .setauthsize = aead_setauthsize,
1706 .encrypt = aead_encrypt,
1707 .decrypt = aead_decrypt,
1708 .ivsize = AES_BLOCK_SIZE,
1709 .maxauthsize = SHA384_DIGEST_SIZE,
1712 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1713 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
1714 OP_ALG_AAI_HMAC_PRECOMP,
1720 .cra_name = "echainiv(authenc(hmac(sha384),"
1722 .cra_driver_name = "echainiv-authenc-"
1723 "hmac-sha384-cbc-aes-"
1725 .cra_blocksize = AES_BLOCK_SIZE,
1727 .setkey = aead_setkey,
1728 .setauthsize = aead_setauthsize,
1729 .encrypt = aead_encrypt,
1730 .decrypt = aead_decrypt,
1731 .ivsize = AES_BLOCK_SIZE,
1732 .maxauthsize = SHA384_DIGEST_SIZE,
1735 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1736 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
1737 OP_ALG_AAI_HMAC_PRECOMP,
1744 .cra_name = "authenc(hmac(sha512),cbc(aes))",
1745 .cra_driver_name = "authenc-hmac-sha512-"
1747 .cra_blocksize = AES_BLOCK_SIZE,
1749 .setkey = aead_setkey,
1750 .setauthsize = aead_setauthsize,
1751 .encrypt = aead_encrypt,
1752 .decrypt = aead_decrypt,
1753 .ivsize = AES_BLOCK_SIZE,
1754 .maxauthsize = SHA512_DIGEST_SIZE,
1757 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1758 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
1759 OP_ALG_AAI_HMAC_PRECOMP,
1765 .cra_name = "echainiv(authenc(hmac(sha512),"
1767 .cra_driver_name = "echainiv-authenc-"
1768 "hmac-sha512-cbc-aes-"
1770 .cra_blocksize = AES_BLOCK_SIZE,
1772 .setkey = aead_setkey,
1773 .setauthsize = aead_setauthsize,
1774 .encrypt = aead_encrypt,
1775 .decrypt = aead_decrypt,
1776 .ivsize = AES_BLOCK_SIZE,
1777 .maxauthsize = SHA512_DIGEST_SIZE,
1780 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1781 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
1782 OP_ALG_AAI_HMAC_PRECOMP,
1789 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
1790 .cra_driver_name = "authenc-hmac-md5-"
1791 "cbc-des3_ede-caam-qi",
1792 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1794 .setkey = aead_setkey,
1795 .setauthsize = aead_setauthsize,
1796 .encrypt = aead_encrypt,
1797 .decrypt = aead_decrypt,
1798 .ivsize = DES3_EDE_BLOCK_SIZE,
1799 .maxauthsize = MD5_DIGEST_SIZE,
1802 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1803 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
1804 OP_ALG_AAI_HMAC_PRECOMP,
1810 .cra_name = "echainiv(authenc(hmac(md5),"
1812 .cra_driver_name = "echainiv-authenc-hmac-md5-"
1813 "cbc-des3_ede-caam-qi",
1814 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1816 .setkey = aead_setkey,
1817 .setauthsize = aead_setauthsize,
1818 .encrypt = aead_encrypt,
1819 .decrypt = aead_decrypt,
1820 .ivsize = DES3_EDE_BLOCK_SIZE,
1821 .maxauthsize = MD5_DIGEST_SIZE,
1824 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1825 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
1826 OP_ALG_AAI_HMAC_PRECOMP,
1833 .cra_name = "authenc(hmac(sha1),"
1835 .cra_driver_name = "authenc-hmac-sha1-"
1836 "cbc-des3_ede-caam-qi",
1837 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1839 .setkey = aead_setkey,
1840 .setauthsize = aead_setauthsize,
1841 .encrypt = aead_encrypt,
1842 .decrypt = aead_decrypt,
1843 .ivsize = DES3_EDE_BLOCK_SIZE,
1844 .maxauthsize = SHA1_DIGEST_SIZE,
1847 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1848 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1849 OP_ALG_AAI_HMAC_PRECOMP,
1855 .cra_name = "echainiv(authenc(hmac(sha1),"
1857 .cra_driver_name = "echainiv-authenc-"
1859 "cbc-des3_ede-caam-qi",
1860 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1862 .setkey = aead_setkey,
1863 .setauthsize = aead_setauthsize,
1864 .encrypt = aead_encrypt,
1865 .decrypt = aead_decrypt,
1866 .ivsize = DES3_EDE_BLOCK_SIZE,
1867 .maxauthsize = SHA1_DIGEST_SIZE,
1870 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1871 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1872 OP_ALG_AAI_HMAC_PRECOMP,
1879 .cra_name = "authenc(hmac(sha224),"
1881 .cra_driver_name = "authenc-hmac-sha224-"
1882 "cbc-des3_ede-caam-qi",
1883 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1885 .setkey = aead_setkey,
1886 .setauthsize = aead_setauthsize,
1887 .encrypt = aead_encrypt,
1888 .decrypt = aead_decrypt,
1889 .ivsize = DES3_EDE_BLOCK_SIZE,
1890 .maxauthsize = SHA224_DIGEST_SIZE,
1893 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1894 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1895 OP_ALG_AAI_HMAC_PRECOMP,
1901 .cra_name = "echainiv(authenc(hmac(sha224),"
1903 .cra_driver_name = "echainiv-authenc-"
1905 "cbc-des3_ede-caam-qi",
1906 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1908 .setkey = aead_setkey,
1909 .setauthsize = aead_setauthsize,
1910 .encrypt = aead_encrypt,
1911 .decrypt = aead_decrypt,
1912 .ivsize = DES3_EDE_BLOCK_SIZE,
1913 .maxauthsize = SHA224_DIGEST_SIZE,
1916 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1917 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1918 OP_ALG_AAI_HMAC_PRECOMP,
1925 .cra_name = "authenc(hmac(sha256),"
1927 .cra_driver_name = "authenc-hmac-sha256-"
1928 "cbc-des3_ede-caam-qi",
1929 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1931 .setkey = aead_setkey,
1932 .setauthsize = aead_setauthsize,
1933 .encrypt = aead_encrypt,
1934 .decrypt = aead_decrypt,
1935 .ivsize = DES3_EDE_BLOCK_SIZE,
1936 .maxauthsize = SHA256_DIGEST_SIZE,
1939 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1940 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
1941 OP_ALG_AAI_HMAC_PRECOMP,
1947 .cra_name = "echainiv(authenc(hmac(sha256),"
1949 .cra_driver_name = "echainiv-authenc-"
1951 "cbc-des3_ede-caam-qi",
1952 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1954 .setkey = aead_setkey,
1955 .setauthsize = aead_setauthsize,
1956 .encrypt = aead_encrypt,
1957 .decrypt = aead_decrypt,
1958 .ivsize = DES3_EDE_BLOCK_SIZE,
1959 .maxauthsize = SHA256_DIGEST_SIZE,
1962 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1963 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
1964 OP_ALG_AAI_HMAC_PRECOMP,
1971 .cra_name = "authenc(hmac(sha384),"
1973 .cra_driver_name = "authenc-hmac-sha384-"
1974 "cbc-des3_ede-caam-qi",
1975 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1977 .setkey = aead_setkey,
1978 .setauthsize = aead_setauthsize,
1979 .encrypt = aead_encrypt,
1980 .decrypt = aead_decrypt,
1981 .ivsize = DES3_EDE_BLOCK_SIZE,
1982 .maxauthsize = SHA384_DIGEST_SIZE,
1985 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1986 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
1987 OP_ALG_AAI_HMAC_PRECOMP,
1993 .cra_name = "echainiv(authenc(hmac(sha384),"
1995 .cra_driver_name = "echainiv-authenc-"
1997 "cbc-des3_ede-caam-qi",
1998 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2000 .setkey = aead_setkey,
2001 .setauthsize = aead_setauthsize,
2002 .encrypt = aead_encrypt,
2003 .decrypt = aead_decrypt,
2004 .ivsize = DES3_EDE_BLOCK_SIZE,
2005 .maxauthsize = SHA384_DIGEST_SIZE,
2008 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2009 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2010 OP_ALG_AAI_HMAC_PRECOMP,
2017 .cra_name = "authenc(hmac(sha512),"
2019 .cra_driver_name = "authenc-hmac-sha512-"
2020 "cbc-des3_ede-caam-qi",
2021 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2023 .setkey = aead_setkey,
2024 .setauthsize = aead_setauthsize,
2025 .encrypt = aead_encrypt,
2026 .decrypt = aead_decrypt,
2027 .ivsize = DES3_EDE_BLOCK_SIZE,
2028 .maxauthsize = SHA512_DIGEST_SIZE,
2031 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2032 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2033 OP_ALG_AAI_HMAC_PRECOMP,
2039 .cra_name = "echainiv(authenc(hmac(sha512),"
2041 .cra_driver_name = "echainiv-authenc-"
2043 "cbc-des3_ede-caam-qi",
2044 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2046 .setkey = aead_setkey,
2047 .setauthsize = aead_setauthsize,
2048 .encrypt = aead_encrypt,
2049 .decrypt = aead_decrypt,
2050 .ivsize = DES3_EDE_BLOCK_SIZE,
2051 .maxauthsize = SHA512_DIGEST_SIZE,
2054 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2055 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2056 OP_ALG_AAI_HMAC_PRECOMP,
2063 .cra_name = "authenc(hmac(md5),cbc(des))",
2064 .cra_driver_name = "authenc-hmac-md5-"
2066 .cra_blocksize = DES_BLOCK_SIZE,
2068 .setkey = aead_setkey,
2069 .setauthsize = aead_setauthsize,
2070 .encrypt = aead_encrypt,
2071 .decrypt = aead_decrypt,
2072 .ivsize = DES_BLOCK_SIZE,
2073 .maxauthsize = MD5_DIGEST_SIZE,
2076 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2077 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2078 OP_ALG_AAI_HMAC_PRECOMP,
2084 .cra_name = "echainiv(authenc(hmac(md5),"
2086 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2088 .cra_blocksize = DES_BLOCK_SIZE,
2090 .setkey = aead_setkey,
2091 .setauthsize = aead_setauthsize,
2092 .encrypt = aead_encrypt,
2093 .decrypt = aead_decrypt,
2094 .ivsize = DES_BLOCK_SIZE,
2095 .maxauthsize = MD5_DIGEST_SIZE,
2098 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2099 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2100 OP_ALG_AAI_HMAC_PRECOMP,
2107 .cra_name = "authenc(hmac(sha1),cbc(des))",
2108 .cra_driver_name = "authenc-hmac-sha1-"
2110 .cra_blocksize = DES_BLOCK_SIZE,
2112 .setkey = aead_setkey,
2113 .setauthsize = aead_setauthsize,
2114 .encrypt = aead_encrypt,
2115 .decrypt = aead_decrypt,
2116 .ivsize = DES_BLOCK_SIZE,
2117 .maxauthsize = SHA1_DIGEST_SIZE,
2120 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2121 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2122 OP_ALG_AAI_HMAC_PRECOMP,
2128 .cra_name = "echainiv(authenc(hmac(sha1),"
2130 .cra_driver_name = "echainiv-authenc-"
2131 "hmac-sha1-cbc-des-caam-qi",
2132 .cra_blocksize = DES_BLOCK_SIZE,
2134 .setkey = aead_setkey,
2135 .setauthsize = aead_setauthsize,
2136 .encrypt = aead_encrypt,
2137 .decrypt = aead_decrypt,
2138 .ivsize = DES_BLOCK_SIZE,
2139 .maxauthsize = SHA1_DIGEST_SIZE,
2142 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2143 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2144 OP_ALG_AAI_HMAC_PRECOMP,
2151 .cra_name = "authenc(hmac(sha224),cbc(des))",
2152 .cra_driver_name = "authenc-hmac-sha224-"
2154 .cra_blocksize = DES_BLOCK_SIZE,
2156 .setkey = aead_setkey,
2157 .setauthsize = aead_setauthsize,
2158 .encrypt = aead_encrypt,
2159 .decrypt = aead_decrypt,
2160 .ivsize = DES_BLOCK_SIZE,
2161 .maxauthsize = SHA224_DIGEST_SIZE,
2164 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2165 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2166 OP_ALG_AAI_HMAC_PRECOMP,
2172 .cra_name = "echainiv(authenc(hmac(sha224),"
2174 .cra_driver_name = "echainiv-authenc-"
2175 "hmac-sha224-cbc-des-"
2177 .cra_blocksize = DES_BLOCK_SIZE,
2179 .setkey = aead_setkey,
2180 .setauthsize = aead_setauthsize,
2181 .encrypt = aead_encrypt,
2182 .decrypt = aead_decrypt,
2183 .ivsize = DES_BLOCK_SIZE,
2184 .maxauthsize = SHA224_DIGEST_SIZE,
2187 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2188 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2189 OP_ALG_AAI_HMAC_PRECOMP,
2196 .cra_name = "authenc(hmac(sha256),cbc(des))",
2197 .cra_driver_name = "authenc-hmac-sha256-"
2199 .cra_blocksize = DES_BLOCK_SIZE,
2201 .setkey = aead_setkey,
2202 .setauthsize = aead_setauthsize,
2203 .encrypt = aead_encrypt,
2204 .decrypt = aead_decrypt,
2205 .ivsize = DES_BLOCK_SIZE,
2206 .maxauthsize = SHA256_DIGEST_SIZE,
2209 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2210 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2211 OP_ALG_AAI_HMAC_PRECOMP,
2217 .cra_name = "echainiv(authenc(hmac(sha256),"
2219 .cra_driver_name = "echainiv-authenc-"
2220 "hmac-sha256-cbc-des-"
2222 .cra_blocksize = DES_BLOCK_SIZE,
2224 .setkey = aead_setkey,
2225 .setauthsize = aead_setauthsize,
2226 .encrypt = aead_encrypt,
2227 .decrypt = aead_decrypt,
2228 .ivsize = DES_BLOCK_SIZE,
2229 .maxauthsize = SHA256_DIGEST_SIZE,
2232 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2233 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2234 OP_ALG_AAI_HMAC_PRECOMP,
2241 .cra_name = "authenc(hmac(sha384),cbc(des))",
2242 .cra_driver_name = "authenc-hmac-sha384-"
2244 .cra_blocksize = DES_BLOCK_SIZE,
2246 .setkey = aead_setkey,
2247 .setauthsize = aead_setauthsize,
2248 .encrypt = aead_encrypt,
2249 .decrypt = aead_decrypt,
2250 .ivsize = DES_BLOCK_SIZE,
2251 .maxauthsize = SHA384_DIGEST_SIZE,
2254 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2255 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2256 OP_ALG_AAI_HMAC_PRECOMP,
2262 .cra_name = "echainiv(authenc(hmac(sha384),"
2264 .cra_driver_name = "echainiv-authenc-"
2265 "hmac-sha384-cbc-des-"
2267 .cra_blocksize = DES_BLOCK_SIZE,
2269 .setkey = aead_setkey,
2270 .setauthsize = aead_setauthsize,
2271 .encrypt = aead_encrypt,
2272 .decrypt = aead_decrypt,
2273 .ivsize = DES_BLOCK_SIZE,
2274 .maxauthsize = SHA384_DIGEST_SIZE,
2277 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2278 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2279 OP_ALG_AAI_HMAC_PRECOMP,
2286 .cra_name = "authenc(hmac(sha512),cbc(des))",
2287 .cra_driver_name = "authenc-hmac-sha512-"
2289 .cra_blocksize = DES_BLOCK_SIZE,
2291 .setkey = aead_setkey,
2292 .setauthsize = aead_setauthsize,
2293 .encrypt = aead_encrypt,
2294 .decrypt = aead_decrypt,
2295 .ivsize = DES_BLOCK_SIZE,
2296 .maxauthsize = SHA512_DIGEST_SIZE,
2299 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2300 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2301 OP_ALG_AAI_HMAC_PRECOMP,
2307 .cra_name = "echainiv(authenc(hmac(sha512),"
2309 .cra_driver_name = "echainiv-authenc-"
2310 "hmac-sha512-cbc-des-"
2312 .cra_blocksize = DES_BLOCK_SIZE,
2314 .setkey = aead_setkey,
2315 .setauthsize = aead_setauthsize,
2316 .encrypt = aead_encrypt,
2317 .decrypt = aead_decrypt,
2318 .ivsize = DES_BLOCK_SIZE,
2319 .maxauthsize = SHA512_DIGEST_SIZE,
2322 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2323 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2324 OP_ALG_AAI_HMAC_PRECOMP,
2330 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
2333 struct caam_drv_private *priv;
2336 * distribute tfms across job rings to ensure in-order
2337 * crypto request processing per tfm
2339 ctx->jrdev = caam_jr_alloc();
2340 if (IS_ERR(ctx->jrdev)) {
2341 pr_err("Job Ring Device allocation for transform failed\n");
2342 return PTR_ERR(ctx->jrdev);
2345 priv = dev_get_drvdata(ctx->jrdev->parent);
2346 if (priv->era >= 6 && uses_dkp)
2347 ctx->dir = DMA_BIDIRECTIONAL;
2349 ctx->dir = DMA_TO_DEVICE;
2351 ctx->key_dma = dma_map_single(ctx->jrdev, ctx->key, sizeof(ctx->key),
2353 if (dma_mapping_error(ctx->jrdev, ctx->key_dma)) {
2354 dev_err(ctx->jrdev, "unable to map key\n");
2355 caam_jr_free(ctx->jrdev);
2359 /* copy descriptor header template value */
2360 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
2361 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
2363 ctx->qidev = priv->qidev;
2365 spin_lock_init(&ctx->lock);
2366 ctx->drv_ctx[ENCRYPT] = NULL;
2367 ctx->drv_ctx[DECRYPT] = NULL;
2372 static int caam_cra_init(struct crypto_skcipher *tfm)
2374 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
2375 struct caam_skcipher_alg *caam_alg =
2376 container_of(alg, typeof(*caam_alg), skcipher);
2378 return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam,
2382 static int caam_aead_init(struct crypto_aead *tfm)
2384 struct aead_alg *alg = crypto_aead_alg(tfm);
2385 struct caam_aead_alg *caam_alg = container_of(alg, typeof(*caam_alg),
2387 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
2389 return caam_init_common(ctx, &caam_alg->caam,
2390 alg->setkey == aead_setkey);
2393 static void caam_exit_common(struct caam_ctx *ctx)
2395 caam_drv_ctx_rel(ctx->drv_ctx[ENCRYPT]);
2396 caam_drv_ctx_rel(ctx->drv_ctx[DECRYPT]);
2398 dma_unmap_single(ctx->jrdev, ctx->key_dma, sizeof(ctx->key), ctx->dir);
2400 caam_jr_free(ctx->jrdev);
2403 static void caam_cra_exit(struct crypto_skcipher *tfm)
2405 caam_exit_common(crypto_skcipher_ctx(tfm));
2408 static void caam_aead_exit(struct crypto_aead *tfm)
2410 caam_exit_common(crypto_aead_ctx(tfm));
2413 static void __exit caam_qi_algapi_exit(void)
2417 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
2418 struct caam_aead_alg *t_alg = driver_aeads + i;
2420 if (t_alg->registered)
2421 crypto_unregister_aead(&t_alg->aead);
2424 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
2425 struct caam_skcipher_alg *t_alg = driver_algs + i;
2427 if (t_alg->registered)
2428 crypto_unregister_skcipher(&t_alg->skcipher);
2432 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
2434 struct skcipher_alg *alg = &t_alg->skcipher;
2436 alg->base.cra_module = THIS_MODULE;
2437 alg->base.cra_priority = CAAM_CRA_PRIORITY;
2438 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
2439 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
2441 alg->init = caam_cra_init;
2442 alg->exit = caam_cra_exit;
2445 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
2447 struct aead_alg *alg = &t_alg->aead;
2449 alg->base.cra_module = THIS_MODULE;
2450 alg->base.cra_priority = CAAM_CRA_PRIORITY;
2451 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
2452 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
2454 alg->init = caam_aead_init;
2455 alg->exit = caam_aead_exit;
2458 static int __init caam_qi_algapi_init(void)
2460 struct device_node *dev_node;
2461 struct platform_device *pdev;
2462 struct device *ctrldev;
2463 struct caam_drv_private *priv;
2465 u32 aes_vid, aes_inst, des_inst, md_vid, md_inst;
2466 unsigned int md_limit = SHA512_DIGEST_SIZE;
2467 bool registered = false;
2469 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
2471 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
2476 pdev = of_find_device_by_node(dev_node);
2477 of_node_put(dev_node);
2481 ctrldev = &pdev->dev;
2482 priv = dev_get_drvdata(ctrldev);
2485 * If priv is NULL, it's probably because the caam driver wasn't
2486 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
2488 if (!priv || !priv->qi_present)
2492 dev_info(ctrldev, "caam/qi frontend driver not suitable for DPAA 2.x, aborting...\n");
2497 * Register crypto algorithms the device supports.
2498 * First, detect presence and attributes of DES, AES, and MD blocks.
2500 if (priv->era < 10) {
2501 u32 cha_vid, cha_inst;
2503 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
2504 aes_vid = cha_vid & CHA_ID_LS_AES_MASK;
2505 md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
2507 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
2508 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >>
2509 CHA_ID_LS_DES_SHIFT;
2510 aes_inst = cha_inst & CHA_ID_LS_AES_MASK;
2511 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
2515 aesa = rd_reg32(&priv->ctrl->vreg.aesa);
2516 mdha = rd_reg32(&priv->ctrl->vreg.mdha);
2518 aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
2519 md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
2521 des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK;
2522 aes_inst = aesa & CHA_VER_NUM_MASK;
2523 md_inst = mdha & CHA_VER_NUM_MASK;
2526 /* If MD is present, limit digest size based on LP256 */
2527 if (md_inst && md_vid == CHA_VER_VID_MD_LP256)
2528 md_limit = SHA256_DIGEST_SIZE;
2530 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
2531 struct caam_skcipher_alg *t_alg = driver_algs + i;
2532 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
2534 /* Skip DES algorithms if not supported by device */
2536 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
2537 (alg_sel == OP_ALG_ALGSEL_DES)))
2540 /* Skip AES algorithms if not supported by device */
2541 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
2544 caam_skcipher_alg_init(t_alg);
2546 err = crypto_register_skcipher(&t_alg->skcipher);
2548 dev_warn(priv->qidev, "%s alg registration failed\n",
2549 t_alg->skcipher.base.cra_driver_name);
2553 t_alg->registered = true;
2557 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
2558 struct caam_aead_alg *t_alg = driver_aeads + i;
2559 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
2561 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
2563 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
2565 /* Skip DES algorithms if not supported by device */
2567 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
2568 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
2571 /* Skip AES algorithms if not supported by device */
2572 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
2576 * Check support for AES algorithms not available
2579 if (aes_vid == CHA_VER_VID_AES_LP && alg_aai == OP_ALG_AAI_GCM)
2583 * Skip algorithms requiring message digests
2584 * if MD or MD size is not supported by device.
2587 (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
2590 caam_aead_alg_init(t_alg);
2592 err = crypto_register_aead(&t_alg->aead);
2594 pr_warn("%s alg registration failed\n",
2595 t_alg->aead.base.cra_driver_name);
2599 t_alg->registered = true;
2604 dev_info(priv->qidev, "algorithms registered in /proc/crypto\n");
2609 module_init(caam_qi_algapi_init);
2610 module_exit(caam_qi_algapi_exit);
2612 MODULE_LICENSE("GPL");
2613 MODULE_DESCRIPTION("Support for crypto API using CAAM-QI backend");
2614 MODULE_AUTHOR("Freescale Semiconductor");