1 // SPDX-License-Identifier: GPL-2.0+
3 * caam - Freescale FSL CAAM support for crypto API
5 * Copyright 2008-2011 Freescale Semiconductor, Inc.
6 * Copyright 2016-2019 NXP
8 * Based on talitos crypto API driver.
10 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
12 * --------------- ---------------
13 * | JobDesc #1 |-------------------->| ShareDesc |
14 * | *(packet 1) | | (PDB) |
15 * --------------- |------------->| (hashKey) |
17 * . | |-------->| (operation) |
18 * --------------- | | ---------------
19 * | JobDesc #2 |------| |
25 * | JobDesc #3 |------------
29 * The SharedDesc never changes for a connection unless rekeyed, but
30 * each packet will likely be in a different place. So all we need
31 * to know to process the packet is where the input is, where the
32 * output goes, and what context we want to process with. Context is
33 * in the SharedDesc, packet references in the JobDesc.
35 * So, a job desc looks like:
37 * ---------------------
39 * | ShareDesc Pointer |
46 * ---------------------
53 #include "desc_constr.h"
56 #include "sg_sw_sec4.h"
58 #include "caamalg_desc.h"
59 #include <crypto/engine.h>
64 #define CAAM_CRA_PRIORITY 3000
65 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
66 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
67 CTR_RFC3686_NONCE_SIZE + \
68 SHA512_DIGEST_SIZE * 2)
70 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
71 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
73 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
76 #define CHACHAPOLY_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6)
78 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN_MIN)
79 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
81 struct caam_alg_entry {
89 struct caam_aead_alg {
91 struct caam_alg_entry caam;
95 struct caam_skcipher_alg {
96 struct skcipher_alg skcipher;
97 struct caam_alg_entry caam;
102 * per-session context
105 struct crypto_engine_ctx enginectx;
106 u32 sh_desc_enc[DESC_MAX_USED_LEN];
107 u32 sh_desc_dec[DESC_MAX_USED_LEN];
108 u8 key[CAAM_MAX_KEY_SIZE];
109 dma_addr_t sh_desc_enc_dma;
110 dma_addr_t sh_desc_dec_dma;
112 enum dma_data_direction dir;
113 struct device *jrdev;
114 struct alginfo adata;
115 struct alginfo cdata;
116 unsigned int authsize;
119 struct caam_skcipher_req_ctx {
120 struct skcipher_edesc *edesc;
123 struct caam_aead_req_ctx {
124 struct aead_edesc *edesc;
127 static int aead_null_set_sh_desc(struct crypto_aead *aead)
129 struct caam_ctx *ctx = crypto_aead_ctx(aead);
130 struct device *jrdev = ctx->jrdev;
131 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
133 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
134 ctx->adata.keylen_pad;
137 * Job Descriptor and Shared Descriptors
138 * must all fit into the 64-word Descriptor h/w Buffer
140 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
141 ctx->adata.key_inline = true;
142 ctx->adata.key_virt = ctx->key;
144 ctx->adata.key_inline = false;
145 ctx->adata.key_dma = ctx->key_dma;
148 /* aead_encrypt shared descriptor */
149 desc = ctx->sh_desc_enc;
150 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
152 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
153 desc_bytes(desc), ctx->dir);
156 * Job Descriptor and Shared Descriptors
157 * must all fit into the 64-word Descriptor h/w Buffer
159 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
160 ctx->adata.key_inline = true;
161 ctx->adata.key_virt = ctx->key;
163 ctx->adata.key_inline = false;
164 ctx->adata.key_dma = ctx->key_dma;
167 /* aead_decrypt shared descriptor */
168 desc = ctx->sh_desc_dec;
169 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
171 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
172 desc_bytes(desc), ctx->dir);
177 static int aead_set_sh_desc(struct crypto_aead *aead)
179 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
180 struct caam_aead_alg, aead);
181 unsigned int ivsize = crypto_aead_ivsize(aead);
182 struct caam_ctx *ctx = crypto_aead_ctx(aead);
183 struct device *jrdev = ctx->jrdev;
184 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
186 u32 *desc, *nonce = NULL;
188 unsigned int data_len[2];
189 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
190 OP_ALG_AAI_CTR_MOD128);
191 const bool is_rfc3686 = alg->caam.rfc3686;
196 /* NULL encryption / decryption */
197 if (!ctx->cdata.keylen)
198 return aead_null_set_sh_desc(aead);
201 * AES-CTR needs to load IV in CONTEXT1 reg
202 * at an offset of 128bits (16bytes)
203 * CONTEXT1[255:128] = IV
210 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
213 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
214 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
215 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
219 * In case |user key| > |derived key|, using DKP<imm,imm>
220 * would result in invalid opcodes (last bytes of user key) in
221 * the resulting descriptor. Use DKP<ptr,imm> instead => both
222 * virtual and dma key addresses are needed.
224 ctx->adata.key_virt = ctx->key;
225 ctx->adata.key_dma = ctx->key_dma;
227 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
228 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
230 data_len[0] = ctx->adata.keylen_pad;
231 data_len[1] = ctx->cdata.keylen;
237 * Job Descriptor and Shared Descriptors
238 * must all fit into the 64-word Descriptor h/w Buffer
240 if (desc_inline_query(DESC_AEAD_ENC_LEN +
241 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
242 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
243 ARRAY_SIZE(data_len)) < 0)
246 ctx->adata.key_inline = !!(inl_mask & 1);
247 ctx->cdata.key_inline = !!(inl_mask & 2);
249 /* aead_encrypt shared descriptor */
250 desc = ctx->sh_desc_enc;
251 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
252 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
253 false, ctrlpriv->era);
254 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
255 desc_bytes(desc), ctx->dir);
259 * Job Descriptor and Shared Descriptors
260 * must all fit into the 64-word Descriptor h/w Buffer
262 if (desc_inline_query(DESC_AEAD_DEC_LEN +
263 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
264 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
265 ARRAY_SIZE(data_len)) < 0)
268 ctx->adata.key_inline = !!(inl_mask & 1);
269 ctx->cdata.key_inline = !!(inl_mask & 2);
271 /* aead_decrypt shared descriptor */
272 desc = ctx->sh_desc_dec;
273 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
274 ctx->authsize, alg->caam.geniv, is_rfc3686,
275 nonce, ctx1_iv_off, false, ctrlpriv->era);
276 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
277 desc_bytes(desc), ctx->dir);
279 if (!alg->caam.geniv)
283 * Job Descriptor and Shared Descriptors
284 * must all fit into the 64-word Descriptor h/w Buffer
286 if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
287 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
288 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
289 ARRAY_SIZE(data_len)) < 0)
292 ctx->adata.key_inline = !!(inl_mask & 1);
293 ctx->cdata.key_inline = !!(inl_mask & 2);
295 /* aead_givencrypt shared descriptor */
296 desc = ctx->sh_desc_enc;
297 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
298 ctx->authsize, is_rfc3686, nonce,
299 ctx1_iv_off, false, ctrlpriv->era);
300 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
301 desc_bytes(desc), ctx->dir);
307 static int aead_setauthsize(struct crypto_aead *authenc,
308 unsigned int authsize)
310 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
312 ctx->authsize = authsize;
313 aead_set_sh_desc(authenc);
318 static int gcm_set_sh_desc(struct crypto_aead *aead)
320 struct caam_ctx *ctx = crypto_aead_ctx(aead);
321 struct device *jrdev = ctx->jrdev;
322 unsigned int ivsize = crypto_aead_ivsize(aead);
324 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
327 if (!ctx->cdata.keylen || !ctx->authsize)
331 * AES GCM encrypt shared descriptor
332 * Job Descriptor and Shared Descriptor
333 * must fit into the 64-word Descriptor h/w Buffer
335 if (rem_bytes >= DESC_GCM_ENC_LEN) {
336 ctx->cdata.key_inline = true;
337 ctx->cdata.key_virt = ctx->key;
339 ctx->cdata.key_inline = false;
340 ctx->cdata.key_dma = ctx->key_dma;
343 desc = ctx->sh_desc_enc;
344 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
345 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
346 desc_bytes(desc), ctx->dir);
349 * Job Descriptor and Shared Descriptors
350 * must all fit into the 64-word Descriptor h/w Buffer
352 if (rem_bytes >= DESC_GCM_DEC_LEN) {
353 ctx->cdata.key_inline = true;
354 ctx->cdata.key_virt = ctx->key;
356 ctx->cdata.key_inline = false;
357 ctx->cdata.key_dma = ctx->key_dma;
360 desc = ctx->sh_desc_dec;
361 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
362 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
363 desc_bytes(desc), ctx->dir);
368 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
370 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
373 err = crypto_gcm_check_authsize(authsize);
377 ctx->authsize = authsize;
378 gcm_set_sh_desc(authenc);
383 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
385 struct caam_ctx *ctx = crypto_aead_ctx(aead);
386 struct device *jrdev = ctx->jrdev;
387 unsigned int ivsize = crypto_aead_ivsize(aead);
389 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
392 if (!ctx->cdata.keylen || !ctx->authsize)
396 * RFC4106 encrypt shared descriptor
397 * Job Descriptor and Shared Descriptor
398 * must fit into the 64-word Descriptor h/w Buffer
400 if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
401 ctx->cdata.key_inline = true;
402 ctx->cdata.key_virt = ctx->key;
404 ctx->cdata.key_inline = false;
405 ctx->cdata.key_dma = ctx->key_dma;
408 desc = ctx->sh_desc_enc;
409 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
411 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
412 desc_bytes(desc), ctx->dir);
415 * Job Descriptor and Shared Descriptors
416 * must all fit into the 64-word Descriptor h/w Buffer
418 if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
419 ctx->cdata.key_inline = true;
420 ctx->cdata.key_virt = ctx->key;
422 ctx->cdata.key_inline = false;
423 ctx->cdata.key_dma = ctx->key_dma;
426 desc = ctx->sh_desc_dec;
427 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
429 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
430 desc_bytes(desc), ctx->dir);
435 static int rfc4106_setauthsize(struct crypto_aead *authenc,
436 unsigned int authsize)
438 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
441 err = crypto_rfc4106_check_authsize(authsize);
445 ctx->authsize = authsize;
446 rfc4106_set_sh_desc(authenc);
451 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
453 struct caam_ctx *ctx = crypto_aead_ctx(aead);
454 struct device *jrdev = ctx->jrdev;
455 unsigned int ivsize = crypto_aead_ivsize(aead);
457 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
460 if (!ctx->cdata.keylen || !ctx->authsize)
464 * RFC4543 encrypt shared descriptor
465 * Job Descriptor and Shared Descriptor
466 * must fit into the 64-word Descriptor h/w Buffer
468 if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
469 ctx->cdata.key_inline = true;
470 ctx->cdata.key_virt = ctx->key;
472 ctx->cdata.key_inline = false;
473 ctx->cdata.key_dma = ctx->key_dma;
476 desc = ctx->sh_desc_enc;
477 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
479 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
480 desc_bytes(desc), ctx->dir);
483 * Job Descriptor and Shared Descriptors
484 * must all fit into the 64-word Descriptor h/w Buffer
486 if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
487 ctx->cdata.key_inline = true;
488 ctx->cdata.key_virt = ctx->key;
490 ctx->cdata.key_inline = false;
491 ctx->cdata.key_dma = ctx->key_dma;
494 desc = ctx->sh_desc_dec;
495 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
497 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
498 desc_bytes(desc), ctx->dir);
503 static int rfc4543_setauthsize(struct crypto_aead *authenc,
504 unsigned int authsize)
506 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
511 ctx->authsize = authsize;
512 rfc4543_set_sh_desc(authenc);
517 static int chachapoly_set_sh_desc(struct crypto_aead *aead)
519 struct caam_ctx *ctx = crypto_aead_ctx(aead);
520 struct device *jrdev = ctx->jrdev;
521 unsigned int ivsize = crypto_aead_ivsize(aead);
524 if (!ctx->cdata.keylen || !ctx->authsize)
527 desc = ctx->sh_desc_enc;
528 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
529 ctx->authsize, true, false);
530 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
531 desc_bytes(desc), ctx->dir);
533 desc = ctx->sh_desc_dec;
534 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
535 ctx->authsize, false, false);
536 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
537 desc_bytes(desc), ctx->dir);
542 static int chachapoly_setauthsize(struct crypto_aead *aead,
543 unsigned int authsize)
545 struct caam_ctx *ctx = crypto_aead_ctx(aead);
547 if (authsize != POLY1305_DIGEST_SIZE)
550 ctx->authsize = authsize;
551 return chachapoly_set_sh_desc(aead);
554 static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
557 struct caam_ctx *ctx = crypto_aead_ctx(aead);
558 unsigned int ivsize = crypto_aead_ivsize(aead);
559 unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize;
561 if (keylen != CHACHA_KEY_SIZE + saltlen)
564 ctx->cdata.key_virt = key;
565 ctx->cdata.keylen = keylen - saltlen;
567 return chachapoly_set_sh_desc(aead);
570 static int aead_setkey(struct crypto_aead *aead,
571 const u8 *key, unsigned int keylen)
573 struct caam_ctx *ctx = crypto_aead_ctx(aead);
574 struct device *jrdev = ctx->jrdev;
575 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
576 struct crypto_authenc_keys keys;
579 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
582 dev_dbg(jrdev, "keylen %d enckeylen %d authkeylen %d\n",
583 keys.authkeylen + keys.enckeylen, keys.enckeylen,
585 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
586 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
589 * If DKP is supported, use it in the shared descriptor to generate
592 if (ctrlpriv->era >= 6) {
593 ctx->adata.keylen = keys.authkeylen;
594 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
597 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
600 memcpy(ctx->key, keys.authkey, keys.authkeylen);
601 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
603 dma_sync_single_for_device(jrdev, ctx->key_dma,
604 ctx->adata.keylen_pad +
605 keys.enckeylen, ctx->dir);
609 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
610 keys.authkeylen, CAAM_MAX_KEY_SIZE -
616 /* postpend encryption key to auth split key */
617 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
618 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
619 keys.enckeylen, ctx->dir);
621 print_hex_dump_debug("ctx.key@"__stringify(__LINE__)": ",
622 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
623 ctx->adata.keylen_pad + keys.enckeylen, 1);
626 ctx->cdata.keylen = keys.enckeylen;
627 memzero_explicit(&keys, sizeof(keys));
628 return aead_set_sh_desc(aead);
630 memzero_explicit(&keys, sizeof(keys));
634 static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key,
637 struct crypto_authenc_keys keys;
640 err = crypto_authenc_extractkeys(&keys, key, keylen);
644 err = verify_aead_des3_key(aead, keys.enckey, keys.enckeylen) ?:
645 aead_setkey(aead, key, keylen);
647 memzero_explicit(&keys, sizeof(keys));
651 static int gcm_setkey(struct crypto_aead *aead,
652 const u8 *key, unsigned int keylen)
654 struct caam_ctx *ctx = crypto_aead_ctx(aead);
655 struct device *jrdev = ctx->jrdev;
658 err = aes_check_keylen(keylen);
662 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
663 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
665 memcpy(ctx->key, key, keylen);
666 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
667 ctx->cdata.keylen = keylen;
669 return gcm_set_sh_desc(aead);
672 static int rfc4106_setkey(struct crypto_aead *aead,
673 const u8 *key, unsigned int keylen)
675 struct caam_ctx *ctx = crypto_aead_ctx(aead);
676 struct device *jrdev = ctx->jrdev;
679 err = aes_check_keylen(keylen - 4);
683 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
684 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
686 memcpy(ctx->key, key, keylen);
689 * The last four bytes of the key material are used as the salt value
690 * in the nonce. Update the AES key length.
692 ctx->cdata.keylen = keylen - 4;
693 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
695 return rfc4106_set_sh_desc(aead);
698 static int rfc4543_setkey(struct crypto_aead *aead,
699 const u8 *key, unsigned int keylen)
701 struct caam_ctx *ctx = crypto_aead_ctx(aead);
702 struct device *jrdev = ctx->jrdev;
705 err = aes_check_keylen(keylen - 4);
709 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
710 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
712 memcpy(ctx->key, key, keylen);
715 * The last four bytes of the key material are used as the salt value
716 * in the nonce. Update the AES key length.
718 ctx->cdata.keylen = keylen - 4;
719 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
721 return rfc4543_set_sh_desc(aead);
724 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
725 unsigned int keylen, const u32 ctx1_iv_off)
727 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
728 struct caam_skcipher_alg *alg =
729 container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
731 struct device *jrdev = ctx->jrdev;
732 unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
734 const bool is_rfc3686 = alg->caam.rfc3686;
736 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
737 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
739 ctx->cdata.keylen = keylen;
740 ctx->cdata.key_virt = key;
741 ctx->cdata.key_inline = true;
743 /* skcipher_encrypt shared descriptor */
744 desc = ctx->sh_desc_enc;
745 cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
747 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
748 desc_bytes(desc), ctx->dir);
750 /* skcipher_decrypt shared descriptor */
751 desc = ctx->sh_desc_dec;
752 cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
754 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
755 desc_bytes(desc), ctx->dir);
760 static int aes_skcipher_setkey(struct crypto_skcipher *skcipher,
761 const u8 *key, unsigned int keylen)
765 err = aes_check_keylen(keylen);
769 return skcipher_setkey(skcipher, key, keylen, 0);
772 static int rfc3686_skcipher_setkey(struct crypto_skcipher *skcipher,
773 const u8 *key, unsigned int keylen)
780 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
781 * | *key = {KEY, NONCE}
783 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
784 keylen -= CTR_RFC3686_NONCE_SIZE;
786 err = aes_check_keylen(keylen);
790 return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off);
793 static int ctr_skcipher_setkey(struct crypto_skcipher *skcipher,
794 const u8 *key, unsigned int keylen)
800 * AES-CTR needs to load IV in CONTEXT1 reg
801 * at an offset of 128bits (16bytes)
802 * CONTEXT1[255:128] = IV
806 err = aes_check_keylen(keylen);
810 return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off);
813 static int arc4_skcipher_setkey(struct crypto_skcipher *skcipher,
814 const u8 *key, unsigned int keylen)
816 return skcipher_setkey(skcipher, key, keylen, 0);
819 static int des_skcipher_setkey(struct crypto_skcipher *skcipher,
820 const u8 *key, unsigned int keylen)
822 return verify_skcipher_des_key(skcipher, key) ?:
823 skcipher_setkey(skcipher, key, keylen, 0);
826 static int des3_skcipher_setkey(struct crypto_skcipher *skcipher,
827 const u8 *key, unsigned int keylen)
829 return verify_skcipher_des3_key(skcipher, key) ?:
830 skcipher_setkey(skcipher, key, keylen, 0);
833 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
836 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
837 struct device *jrdev = ctx->jrdev;
840 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
841 dev_err(jrdev, "key size mismatch\n");
845 ctx->cdata.keylen = keylen;
846 ctx->cdata.key_virt = key;
847 ctx->cdata.key_inline = true;
849 /* xts_skcipher_encrypt shared descriptor */
850 desc = ctx->sh_desc_enc;
851 cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata);
852 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
853 desc_bytes(desc), ctx->dir);
855 /* xts_skcipher_decrypt shared descriptor */
856 desc = ctx->sh_desc_dec;
857 cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata);
858 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
859 desc_bytes(desc), ctx->dir);
865 * aead_edesc - s/w-extended aead descriptor
866 * @src_nents: number of segments in input s/w scatterlist
867 * @dst_nents: number of segments in output s/w scatterlist
868 * @mapped_src_nents: number of segments in input h/w link table
869 * @mapped_dst_nents: number of segments in output h/w link table
870 * @sec4_sg_bytes: length of dma mapped sec4_sg space
871 * @bklog: stored to determine if the request needs backlog
872 * @sec4_sg_dma: bus physical mapped address of h/w link table
873 * @sec4_sg: pointer to h/w link table
874 * @hw_desc: the h/w job descriptor followed by any referenced link tables
879 int mapped_src_nents;
880 int mapped_dst_nents;
883 dma_addr_t sec4_sg_dma;
884 struct sec4_sg_entry *sec4_sg;
889 * skcipher_edesc - s/w-extended skcipher descriptor
890 * @src_nents: number of segments in input s/w scatterlist
891 * @dst_nents: number of segments in output s/w scatterlist
892 * @mapped_src_nents: number of segments in input h/w link table
893 * @mapped_dst_nents: number of segments in output h/w link table
894 * @iv_dma: dma address of iv for checking continuity and link table
895 * @sec4_sg_bytes: length of dma mapped sec4_sg space
896 * @bklog: stored to determine if the request needs backlog
897 * @sec4_sg_dma: bus physical mapped address of h/w link table
898 * @sec4_sg: pointer to h/w link table
899 * @hw_desc: the h/w job descriptor followed by any referenced link tables
902 struct skcipher_edesc {
905 int mapped_src_nents;
906 int mapped_dst_nents;
910 dma_addr_t sec4_sg_dma;
911 struct sec4_sg_entry *sec4_sg;
915 static void caam_unmap(struct device *dev, struct scatterlist *src,
916 struct scatterlist *dst, int src_nents,
918 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
923 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
925 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
927 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
931 dma_unmap_single(dev, iv_dma, ivsize, DMA_BIDIRECTIONAL);
933 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
937 static void aead_unmap(struct device *dev,
938 struct aead_edesc *edesc,
939 struct aead_request *req)
941 caam_unmap(dev, req->src, req->dst,
942 edesc->src_nents, edesc->dst_nents, 0, 0,
943 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
946 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
947 struct skcipher_request *req)
949 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
950 int ivsize = crypto_skcipher_ivsize(skcipher);
952 caam_unmap(dev, req->src, req->dst,
953 edesc->src_nents, edesc->dst_nents,
954 edesc->iv_dma, ivsize,
955 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
958 static void aead_crypt_done(struct device *jrdev, u32 *desc, u32 err,
961 struct aead_request *req = context;
962 struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
963 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
964 struct aead_edesc *edesc;
968 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
971 has_bklog = edesc->bklog;
974 ecode = caam_jr_strstatus(jrdev, err);
976 aead_unmap(jrdev, edesc, req);
981 * If no backlog flag, the completion of the request is done
982 * by CAAM, not crypto engine.
985 aead_request_complete(req, ecode);
987 crypto_finalize_aead_request(jrp->engine, req, ecode);
990 static void skcipher_crypt_done(struct device *jrdev, u32 *desc, u32 err,
993 struct skcipher_request *req = context;
994 struct skcipher_edesc *edesc;
995 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
996 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
997 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
998 int ivsize = crypto_skcipher_ivsize(skcipher);
1002 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1004 edesc = rctx->edesc;
1005 has_bklog = edesc->bklog;
1007 ecode = caam_jr_strstatus(jrdev, err);
1009 skcipher_unmap(jrdev, edesc, req);
1012 * The crypto API expects us to set the IV (req->iv) to the last
1013 * ciphertext block (CBC mode) or last counter (CTR mode).
1014 * This is used e.g. by the CTS mode.
1016 if (ivsize && !ecode) {
1017 memcpy(req->iv, (u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes,
1020 print_hex_dump_debug("dstiv @" __stringify(__LINE__)": ",
1021 DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
1025 caam_dump_sg("dst @" __stringify(__LINE__)": ",
1026 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
1027 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
1032 * If no backlog flag, the completion of the request is done
1033 * by CAAM, not crypto engine.
1036 skcipher_request_complete(req, ecode);
1038 crypto_finalize_skcipher_request(jrp->engine, req, ecode);
1042 * Fill in aead job descriptor
1044 static void init_aead_job(struct aead_request *req,
1045 struct aead_edesc *edesc,
1046 bool all_contig, bool encrypt)
1048 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1049 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1050 int authsize = ctx->authsize;
1051 u32 *desc = edesc->hw_desc;
1052 u32 out_options, in_options;
1053 dma_addr_t dst_dma, src_dma;
1054 int len, sec4_sg_index = 0;
1058 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1059 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1061 len = desc_len(sh_desc);
1062 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1065 src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) :
1069 src_dma = edesc->sec4_sg_dma;
1070 sec4_sg_index += edesc->mapped_src_nents;
1071 in_options = LDST_SGF;
1074 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
1078 out_options = in_options;
1080 if (unlikely(req->src != req->dst)) {
1081 if (!edesc->mapped_dst_nents) {
1084 } else if (edesc->mapped_dst_nents == 1) {
1085 dst_dma = sg_dma_address(req->dst);
1088 dst_dma = edesc->sec4_sg_dma +
1090 sizeof(struct sec4_sg_entry);
1091 out_options = LDST_SGF;
1096 append_seq_out_ptr(desc, dst_dma,
1097 req->assoclen + req->cryptlen + authsize,
1100 append_seq_out_ptr(desc, dst_dma,
1101 req->assoclen + req->cryptlen - authsize,
1105 static void init_gcm_job(struct aead_request *req,
1106 struct aead_edesc *edesc,
1107 bool all_contig, bool encrypt)
1109 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1110 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1111 unsigned int ivsize = crypto_aead_ivsize(aead);
1112 u32 *desc = edesc->hw_desc;
1113 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
1116 init_aead_job(req, edesc, all_contig, encrypt);
1117 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1119 /* BUG This should not be specific to generic GCM. */
1121 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1122 last = FIFOLD_TYPE_LAST1;
1125 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1126 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last);
1129 append_data(desc, ctx->key + ctx->cdata.keylen, 4);
1131 append_data(desc, req->iv, ivsize);
1132 /* End of blank commands */
1135 static void init_chachapoly_job(struct aead_request *req,
1136 struct aead_edesc *edesc, bool all_contig,
1139 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1140 unsigned int ivsize = crypto_aead_ivsize(aead);
1141 unsigned int assoclen = req->assoclen;
1142 u32 *desc = edesc->hw_desc;
1145 init_aead_job(req, edesc, all_contig, encrypt);
1147 if (ivsize != CHACHAPOLY_IV_SIZE) {
1148 /* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */
1152 * The associated data comes already with the IV but we need
1153 * to skip it when we authenticate or encrypt...
1158 append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen);
1161 * For IPsec load the IV further in the same register.
1162 * For RFC7539 simply load the 12 bytes nonce in a single operation
1164 append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB |
1165 LDST_SRCDST_BYTE_CONTEXT |
1166 ctx_iv_off << LDST_OFFSET_SHIFT);
1169 static void init_authenc_job(struct aead_request *req,
1170 struct aead_edesc *edesc,
1171 bool all_contig, bool encrypt)
1173 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1174 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1175 struct caam_aead_alg, aead);
1176 unsigned int ivsize = crypto_aead_ivsize(aead);
1177 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1178 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
1179 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
1180 OP_ALG_AAI_CTR_MOD128);
1181 const bool is_rfc3686 = alg->caam.rfc3686;
1182 u32 *desc = edesc->hw_desc;
1186 * AES-CTR needs to load IV in CONTEXT1 reg
1187 * at an offset of 128bits (16bytes)
1188 * CONTEXT1[255:128] = IV
1195 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1198 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
1200 init_aead_job(req, edesc, all_contig, encrypt);
1203 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
1204 * having DPOVRD as destination.
1206 if (ctrlpriv->era < 3)
1207 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1209 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
1211 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
1212 append_load_as_imm(desc, req->iv, ivsize,
1214 LDST_SRCDST_BYTE_CONTEXT |
1215 (ivoffset << LDST_OFFSET_SHIFT));
1219 * Fill in skcipher job descriptor
1221 static void init_skcipher_job(struct skcipher_request *req,
1222 struct skcipher_edesc *edesc,
1225 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1226 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1227 struct device *jrdev = ctx->jrdev;
1228 int ivsize = crypto_skcipher_ivsize(skcipher);
1229 u32 *desc = edesc->hw_desc;
1231 u32 in_options = 0, out_options = 0;
1232 dma_addr_t src_dma, dst_dma, ptr;
1233 int len, sec4_sg_index = 0;
1235 print_hex_dump_debug("presciv@"__stringify(__LINE__)": ",
1236 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
1237 dev_dbg(jrdev, "asked=%d, cryptlen%d\n",
1238 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen);
1240 caam_dump_sg("src @" __stringify(__LINE__)": ",
1241 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1242 edesc->src_nents > 1 ? 100 : req->cryptlen, 1);
1244 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1245 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1247 len = desc_len(sh_desc);
1248 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1250 if (ivsize || edesc->mapped_src_nents > 1) {
1251 src_dma = edesc->sec4_sg_dma;
1252 sec4_sg_index = edesc->mapped_src_nents + !!ivsize;
1253 in_options = LDST_SGF;
1255 src_dma = sg_dma_address(req->src);
1258 append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options);
1260 if (likely(req->src == req->dst)) {
1261 dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry);
1262 out_options = in_options;
1263 } else if (!ivsize && edesc->mapped_dst_nents == 1) {
1264 dst_dma = sg_dma_address(req->dst);
1266 dst_dma = edesc->sec4_sg_dma + sec4_sg_index *
1267 sizeof(struct sec4_sg_entry);
1268 out_options = LDST_SGF;
1271 append_seq_out_ptr(desc, dst_dma, req->cryptlen + ivsize, out_options);
1275 * allocate and map the aead extended descriptor
1277 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1278 int desc_bytes, bool *all_contig_ptr,
1281 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1282 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1283 struct device *jrdev = ctx->jrdev;
1284 struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1285 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1286 GFP_KERNEL : GFP_ATOMIC;
1287 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1288 int src_len, dst_len = 0;
1289 struct aead_edesc *edesc;
1290 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
1291 unsigned int authsize = ctx->authsize;
1293 if (unlikely(req->dst != req->src)) {
1294 src_len = req->assoclen + req->cryptlen;
1295 dst_len = src_len + (encrypt ? authsize : (-authsize));
1297 src_nents = sg_nents_for_len(req->src, src_len);
1298 if (unlikely(src_nents < 0)) {
1299 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1301 return ERR_PTR(src_nents);
1304 dst_nents = sg_nents_for_len(req->dst, dst_len);
1305 if (unlikely(dst_nents < 0)) {
1306 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1308 return ERR_PTR(dst_nents);
1311 src_len = req->assoclen + req->cryptlen +
1312 (encrypt ? authsize : 0);
1314 src_nents = sg_nents_for_len(req->src, src_len);
1315 if (unlikely(src_nents < 0)) {
1316 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1318 return ERR_PTR(src_nents);
1322 if (likely(req->src == req->dst)) {
1323 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1325 if (unlikely(!mapped_src_nents)) {
1326 dev_err(jrdev, "unable to map source\n");
1327 return ERR_PTR(-ENOMEM);
1330 /* Cover also the case of null (zero length) input data */
1332 mapped_src_nents = dma_map_sg(jrdev, req->src,
1333 src_nents, DMA_TO_DEVICE);
1334 if (unlikely(!mapped_src_nents)) {
1335 dev_err(jrdev, "unable to map source\n");
1336 return ERR_PTR(-ENOMEM);
1339 mapped_src_nents = 0;
1342 /* Cover also the case of null (zero length) output data */
1344 mapped_dst_nents = dma_map_sg(jrdev, req->dst,
1347 if (unlikely(!mapped_dst_nents)) {
1348 dev_err(jrdev, "unable to map destination\n");
1349 dma_unmap_sg(jrdev, req->src, src_nents,
1351 return ERR_PTR(-ENOMEM);
1354 mapped_dst_nents = 0;
1359 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
1360 * the end of the table by allocating more S/G entries.
1362 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1363 if (mapped_dst_nents > 1)
1364 sec4_sg_len += pad_sg_nents(mapped_dst_nents);
1366 sec4_sg_len = pad_sg_nents(sec4_sg_len);
1368 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1370 /* allocate space for base edesc and hw desc commands, link tables */
1371 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1374 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1376 return ERR_PTR(-ENOMEM);
1379 edesc->src_nents = src_nents;
1380 edesc->dst_nents = dst_nents;
1381 edesc->mapped_src_nents = mapped_src_nents;
1382 edesc->mapped_dst_nents = mapped_dst_nents;
1383 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1386 rctx->edesc = edesc;
1388 *all_contig_ptr = !(mapped_src_nents > 1);
1391 if (mapped_src_nents > 1) {
1392 sg_to_sec4_sg_last(req->src, src_len,
1393 edesc->sec4_sg + sec4_sg_index, 0);
1394 sec4_sg_index += mapped_src_nents;
1396 if (mapped_dst_nents > 1) {
1397 sg_to_sec4_sg_last(req->dst, dst_len,
1398 edesc->sec4_sg + sec4_sg_index, 0);
1404 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1405 sec4_sg_bytes, DMA_TO_DEVICE);
1406 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1407 dev_err(jrdev, "unable to map S/G table\n");
1408 aead_unmap(jrdev, edesc, req);
1410 return ERR_PTR(-ENOMEM);
1413 edesc->sec4_sg_bytes = sec4_sg_bytes;
1418 static int aead_enqueue_req(struct device *jrdev, struct aead_request *req)
1420 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
1421 struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1422 struct aead_edesc *edesc = rctx->edesc;
1423 u32 *desc = edesc->hw_desc;
1427 * Only the backlog request are sent to crypto-engine since the others
1428 * can be handled by CAAM, if free, especially since JR has up to 1024
1429 * entries (more than the 10 entries from crypto-engine).
1431 if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)
1432 ret = crypto_transfer_aead_request_to_engine(jrpriv->engine,
1435 ret = caam_jr_enqueue(jrdev, desc, aead_crypt_done, req);
1437 if ((ret != -EINPROGRESS) && (ret != -EBUSY)) {
1438 aead_unmap(jrdev, edesc, req);
1445 static inline int chachapoly_crypt(struct aead_request *req, bool encrypt)
1447 struct aead_edesc *edesc;
1448 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1449 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1450 struct device *jrdev = ctx->jrdev;
1454 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
1457 return PTR_ERR(edesc);
1459 desc = edesc->hw_desc;
1461 init_chachapoly_job(req, edesc, all_contig, encrypt);
1462 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
1463 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1466 return aead_enqueue_req(jrdev, req);
1469 static int chachapoly_encrypt(struct aead_request *req)
1471 return chachapoly_crypt(req, true);
1474 static int chachapoly_decrypt(struct aead_request *req)
1476 return chachapoly_crypt(req, false);
1479 static inline int aead_crypt(struct aead_request *req, bool encrypt)
1481 struct aead_edesc *edesc;
1482 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1483 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1484 struct device *jrdev = ctx->jrdev;
1487 /* allocate extended descriptor */
1488 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1489 &all_contig, encrypt);
1491 return PTR_ERR(edesc);
1493 /* Create and submit job descriptor */
1494 init_authenc_job(req, edesc, all_contig, encrypt);
1496 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
1497 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1498 desc_bytes(edesc->hw_desc), 1);
1500 return aead_enqueue_req(jrdev, req);
1503 static int aead_encrypt(struct aead_request *req)
1505 return aead_crypt(req, true);
1508 static int aead_decrypt(struct aead_request *req)
1510 return aead_crypt(req, false);
1513 static int aead_do_one_req(struct crypto_engine *engine, void *areq)
1515 struct aead_request *req = aead_request_cast(areq);
1516 struct caam_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1517 struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1518 u32 *desc = rctx->edesc->hw_desc;
1521 rctx->edesc->bklog = true;
1523 ret = caam_jr_enqueue(ctx->jrdev, desc, aead_crypt_done, req);
1525 if (ret != -EINPROGRESS) {
1526 aead_unmap(ctx->jrdev, rctx->edesc, req);
1535 static inline int gcm_crypt(struct aead_request *req, bool encrypt)
1537 struct aead_edesc *edesc;
1538 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1539 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1540 struct device *jrdev = ctx->jrdev;
1543 /* allocate extended descriptor */
1544 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig,
1547 return PTR_ERR(edesc);
1549 /* Create and submit job descriptor */
1550 init_gcm_job(req, edesc, all_contig, encrypt);
1552 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
1553 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1554 desc_bytes(edesc->hw_desc), 1);
1556 return aead_enqueue_req(jrdev, req);
1559 static int gcm_encrypt(struct aead_request *req)
1561 return gcm_crypt(req, true);
1564 static int gcm_decrypt(struct aead_request *req)
1566 return gcm_crypt(req, false);
1569 static int ipsec_gcm_encrypt(struct aead_request *req)
1571 return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_encrypt(req);
1574 static int ipsec_gcm_decrypt(struct aead_request *req)
1576 return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_decrypt(req);
1580 * allocate and map the skcipher extended descriptor for skcipher
1582 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
1585 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1586 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1587 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1588 struct device *jrdev = ctx->jrdev;
1589 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1590 GFP_KERNEL : GFP_ATOMIC;
1591 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1592 struct skcipher_edesc *edesc;
1593 dma_addr_t iv_dma = 0;
1595 int ivsize = crypto_skcipher_ivsize(skcipher);
1596 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1598 src_nents = sg_nents_for_len(req->src, req->cryptlen);
1599 if (unlikely(src_nents < 0)) {
1600 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1602 return ERR_PTR(src_nents);
1605 if (req->dst != req->src) {
1606 dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
1607 if (unlikely(dst_nents < 0)) {
1608 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1610 return ERR_PTR(dst_nents);
1614 if (likely(req->src == req->dst)) {
1615 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1617 if (unlikely(!mapped_src_nents)) {
1618 dev_err(jrdev, "unable to map source\n");
1619 return ERR_PTR(-ENOMEM);
1622 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1624 if (unlikely(!mapped_src_nents)) {
1625 dev_err(jrdev, "unable to map source\n");
1626 return ERR_PTR(-ENOMEM);
1628 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1630 if (unlikely(!mapped_dst_nents)) {
1631 dev_err(jrdev, "unable to map destination\n");
1632 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1633 return ERR_PTR(-ENOMEM);
1637 if (!ivsize && mapped_src_nents == 1)
1638 sec4_sg_ents = 0; // no need for an input hw s/g table
1640 sec4_sg_ents = mapped_src_nents + !!ivsize;
1641 dst_sg_idx = sec4_sg_ents;
1644 * Input, output HW S/G tables: [IV, src][dst, IV]
1645 * IV entries point to the same buffer
1646 * If src == dst, S/G entries are reused (S/G tables overlap)
1648 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
1649 * the end of the table by allocating more S/G entries. Logic:
1651 * pad output S/G, if needed
1652 * else if (input S/G) ...
1653 * pad input S/G, if needed
1655 if (ivsize || mapped_dst_nents > 1) {
1656 if (req->src == req->dst)
1657 sec4_sg_ents = !!ivsize + pad_sg_nents(sec4_sg_ents);
1659 sec4_sg_ents += pad_sg_nents(mapped_dst_nents +
1662 sec4_sg_ents = pad_sg_nents(sec4_sg_ents);
1665 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1668 * allocate space for base edesc and hw desc commands, link tables, IV
1670 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize,
1673 dev_err(jrdev, "could not allocate extended descriptor\n");
1674 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1676 return ERR_PTR(-ENOMEM);
1679 edesc->src_nents = src_nents;
1680 edesc->dst_nents = dst_nents;
1681 edesc->mapped_src_nents = mapped_src_nents;
1682 edesc->mapped_dst_nents = mapped_dst_nents;
1683 edesc->sec4_sg_bytes = sec4_sg_bytes;
1684 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
1686 rctx->edesc = edesc;
1688 /* Make sure IV is located in a DMAable area */
1690 iv = (u8 *)edesc->sec4_sg + sec4_sg_bytes;
1691 memcpy(iv, req->iv, ivsize);
1693 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL);
1694 if (dma_mapping_error(jrdev, iv_dma)) {
1695 dev_err(jrdev, "unable to map IV\n");
1696 caam_unmap(jrdev, req->src, req->dst, src_nents,
1697 dst_nents, 0, 0, 0, 0);
1699 return ERR_PTR(-ENOMEM);
1702 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
1705 sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg +
1708 if (req->src != req->dst && (ivsize || mapped_dst_nents > 1))
1709 sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg +
1713 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx +
1714 mapped_dst_nents, iv_dma, ivsize, 0);
1716 if (ivsize || mapped_dst_nents > 1)
1717 sg_to_sec4_set_last(edesc->sec4_sg + dst_sg_idx +
1718 mapped_dst_nents - 1 + !!ivsize);
1720 if (sec4_sg_bytes) {
1721 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1724 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1725 dev_err(jrdev, "unable to map S/G table\n");
1726 caam_unmap(jrdev, req->src, req->dst, src_nents,
1727 dst_nents, iv_dma, ivsize, 0, 0);
1729 return ERR_PTR(-ENOMEM);
1733 edesc->iv_dma = iv_dma;
1735 print_hex_dump_debug("skcipher sec4_sg@" __stringify(__LINE__)": ",
1736 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1742 static int skcipher_do_one_req(struct crypto_engine *engine, void *areq)
1744 struct skcipher_request *req = skcipher_request_cast(areq);
1745 struct caam_ctx *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
1746 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1747 u32 *desc = rctx->edesc->hw_desc;
1750 rctx->edesc->bklog = true;
1752 ret = caam_jr_enqueue(ctx->jrdev, desc, skcipher_crypt_done, req);
1754 if (ret != -EINPROGRESS) {
1755 skcipher_unmap(ctx->jrdev, rctx->edesc, req);
1764 static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
1766 struct skcipher_edesc *edesc;
1767 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1768 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1769 struct device *jrdev = ctx->jrdev;
1770 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
1777 /* allocate extended descriptor */
1778 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
1780 return PTR_ERR(edesc);
1782 /* Create and submit job descriptor*/
1783 init_skcipher_job(req, edesc, encrypt);
1785 print_hex_dump_debug("skcipher jobdesc@" __stringify(__LINE__)": ",
1786 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1787 desc_bytes(edesc->hw_desc), 1);
1789 desc = edesc->hw_desc;
1791 * Only the backlog request are sent to crypto-engine since the others
1792 * can be handled by CAAM, if free, especially since JR has up to 1024
1793 * entries (more than the 10 entries from crypto-engine).
1795 if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)
1796 ret = crypto_transfer_skcipher_request_to_engine(jrpriv->engine,
1799 ret = caam_jr_enqueue(jrdev, desc, skcipher_crypt_done, req);
1801 if ((ret != -EINPROGRESS) && (ret != -EBUSY)) {
1802 skcipher_unmap(jrdev, edesc, req);
1809 static int skcipher_encrypt(struct skcipher_request *req)
1811 return skcipher_crypt(req, true);
1814 static int skcipher_decrypt(struct skcipher_request *req)
1816 return skcipher_crypt(req, false);
1819 static struct caam_skcipher_alg driver_algs[] = {
1823 .cra_name = "cbc(aes)",
1824 .cra_driver_name = "cbc-aes-caam",
1825 .cra_blocksize = AES_BLOCK_SIZE,
1827 .setkey = aes_skcipher_setkey,
1828 .encrypt = skcipher_encrypt,
1829 .decrypt = skcipher_decrypt,
1830 .min_keysize = AES_MIN_KEY_SIZE,
1831 .max_keysize = AES_MAX_KEY_SIZE,
1832 .ivsize = AES_BLOCK_SIZE,
1834 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1839 .cra_name = "cbc(des3_ede)",
1840 .cra_driver_name = "cbc-3des-caam",
1841 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1843 .setkey = des3_skcipher_setkey,
1844 .encrypt = skcipher_encrypt,
1845 .decrypt = skcipher_decrypt,
1846 .min_keysize = DES3_EDE_KEY_SIZE,
1847 .max_keysize = DES3_EDE_KEY_SIZE,
1848 .ivsize = DES3_EDE_BLOCK_SIZE,
1850 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1855 .cra_name = "cbc(des)",
1856 .cra_driver_name = "cbc-des-caam",
1857 .cra_blocksize = DES_BLOCK_SIZE,
1859 .setkey = des_skcipher_setkey,
1860 .encrypt = skcipher_encrypt,
1861 .decrypt = skcipher_decrypt,
1862 .min_keysize = DES_KEY_SIZE,
1863 .max_keysize = DES_KEY_SIZE,
1864 .ivsize = DES_BLOCK_SIZE,
1866 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1871 .cra_name = "ctr(aes)",
1872 .cra_driver_name = "ctr-aes-caam",
1875 .setkey = ctr_skcipher_setkey,
1876 .encrypt = skcipher_encrypt,
1877 .decrypt = skcipher_decrypt,
1878 .min_keysize = AES_MIN_KEY_SIZE,
1879 .max_keysize = AES_MAX_KEY_SIZE,
1880 .ivsize = AES_BLOCK_SIZE,
1881 .chunksize = AES_BLOCK_SIZE,
1883 .caam.class1_alg_type = OP_ALG_ALGSEL_AES |
1884 OP_ALG_AAI_CTR_MOD128,
1889 .cra_name = "rfc3686(ctr(aes))",
1890 .cra_driver_name = "rfc3686-ctr-aes-caam",
1893 .setkey = rfc3686_skcipher_setkey,
1894 .encrypt = skcipher_encrypt,
1895 .decrypt = skcipher_decrypt,
1896 .min_keysize = AES_MIN_KEY_SIZE +
1897 CTR_RFC3686_NONCE_SIZE,
1898 .max_keysize = AES_MAX_KEY_SIZE +
1899 CTR_RFC3686_NONCE_SIZE,
1900 .ivsize = CTR_RFC3686_IV_SIZE,
1901 .chunksize = AES_BLOCK_SIZE,
1904 .class1_alg_type = OP_ALG_ALGSEL_AES |
1905 OP_ALG_AAI_CTR_MOD128,
1912 .cra_name = "xts(aes)",
1913 .cra_driver_name = "xts-aes-caam",
1914 .cra_blocksize = AES_BLOCK_SIZE,
1916 .setkey = xts_skcipher_setkey,
1917 .encrypt = skcipher_encrypt,
1918 .decrypt = skcipher_decrypt,
1919 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1920 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1921 .ivsize = AES_BLOCK_SIZE,
1923 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1928 .cra_name = "ecb(des)",
1929 .cra_driver_name = "ecb-des-caam",
1930 .cra_blocksize = DES_BLOCK_SIZE,
1932 .setkey = des_skcipher_setkey,
1933 .encrypt = skcipher_encrypt,
1934 .decrypt = skcipher_decrypt,
1935 .min_keysize = DES_KEY_SIZE,
1936 .max_keysize = DES_KEY_SIZE,
1938 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB,
1943 .cra_name = "ecb(aes)",
1944 .cra_driver_name = "ecb-aes-caam",
1945 .cra_blocksize = AES_BLOCK_SIZE,
1947 .setkey = aes_skcipher_setkey,
1948 .encrypt = skcipher_encrypt,
1949 .decrypt = skcipher_decrypt,
1950 .min_keysize = AES_MIN_KEY_SIZE,
1951 .max_keysize = AES_MAX_KEY_SIZE,
1953 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB,
1958 .cra_name = "ecb(des3_ede)",
1959 .cra_driver_name = "ecb-des3-caam",
1960 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1962 .setkey = des3_skcipher_setkey,
1963 .encrypt = skcipher_encrypt,
1964 .decrypt = skcipher_decrypt,
1965 .min_keysize = DES3_EDE_KEY_SIZE,
1966 .max_keysize = DES3_EDE_KEY_SIZE,
1968 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB,
1973 .cra_name = "ecb(arc4)",
1974 .cra_driver_name = "ecb-arc4-caam",
1975 .cra_blocksize = ARC4_BLOCK_SIZE,
1977 .setkey = arc4_skcipher_setkey,
1978 .encrypt = skcipher_encrypt,
1979 .decrypt = skcipher_decrypt,
1980 .min_keysize = ARC4_MIN_KEY_SIZE,
1981 .max_keysize = ARC4_MAX_KEY_SIZE,
1983 .caam.class1_alg_type = OP_ALG_ALGSEL_ARC4 | OP_ALG_AAI_ECB,
1987 static struct caam_aead_alg driver_aeads[] = {
1991 .cra_name = "rfc4106(gcm(aes))",
1992 .cra_driver_name = "rfc4106-gcm-aes-caam",
1995 .setkey = rfc4106_setkey,
1996 .setauthsize = rfc4106_setauthsize,
1997 .encrypt = ipsec_gcm_encrypt,
1998 .decrypt = ipsec_gcm_decrypt,
1999 .ivsize = GCM_RFC4106_IV_SIZE,
2000 .maxauthsize = AES_BLOCK_SIZE,
2003 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2010 .cra_name = "rfc4543(gcm(aes))",
2011 .cra_driver_name = "rfc4543-gcm-aes-caam",
2014 .setkey = rfc4543_setkey,
2015 .setauthsize = rfc4543_setauthsize,
2016 .encrypt = ipsec_gcm_encrypt,
2017 .decrypt = ipsec_gcm_decrypt,
2018 .ivsize = GCM_RFC4543_IV_SIZE,
2019 .maxauthsize = AES_BLOCK_SIZE,
2022 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2026 /* Galois Counter Mode */
2030 .cra_name = "gcm(aes)",
2031 .cra_driver_name = "gcm-aes-caam",
2034 .setkey = gcm_setkey,
2035 .setauthsize = gcm_setauthsize,
2036 .encrypt = gcm_encrypt,
2037 .decrypt = gcm_decrypt,
2038 .ivsize = GCM_AES_IV_SIZE,
2039 .maxauthsize = AES_BLOCK_SIZE,
2042 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2046 /* single-pass ipsec_esp descriptor */
2050 .cra_name = "authenc(hmac(md5),"
2051 "ecb(cipher_null))",
2052 .cra_driver_name = "authenc-hmac-md5-"
2053 "ecb-cipher_null-caam",
2054 .cra_blocksize = NULL_BLOCK_SIZE,
2056 .setkey = aead_setkey,
2057 .setauthsize = aead_setauthsize,
2058 .encrypt = aead_encrypt,
2059 .decrypt = aead_decrypt,
2060 .ivsize = NULL_IV_SIZE,
2061 .maxauthsize = MD5_DIGEST_SIZE,
2064 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2065 OP_ALG_AAI_HMAC_PRECOMP,
2071 .cra_name = "authenc(hmac(sha1),"
2072 "ecb(cipher_null))",
2073 .cra_driver_name = "authenc-hmac-sha1-"
2074 "ecb-cipher_null-caam",
2075 .cra_blocksize = NULL_BLOCK_SIZE,
2077 .setkey = aead_setkey,
2078 .setauthsize = aead_setauthsize,
2079 .encrypt = aead_encrypt,
2080 .decrypt = aead_decrypt,
2081 .ivsize = NULL_IV_SIZE,
2082 .maxauthsize = SHA1_DIGEST_SIZE,
2085 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2086 OP_ALG_AAI_HMAC_PRECOMP,
2092 .cra_name = "authenc(hmac(sha224),"
2093 "ecb(cipher_null))",
2094 .cra_driver_name = "authenc-hmac-sha224-"
2095 "ecb-cipher_null-caam",
2096 .cra_blocksize = NULL_BLOCK_SIZE,
2098 .setkey = aead_setkey,
2099 .setauthsize = aead_setauthsize,
2100 .encrypt = aead_encrypt,
2101 .decrypt = aead_decrypt,
2102 .ivsize = NULL_IV_SIZE,
2103 .maxauthsize = SHA224_DIGEST_SIZE,
2106 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2107 OP_ALG_AAI_HMAC_PRECOMP,
2113 .cra_name = "authenc(hmac(sha256),"
2114 "ecb(cipher_null))",
2115 .cra_driver_name = "authenc-hmac-sha256-"
2116 "ecb-cipher_null-caam",
2117 .cra_blocksize = NULL_BLOCK_SIZE,
2119 .setkey = aead_setkey,
2120 .setauthsize = aead_setauthsize,
2121 .encrypt = aead_encrypt,
2122 .decrypt = aead_decrypt,
2123 .ivsize = NULL_IV_SIZE,
2124 .maxauthsize = SHA256_DIGEST_SIZE,
2127 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2128 OP_ALG_AAI_HMAC_PRECOMP,
2134 .cra_name = "authenc(hmac(sha384),"
2135 "ecb(cipher_null))",
2136 .cra_driver_name = "authenc-hmac-sha384-"
2137 "ecb-cipher_null-caam",
2138 .cra_blocksize = NULL_BLOCK_SIZE,
2140 .setkey = aead_setkey,
2141 .setauthsize = aead_setauthsize,
2142 .encrypt = aead_encrypt,
2143 .decrypt = aead_decrypt,
2144 .ivsize = NULL_IV_SIZE,
2145 .maxauthsize = SHA384_DIGEST_SIZE,
2148 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2149 OP_ALG_AAI_HMAC_PRECOMP,
2155 .cra_name = "authenc(hmac(sha512),"
2156 "ecb(cipher_null))",
2157 .cra_driver_name = "authenc-hmac-sha512-"
2158 "ecb-cipher_null-caam",
2159 .cra_blocksize = NULL_BLOCK_SIZE,
2161 .setkey = aead_setkey,
2162 .setauthsize = aead_setauthsize,
2163 .encrypt = aead_encrypt,
2164 .decrypt = aead_decrypt,
2165 .ivsize = NULL_IV_SIZE,
2166 .maxauthsize = SHA512_DIGEST_SIZE,
2169 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2170 OP_ALG_AAI_HMAC_PRECOMP,
2176 .cra_name = "authenc(hmac(md5),cbc(aes))",
2177 .cra_driver_name = "authenc-hmac-md5-"
2179 .cra_blocksize = AES_BLOCK_SIZE,
2181 .setkey = aead_setkey,
2182 .setauthsize = aead_setauthsize,
2183 .encrypt = aead_encrypt,
2184 .decrypt = aead_decrypt,
2185 .ivsize = AES_BLOCK_SIZE,
2186 .maxauthsize = MD5_DIGEST_SIZE,
2189 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2190 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2191 OP_ALG_AAI_HMAC_PRECOMP,
2197 .cra_name = "echainiv(authenc(hmac(md5),"
2199 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2201 .cra_blocksize = AES_BLOCK_SIZE,
2203 .setkey = aead_setkey,
2204 .setauthsize = aead_setauthsize,
2205 .encrypt = aead_encrypt,
2206 .decrypt = aead_decrypt,
2207 .ivsize = AES_BLOCK_SIZE,
2208 .maxauthsize = MD5_DIGEST_SIZE,
2211 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2212 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2213 OP_ALG_AAI_HMAC_PRECOMP,
2220 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2221 .cra_driver_name = "authenc-hmac-sha1-"
2223 .cra_blocksize = AES_BLOCK_SIZE,
2225 .setkey = aead_setkey,
2226 .setauthsize = aead_setauthsize,
2227 .encrypt = aead_encrypt,
2228 .decrypt = aead_decrypt,
2229 .ivsize = AES_BLOCK_SIZE,
2230 .maxauthsize = SHA1_DIGEST_SIZE,
2233 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2234 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2235 OP_ALG_AAI_HMAC_PRECOMP,
2241 .cra_name = "echainiv(authenc(hmac(sha1),"
2243 .cra_driver_name = "echainiv-authenc-"
2244 "hmac-sha1-cbc-aes-caam",
2245 .cra_blocksize = AES_BLOCK_SIZE,
2247 .setkey = aead_setkey,
2248 .setauthsize = aead_setauthsize,
2249 .encrypt = aead_encrypt,
2250 .decrypt = aead_decrypt,
2251 .ivsize = AES_BLOCK_SIZE,
2252 .maxauthsize = SHA1_DIGEST_SIZE,
2255 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2256 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2257 OP_ALG_AAI_HMAC_PRECOMP,
2264 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2265 .cra_driver_name = "authenc-hmac-sha224-"
2267 .cra_blocksize = AES_BLOCK_SIZE,
2269 .setkey = aead_setkey,
2270 .setauthsize = aead_setauthsize,
2271 .encrypt = aead_encrypt,
2272 .decrypt = aead_decrypt,
2273 .ivsize = AES_BLOCK_SIZE,
2274 .maxauthsize = SHA224_DIGEST_SIZE,
2277 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2278 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2279 OP_ALG_AAI_HMAC_PRECOMP,
2285 .cra_name = "echainiv(authenc(hmac(sha224),"
2287 .cra_driver_name = "echainiv-authenc-"
2288 "hmac-sha224-cbc-aes-caam",
2289 .cra_blocksize = AES_BLOCK_SIZE,
2291 .setkey = aead_setkey,
2292 .setauthsize = aead_setauthsize,
2293 .encrypt = aead_encrypt,
2294 .decrypt = aead_decrypt,
2295 .ivsize = AES_BLOCK_SIZE,
2296 .maxauthsize = SHA224_DIGEST_SIZE,
2299 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2300 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2301 OP_ALG_AAI_HMAC_PRECOMP,
2308 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2309 .cra_driver_name = "authenc-hmac-sha256-"
2311 .cra_blocksize = AES_BLOCK_SIZE,
2313 .setkey = aead_setkey,
2314 .setauthsize = aead_setauthsize,
2315 .encrypt = aead_encrypt,
2316 .decrypt = aead_decrypt,
2317 .ivsize = AES_BLOCK_SIZE,
2318 .maxauthsize = SHA256_DIGEST_SIZE,
2321 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2322 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2323 OP_ALG_AAI_HMAC_PRECOMP,
2329 .cra_name = "echainiv(authenc(hmac(sha256),"
2331 .cra_driver_name = "echainiv-authenc-"
2332 "hmac-sha256-cbc-aes-caam",
2333 .cra_blocksize = AES_BLOCK_SIZE,
2335 .setkey = aead_setkey,
2336 .setauthsize = aead_setauthsize,
2337 .encrypt = aead_encrypt,
2338 .decrypt = aead_decrypt,
2339 .ivsize = AES_BLOCK_SIZE,
2340 .maxauthsize = SHA256_DIGEST_SIZE,
2343 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2344 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2345 OP_ALG_AAI_HMAC_PRECOMP,
2352 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2353 .cra_driver_name = "authenc-hmac-sha384-"
2355 .cra_blocksize = AES_BLOCK_SIZE,
2357 .setkey = aead_setkey,
2358 .setauthsize = aead_setauthsize,
2359 .encrypt = aead_encrypt,
2360 .decrypt = aead_decrypt,
2361 .ivsize = AES_BLOCK_SIZE,
2362 .maxauthsize = SHA384_DIGEST_SIZE,
2365 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2366 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2367 OP_ALG_AAI_HMAC_PRECOMP,
2373 .cra_name = "echainiv(authenc(hmac(sha384),"
2375 .cra_driver_name = "echainiv-authenc-"
2376 "hmac-sha384-cbc-aes-caam",
2377 .cra_blocksize = AES_BLOCK_SIZE,
2379 .setkey = aead_setkey,
2380 .setauthsize = aead_setauthsize,
2381 .encrypt = aead_encrypt,
2382 .decrypt = aead_decrypt,
2383 .ivsize = AES_BLOCK_SIZE,
2384 .maxauthsize = SHA384_DIGEST_SIZE,
2387 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2388 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2389 OP_ALG_AAI_HMAC_PRECOMP,
2396 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2397 .cra_driver_name = "authenc-hmac-sha512-"
2399 .cra_blocksize = AES_BLOCK_SIZE,
2401 .setkey = aead_setkey,
2402 .setauthsize = aead_setauthsize,
2403 .encrypt = aead_encrypt,
2404 .decrypt = aead_decrypt,
2405 .ivsize = AES_BLOCK_SIZE,
2406 .maxauthsize = SHA512_DIGEST_SIZE,
2409 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2410 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2411 OP_ALG_AAI_HMAC_PRECOMP,
2417 .cra_name = "echainiv(authenc(hmac(sha512),"
2419 .cra_driver_name = "echainiv-authenc-"
2420 "hmac-sha512-cbc-aes-caam",
2421 .cra_blocksize = AES_BLOCK_SIZE,
2423 .setkey = aead_setkey,
2424 .setauthsize = aead_setauthsize,
2425 .encrypt = aead_encrypt,
2426 .decrypt = aead_decrypt,
2427 .ivsize = AES_BLOCK_SIZE,
2428 .maxauthsize = SHA512_DIGEST_SIZE,
2431 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2432 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2433 OP_ALG_AAI_HMAC_PRECOMP,
2440 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2441 .cra_driver_name = "authenc-hmac-md5-"
2442 "cbc-des3_ede-caam",
2443 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2445 .setkey = des3_aead_setkey,
2446 .setauthsize = aead_setauthsize,
2447 .encrypt = aead_encrypt,
2448 .decrypt = aead_decrypt,
2449 .ivsize = DES3_EDE_BLOCK_SIZE,
2450 .maxauthsize = MD5_DIGEST_SIZE,
2453 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2454 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2455 OP_ALG_AAI_HMAC_PRECOMP,
2461 .cra_name = "echainiv(authenc(hmac(md5),"
2463 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2464 "cbc-des3_ede-caam",
2465 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2467 .setkey = des3_aead_setkey,
2468 .setauthsize = aead_setauthsize,
2469 .encrypt = aead_encrypt,
2470 .decrypt = aead_decrypt,
2471 .ivsize = DES3_EDE_BLOCK_SIZE,
2472 .maxauthsize = MD5_DIGEST_SIZE,
2475 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2476 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2477 OP_ALG_AAI_HMAC_PRECOMP,
2484 .cra_name = "authenc(hmac(sha1),"
2486 .cra_driver_name = "authenc-hmac-sha1-"
2487 "cbc-des3_ede-caam",
2488 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2490 .setkey = des3_aead_setkey,
2491 .setauthsize = aead_setauthsize,
2492 .encrypt = aead_encrypt,
2493 .decrypt = aead_decrypt,
2494 .ivsize = DES3_EDE_BLOCK_SIZE,
2495 .maxauthsize = SHA1_DIGEST_SIZE,
2498 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2499 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2500 OP_ALG_AAI_HMAC_PRECOMP,
2506 .cra_name = "echainiv(authenc(hmac(sha1),"
2508 .cra_driver_name = "echainiv-authenc-"
2510 "cbc-des3_ede-caam",
2511 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2513 .setkey = des3_aead_setkey,
2514 .setauthsize = aead_setauthsize,
2515 .encrypt = aead_encrypt,
2516 .decrypt = aead_decrypt,
2517 .ivsize = DES3_EDE_BLOCK_SIZE,
2518 .maxauthsize = SHA1_DIGEST_SIZE,
2521 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2522 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2523 OP_ALG_AAI_HMAC_PRECOMP,
2530 .cra_name = "authenc(hmac(sha224),"
2532 .cra_driver_name = "authenc-hmac-sha224-"
2533 "cbc-des3_ede-caam",
2534 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2536 .setkey = des3_aead_setkey,
2537 .setauthsize = aead_setauthsize,
2538 .encrypt = aead_encrypt,
2539 .decrypt = aead_decrypt,
2540 .ivsize = DES3_EDE_BLOCK_SIZE,
2541 .maxauthsize = SHA224_DIGEST_SIZE,
2544 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2545 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2546 OP_ALG_AAI_HMAC_PRECOMP,
2552 .cra_name = "echainiv(authenc(hmac(sha224),"
2554 .cra_driver_name = "echainiv-authenc-"
2556 "cbc-des3_ede-caam",
2557 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2559 .setkey = des3_aead_setkey,
2560 .setauthsize = aead_setauthsize,
2561 .encrypt = aead_encrypt,
2562 .decrypt = aead_decrypt,
2563 .ivsize = DES3_EDE_BLOCK_SIZE,
2564 .maxauthsize = SHA224_DIGEST_SIZE,
2567 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2568 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2569 OP_ALG_AAI_HMAC_PRECOMP,
2576 .cra_name = "authenc(hmac(sha256),"
2578 .cra_driver_name = "authenc-hmac-sha256-"
2579 "cbc-des3_ede-caam",
2580 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2582 .setkey = des3_aead_setkey,
2583 .setauthsize = aead_setauthsize,
2584 .encrypt = aead_encrypt,
2585 .decrypt = aead_decrypt,
2586 .ivsize = DES3_EDE_BLOCK_SIZE,
2587 .maxauthsize = SHA256_DIGEST_SIZE,
2590 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2591 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2592 OP_ALG_AAI_HMAC_PRECOMP,
2598 .cra_name = "echainiv(authenc(hmac(sha256),"
2600 .cra_driver_name = "echainiv-authenc-"
2602 "cbc-des3_ede-caam",
2603 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2605 .setkey = des3_aead_setkey,
2606 .setauthsize = aead_setauthsize,
2607 .encrypt = aead_encrypt,
2608 .decrypt = aead_decrypt,
2609 .ivsize = DES3_EDE_BLOCK_SIZE,
2610 .maxauthsize = SHA256_DIGEST_SIZE,
2613 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2614 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2615 OP_ALG_AAI_HMAC_PRECOMP,
2622 .cra_name = "authenc(hmac(sha384),"
2624 .cra_driver_name = "authenc-hmac-sha384-"
2625 "cbc-des3_ede-caam",
2626 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2628 .setkey = des3_aead_setkey,
2629 .setauthsize = aead_setauthsize,
2630 .encrypt = aead_encrypt,
2631 .decrypt = aead_decrypt,
2632 .ivsize = DES3_EDE_BLOCK_SIZE,
2633 .maxauthsize = SHA384_DIGEST_SIZE,
2636 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2637 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2638 OP_ALG_AAI_HMAC_PRECOMP,
2644 .cra_name = "echainiv(authenc(hmac(sha384),"
2646 .cra_driver_name = "echainiv-authenc-"
2648 "cbc-des3_ede-caam",
2649 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2651 .setkey = des3_aead_setkey,
2652 .setauthsize = aead_setauthsize,
2653 .encrypt = aead_encrypt,
2654 .decrypt = aead_decrypt,
2655 .ivsize = DES3_EDE_BLOCK_SIZE,
2656 .maxauthsize = SHA384_DIGEST_SIZE,
2659 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2660 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2661 OP_ALG_AAI_HMAC_PRECOMP,
2668 .cra_name = "authenc(hmac(sha512),"
2670 .cra_driver_name = "authenc-hmac-sha512-"
2671 "cbc-des3_ede-caam",
2672 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2674 .setkey = des3_aead_setkey,
2675 .setauthsize = aead_setauthsize,
2676 .encrypt = aead_encrypt,
2677 .decrypt = aead_decrypt,
2678 .ivsize = DES3_EDE_BLOCK_SIZE,
2679 .maxauthsize = SHA512_DIGEST_SIZE,
2682 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2683 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2684 OP_ALG_AAI_HMAC_PRECOMP,
2690 .cra_name = "echainiv(authenc(hmac(sha512),"
2692 .cra_driver_name = "echainiv-authenc-"
2694 "cbc-des3_ede-caam",
2695 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2697 .setkey = des3_aead_setkey,
2698 .setauthsize = aead_setauthsize,
2699 .encrypt = aead_encrypt,
2700 .decrypt = aead_decrypt,
2701 .ivsize = DES3_EDE_BLOCK_SIZE,
2702 .maxauthsize = SHA512_DIGEST_SIZE,
2705 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2706 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2707 OP_ALG_AAI_HMAC_PRECOMP,
2714 .cra_name = "authenc(hmac(md5),cbc(des))",
2715 .cra_driver_name = "authenc-hmac-md5-"
2717 .cra_blocksize = DES_BLOCK_SIZE,
2719 .setkey = aead_setkey,
2720 .setauthsize = aead_setauthsize,
2721 .encrypt = aead_encrypt,
2722 .decrypt = aead_decrypt,
2723 .ivsize = DES_BLOCK_SIZE,
2724 .maxauthsize = MD5_DIGEST_SIZE,
2727 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2728 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2729 OP_ALG_AAI_HMAC_PRECOMP,
2735 .cra_name = "echainiv(authenc(hmac(md5),"
2737 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2739 .cra_blocksize = DES_BLOCK_SIZE,
2741 .setkey = aead_setkey,
2742 .setauthsize = aead_setauthsize,
2743 .encrypt = aead_encrypt,
2744 .decrypt = aead_decrypt,
2745 .ivsize = DES_BLOCK_SIZE,
2746 .maxauthsize = MD5_DIGEST_SIZE,
2749 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2750 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2751 OP_ALG_AAI_HMAC_PRECOMP,
2758 .cra_name = "authenc(hmac(sha1),cbc(des))",
2759 .cra_driver_name = "authenc-hmac-sha1-"
2761 .cra_blocksize = DES_BLOCK_SIZE,
2763 .setkey = aead_setkey,
2764 .setauthsize = aead_setauthsize,
2765 .encrypt = aead_encrypt,
2766 .decrypt = aead_decrypt,
2767 .ivsize = DES_BLOCK_SIZE,
2768 .maxauthsize = SHA1_DIGEST_SIZE,
2771 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2772 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2773 OP_ALG_AAI_HMAC_PRECOMP,
2779 .cra_name = "echainiv(authenc(hmac(sha1),"
2781 .cra_driver_name = "echainiv-authenc-"
2782 "hmac-sha1-cbc-des-caam",
2783 .cra_blocksize = DES_BLOCK_SIZE,
2785 .setkey = aead_setkey,
2786 .setauthsize = aead_setauthsize,
2787 .encrypt = aead_encrypt,
2788 .decrypt = aead_decrypt,
2789 .ivsize = DES_BLOCK_SIZE,
2790 .maxauthsize = SHA1_DIGEST_SIZE,
2793 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2794 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2795 OP_ALG_AAI_HMAC_PRECOMP,
2802 .cra_name = "authenc(hmac(sha224),cbc(des))",
2803 .cra_driver_name = "authenc-hmac-sha224-"
2805 .cra_blocksize = DES_BLOCK_SIZE,
2807 .setkey = aead_setkey,
2808 .setauthsize = aead_setauthsize,
2809 .encrypt = aead_encrypt,
2810 .decrypt = aead_decrypt,
2811 .ivsize = DES_BLOCK_SIZE,
2812 .maxauthsize = SHA224_DIGEST_SIZE,
2815 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2816 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2817 OP_ALG_AAI_HMAC_PRECOMP,
2823 .cra_name = "echainiv(authenc(hmac(sha224),"
2825 .cra_driver_name = "echainiv-authenc-"
2826 "hmac-sha224-cbc-des-caam",
2827 .cra_blocksize = DES_BLOCK_SIZE,
2829 .setkey = aead_setkey,
2830 .setauthsize = aead_setauthsize,
2831 .encrypt = aead_encrypt,
2832 .decrypt = aead_decrypt,
2833 .ivsize = DES_BLOCK_SIZE,
2834 .maxauthsize = SHA224_DIGEST_SIZE,
2837 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2838 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2839 OP_ALG_AAI_HMAC_PRECOMP,
2846 .cra_name = "authenc(hmac(sha256),cbc(des))",
2847 .cra_driver_name = "authenc-hmac-sha256-"
2849 .cra_blocksize = DES_BLOCK_SIZE,
2851 .setkey = aead_setkey,
2852 .setauthsize = aead_setauthsize,
2853 .encrypt = aead_encrypt,
2854 .decrypt = aead_decrypt,
2855 .ivsize = DES_BLOCK_SIZE,
2856 .maxauthsize = SHA256_DIGEST_SIZE,
2859 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2860 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2861 OP_ALG_AAI_HMAC_PRECOMP,
2867 .cra_name = "echainiv(authenc(hmac(sha256),"
2869 .cra_driver_name = "echainiv-authenc-"
2870 "hmac-sha256-cbc-des-caam",
2871 .cra_blocksize = DES_BLOCK_SIZE,
2873 .setkey = aead_setkey,
2874 .setauthsize = aead_setauthsize,
2875 .encrypt = aead_encrypt,
2876 .decrypt = aead_decrypt,
2877 .ivsize = DES_BLOCK_SIZE,
2878 .maxauthsize = SHA256_DIGEST_SIZE,
2881 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2882 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2883 OP_ALG_AAI_HMAC_PRECOMP,
2890 .cra_name = "authenc(hmac(sha384),cbc(des))",
2891 .cra_driver_name = "authenc-hmac-sha384-"
2893 .cra_blocksize = DES_BLOCK_SIZE,
2895 .setkey = aead_setkey,
2896 .setauthsize = aead_setauthsize,
2897 .encrypt = aead_encrypt,
2898 .decrypt = aead_decrypt,
2899 .ivsize = DES_BLOCK_SIZE,
2900 .maxauthsize = SHA384_DIGEST_SIZE,
2903 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2904 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2905 OP_ALG_AAI_HMAC_PRECOMP,
2911 .cra_name = "echainiv(authenc(hmac(sha384),"
2913 .cra_driver_name = "echainiv-authenc-"
2914 "hmac-sha384-cbc-des-caam",
2915 .cra_blocksize = DES_BLOCK_SIZE,
2917 .setkey = aead_setkey,
2918 .setauthsize = aead_setauthsize,
2919 .encrypt = aead_encrypt,
2920 .decrypt = aead_decrypt,
2921 .ivsize = DES_BLOCK_SIZE,
2922 .maxauthsize = SHA384_DIGEST_SIZE,
2925 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2926 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2927 OP_ALG_AAI_HMAC_PRECOMP,
2934 .cra_name = "authenc(hmac(sha512),cbc(des))",
2935 .cra_driver_name = "authenc-hmac-sha512-"
2937 .cra_blocksize = DES_BLOCK_SIZE,
2939 .setkey = aead_setkey,
2940 .setauthsize = aead_setauthsize,
2941 .encrypt = aead_encrypt,
2942 .decrypt = aead_decrypt,
2943 .ivsize = DES_BLOCK_SIZE,
2944 .maxauthsize = SHA512_DIGEST_SIZE,
2947 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2948 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2949 OP_ALG_AAI_HMAC_PRECOMP,
2955 .cra_name = "echainiv(authenc(hmac(sha512),"
2957 .cra_driver_name = "echainiv-authenc-"
2958 "hmac-sha512-cbc-des-caam",
2959 .cra_blocksize = DES_BLOCK_SIZE,
2961 .setkey = aead_setkey,
2962 .setauthsize = aead_setauthsize,
2963 .encrypt = aead_encrypt,
2964 .decrypt = aead_decrypt,
2965 .ivsize = DES_BLOCK_SIZE,
2966 .maxauthsize = SHA512_DIGEST_SIZE,
2969 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2970 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2971 OP_ALG_AAI_HMAC_PRECOMP,
2978 .cra_name = "authenc(hmac(md5),"
2979 "rfc3686(ctr(aes)))",
2980 .cra_driver_name = "authenc-hmac-md5-"
2981 "rfc3686-ctr-aes-caam",
2984 .setkey = aead_setkey,
2985 .setauthsize = aead_setauthsize,
2986 .encrypt = aead_encrypt,
2987 .decrypt = aead_decrypt,
2988 .ivsize = CTR_RFC3686_IV_SIZE,
2989 .maxauthsize = MD5_DIGEST_SIZE,
2992 .class1_alg_type = OP_ALG_ALGSEL_AES |
2993 OP_ALG_AAI_CTR_MOD128,
2994 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2995 OP_ALG_AAI_HMAC_PRECOMP,
3002 .cra_name = "seqiv(authenc("
3003 "hmac(md5),rfc3686(ctr(aes))))",
3004 .cra_driver_name = "seqiv-authenc-hmac-md5-"
3005 "rfc3686-ctr-aes-caam",
3008 .setkey = aead_setkey,
3009 .setauthsize = aead_setauthsize,
3010 .encrypt = aead_encrypt,
3011 .decrypt = aead_decrypt,
3012 .ivsize = CTR_RFC3686_IV_SIZE,
3013 .maxauthsize = MD5_DIGEST_SIZE,
3016 .class1_alg_type = OP_ALG_ALGSEL_AES |
3017 OP_ALG_AAI_CTR_MOD128,
3018 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3019 OP_ALG_AAI_HMAC_PRECOMP,
3027 .cra_name = "authenc(hmac(sha1),"
3028 "rfc3686(ctr(aes)))",
3029 .cra_driver_name = "authenc-hmac-sha1-"
3030 "rfc3686-ctr-aes-caam",
3033 .setkey = aead_setkey,
3034 .setauthsize = aead_setauthsize,
3035 .encrypt = aead_encrypt,
3036 .decrypt = aead_decrypt,
3037 .ivsize = CTR_RFC3686_IV_SIZE,
3038 .maxauthsize = SHA1_DIGEST_SIZE,
3041 .class1_alg_type = OP_ALG_ALGSEL_AES |
3042 OP_ALG_AAI_CTR_MOD128,
3043 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3044 OP_ALG_AAI_HMAC_PRECOMP,
3051 .cra_name = "seqiv(authenc("
3052 "hmac(sha1),rfc3686(ctr(aes))))",
3053 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
3054 "rfc3686-ctr-aes-caam",
3057 .setkey = aead_setkey,
3058 .setauthsize = aead_setauthsize,
3059 .encrypt = aead_encrypt,
3060 .decrypt = aead_decrypt,
3061 .ivsize = CTR_RFC3686_IV_SIZE,
3062 .maxauthsize = SHA1_DIGEST_SIZE,
3065 .class1_alg_type = OP_ALG_ALGSEL_AES |
3066 OP_ALG_AAI_CTR_MOD128,
3067 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3068 OP_ALG_AAI_HMAC_PRECOMP,
3076 .cra_name = "authenc(hmac(sha224),"
3077 "rfc3686(ctr(aes)))",
3078 .cra_driver_name = "authenc-hmac-sha224-"
3079 "rfc3686-ctr-aes-caam",
3082 .setkey = aead_setkey,
3083 .setauthsize = aead_setauthsize,
3084 .encrypt = aead_encrypt,
3085 .decrypt = aead_decrypt,
3086 .ivsize = CTR_RFC3686_IV_SIZE,
3087 .maxauthsize = SHA224_DIGEST_SIZE,
3090 .class1_alg_type = OP_ALG_ALGSEL_AES |
3091 OP_ALG_AAI_CTR_MOD128,
3092 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3093 OP_ALG_AAI_HMAC_PRECOMP,
3100 .cra_name = "seqiv(authenc("
3101 "hmac(sha224),rfc3686(ctr(aes))))",
3102 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
3103 "rfc3686-ctr-aes-caam",
3106 .setkey = aead_setkey,
3107 .setauthsize = aead_setauthsize,
3108 .encrypt = aead_encrypt,
3109 .decrypt = aead_decrypt,
3110 .ivsize = CTR_RFC3686_IV_SIZE,
3111 .maxauthsize = SHA224_DIGEST_SIZE,
3114 .class1_alg_type = OP_ALG_ALGSEL_AES |
3115 OP_ALG_AAI_CTR_MOD128,
3116 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3117 OP_ALG_AAI_HMAC_PRECOMP,
3125 .cra_name = "authenc(hmac(sha256),"
3126 "rfc3686(ctr(aes)))",
3127 .cra_driver_name = "authenc-hmac-sha256-"
3128 "rfc3686-ctr-aes-caam",
3131 .setkey = aead_setkey,
3132 .setauthsize = aead_setauthsize,
3133 .encrypt = aead_encrypt,
3134 .decrypt = aead_decrypt,
3135 .ivsize = CTR_RFC3686_IV_SIZE,
3136 .maxauthsize = SHA256_DIGEST_SIZE,
3139 .class1_alg_type = OP_ALG_ALGSEL_AES |
3140 OP_ALG_AAI_CTR_MOD128,
3141 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3142 OP_ALG_AAI_HMAC_PRECOMP,
3149 .cra_name = "seqiv(authenc(hmac(sha256),"
3150 "rfc3686(ctr(aes))))",
3151 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
3152 "rfc3686-ctr-aes-caam",
3155 .setkey = aead_setkey,
3156 .setauthsize = aead_setauthsize,
3157 .encrypt = aead_encrypt,
3158 .decrypt = aead_decrypt,
3159 .ivsize = CTR_RFC3686_IV_SIZE,
3160 .maxauthsize = SHA256_DIGEST_SIZE,
3163 .class1_alg_type = OP_ALG_ALGSEL_AES |
3164 OP_ALG_AAI_CTR_MOD128,
3165 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3166 OP_ALG_AAI_HMAC_PRECOMP,
3174 .cra_name = "authenc(hmac(sha384),"
3175 "rfc3686(ctr(aes)))",
3176 .cra_driver_name = "authenc-hmac-sha384-"
3177 "rfc3686-ctr-aes-caam",
3180 .setkey = aead_setkey,
3181 .setauthsize = aead_setauthsize,
3182 .encrypt = aead_encrypt,
3183 .decrypt = aead_decrypt,
3184 .ivsize = CTR_RFC3686_IV_SIZE,
3185 .maxauthsize = SHA384_DIGEST_SIZE,
3188 .class1_alg_type = OP_ALG_ALGSEL_AES |
3189 OP_ALG_AAI_CTR_MOD128,
3190 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3191 OP_ALG_AAI_HMAC_PRECOMP,
3198 .cra_name = "seqiv(authenc(hmac(sha384),"
3199 "rfc3686(ctr(aes))))",
3200 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
3201 "rfc3686-ctr-aes-caam",
3204 .setkey = aead_setkey,
3205 .setauthsize = aead_setauthsize,
3206 .encrypt = aead_encrypt,
3207 .decrypt = aead_decrypt,
3208 .ivsize = CTR_RFC3686_IV_SIZE,
3209 .maxauthsize = SHA384_DIGEST_SIZE,
3212 .class1_alg_type = OP_ALG_ALGSEL_AES |
3213 OP_ALG_AAI_CTR_MOD128,
3214 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3215 OP_ALG_AAI_HMAC_PRECOMP,
3223 .cra_name = "authenc(hmac(sha512),"
3224 "rfc3686(ctr(aes)))",
3225 .cra_driver_name = "authenc-hmac-sha512-"
3226 "rfc3686-ctr-aes-caam",
3229 .setkey = aead_setkey,
3230 .setauthsize = aead_setauthsize,
3231 .encrypt = aead_encrypt,
3232 .decrypt = aead_decrypt,
3233 .ivsize = CTR_RFC3686_IV_SIZE,
3234 .maxauthsize = SHA512_DIGEST_SIZE,
3237 .class1_alg_type = OP_ALG_ALGSEL_AES |
3238 OP_ALG_AAI_CTR_MOD128,
3239 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3240 OP_ALG_AAI_HMAC_PRECOMP,
3247 .cra_name = "seqiv(authenc(hmac(sha512),"
3248 "rfc3686(ctr(aes))))",
3249 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
3250 "rfc3686-ctr-aes-caam",
3253 .setkey = aead_setkey,
3254 .setauthsize = aead_setauthsize,
3255 .encrypt = aead_encrypt,
3256 .decrypt = aead_decrypt,
3257 .ivsize = CTR_RFC3686_IV_SIZE,
3258 .maxauthsize = SHA512_DIGEST_SIZE,
3261 .class1_alg_type = OP_ALG_ALGSEL_AES |
3262 OP_ALG_AAI_CTR_MOD128,
3263 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3264 OP_ALG_AAI_HMAC_PRECOMP,
3272 .cra_name = "rfc7539(chacha20,poly1305)",
3273 .cra_driver_name = "rfc7539-chacha20-poly1305-"
3277 .setkey = chachapoly_setkey,
3278 .setauthsize = chachapoly_setauthsize,
3279 .encrypt = chachapoly_encrypt,
3280 .decrypt = chachapoly_decrypt,
3281 .ivsize = CHACHAPOLY_IV_SIZE,
3282 .maxauthsize = POLY1305_DIGEST_SIZE,
3285 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3287 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3295 .cra_name = "rfc7539esp(chacha20,poly1305)",
3296 .cra_driver_name = "rfc7539esp-chacha20-"
3300 .setkey = chachapoly_setkey,
3301 .setauthsize = chachapoly_setauthsize,
3302 .encrypt = chachapoly_encrypt,
3303 .decrypt = chachapoly_decrypt,
3305 .maxauthsize = POLY1305_DIGEST_SIZE,
3308 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3310 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3317 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
3320 dma_addr_t dma_addr;
3321 struct caam_drv_private *priv;
3322 const size_t sh_desc_enc_offset = offsetof(struct caam_ctx,
3325 ctx->jrdev = caam_jr_alloc();
3326 if (IS_ERR(ctx->jrdev)) {
3327 pr_err("Job Ring Device allocation for transform failed\n");
3328 return PTR_ERR(ctx->jrdev);
3331 priv = dev_get_drvdata(ctx->jrdev->parent);
3332 if (priv->era >= 6 && uses_dkp)
3333 ctx->dir = DMA_BIDIRECTIONAL;
3335 ctx->dir = DMA_TO_DEVICE;
3337 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3338 offsetof(struct caam_ctx,
3341 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3342 if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3343 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3344 caam_jr_free(ctx->jrdev);
3348 ctx->sh_desc_enc_dma = dma_addr;
3349 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3352 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key) -
3355 /* copy descriptor header template value */
3356 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3357 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
3362 static int caam_cra_init(struct crypto_skcipher *tfm)
3364 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
3365 struct caam_skcipher_alg *caam_alg =
3366 container_of(alg, typeof(*caam_alg), skcipher);
3367 struct caam_ctx *ctx = crypto_skcipher_ctx(tfm);
3369 crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx));
3371 ctx->enginectx.op.do_one_request = skcipher_do_one_req;
3373 return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam,
3377 static int caam_aead_init(struct crypto_aead *tfm)
3379 struct aead_alg *alg = crypto_aead_alg(tfm);
3380 struct caam_aead_alg *caam_alg =
3381 container_of(alg, struct caam_aead_alg, aead);
3382 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
3384 crypto_aead_set_reqsize(tfm, sizeof(struct caam_aead_req_ctx));
3386 ctx->enginectx.op.do_one_request = aead_do_one_req;
3388 return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp);
3391 static void caam_exit_common(struct caam_ctx *ctx)
3393 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3394 offsetof(struct caam_ctx, sh_desc_enc_dma) -
3395 offsetof(struct caam_ctx, sh_desc_enc),
3396 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3397 caam_jr_free(ctx->jrdev);
3400 static void caam_cra_exit(struct crypto_skcipher *tfm)
3402 caam_exit_common(crypto_skcipher_ctx(tfm));
3405 static void caam_aead_exit(struct crypto_aead *tfm)
3407 caam_exit_common(crypto_aead_ctx(tfm));
3410 void caam_algapi_exit(void)
3414 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3415 struct caam_aead_alg *t_alg = driver_aeads + i;
3417 if (t_alg->registered)
3418 crypto_unregister_aead(&t_alg->aead);
3421 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3422 struct caam_skcipher_alg *t_alg = driver_algs + i;
3424 if (t_alg->registered)
3425 crypto_unregister_skcipher(&t_alg->skcipher);
3429 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
3431 struct skcipher_alg *alg = &t_alg->skcipher;
3433 alg->base.cra_module = THIS_MODULE;
3434 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3435 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3436 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3438 alg->init = caam_cra_init;
3439 alg->exit = caam_cra_exit;
3442 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3444 struct aead_alg *alg = &t_alg->aead;
3446 alg->base.cra_module = THIS_MODULE;
3447 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3448 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3449 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3451 alg->init = caam_aead_init;
3452 alg->exit = caam_aead_exit;
3455 int caam_algapi_init(struct device *ctrldev)
3457 struct caam_drv_private *priv = dev_get_drvdata(ctrldev);
3459 u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst;
3461 unsigned int md_limit = SHA512_DIGEST_SIZE;
3462 bool registered = false, gcm_support;
3465 * Register crypto algorithms the device supports.
3466 * First, detect presence and attributes of DES, AES, and MD blocks.
3468 if (priv->era < 10) {
3469 u32 cha_vid, cha_inst, aes_rn;
3471 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3472 aes_vid = cha_vid & CHA_ID_LS_AES_MASK;
3473 md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3475 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3476 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >>
3477 CHA_ID_LS_DES_SHIFT;
3478 aes_inst = cha_inst & CHA_ID_LS_AES_MASK;
3479 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3480 arc4_inst = (cha_inst & CHA_ID_LS_ARC4_MASK) >>
3481 CHA_ID_LS_ARC4_SHIFT;
3485 aes_rn = rd_reg32(&priv->ctrl->perfmon.cha_rev_ls) &
3487 gcm_support = !(aes_vid == CHA_VER_VID_AES_LP && aes_rn < 8);
3491 aesa = rd_reg32(&priv->ctrl->vreg.aesa);
3492 mdha = rd_reg32(&priv->ctrl->vreg.mdha);
3494 aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3495 md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3497 des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK;
3498 aes_inst = aesa & CHA_VER_NUM_MASK;
3499 md_inst = mdha & CHA_VER_NUM_MASK;
3500 ccha_inst = rd_reg32(&priv->ctrl->vreg.ccha) & CHA_VER_NUM_MASK;
3501 ptha_inst = rd_reg32(&priv->ctrl->vreg.ptha) & CHA_VER_NUM_MASK;
3502 arc4_inst = rd_reg32(&priv->ctrl->vreg.afha) & CHA_VER_NUM_MASK;
3504 gcm_support = aesa & CHA_VER_MISC_AES_GCM;
3507 /* If MD is present, limit digest size based on LP256 */
3508 if (md_inst && md_vid == CHA_VER_VID_MD_LP256)
3509 md_limit = SHA256_DIGEST_SIZE;
3511 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3512 struct caam_skcipher_alg *t_alg = driver_algs + i;
3513 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
3515 /* Skip DES algorithms if not supported by device */
3517 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3518 (alg_sel == OP_ALG_ALGSEL_DES)))
3521 /* Skip AES algorithms if not supported by device */
3522 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3525 /* Skip ARC4 algorithms if not supported by device */
3526 if (!arc4_inst && alg_sel == OP_ALG_ALGSEL_ARC4)
3530 * Check support for AES modes not available
3533 if (aes_vid == CHA_VER_VID_AES_LP &&
3534 (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) ==
3538 caam_skcipher_alg_init(t_alg);
3540 err = crypto_register_skcipher(&t_alg->skcipher);
3542 pr_warn("%s alg registration failed\n",
3543 t_alg->skcipher.base.cra_driver_name);
3547 t_alg->registered = true;
3551 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3552 struct caam_aead_alg *t_alg = driver_aeads + i;
3553 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3555 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3557 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3559 /* Skip DES algorithms if not supported by device */
3561 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3562 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3565 /* Skip AES algorithms if not supported by device */
3566 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3569 /* Skip CHACHA20 algorithms if not supported by device */
3570 if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst)
3573 /* Skip POLY1305 algorithms if not supported by device */
3574 if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst)
3577 /* Skip GCM algorithms if not supported by device */
3578 if (c1_alg_sel == OP_ALG_ALGSEL_AES &&
3579 alg_aai == OP_ALG_AAI_GCM && !gcm_support)
3583 * Skip algorithms requiring message digests
3584 * if MD or MD size is not supported by device.
3586 if (is_mdha(c2_alg_sel) &&
3587 (!md_inst || t_alg->aead.maxauthsize > md_limit))
3590 caam_aead_alg_init(t_alg);
3592 err = crypto_register_aead(&t_alg->aead);
3594 pr_warn("%s alg registration failed\n",
3595 t_alg->aead.base.cra_driver_name);
3599 t_alg->registered = true;
3604 pr_info("caam algorithms registered in /proc/crypto\n");