1 // SPDX-License-Identifier: GPL-2.0+
3 * caam - Freescale FSL CAAM support for crypto API
5 * Copyright 2008-2011 Freescale Semiconductor, Inc.
6 * Copyright 2016-2018 NXP
8 * Based on talitos crypto API driver.
10 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
12 * --------------- ---------------
13 * | JobDesc #1 |-------------------->| ShareDesc |
14 * | *(packet 1) | | (PDB) |
15 * --------------- |------------->| (hashKey) |
17 * . | |-------->| (operation) |
18 * --------------- | | ---------------
19 * | JobDesc #2 |------| |
25 * | JobDesc #3 |------------
29 * The SharedDesc never changes for a connection unless rekeyed, but
30 * each packet will likely be in a different place. So all we need
31 * to know to process the packet is where the input is, where the
32 * output goes, and what context we want to process with. Context is
33 * in the SharedDesc, packet references in the JobDesc.
35 * So, a job desc looks like:
37 * ---------------------
39 * | ShareDesc Pointer |
46 * ---------------------
53 #include "desc_constr.h"
56 #include "sg_sw_sec4.h"
58 #include "caamalg_desc.h"
63 #define CAAM_CRA_PRIORITY 3000
64 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
65 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
66 CTR_RFC3686_NONCE_SIZE + \
67 SHA512_DIGEST_SIZE * 2)
69 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
70 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
72 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
75 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
76 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
79 /* for print_hex_dumps with line references */
80 #define debug(format, arg...) printk(format, arg)
82 #define debug(format, arg...)
85 struct caam_alg_entry {
92 struct caam_aead_alg {
94 struct caam_alg_entry caam;
98 struct caam_skcipher_alg {
99 struct skcipher_alg skcipher;
100 struct caam_alg_entry caam;
105 * per-session context
108 u32 sh_desc_enc[DESC_MAX_USED_LEN];
109 u32 sh_desc_dec[DESC_MAX_USED_LEN];
110 u8 key[CAAM_MAX_KEY_SIZE];
111 dma_addr_t sh_desc_enc_dma;
112 dma_addr_t sh_desc_dec_dma;
114 enum dma_data_direction dir;
115 struct device *jrdev;
116 struct alginfo adata;
117 struct alginfo cdata;
118 unsigned int authsize;
121 static int aead_null_set_sh_desc(struct crypto_aead *aead)
123 struct caam_ctx *ctx = crypto_aead_ctx(aead);
124 struct device *jrdev = ctx->jrdev;
125 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
127 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
128 ctx->adata.keylen_pad;
131 * Job Descriptor and Shared Descriptors
132 * must all fit into the 64-word Descriptor h/w Buffer
134 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
135 ctx->adata.key_inline = true;
136 ctx->adata.key_virt = ctx->key;
138 ctx->adata.key_inline = false;
139 ctx->adata.key_dma = ctx->key_dma;
142 /* aead_encrypt shared descriptor */
143 desc = ctx->sh_desc_enc;
144 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
146 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
147 desc_bytes(desc), ctx->dir);
150 * Job Descriptor and Shared Descriptors
151 * must all fit into the 64-word Descriptor h/w Buffer
153 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
154 ctx->adata.key_inline = true;
155 ctx->adata.key_virt = ctx->key;
157 ctx->adata.key_inline = false;
158 ctx->adata.key_dma = ctx->key_dma;
161 /* aead_decrypt shared descriptor */
162 desc = ctx->sh_desc_dec;
163 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
165 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
166 desc_bytes(desc), ctx->dir);
171 static int aead_set_sh_desc(struct crypto_aead *aead)
173 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
174 struct caam_aead_alg, aead);
175 unsigned int ivsize = crypto_aead_ivsize(aead);
176 struct caam_ctx *ctx = crypto_aead_ctx(aead);
177 struct device *jrdev = ctx->jrdev;
178 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
180 u32 *desc, *nonce = NULL;
182 unsigned int data_len[2];
183 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
184 OP_ALG_AAI_CTR_MOD128);
185 const bool is_rfc3686 = alg->caam.rfc3686;
190 /* NULL encryption / decryption */
191 if (!ctx->cdata.keylen)
192 return aead_null_set_sh_desc(aead);
195 * AES-CTR needs to load IV in CONTEXT1 reg
196 * at an offset of 128bits (16bytes)
197 * CONTEXT1[255:128] = IV
204 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
207 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
208 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
209 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
212 data_len[0] = ctx->adata.keylen_pad;
213 data_len[1] = ctx->cdata.keylen;
219 * Job Descriptor and Shared Descriptors
220 * must all fit into the 64-word Descriptor h/w Buffer
222 if (desc_inline_query(DESC_AEAD_ENC_LEN +
223 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
224 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
225 ARRAY_SIZE(data_len)) < 0)
229 ctx->adata.key_virt = ctx->key;
231 ctx->adata.key_dma = ctx->key_dma;
234 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
236 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
238 ctx->adata.key_inline = !!(inl_mask & 1);
239 ctx->cdata.key_inline = !!(inl_mask & 2);
241 /* aead_encrypt shared descriptor */
242 desc = ctx->sh_desc_enc;
243 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
244 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
245 false, ctrlpriv->era);
246 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
247 desc_bytes(desc), ctx->dir);
251 * Job Descriptor and Shared Descriptors
252 * must all fit into the 64-word Descriptor h/w Buffer
254 if (desc_inline_query(DESC_AEAD_DEC_LEN +
255 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
256 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
257 ARRAY_SIZE(data_len)) < 0)
261 ctx->adata.key_virt = ctx->key;
263 ctx->adata.key_dma = ctx->key_dma;
266 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
268 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
270 ctx->adata.key_inline = !!(inl_mask & 1);
271 ctx->cdata.key_inline = !!(inl_mask & 2);
273 /* aead_decrypt shared descriptor */
274 desc = ctx->sh_desc_dec;
275 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
276 ctx->authsize, alg->caam.geniv, is_rfc3686,
277 nonce, ctx1_iv_off, false, ctrlpriv->era);
278 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
279 desc_bytes(desc), ctx->dir);
281 if (!alg->caam.geniv)
285 * Job Descriptor and Shared Descriptors
286 * must all fit into the 64-word Descriptor h/w Buffer
288 if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
289 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
290 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
291 ARRAY_SIZE(data_len)) < 0)
295 ctx->adata.key_virt = ctx->key;
297 ctx->adata.key_dma = ctx->key_dma;
300 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
302 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
304 ctx->adata.key_inline = !!(inl_mask & 1);
305 ctx->cdata.key_inline = !!(inl_mask & 2);
307 /* aead_givencrypt shared descriptor */
308 desc = ctx->sh_desc_enc;
309 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
310 ctx->authsize, is_rfc3686, nonce,
311 ctx1_iv_off, false, ctrlpriv->era);
312 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
313 desc_bytes(desc), ctx->dir);
319 static int aead_setauthsize(struct crypto_aead *authenc,
320 unsigned int authsize)
322 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
324 ctx->authsize = authsize;
325 aead_set_sh_desc(authenc);
330 static int gcm_set_sh_desc(struct crypto_aead *aead)
332 struct caam_ctx *ctx = crypto_aead_ctx(aead);
333 struct device *jrdev = ctx->jrdev;
334 unsigned int ivsize = crypto_aead_ivsize(aead);
336 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
339 if (!ctx->cdata.keylen || !ctx->authsize)
343 * AES GCM encrypt shared descriptor
344 * Job Descriptor and Shared Descriptor
345 * must fit into the 64-word Descriptor h/w Buffer
347 if (rem_bytes >= DESC_GCM_ENC_LEN) {
348 ctx->cdata.key_inline = true;
349 ctx->cdata.key_virt = ctx->key;
351 ctx->cdata.key_inline = false;
352 ctx->cdata.key_dma = ctx->key_dma;
355 desc = ctx->sh_desc_enc;
356 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
357 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
358 desc_bytes(desc), ctx->dir);
361 * Job Descriptor and Shared Descriptors
362 * must all fit into the 64-word Descriptor h/w Buffer
364 if (rem_bytes >= DESC_GCM_DEC_LEN) {
365 ctx->cdata.key_inline = true;
366 ctx->cdata.key_virt = ctx->key;
368 ctx->cdata.key_inline = false;
369 ctx->cdata.key_dma = ctx->key_dma;
372 desc = ctx->sh_desc_dec;
373 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
374 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
375 desc_bytes(desc), ctx->dir);
380 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
382 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
384 ctx->authsize = authsize;
385 gcm_set_sh_desc(authenc);
390 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
392 struct caam_ctx *ctx = crypto_aead_ctx(aead);
393 struct device *jrdev = ctx->jrdev;
394 unsigned int ivsize = crypto_aead_ivsize(aead);
396 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
399 if (!ctx->cdata.keylen || !ctx->authsize)
403 * RFC4106 encrypt shared descriptor
404 * Job Descriptor and Shared Descriptor
405 * must fit into the 64-word Descriptor h/w Buffer
407 if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
408 ctx->cdata.key_inline = true;
409 ctx->cdata.key_virt = ctx->key;
411 ctx->cdata.key_inline = false;
412 ctx->cdata.key_dma = ctx->key_dma;
415 desc = ctx->sh_desc_enc;
416 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
418 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
419 desc_bytes(desc), ctx->dir);
422 * Job Descriptor and Shared Descriptors
423 * must all fit into the 64-word Descriptor h/w Buffer
425 if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
426 ctx->cdata.key_inline = true;
427 ctx->cdata.key_virt = ctx->key;
429 ctx->cdata.key_inline = false;
430 ctx->cdata.key_dma = ctx->key_dma;
433 desc = ctx->sh_desc_dec;
434 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
436 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
437 desc_bytes(desc), ctx->dir);
442 static int rfc4106_setauthsize(struct crypto_aead *authenc,
443 unsigned int authsize)
445 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
447 ctx->authsize = authsize;
448 rfc4106_set_sh_desc(authenc);
453 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
455 struct caam_ctx *ctx = crypto_aead_ctx(aead);
456 struct device *jrdev = ctx->jrdev;
457 unsigned int ivsize = crypto_aead_ivsize(aead);
459 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
462 if (!ctx->cdata.keylen || !ctx->authsize)
466 * RFC4543 encrypt shared descriptor
467 * Job Descriptor and Shared Descriptor
468 * must fit into the 64-word Descriptor h/w Buffer
470 if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
471 ctx->cdata.key_inline = true;
472 ctx->cdata.key_virt = ctx->key;
474 ctx->cdata.key_inline = false;
475 ctx->cdata.key_dma = ctx->key_dma;
478 desc = ctx->sh_desc_enc;
479 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
481 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
482 desc_bytes(desc), ctx->dir);
485 * Job Descriptor and Shared Descriptors
486 * must all fit into the 64-word Descriptor h/w Buffer
488 if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
489 ctx->cdata.key_inline = true;
490 ctx->cdata.key_virt = ctx->key;
492 ctx->cdata.key_inline = false;
493 ctx->cdata.key_dma = ctx->key_dma;
496 desc = ctx->sh_desc_dec;
497 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
499 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
500 desc_bytes(desc), ctx->dir);
505 static int rfc4543_setauthsize(struct crypto_aead *authenc,
506 unsigned int authsize)
508 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
510 ctx->authsize = authsize;
511 rfc4543_set_sh_desc(authenc);
516 static int aead_setkey(struct crypto_aead *aead,
517 const u8 *key, unsigned int keylen)
519 struct caam_ctx *ctx = crypto_aead_ctx(aead);
520 struct device *jrdev = ctx->jrdev;
521 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
522 struct crypto_authenc_keys keys;
525 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
529 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
530 keys.authkeylen + keys.enckeylen, keys.enckeylen,
532 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
533 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
537 * If DKP is supported, use it in the shared descriptor to generate
540 if (ctrlpriv->era >= 6) {
541 ctx->adata.keylen = keys.authkeylen;
542 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
545 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
548 memcpy(ctx->key, keys.authkey, keys.authkeylen);
549 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
551 dma_sync_single_for_device(jrdev, ctx->key_dma,
552 ctx->adata.keylen_pad +
553 keys.enckeylen, ctx->dir);
557 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
558 keys.authkeylen, CAAM_MAX_KEY_SIZE -
564 /* postpend encryption key to auth split key */
565 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
566 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
567 keys.enckeylen, ctx->dir);
569 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
570 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
571 ctx->adata.keylen_pad + keys.enckeylen, 1);
575 ctx->cdata.keylen = keys.enckeylen;
576 memzero_explicit(&keys, sizeof(keys));
577 return aead_set_sh_desc(aead);
579 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
580 memzero_explicit(&keys, sizeof(keys));
584 static int gcm_setkey(struct crypto_aead *aead,
585 const u8 *key, unsigned int keylen)
587 struct caam_ctx *ctx = crypto_aead_ctx(aead);
588 struct device *jrdev = ctx->jrdev;
591 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
592 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
595 memcpy(ctx->key, key, keylen);
596 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
597 ctx->cdata.keylen = keylen;
599 return gcm_set_sh_desc(aead);
602 static int rfc4106_setkey(struct crypto_aead *aead,
603 const u8 *key, unsigned int keylen)
605 struct caam_ctx *ctx = crypto_aead_ctx(aead);
606 struct device *jrdev = ctx->jrdev;
612 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
613 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
616 memcpy(ctx->key, key, keylen);
619 * The last four bytes of the key material are used as the salt value
620 * in the nonce. Update the AES key length.
622 ctx->cdata.keylen = keylen - 4;
623 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
625 return rfc4106_set_sh_desc(aead);
628 static int rfc4543_setkey(struct crypto_aead *aead,
629 const u8 *key, unsigned int keylen)
631 struct caam_ctx *ctx = crypto_aead_ctx(aead);
632 struct device *jrdev = ctx->jrdev;
638 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
639 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
642 memcpy(ctx->key, key, keylen);
645 * The last four bytes of the key material are used as the salt value
646 * in the nonce. Update the AES key length.
648 ctx->cdata.keylen = keylen - 4;
649 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
651 return rfc4543_set_sh_desc(aead);
654 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
657 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
658 struct caam_skcipher_alg *alg =
659 container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
661 struct device *jrdev = ctx->jrdev;
662 unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
665 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
666 OP_ALG_AAI_CTR_MOD128);
667 const bool is_rfc3686 = alg->caam.rfc3686;
670 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
671 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
674 * AES-CTR needs to load IV in CONTEXT1 reg
675 * at an offset of 128bits (16bytes)
676 * CONTEXT1[255:128] = IV
683 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
684 * | *key = {KEY, NONCE}
687 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
688 keylen -= CTR_RFC3686_NONCE_SIZE;
691 ctx->cdata.keylen = keylen;
692 ctx->cdata.key_virt = key;
693 ctx->cdata.key_inline = true;
695 /* skcipher_encrypt shared descriptor */
696 desc = ctx->sh_desc_enc;
697 cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
699 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
700 desc_bytes(desc), ctx->dir);
702 /* skcipher_decrypt shared descriptor */
703 desc = ctx->sh_desc_dec;
704 cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
706 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
707 desc_bytes(desc), ctx->dir);
712 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
715 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
716 struct device *jrdev = ctx->jrdev;
719 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
720 crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
721 dev_err(jrdev, "key size mismatch\n");
725 ctx->cdata.keylen = keylen;
726 ctx->cdata.key_virt = key;
727 ctx->cdata.key_inline = true;
729 /* xts_skcipher_encrypt shared descriptor */
730 desc = ctx->sh_desc_enc;
731 cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata);
732 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
733 desc_bytes(desc), ctx->dir);
735 /* xts_skcipher_decrypt shared descriptor */
736 desc = ctx->sh_desc_dec;
737 cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata);
738 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
739 desc_bytes(desc), ctx->dir);
745 * aead_edesc - s/w-extended aead descriptor
746 * @src_nents: number of segments in input s/w scatterlist
747 * @dst_nents: number of segments in output s/w scatterlist
748 * @sec4_sg_bytes: length of dma mapped sec4_sg space
749 * @sec4_sg_dma: bus physical mapped address of h/w link table
750 * @sec4_sg: pointer to h/w link table
751 * @hw_desc: the h/w job descriptor followed by any referenced link tables
757 dma_addr_t sec4_sg_dma;
758 struct sec4_sg_entry *sec4_sg;
763 * skcipher_edesc - s/w-extended skcipher descriptor
764 * @src_nents: number of segments in input s/w scatterlist
765 * @dst_nents: number of segments in output s/w scatterlist
766 * @iv_dma: dma address of iv for checking continuity and link table
767 * @sec4_sg_bytes: length of dma mapped sec4_sg space
768 * @sec4_sg_dma: bus physical mapped address of h/w link table
769 * @sec4_sg: pointer to h/w link table
770 * @hw_desc: the h/w job descriptor followed by any referenced link tables
773 struct skcipher_edesc {
778 dma_addr_t sec4_sg_dma;
779 struct sec4_sg_entry *sec4_sg;
783 static void caam_unmap(struct device *dev, struct scatterlist *src,
784 struct scatterlist *dst, int src_nents,
786 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
791 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
792 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
794 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
798 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
800 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
804 static void aead_unmap(struct device *dev,
805 struct aead_edesc *edesc,
806 struct aead_request *req)
808 caam_unmap(dev, req->src, req->dst,
809 edesc->src_nents, edesc->dst_nents, 0, 0,
810 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
813 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
814 struct skcipher_request *req)
816 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
817 int ivsize = crypto_skcipher_ivsize(skcipher);
819 caam_unmap(dev, req->src, req->dst,
820 edesc->src_nents, edesc->dst_nents,
821 edesc->iv_dma, ivsize,
822 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
825 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
828 struct aead_request *req = context;
829 struct aead_edesc *edesc;
832 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
835 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
838 caam_jr_strstatus(jrdev, err);
840 aead_unmap(jrdev, edesc, req);
844 aead_request_complete(req, err);
847 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
850 struct aead_request *req = context;
851 struct aead_edesc *edesc;
854 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
857 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
860 caam_jr_strstatus(jrdev, err);
862 aead_unmap(jrdev, edesc, req);
865 * verify hw auth check passed else return -EBADMSG
867 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
872 aead_request_complete(req, err);
875 static void skcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
878 struct skcipher_request *req = context;
879 struct skcipher_edesc *edesc;
880 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
881 int ivsize = crypto_skcipher_ivsize(skcipher);
884 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
887 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]);
890 caam_jr_strstatus(jrdev, err);
893 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
894 DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
895 edesc->src_nents > 1 ? 100 : ivsize, 1);
897 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
898 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
899 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
901 skcipher_unmap(jrdev, edesc, req);
904 * The crypto API expects us to set the IV (req->iv) to the last
905 * ciphertext block. This is used e.g. by the CTS mode.
907 scatterwalk_map_and_copy(req->iv, req->dst, req->cryptlen - ivsize,
912 skcipher_request_complete(req, err);
915 static void skcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
918 struct skcipher_request *req = context;
919 struct skcipher_edesc *edesc;
921 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
922 int ivsize = crypto_skcipher_ivsize(skcipher);
924 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
927 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]);
929 caam_jr_strstatus(jrdev, err);
932 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
933 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
935 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
936 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
937 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
939 skcipher_unmap(jrdev, edesc, req);
942 skcipher_request_complete(req, err);
946 * Fill in aead job descriptor
948 static void init_aead_job(struct aead_request *req,
949 struct aead_edesc *edesc,
950 bool all_contig, bool encrypt)
952 struct crypto_aead *aead = crypto_aead_reqtfm(req);
953 struct caam_ctx *ctx = crypto_aead_ctx(aead);
954 int authsize = ctx->authsize;
955 u32 *desc = edesc->hw_desc;
956 u32 out_options, in_options;
957 dma_addr_t dst_dma, src_dma;
958 int len, sec4_sg_index = 0;
962 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
963 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
965 len = desc_len(sh_desc);
966 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
969 src_dma = edesc->src_nents ? sg_dma_address(req->src) : 0;
972 src_dma = edesc->sec4_sg_dma;
973 sec4_sg_index += edesc->src_nents;
974 in_options = LDST_SGF;
977 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
981 out_options = in_options;
983 if (unlikely(req->src != req->dst)) {
984 if (edesc->dst_nents == 1) {
985 dst_dma = sg_dma_address(req->dst);
987 dst_dma = edesc->sec4_sg_dma +
989 sizeof(struct sec4_sg_entry);
990 out_options = LDST_SGF;
995 append_seq_out_ptr(desc, dst_dma,
996 req->assoclen + req->cryptlen + authsize,
999 append_seq_out_ptr(desc, dst_dma,
1000 req->assoclen + req->cryptlen - authsize,
1004 static void init_gcm_job(struct aead_request *req,
1005 struct aead_edesc *edesc,
1006 bool all_contig, bool encrypt)
1008 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1009 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1010 unsigned int ivsize = crypto_aead_ivsize(aead);
1011 u32 *desc = edesc->hw_desc;
1012 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
1015 init_aead_job(req, edesc, all_contig, encrypt);
1016 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1018 /* BUG This should not be specific to generic GCM. */
1020 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1021 last = FIFOLD_TYPE_LAST1;
1024 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1025 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last);
1028 append_data(desc, ctx->key + ctx->cdata.keylen, 4);
1030 append_data(desc, req->iv, ivsize);
1031 /* End of blank commands */
1034 static void init_authenc_job(struct aead_request *req,
1035 struct aead_edesc *edesc,
1036 bool all_contig, bool encrypt)
1038 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1039 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1040 struct caam_aead_alg, aead);
1041 unsigned int ivsize = crypto_aead_ivsize(aead);
1042 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1043 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
1044 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
1045 OP_ALG_AAI_CTR_MOD128);
1046 const bool is_rfc3686 = alg->caam.rfc3686;
1047 u32 *desc = edesc->hw_desc;
1051 * AES-CTR needs to load IV in CONTEXT1 reg
1052 * at an offset of 128bits (16bytes)
1053 * CONTEXT1[255:128] = IV
1060 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1063 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
1065 init_aead_job(req, edesc, all_contig, encrypt);
1068 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
1069 * having DPOVRD as destination.
1071 if (ctrlpriv->era < 3)
1072 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1074 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
1076 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
1077 append_load_as_imm(desc, req->iv, ivsize,
1079 LDST_SRCDST_BYTE_CONTEXT |
1080 (ivoffset << LDST_OFFSET_SHIFT));
1084 * Fill in skcipher job descriptor
1086 static void init_skcipher_job(struct skcipher_request *req,
1087 struct skcipher_edesc *edesc,
1090 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1091 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1092 int ivsize = crypto_skcipher_ivsize(skcipher);
1093 u32 *desc = edesc->hw_desc;
1095 u32 out_options = 0;
1096 dma_addr_t dst_dma, ptr;
1100 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
1101 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
1102 pr_err("asked=%d, cryptlen%d\n",
1103 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen);
1105 caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__)": ",
1106 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1107 edesc->src_nents > 1 ? 100 : req->cryptlen, 1);
1109 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1110 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1112 len = desc_len(sh_desc);
1113 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1115 append_seq_in_ptr(desc, edesc->sec4_sg_dma, req->cryptlen + ivsize,
1118 if (likely(req->src == req->dst)) {
1119 dst_dma = edesc->sec4_sg_dma + sizeof(struct sec4_sg_entry);
1120 out_options = LDST_SGF;
1122 if (edesc->dst_nents == 1) {
1123 dst_dma = sg_dma_address(req->dst);
1125 dst_dma = edesc->sec4_sg_dma + (edesc->src_nents + 1) *
1126 sizeof(struct sec4_sg_entry);
1127 out_options = LDST_SGF;
1130 append_seq_out_ptr(desc, dst_dma, req->cryptlen, out_options);
1134 * allocate and map the aead extended descriptor
1136 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1137 int desc_bytes, bool *all_contig_ptr,
1140 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1141 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1142 struct device *jrdev = ctx->jrdev;
1143 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1144 GFP_KERNEL : GFP_ATOMIC;
1145 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1146 struct aead_edesc *edesc;
1147 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
1148 unsigned int authsize = ctx->authsize;
1150 if (unlikely(req->dst != req->src)) {
1151 src_nents = sg_nents_for_len(req->src, req->assoclen +
1153 if (unlikely(src_nents < 0)) {
1154 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1155 req->assoclen + req->cryptlen);
1156 return ERR_PTR(src_nents);
1159 dst_nents = sg_nents_for_len(req->dst, req->assoclen +
1161 (encrypt ? authsize :
1163 if (unlikely(dst_nents < 0)) {
1164 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1165 req->assoclen + req->cryptlen +
1166 (encrypt ? authsize : (-authsize)));
1167 return ERR_PTR(dst_nents);
1170 src_nents = sg_nents_for_len(req->src, req->assoclen +
1172 (encrypt ? authsize : 0));
1173 if (unlikely(src_nents < 0)) {
1174 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1175 req->assoclen + req->cryptlen +
1176 (encrypt ? authsize : 0));
1177 return ERR_PTR(src_nents);
1181 if (likely(req->src == req->dst)) {
1182 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1184 if (unlikely(!mapped_src_nents)) {
1185 dev_err(jrdev, "unable to map source\n");
1186 return ERR_PTR(-ENOMEM);
1189 /* Cover also the case of null (zero length) input data */
1191 mapped_src_nents = dma_map_sg(jrdev, req->src,
1192 src_nents, DMA_TO_DEVICE);
1193 if (unlikely(!mapped_src_nents)) {
1194 dev_err(jrdev, "unable to map source\n");
1195 return ERR_PTR(-ENOMEM);
1198 mapped_src_nents = 0;
1201 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1203 if (unlikely(!mapped_dst_nents)) {
1204 dev_err(jrdev, "unable to map destination\n");
1205 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1206 return ERR_PTR(-ENOMEM);
1210 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1211 sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1212 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1214 /* allocate space for base edesc and hw desc commands, link tables */
1215 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1218 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1220 return ERR_PTR(-ENOMEM);
1223 edesc->src_nents = src_nents;
1224 edesc->dst_nents = dst_nents;
1225 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1227 *all_contig_ptr = !(mapped_src_nents > 1);
1230 if (mapped_src_nents > 1) {
1231 sg_to_sec4_sg_last(req->src, mapped_src_nents,
1232 edesc->sec4_sg + sec4_sg_index, 0);
1233 sec4_sg_index += mapped_src_nents;
1235 if (mapped_dst_nents > 1) {
1236 sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1237 edesc->sec4_sg + sec4_sg_index, 0);
1243 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1244 sec4_sg_bytes, DMA_TO_DEVICE);
1245 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1246 dev_err(jrdev, "unable to map S/G table\n");
1247 aead_unmap(jrdev, edesc, req);
1249 return ERR_PTR(-ENOMEM);
1252 edesc->sec4_sg_bytes = sec4_sg_bytes;
1257 static int gcm_encrypt(struct aead_request *req)
1259 struct aead_edesc *edesc;
1260 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1261 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1262 struct device *jrdev = ctx->jrdev;
1267 /* allocate extended descriptor */
1268 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
1270 return PTR_ERR(edesc);
1272 /* Create and submit job descriptor */
1273 init_gcm_job(req, edesc, all_contig, true);
1275 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1276 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1277 desc_bytes(edesc->hw_desc), 1);
1280 desc = edesc->hw_desc;
1281 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1285 aead_unmap(jrdev, edesc, req);
1292 static int ipsec_gcm_encrypt(struct aead_request *req)
1294 if (req->assoclen < 8)
1297 return gcm_encrypt(req);
1300 static int aead_encrypt(struct aead_request *req)
1302 struct aead_edesc *edesc;
1303 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1304 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1305 struct device *jrdev = ctx->jrdev;
1310 /* allocate extended descriptor */
1311 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1314 return PTR_ERR(edesc);
1316 /* Create and submit job descriptor */
1317 init_authenc_job(req, edesc, all_contig, true);
1319 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1320 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1321 desc_bytes(edesc->hw_desc), 1);
1324 desc = edesc->hw_desc;
1325 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1329 aead_unmap(jrdev, edesc, req);
1336 static int gcm_decrypt(struct aead_request *req)
1338 struct aead_edesc *edesc;
1339 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1340 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1341 struct device *jrdev = ctx->jrdev;
1346 /* allocate extended descriptor */
1347 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
1349 return PTR_ERR(edesc);
1351 /* Create and submit job descriptor*/
1352 init_gcm_job(req, edesc, all_contig, false);
1354 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1355 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1356 desc_bytes(edesc->hw_desc), 1);
1359 desc = edesc->hw_desc;
1360 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1364 aead_unmap(jrdev, edesc, req);
1371 static int ipsec_gcm_decrypt(struct aead_request *req)
1373 if (req->assoclen < 8)
1376 return gcm_decrypt(req);
1379 static int aead_decrypt(struct aead_request *req)
1381 struct aead_edesc *edesc;
1382 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1383 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1384 struct device *jrdev = ctx->jrdev;
1389 caam_dump_sg(KERN_ERR, "dec src@" __stringify(__LINE__)": ",
1390 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1391 req->assoclen + req->cryptlen, 1);
1393 /* allocate extended descriptor */
1394 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1395 &all_contig, false);
1397 return PTR_ERR(edesc);
1399 /* Create and submit job descriptor*/
1400 init_authenc_job(req, edesc, all_contig, false);
1402 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1403 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1404 desc_bytes(edesc->hw_desc), 1);
1407 desc = edesc->hw_desc;
1408 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1412 aead_unmap(jrdev, edesc, req);
1420 * allocate and map the skcipher extended descriptor for skcipher
1422 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
1425 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1426 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1427 struct device *jrdev = ctx->jrdev;
1428 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1429 GFP_KERNEL : GFP_ATOMIC;
1430 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1431 struct skcipher_edesc *edesc;
1434 int ivsize = crypto_skcipher_ivsize(skcipher);
1435 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1437 src_nents = sg_nents_for_len(req->src, req->cryptlen);
1438 if (unlikely(src_nents < 0)) {
1439 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1441 return ERR_PTR(src_nents);
1444 if (req->dst != req->src) {
1445 dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
1446 if (unlikely(dst_nents < 0)) {
1447 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1449 return ERR_PTR(dst_nents);
1453 if (likely(req->src == req->dst)) {
1454 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1456 if (unlikely(!mapped_src_nents)) {
1457 dev_err(jrdev, "unable to map source\n");
1458 return ERR_PTR(-ENOMEM);
1461 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1463 if (unlikely(!mapped_src_nents)) {
1464 dev_err(jrdev, "unable to map source\n");
1465 return ERR_PTR(-ENOMEM);
1468 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1470 if (unlikely(!mapped_dst_nents)) {
1471 dev_err(jrdev, "unable to map destination\n");
1472 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1473 return ERR_PTR(-ENOMEM);
1477 sec4_sg_ents = 1 + mapped_src_nents;
1478 dst_sg_idx = sec4_sg_ents;
1479 sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1480 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1483 * allocate space for base edesc and hw desc commands, link tables, IV
1485 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize,
1488 dev_err(jrdev, "could not allocate extended descriptor\n");
1489 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1491 return ERR_PTR(-ENOMEM);
1494 edesc->src_nents = src_nents;
1495 edesc->dst_nents = dst_nents;
1496 edesc->sec4_sg_bytes = sec4_sg_bytes;
1497 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
1500 /* Make sure IV is located in a DMAable area */
1501 iv = (u8 *)edesc->hw_desc + desc_bytes + sec4_sg_bytes;
1502 memcpy(iv, req->iv, ivsize);
1504 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_TO_DEVICE);
1505 if (dma_mapping_error(jrdev, iv_dma)) {
1506 dev_err(jrdev, "unable to map IV\n");
1507 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1510 return ERR_PTR(-ENOMEM);
1513 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
1514 sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg + 1, 0);
1516 if (mapped_dst_nents > 1) {
1517 sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1518 edesc->sec4_sg + dst_sg_idx, 0);
1521 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1522 sec4_sg_bytes, DMA_TO_DEVICE);
1523 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1524 dev_err(jrdev, "unable to map S/G table\n");
1525 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1526 iv_dma, ivsize, 0, 0);
1528 return ERR_PTR(-ENOMEM);
1531 edesc->iv_dma = iv_dma;
1534 print_hex_dump(KERN_ERR, "skcipher sec4_sg@" __stringify(__LINE__)": ",
1535 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1542 static int skcipher_encrypt(struct skcipher_request *req)
1544 struct skcipher_edesc *edesc;
1545 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1546 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1547 struct device *jrdev = ctx->jrdev;
1551 /* allocate extended descriptor */
1552 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
1554 return PTR_ERR(edesc);
1556 /* Create and submit job descriptor*/
1557 init_skcipher_job(req, edesc, true);
1559 print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ",
1560 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1561 desc_bytes(edesc->hw_desc), 1);
1563 desc = edesc->hw_desc;
1564 ret = caam_jr_enqueue(jrdev, desc, skcipher_encrypt_done, req);
1569 skcipher_unmap(jrdev, edesc, req);
1576 static int skcipher_decrypt(struct skcipher_request *req)
1578 struct skcipher_edesc *edesc;
1579 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1580 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1581 int ivsize = crypto_skcipher_ivsize(skcipher);
1582 struct device *jrdev = ctx->jrdev;
1586 /* allocate extended descriptor */
1587 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
1589 return PTR_ERR(edesc);
1592 * The crypto API expects us to set the IV (req->iv) to the last
1595 scatterwalk_map_and_copy(req->iv, req->src, req->cryptlen - ivsize,
1598 /* Create and submit job descriptor*/
1599 init_skcipher_job(req, edesc, false);
1600 desc = edesc->hw_desc;
1602 print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ",
1603 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1604 desc_bytes(edesc->hw_desc), 1);
1607 ret = caam_jr_enqueue(jrdev, desc, skcipher_decrypt_done, req);
1611 skcipher_unmap(jrdev, edesc, req);
1618 static struct caam_skcipher_alg driver_algs[] = {
1622 .cra_name = "cbc(aes)",
1623 .cra_driver_name = "cbc-aes-caam",
1624 .cra_blocksize = AES_BLOCK_SIZE,
1626 .setkey = skcipher_setkey,
1627 .encrypt = skcipher_encrypt,
1628 .decrypt = skcipher_decrypt,
1629 .min_keysize = AES_MIN_KEY_SIZE,
1630 .max_keysize = AES_MAX_KEY_SIZE,
1631 .ivsize = AES_BLOCK_SIZE,
1633 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1638 .cra_name = "cbc(des3_ede)",
1639 .cra_driver_name = "cbc-3des-caam",
1640 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1642 .setkey = skcipher_setkey,
1643 .encrypt = skcipher_encrypt,
1644 .decrypt = skcipher_decrypt,
1645 .min_keysize = DES3_EDE_KEY_SIZE,
1646 .max_keysize = DES3_EDE_KEY_SIZE,
1647 .ivsize = DES3_EDE_BLOCK_SIZE,
1649 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1654 .cra_name = "cbc(des)",
1655 .cra_driver_name = "cbc-des-caam",
1656 .cra_blocksize = DES_BLOCK_SIZE,
1658 .setkey = skcipher_setkey,
1659 .encrypt = skcipher_encrypt,
1660 .decrypt = skcipher_decrypt,
1661 .min_keysize = DES_KEY_SIZE,
1662 .max_keysize = DES_KEY_SIZE,
1663 .ivsize = DES_BLOCK_SIZE,
1665 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1670 .cra_name = "ctr(aes)",
1671 .cra_driver_name = "ctr-aes-caam",
1674 .setkey = skcipher_setkey,
1675 .encrypt = skcipher_encrypt,
1676 .decrypt = skcipher_decrypt,
1677 .min_keysize = AES_MIN_KEY_SIZE,
1678 .max_keysize = AES_MAX_KEY_SIZE,
1679 .ivsize = AES_BLOCK_SIZE,
1680 .chunksize = AES_BLOCK_SIZE,
1682 .caam.class1_alg_type = OP_ALG_ALGSEL_AES |
1683 OP_ALG_AAI_CTR_MOD128,
1688 .cra_name = "rfc3686(ctr(aes))",
1689 .cra_driver_name = "rfc3686-ctr-aes-caam",
1692 .setkey = skcipher_setkey,
1693 .encrypt = skcipher_encrypt,
1694 .decrypt = skcipher_decrypt,
1695 .min_keysize = AES_MIN_KEY_SIZE +
1696 CTR_RFC3686_NONCE_SIZE,
1697 .max_keysize = AES_MAX_KEY_SIZE +
1698 CTR_RFC3686_NONCE_SIZE,
1699 .ivsize = CTR_RFC3686_IV_SIZE,
1700 .chunksize = AES_BLOCK_SIZE,
1703 .class1_alg_type = OP_ALG_ALGSEL_AES |
1704 OP_ALG_AAI_CTR_MOD128,
1711 .cra_name = "xts(aes)",
1712 .cra_driver_name = "xts-aes-caam",
1713 .cra_blocksize = AES_BLOCK_SIZE,
1715 .setkey = xts_skcipher_setkey,
1716 .encrypt = skcipher_encrypt,
1717 .decrypt = skcipher_decrypt,
1718 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1719 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1720 .ivsize = AES_BLOCK_SIZE,
1722 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1726 static struct caam_aead_alg driver_aeads[] = {
1730 .cra_name = "rfc4106(gcm(aes))",
1731 .cra_driver_name = "rfc4106-gcm-aes-caam",
1734 .setkey = rfc4106_setkey,
1735 .setauthsize = rfc4106_setauthsize,
1736 .encrypt = ipsec_gcm_encrypt,
1737 .decrypt = ipsec_gcm_decrypt,
1738 .ivsize = GCM_RFC4106_IV_SIZE,
1739 .maxauthsize = AES_BLOCK_SIZE,
1742 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1748 .cra_name = "rfc4543(gcm(aes))",
1749 .cra_driver_name = "rfc4543-gcm-aes-caam",
1752 .setkey = rfc4543_setkey,
1753 .setauthsize = rfc4543_setauthsize,
1754 .encrypt = ipsec_gcm_encrypt,
1755 .decrypt = ipsec_gcm_decrypt,
1756 .ivsize = GCM_RFC4543_IV_SIZE,
1757 .maxauthsize = AES_BLOCK_SIZE,
1760 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1763 /* Galois Counter Mode */
1767 .cra_name = "gcm(aes)",
1768 .cra_driver_name = "gcm-aes-caam",
1771 .setkey = gcm_setkey,
1772 .setauthsize = gcm_setauthsize,
1773 .encrypt = gcm_encrypt,
1774 .decrypt = gcm_decrypt,
1775 .ivsize = GCM_AES_IV_SIZE,
1776 .maxauthsize = AES_BLOCK_SIZE,
1779 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1782 /* single-pass ipsec_esp descriptor */
1786 .cra_name = "authenc(hmac(md5),"
1787 "ecb(cipher_null))",
1788 .cra_driver_name = "authenc-hmac-md5-"
1789 "ecb-cipher_null-caam",
1790 .cra_blocksize = NULL_BLOCK_SIZE,
1792 .setkey = aead_setkey,
1793 .setauthsize = aead_setauthsize,
1794 .encrypt = aead_encrypt,
1795 .decrypt = aead_decrypt,
1796 .ivsize = NULL_IV_SIZE,
1797 .maxauthsize = MD5_DIGEST_SIZE,
1800 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
1801 OP_ALG_AAI_HMAC_PRECOMP,
1807 .cra_name = "authenc(hmac(sha1),"
1808 "ecb(cipher_null))",
1809 .cra_driver_name = "authenc-hmac-sha1-"
1810 "ecb-cipher_null-caam",
1811 .cra_blocksize = NULL_BLOCK_SIZE,
1813 .setkey = aead_setkey,
1814 .setauthsize = aead_setauthsize,
1815 .encrypt = aead_encrypt,
1816 .decrypt = aead_decrypt,
1817 .ivsize = NULL_IV_SIZE,
1818 .maxauthsize = SHA1_DIGEST_SIZE,
1821 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1822 OP_ALG_AAI_HMAC_PRECOMP,
1828 .cra_name = "authenc(hmac(sha224),"
1829 "ecb(cipher_null))",
1830 .cra_driver_name = "authenc-hmac-sha224-"
1831 "ecb-cipher_null-caam",
1832 .cra_blocksize = NULL_BLOCK_SIZE,
1834 .setkey = aead_setkey,
1835 .setauthsize = aead_setauthsize,
1836 .encrypt = aead_encrypt,
1837 .decrypt = aead_decrypt,
1838 .ivsize = NULL_IV_SIZE,
1839 .maxauthsize = SHA224_DIGEST_SIZE,
1842 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1843 OP_ALG_AAI_HMAC_PRECOMP,
1849 .cra_name = "authenc(hmac(sha256),"
1850 "ecb(cipher_null))",
1851 .cra_driver_name = "authenc-hmac-sha256-"
1852 "ecb-cipher_null-caam",
1853 .cra_blocksize = NULL_BLOCK_SIZE,
1855 .setkey = aead_setkey,
1856 .setauthsize = aead_setauthsize,
1857 .encrypt = aead_encrypt,
1858 .decrypt = aead_decrypt,
1859 .ivsize = NULL_IV_SIZE,
1860 .maxauthsize = SHA256_DIGEST_SIZE,
1863 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
1864 OP_ALG_AAI_HMAC_PRECOMP,
1870 .cra_name = "authenc(hmac(sha384),"
1871 "ecb(cipher_null))",
1872 .cra_driver_name = "authenc-hmac-sha384-"
1873 "ecb-cipher_null-caam",
1874 .cra_blocksize = NULL_BLOCK_SIZE,
1876 .setkey = aead_setkey,
1877 .setauthsize = aead_setauthsize,
1878 .encrypt = aead_encrypt,
1879 .decrypt = aead_decrypt,
1880 .ivsize = NULL_IV_SIZE,
1881 .maxauthsize = SHA384_DIGEST_SIZE,
1884 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
1885 OP_ALG_AAI_HMAC_PRECOMP,
1891 .cra_name = "authenc(hmac(sha512),"
1892 "ecb(cipher_null))",
1893 .cra_driver_name = "authenc-hmac-sha512-"
1894 "ecb-cipher_null-caam",
1895 .cra_blocksize = NULL_BLOCK_SIZE,
1897 .setkey = aead_setkey,
1898 .setauthsize = aead_setauthsize,
1899 .encrypt = aead_encrypt,
1900 .decrypt = aead_decrypt,
1901 .ivsize = NULL_IV_SIZE,
1902 .maxauthsize = SHA512_DIGEST_SIZE,
1905 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
1906 OP_ALG_AAI_HMAC_PRECOMP,
1912 .cra_name = "authenc(hmac(md5),cbc(aes))",
1913 .cra_driver_name = "authenc-hmac-md5-"
1915 .cra_blocksize = AES_BLOCK_SIZE,
1917 .setkey = aead_setkey,
1918 .setauthsize = aead_setauthsize,
1919 .encrypt = aead_encrypt,
1920 .decrypt = aead_decrypt,
1921 .ivsize = AES_BLOCK_SIZE,
1922 .maxauthsize = MD5_DIGEST_SIZE,
1925 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1926 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
1927 OP_ALG_AAI_HMAC_PRECOMP,
1933 .cra_name = "echainiv(authenc(hmac(md5),"
1935 .cra_driver_name = "echainiv-authenc-hmac-md5-"
1937 .cra_blocksize = AES_BLOCK_SIZE,
1939 .setkey = aead_setkey,
1940 .setauthsize = aead_setauthsize,
1941 .encrypt = aead_encrypt,
1942 .decrypt = aead_decrypt,
1943 .ivsize = AES_BLOCK_SIZE,
1944 .maxauthsize = MD5_DIGEST_SIZE,
1947 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1948 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
1949 OP_ALG_AAI_HMAC_PRECOMP,
1956 .cra_name = "authenc(hmac(sha1),cbc(aes))",
1957 .cra_driver_name = "authenc-hmac-sha1-"
1959 .cra_blocksize = AES_BLOCK_SIZE,
1961 .setkey = aead_setkey,
1962 .setauthsize = aead_setauthsize,
1963 .encrypt = aead_encrypt,
1964 .decrypt = aead_decrypt,
1965 .ivsize = AES_BLOCK_SIZE,
1966 .maxauthsize = SHA1_DIGEST_SIZE,
1969 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1970 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1971 OP_ALG_AAI_HMAC_PRECOMP,
1977 .cra_name = "echainiv(authenc(hmac(sha1),"
1979 .cra_driver_name = "echainiv-authenc-"
1980 "hmac-sha1-cbc-aes-caam",
1981 .cra_blocksize = AES_BLOCK_SIZE,
1983 .setkey = aead_setkey,
1984 .setauthsize = aead_setauthsize,
1985 .encrypt = aead_encrypt,
1986 .decrypt = aead_decrypt,
1987 .ivsize = AES_BLOCK_SIZE,
1988 .maxauthsize = SHA1_DIGEST_SIZE,
1991 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1992 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1993 OP_ALG_AAI_HMAC_PRECOMP,
2000 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2001 .cra_driver_name = "authenc-hmac-sha224-"
2003 .cra_blocksize = AES_BLOCK_SIZE,
2005 .setkey = aead_setkey,
2006 .setauthsize = aead_setauthsize,
2007 .encrypt = aead_encrypt,
2008 .decrypt = aead_decrypt,
2009 .ivsize = AES_BLOCK_SIZE,
2010 .maxauthsize = SHA224_DIGEST_SIZE,
2013 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2014 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2015 OP_ALG_AAI_HMAC_PRECOMP,
2021 .cra_name = "echainiv(authenc(hmac(sha224),"
2023 .cra_driver_name = "echainiv-authenc-"
2024 "hmac-sha224-cbc-aes-caam",
2025 .cra_blocksize = AES_BLOCK_SIZE,
2027 .setkey = aead_setkey,
2028 .setauthsize = aead_setauthsize,
2029 .encrypt = aead_encrypt,
2030 .decrypt = aead_decrypt,
2031 .ivsize = AES_BLOCK_SIZE,
2032 .maxauthsize = SHA224_DIGEST_SIZE,
2035 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2036 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2037 OP_ALG_AAI_HMAC_PRECOMP,
2044 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2045 .cra_driver_name = "authenc-hmac-sha256-"
2047 .cra_blocksize = AES_BLOCK_SIZE,
2049 .setkey = aead_setkey,
2050 .setauthsize = aead_setauthsize,
2051 .encrypt = aead_encrypt,
2052 .decrypt = aead_decrypt,
2053 .ivsize = AES_BLOCK_SIZE,
2054 .maxauthsize = SHA256_DIGEST_SIZE,
2057 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2058 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2059 OP_ALG_AAI_HMAC_PRECOMP,
2065 .cra_name = "echainiv(authenc(hmac(sha256),"
2067 .cra_driver_name = "echainiv-authenc-"
2068 "hmac-sha256-cbc-aes-caam",
2069 .cra_blocksize = AES_BLOCK_SIZE,
2071 .setkey = aead_setkey,
2072 .setauthsize = aead_setauthsize,
2073 .encrypt = aead_encrypt,
2074 .decrypt = aead_decrypt,
2075 .ivsize = AES_BLOCK_SIZE,
2076 .maxauthsize = SHA256_DIGEST_SIZE,
2079 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2080 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2081 OP_ALG_AAI_HMAC_PRECOMP,
2088 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2089 .cra_driver_name = "authenc-hmac-sha384-"
2091 .cra_blocksize = AES_BLOCK_SIZE,
2093 .setkey = aead_setkey,
2094 .setauthsize = aead_setauthsize,
2095 .encrypt = aead_encrypt,
2096 .decrypt = aead_decrypt,
2097 .ivsize = AES_BLOCK_SIZE,
2098 .maxauthsize = SHA384_DIGEST_SIZE,
2101 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2102 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2103 OP_ALG_AAI_HMAC_PRECOMP,
2109 .cra_name = "echainiv(authenc(hmac(sha384),"
2111 .cra_driver_name = "echainiv-authenc-"
2112 "hmac-sha384-cbc-aes-caam",
2113 .cra_blocksize = AES_BLOCK_SIZE,
2115 .setkey = aead_setkey,
2116 .setauthsize = aead_setauthsize,
2117 .encrypt = aead_encrypt,
2118 .decrypt = aead_decrypt,
2119 .ivsize = AES_BLOCK_SIZE,
2120 .maxauthsize = SHA384_DIGEST_SIZE,
2123 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2124 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2125 OP_ALG_AAI_HMAC_PRECOMP,
2132 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2133 .cra_driver_name = "authenc-hmac-sha512-"
2135 .cra_blocksize = AES_BLOCK_SIZE,
2137 .setkey = aead_setkey,
2138 .setauthsize = aead_setauthsize,
2139 .encrypt = aead_encrypt,
2140 .decrypt = aead_decrypt,
2141 .ivsize = AES_BLOCK_SIZE,
2142 .maxauthsize = SHA512_DIGEST_SIZE,
2145 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2146 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2147 OP_ALG_AAI_HMAC_PRECOMP,
2153 .cra_name = "echainiv(authenc(hmac(sha512),"
2155 .cra_driver_name = "echainiv-authenc-"
2156 "hmac-sha512-cbc-aes-caam",
2157 .cra_blocksize = AES_BLOCK_SIZE,
2159 .setkey = aead_setkey,
2160 .setauthsize = aead_setauthsize,
2161 .encrypt = aead_encrypt,
2162 .decrypt = aead_decrypt,
2163 .ivsize = AES_BLOCK_SIZE,
2164 .maxauthsize = SHA512_DIGEST_SIZE,
2167 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2168 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2169 OP_ALG_AAI_HMAC_PRECOMP,
2176 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2177 .cra_driver_name = "authenc-hmac-md5-"
2178 "cbc-des3_ede-caam",
2179 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2181 .setkey = aead_setkey,
2182 .setauthsize = aead_setauthsize,
2183 .encrypt = aead_encrypt,
2184 .decrypt = aead_decrypt,
2185 .ivsize = DES3_EDE_BLOCK_SIZE,
2186 .maxauthsize = MD5_DIGEST_SIZE,
2189 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2190 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2191 OP_ALG_AAI_HMAC_PRECOMP,
2197 .cra_name = "echainiv(authenc(hmac(md5),"
2199 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2200 "cbc-des3_ede-caam",
2201 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2203 .setkey = aead_setkey,
2204 .setauthsize = aead_setauthsize,
2205 .encrypt = aead_encrypt,
2206 .decrypt = aead_decrypt,
2207 .ivsize = DES3_EDE_BLOCK_SIZE,
2208 .maxauthsize = MD5_DIGEST_SIZE,
2211 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2212 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2213 OP_ALG_AAI_HMAC_PRECOMP,
2220 .cra_name = "authenc(hmac(sha1),"
2222 .cra_driver_name = "authenc-hmac-sha1-"
2223 "cbc-des3_ede-caam",
2224 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2226 .setkey = aead_setkey,
2227 .setauthsize = aead_setauthsize,
2228 .encrypt = aead_encrypt,
2229 .decrypt = aead_decrypt,
2230 .ivsize = DES3_EDE_BLOCK_SIZE,
2231 .maxauthsize = SHA1_DIGEST_SIZE,
2234 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2235 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2236 OP_ALG_AAI_HMAC_PRECOMP,
2242 .cra_name = "echainiv(authenc(hmac(sha1),"
2244 .cra_driver_name = "echainiv-authenc-"
2246 "cbc-des3_ede-caam",
2247 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2249 .setkey = aead_setkey,
2250 .setauthsize = aead_setauthsize,
2251 .encrypt = aead_encrypt,
2252 .decrypt = aead_decrypt,
2253 .ivsize = DES3_EDE_BLOCK_SIZE,
2254 .maxauthsize = SHA1_DIGEST_SIZE,
2257 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2258 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2259 OP_ALG_AAI_HMAC_PRECOMP,
2266 .cra_name = "authenc(hmac(sha224),"
2268 .cra_driver_name = "authenc-hmac-sha224-"
2269 "cbc-des3_ede-caam",
2270 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2272 .setkey = aead_setkey,
2273 .setauthsize = aead_setauthsize,
2274 .encrypt = aead_encrypt,
2275 .decrypt = aead_decrypt,
2276 .ivsize = DES3_EDE_BLOCK_SIZE,
2277 .maxauthsize = SHA224_DIGEST_SIZE,
2280 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2281 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2282 OP_ALG_AAI_HMAC_PRECOMP,
2288 .cra_name = "echainiv(authenc(hmac(sha224),"
2290 .cra_driver_name = "echainiv-authenc-"
2292 "cbc-des3_ede-caam",
2293 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2295 .setkey = aead_setkey,
2296 .setauthsize = aead_setauthsize,
2297 .encrypt = aead_encrypt,
2298 .decrypt = aead_decrypt,
2299 .ivsize = DES3_EDE_BLOCK_SIZE,
2300 .maxauthsize = SHA224_DIGEST_SIZE,
2303 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2304 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2305 OP_ALG_AAI_HMAC_PRECOMP,
2312 .cra_name = "authenc(hmac(sha256),"
2314 .cra_driver_name = "authenc-hmac-sha256-"
2315 "cbc-des3_ede-caam",
2316 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2318 .setkey = aead_setkey,
2319 .setauthsize = aead_setauthsize,
2320 .encrypt = aead_encrypt,
2321 .decrypt = aead_decrypt,
2322 .ivsize = DES3_EDE_BLOCK_SIZE,
2323 .maxauthsize = SHA256_DIGEST_SIZE,
2326 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2327 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2328 OP_ALG_AAI_HMAC_PRECOMP,
2334 .cra_name = "echainiv(authenc(hmac(sha256),"
2336 .cra_driver_name = "echainiv-authenc-"
2338 "cbc-des3_ede-caam",
2339 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2341 .setkey = aead_setkey,
2342 .setauthsize = aead_setauthsize,
2343 .encrypt = aead_encrypt,
2344 .decrypt = aead_decrypt,
2345 .ivsize = DES3_EDE_BLOCK_SIZE,
2346 .maxauthsize = SHA256_DIGEST_SIZE,
2349 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2350 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2351 OP_ALG_AAI_HMAC_PRECOMP,
2358 .cra_name = "authenc(hmac(sha384),"
2360 .cra_driver_name = "authenc-hmac-sha384-"
2361 "cbc-des3_ede-caam",
2362 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2364 .setkey = aead_setkey,
2365 .setauthsize = aead_setauthsize,
2366 .encrypt = aead_encrypt,
2367 .decrypt = aead_decrypt,
2368 .ivsize = DES3_EDE_BLOCK_SIZE,
2369 .maxauthsize = SHA384_DIGEST_SIZE,
2372 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2373 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2374 OP_ALG_AAI_HMAC_PRECOMP,
2380 .cra_name = "echainiv(authenc(hmac(sha384),"
2382 .cra_driver_name = "echainiv-authenc-"
2384 "cbc-des3_ede-caam",
2385 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2387 .setkey = aead_setkey,
2388 .setauthsize = aead_setauthsize,
2389 .encrypt = aead_encrypt,
2390 .decrypt = aead_decrypt,
2391 .ivsize = DES3_EDE_BLOCK_SIZE,
2392 .maxauthsize = SHA384_DIGEST_SIZE,
2395 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2396 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2397 OP_ALG_AAI_HMAC_PRECOMP,
2404 .cra_name = "authenc(hmac(sha512),"
2406 .cra_driver_name = "authenc-hmac-sha512-"
2407 "cbc-des3_ede-caam",
2408 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2410 .setkey = aead_setkey,
2411 .setauthsize = aead_setauthsize,
2412 .encrypt = aead_encrypt,
2413 .decrypt = aead_decrypt,
2414 .ivsize = DES3_EDE_BLOCK_SIZE,
2415 .maxauthsize = SHA512_DIGEST_SIZE,
2418 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2419 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2420 OP_ALG_AAI_HMAC_PRECOMP,
2426 .cra_name = "echainiv(authenc(hmac(sha512),"
2428 .cra_driver_name = "echainiv-authenc-"
2430 "cbc-des3_ede-caam",
2431 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2433 .setkey = aead_setkey,
2434 .setauthsize = aead_setauthsize,
2435 .encrypt = aead_encrypt,
2436 .decrypt = aead_decrypt,
2437 .ivsize = DES3_EDE_BLOCK_SIZE,
2438 .maxauthsize = SHA512_DIGEST_SIZE,
2441 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2442 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2443 OP_ALG_AAI_HMAC_PRECOMP,
2450 .cra_name = "authenc(hmac(md5),cbc(des))",
2451 .cra_driver_name = "authenc-hmac-md5-"
2453 .cra_blocksize = DES_BLOCK_SIZE,
2455 .setkey = aead_setkey,
2456 .setauthsize = aead_setauthsize,
2457 .encrypt = aead_encrypt,
2458 .decrypt = aead_decrypt,
2459 .ivsize = DES_BLOCK_SIZE,
2460 .maxauthsize = MD5_DIGEST_SIZE,
2463 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2464 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2465 OP_ALG_AAI_HMAC_PRECOMP,
2471 .cra_name = "echainiv(authenc(hmac(md5),"
2473 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2475 .cra_blocksize = DES_BLOCK_SIZE,
2477 .setkey = aead_setkey,
2478 .setauthsize = aead_setauthsize,
2479 .encrypt = aead_encrypt,
2480 .decrypt = aead_decrypt,
2481 .ivsize = DES_BLOCK_SIZE,
2482 .maxauthsize = MD5_DIGEST_SIZE,
2485 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2486 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2487 OP_ALG_AAI_HMAC_PRECOMP,
2494 .cra_name = "authenc(hmac(sha1),cbc(des))",
2495 .cra_driver_name = "authenc-hmac-sha1-"
2497 .cra_blocksize = DES_BLOCK_SIZE,
2499 .setkey = aead_setkey,
2500 .setauthsize = aead_setauthsize,
2501 .encrypt = aead_encrypt,
2502 .decrypt = aead_decrypt,
2503 .ivsize = DES_BLOCK_SIZE,
2504 .maxauthsize = SHA1_DIGEST_SIZE,
2507 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2508 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2509 OP_ALG_AAI_HMAC_PRECOMP,
2515 .cra_name = "echainiv(authenc(hmac(sha1),"
2517 .cra_driver_name = "echainiv-authenc-"
2518 "hmac-sha1-cbc-des-caam",
2519 .cra_blocksize = DES_BLOCK_SIZE,
2521 .setkey = aead_setkey,
2522 .setauthsize = aead_setauthsize,
2523 .encrypt = aead_encrypt,
2524 .decrypt = aead_decrypt,
2525 .ivsize = DES_BLOCK_SIZE,
2526 .maxauthsize = SHA1_DIGEST_SIZE,
2529 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2530 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2531 OP_ALG_AAI_HMAC_PRECOMP,
2538 .cra_name = "authenc(hmac(sha224),cbc(des))",
2539 .cra_driver_name = "authenc-hmac-sha224-"
2541 .cra_blocksize = DES_BLOCK_SIZE,
2543 .setkey = aead_setkey,
2544 .setauthsize = aead_setauthsize,
2545 .encrypt = aead_encrypt,
2546 .decrypt = aead_decrypt,
2547 .ivsize = DES_BLOCK_SIZE,
2548 .maxauthsize = SHA224_DIGEST_SIZE,
2551 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2552 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2553 OP_ALG_AAI_HMAC_PRECOMP,
2559 .cra_name = "echainiv(authenc(hmac(sha224),"
2561 .cra_driver_name = "echainiv-authenc-"
2562 "hmac-sha224-cbc-des-caam",
2563 .cra_blocksize = DES_BLOCK_SIZE,
2565 .setkey = aead_setkey,
2566 .setauthsize = aead_setauthsize,
2567 .encrypt = aead_encrypt,
2568 .decrypt = aead_decrypt,
2569 .ivsize = DES_BLOCK_SIZE,
2570 .maxauthsize = SHA224_DIGEST_SIZE,
2573 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2574 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2575 OP_ALG_AAI_HMAC_PRECOMP,
2582 .cra_name = "authenc(hmac(sha256),cbc(des))",
2583 .cra_driver_name = "authenc-hmac-sha256-"
2585 .cra_blocksize = DES_BLOCK_SIZE,
2587 .setkey = aead_setkey,
2588 .setauthsize = aead_setauthsize,
2589 .encrypt = aead_encrypt,
2590 .decrypt = aead_decrypt,
2591 .ivsize = DES_BLOCK_SIZE,
2592 .maxauthsize = SHA256_DIGEST_SIZE,
2595 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2596 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2597 OP_ALG_AAI_HMAC_PRECOMP,
2603 .cra_name = "echainiv(authenc(hmac(sha256),"
2605 .cra_driver_name = "echainiv-authenc-"
2606 "hmac-sha256-cbc-des-caam",
2607 .cra_blocksize = DES_BLOCK_SIZE,
2609 .setkey = aead_setkey,
2610 .setauthsize = aead_setauthsize,
2611 .encrypt = aead_encrypt,
2612 .decrypt = aead_decrypt,
2613 .ivsize = DES_BLOCK_SIZE,
2614 .maxauthsize = SHA256_DIGEST_SIZE,
2617 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2618 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2619 OP_ALG_AAI_HMAC_PRECOMP,
2626 .cra_name = "authenc(hmac(sha384),cbc(des))",
2627 .cra_driver_name = "authenc-hmac-sha384-"
2629 .cra_blocksize = DES_BLOCK_SIZE,
2631 .setkey = aead_setkey,
2632 .setauthsize = aead_setauthsize,
2633 .encrypt = aead_encrypt,
2634 .decrypt = aead_decrypt,
2635 .ivsize = DES_BLOCK_SIZE,
2636 .maxauthsize = SHA384_DIGEST_SIZE,
2639 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2640 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2641 OP_ALG_AAI_HMAC_PRECOMP,
2647 .cra_name = "echainiv(authenc(hmac(sha384),"
2649 .cra_driver_name = "echainiv-authenc-"
2650 "hmac-sha384-cbc-des-caam",
2651 .cra_blocksize = DES_BLOCK_SIZE,
2653 .setkey = aead_setkey,
2654 .setauthsize = aead_setauthsize,
2655 .encrypt = aead_encrypt,
2656 .decrypt = aead_decrypt,
2657 .ivsize = DES_BLOCK_SIZE,
2658 .maxauthsize = SHA384_DIGEST_SIZE,
2661 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2662 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2663 OP_ALG_AAI_HMAC_PRECOMP,
2670 .cra_name = "authenc(hmac(sha512),cbc(des))",
2671 .cra_driver_name = "authenc-hmac-sha512-"
2673 .cra_blocksize = DES_BLOCK_SIZE,
2675 .setkey = aead_setkey,
2676 .setauthsize = aead_setauthsize,
2677 .encrypt = aead_encrypt,
2678 .decrypt = aead_decrypt,
2679 .ivsize = DES_BLOCK_SIZE,
2680 .maxauthsize = SHA512_DIGEST_SIZE,
2683 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2684 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2685 OP_ALG_AAI_HMAC_PRECOMP,
2691 .cra_name = "echainiv(authenc(hmac(sha512),"
2693 .cra_driver_name = "echainiv-authenc-"
2694 "hmac-sha512-cbc-des-caam",
2695 .cra_blocksize = DES_BLOCK_SIZE,
2697 .setkey = aead_setkey,
2698 .setauthsize = aead_setauthsize,
2699 .encrypt = aead_encrypt,
2700 .decrypt = aead_decrypt,
2701 .ivsize = DES_BLOCK_SIZE,
2702 .maxauthsize = SHA512_DIGEST_SIZE,
2705 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2706 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2707 OP_ALG_AAI_HMAC_PRECOMP,
2714 .cra_name = "authenc(hmac(md5),"
2715 "rfc3686(ctr(aes)))",
2716 .cra_driver_name = "authenc-hmac-md5-"
2717 "rfc3686-ctr-aes-caam",
2720 .setkey = aead_setkey,
2721 .setauthsize = aead_setauthsize,
2722 .encrypt = aead_encrypt,
2723 .decrypt = aead_decrypt,
2724 .ivsize = CTR_RFC3686_IV_SIZE,
2725 .maxauthsize = MD5_DIGEST_SIZE,
2728 .class1_alg_type = OP_ALG_ALGSEL_AES |
2729 OP_ALG_AAI_CTR_MOD128,
2730 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2731 OP_ALG_AAI_HMAC_PRECOMP,
2738 .cra_name = "seqiv(authenc("
2739 "hmac(md5),rfc3686(ctr(aes))))",
2740 .cra_driver_name = "seqiv-authenc-hmac-md5-"
2741 "rfc3686-ctr-aes-caam",
2744 .setkey = aead_setkey,
2745 .setauthsize = aead_setauthsize,
2746 .encrypt = aead_encrypt,
2747 .decrypt = aead_decrypt,
2748 .ivsize = CTR_RFC3686_IV_SIZE,
2749 .maxauthsize = MD5_DIGEST_SIZE,
2752 .class1_alg_type = OP_ALG_ALGSEL_AES |
2753 OP_ALG_AAI_CTR_MOD128,
2754 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2755 OP_ALG_AAI_HMAC_PRECOMP,
2763 .cra_name = "authenc(hmac(sha1),"
2764 "rfc3686(ctr(aes)))",
2765 .cra_driver_name = "authenc-hmac-sha1-"
2766 "rfc3686-ctr-aes-caam",
2769 .setkey = aead_setkey,
2770 .setauthsize = aead_setauthsize,
2771 .encrypt = aead_encrypt,
2772 .decrypt = aead_decrypt,
2773 .ivsize = CTR_RFC3686_IV_SIZE,
2774 .maxauthsize = SHA1_DIGEST_SIZE,
2777 .class1_alg_type = OP_ALG_ALGSEL_AES |
2778 OP_ALG_AAI_CTR_MOD128,
2779 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2780 OP_ALG_AAI_HMAC_PRECOMP,
2787 .cra_name = "seqiv(authenc("
2788 "hmac(sha1),rfc3686(ctr(aes))))",
2789 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
2790 "rfc3686-ctr-aes-caam",
2793 .setkey = aead_setkey,
2794 .setauthsize = aead_setauthsize,
2795 .encrypt = aead_encrypt,
2796 .decrypt = aead_decrypt,
2797 .ivsize = CTR_RFC3686_IV_SIZE,
2798 .maxauthsize = SHA1_DIGEST_SIZE,
2801 .class1_alg_type = OP_ALG_ALGSEL_AES |
2802 OP_ALG_AAI_CTR_MOD128,
2803 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2804 OP_ALG_AAI_HMAC_PRECOMP,
2812 .cra_name = "authenc(hmac(sha224),"
2813 "rfc3686(ctr(aes)))",
2814 .cra_driver_name = "authenc-hmac-sha224-"
2815 "rfc3686-ctr-aes-caam",
2818 .setkey = aead_setkey,
2819 .setauthsize = aead_setauthsize,
2820 .encrypt = aead_encrypt,
2821 .decrypt = aead_decrypt,
2822 .ivsize = CTR_RFC3686_IV_SIZE,
2823 .maxauthsize = SHA224_DIGEST_SIZE,
2826 .class1_alg_type = OP_ALG_ALGSEL_AES |
2827 OP_ALG_AAI_CTR_MOD128,
2828 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2829 OP_ALG_AAI_HMAC_PRECOMP,
2836 .cra_name = "seqiv(authenc("
2837 "hmac(sha224),rfc3686(ctr(aes))))",
2838 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
2839 "rfc3686-ctr-aes-caam",
2842 .setkey = aead_setkey,
2843 .setauthsize = aead_setauthsize,
2844 .encrypt = aead_encrypt,
2845 .decrypt = aead_decrypt,
2846 .ivsize = CTR_RFC3686_IV_SIZE,
2847 .maxauthsize = SHA224_DIGEST_SIZE,
2850 .class1_alg_type = OP_ALG_ALGSEL_AES |
2851 OP_ALG_AAI_CTR_MOD128,
2852 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2853 OP_ALG_AAI_HMAC_PRECOMP,
2861 .cra_name = "authenc(hmac(sha256),"
2862 "rfc3686(ctr(aes)))",
2863 .cra_driver_name = "authenc-hmac-sha256-"
2864 "rfc3686-ctr-aes-caam",
2867 .setkey = aead_setkey,
2868 .setauthsize = aead_setauthsize,
2869 .encrypt = aead_encrypt,
2870 .decrypt = aead_decrypt,
2871 .ivsize = CTR_RFC3686_IV_SIZE,
2872 .maxauthsize = SHA256_DIGEST_SIZE,
2875 .class1_alg_type = OP_ALG_ALGSEL_AES |
2876 OP_ALG_AAI_CTR_MOD128,
2877 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2878 OP_ALG_AAI_HMAC_PRECOMP,
2885 .cra_name = "seqiv(authenc(hmac(sha256),"
2886 "rfc3686(ctr(aes))))",
2887 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
2888 "rfc3686-ctr-aes-caam",
2891 .setkey = aead_setkey,
2892 .setauthsize = aead_setauthsize,
2893 .encrypt = aead_encrypt,
2894 .decrypt = aead_decrypt,
2895 .ivsize = CTR_RFC3686_IV_SIZE,
2896 .maxauthsize = SHA256_DIGEST_SIZE,
2899 .class1_alg_type = OP_ALG_ALGSEL_AES |
2900 OP_ALG_AAI_CTR_MOD128,
2901 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2902 OP_ALG_AAI_HMAC_PRECOMP,
2910 .cra_name = "authenc(hmac(sha384),"
2911 "rfc3686(ctr(aes)))",
2912 .cra_driver_name = "authenc-hmac-sha384-"
2913 "rfc3686-ctr-aes-caam",
2916 .setkey = aead_setkey,
2917 .setauthsize = aead_setauthsize,
2918 .encrypt = aead_encrypt,
2919 .decrypt = aead_decrypt,
2920 .ivsize = CTR_RFC3686_IV_SIZE,
2921 .maxauthsize = SHA384_DIGEST_SIZE,
2924 .class1_alg_type = OP_ALG_ALGSEL_AES |
2925 OP_ALG_AAI_CTR_MOD128,
2926 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2927 OP_ALG_AAI_HMAC_PRECOMP,
2934 .cra_name = "seqiv(authenc(hmac(sha384),"
2935 "rfc3686(ctr(aes))))",
2936 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
2937 "rfc3686-ctr-aes-caam",
2940 .setkey = aead_setkey,
2941 .setauthsize = aead_setauthsize,
2942 .encrypt = aead_encrypt,
2943 .decrypt = aead_decrypt,
2944 .ivsize = CTR_RFC3686_IV_SIZE,
2945 .maxauthsize = SHA384_DIGEST_SIZE,
2948 .class1_alg_type = OP_ALG_ALGSEL_AES |
2949 OP_ALG_AAI_CTR_MOD128,
2950 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2951 OP_ALG_AAI_HMAC_PRECOMP,
2959 .cra_name = "authenc(hmac(sha512),"
2960 "rfc3686(ctr(aes)))",
2961 .cra_driver_name = "authenc-hmac-sha512-"
2962 "rfc3686-ctr-aes-caam",
2965 .setkey = aead_setkey,
2966 .setauthsize = aead_setauthsize,
2967 .encrypt = aead_encrypt,
2968 .decrypt = aead_decrypt,
2969 .ivsize = CTR_RFC3686_IV_SIZE,
2970 .maxauthsize = SHA512_DIGEST_SIZE,
2973 .class1_alg_type = OP_ALG_ALGSEL_AES |
2974 OP_ALG_AAI_CTR_MOD128,
2975 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2976 OP_ALG_AAI_HMAC_PRECOMP,
2983 .cra_name = "seqiv(authenc(hmac(sha512),"
2984 "rfc3686(ctr(aes))))",
2985 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
2986 "rfc3686-ctr-aes-caam",
2989 .setkey = aead_setkey,
2990 .setauthsize = aead_setauthsize,
2991 .encrypt = aead_encrypt,
2992 .decrypt = aead_decrypt,
2993 .ivsize = CTR_RFC3686_IV_SIZE,
2994 .maxauthsize = SHA512_DIGEST_SIZE,
2997 .class1_alg_type = OP_ALG_ALGSEL_AES |
2998 OP_ALG_AAI_CTR_MOD128,
2999 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3000 OP_ALG_AAI_HMAC_PRECOMP,
3007 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
3010 dma_addr_t dma_addr;
3011 struct caam_drv_private *priv;
3013 ctx->jrdev = caam_jr_alloc();
3014 if (IS_ERR(ctx->jrdev)) {
3015 pr_err("Job Ring Device allocation for transform failed\n");
3016 return PTR_ERR(ctx->jrdev);
3019 priv = dev_get_drvdata(ctx->jrdev->parent);
3020 if (priv->era >= 6 && uses_dkp)
3021 ctx->dir = DMA_BIDIRECTIONAL;
3023 ctx->dir = DMA_TO_DEVICE;
3025 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3026 offsetof(struct caam_ctx,
3028 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3029 if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3030 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3031 caam_jr_free(ctx->jrdev);
3035 ctx->sh_desc_enc_dma = dma_addr;
3036 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3038 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key);
3040 /* copy descriptor header template value */
3041 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3042 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
3047 static int caam_cra_init(struct crypto_skcipher *tfm)
3049 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
3050 struct caam_skcipher_alg *caam_alg =
3051 container_of(alg, typeof(*caam_alg), skcipher);
3053 return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam,
3057 static int caam_aead_init(struct crypto_aead *tfm)
3059 struct aead_alg *alg = crypto_aead_alg(tfm);
3060 struct caam_aead_alg *caam_alg =
3061 container_of(alg, struct caam_aead_alg, aead);
3062 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
3064 return caam_init_common(ctx, &caam_alg->caam,
3065 alg->setkey == aead_setkey);
3068 static void caam_exit_common(struct caam_ctx *ctx)
3070 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3071 offsetof(struct caam_ctx, sh_desc_enc_dma),
3072 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3073 caam_jr_free(ctx->jrdev);
3076 static void caam_cra_exit(struct crypto_skcipher *tfm)
3078 caam_exit_common(crypto_skcipher_ctx(tfm));
3081 static void caam_aead_exit(struct crypto_aead *tfm)
3083 caam_exit_common(crypto_aead_ctx(tfm));
3086 static void __exit caam_algapi_exit(void)
3090 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3091 struct caam_aead_alg *t_alg = driver_aeads + i;
3093 if (t_alg->registered)
3094 crypto_unregister_aead(&t_alg->aead);
3097 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3098 struct caam_skcipher_alg *t_alg = driver_algs + i;
3100 if (t_alg->registered)
3101 crypto_unregister_skcipher(&t_alg->skcipher);
3105 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
3107 struct skcipher_alg *alg = &t_alg->skcipher;
3109 alg->base.cra_module = THIS_MODULE;
3110 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3111 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3112 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3114 alg->init = caam_cra_init;
3115 alg->exit = caam_cra_exit;
3118 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3120 struct aead_alg *alg = &t_alg->aead;
3122 alg->base.cra_module = THIS_MODULE;
3123 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3124 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3125 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3127 alg->init = caam_aead_init;
3128 alg->exit = caam_aead_exit;
3131 static int __init caam_algapi_init(void)
3133 struct device_node *dev_node;
3134 struct platform_device *pdev;
3135 struct device *ctrldev;
3136 struct caam_drv_private *priv;
3138 u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
3139 unsigned int md_limit = SHA512_DIGEST_SIZE;
3140 bool registered = false;
3142 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
3144 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
3149 pdev = of_find_device_by_node(dev_node);
3151 of_node_put(dev_node);
3155 ctrldev = &pdev->dev;
3156 priv = dev_get_drvdata(ctrldev);
3157 of_node_put(dev_node);
3160 * If priv is NULL, it's probably because the caam driver wasn't
3161 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
3168 * Register crypto algorithms the device supports.
3169 * First, detect presence and attributes of DES, AES, and MD blocks.
3171 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3172 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3173 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
3174 aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
3175 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3177 /* If MD is present, limit digest size based on LP256 */
3178 if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
3179 md_limit = SHA256_DIGEST_SIZE;
3181 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3182 struct caam_skcipher_alg *t_alg = driver_algs + i;
3183 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
3185 /* Skip DES algorithms if not supported by device */
3187 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3188 (alg_sel == OP_ALG_ALGSEL_DES)))
3191 /* Skip AES algorithms if not supported by device */
3192 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3196 * Check support for AES modes not available
3199 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
3200 if ((t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) ==
3204 caam_skcipher_alg_init(t_alg);
3206 err = crypto_register_skcipher(&t_alg->skcipher);
3208 pr_warn("%s alg registration failed\n",
3209 t_alg->skcipher.base.cra_driver_name);
3213 t_alg->registered = true;
3217 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3218 struct caam_aead_alg *t_alg = driver_aeads + i;
3219 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3221 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3223 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3225 /* Skip DES algorithms if not supported by device */
3227 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3228 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3231 /* Skip AES algorithms if not supported by device */
3232 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3236 * Check support for AES algorithms not available
3239 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
3240 if (alg_aai == OP_ALG_AAI_GCM)
3244 * Skip algorithms requiring message digests
3245 * if MD or MD size is not supported by device.
3248 (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
3251 caam_aead_alg_init(t_alg);
3253 err = crypto_register_aead(&t_alg->aead);
3255 pr_warn("%s alg registration failed\n",
3256 t_alg->aead.base.cra_driver_name);
3260 t_alg->registered = true;
3265 pr_info("caam algorithms registered in /proc/crypto\n");
3270 module_init(caam_algapi_init);
3271 module_exit(caam_algapi_exit);
3273 MODULE_LICENSE("GPL");
3274 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
3275 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");