1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) 2017 Marvell
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
8 #include <asm/unaligned.h>
9 #include <linux/device.h>
10 #include <linux/dma-mapping.h>
11 #include <linux/dmapool.h>
12 #include <crypto/aead.h>
13 #include <crypto/aes.h>
14 #include <crypto/authenc.h>
15 #include <crypto/chacha.h>
16 #include <crypto/ctr.h>
17 #include <crypto/internal/des.h>
18 #include <crypto/gcm.h>
19 #include <crypto/ghash.h>
20 #include <crypto/poly1305.h>
21 #include <crypto/sha.h>
22 #include <crypto/sm3.h>
23 #include <crypto/sm4.h>
24 #include <crypto/xts.h>
25 #include <crypto/skcipher.h>
26 #include <crypto/internal/aead.h>
27 #include <crypto/internal/skcipher.h>
31 enum safexcel_cipher_direction {
36 enum safexcel_cipher_alg {
44 struct safexcel_cipher_ctx {
45 struct safexcel_context base;
46 struct safexcel_crypto_priv *priv;
49 enum safexcel_cipher_alg alg;
50 char aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
51 char xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */
55 unsigned int key_len, xts;
57 /* All the below is AEAD specific */
60 u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
61 u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
63 struct crypto_cipher *hkaes;
64 struct crypto_aead *fback;
67 struct safexcel_cipher_req {
68 enum safexcel_cipher_direction direction;
69 /* Number of result descriptors associated to the request */
75 static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
76 struct safexcel_command_desc *cdesc)
80 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
81 ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
82 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
85 cdesc->control_data.token[0] = ctx->nonce;
87 memcpy(&cdesc->control_data.token[1], iv, 8);
89 if (ctx->alg == SAFEXCEL_CHACHA20 ||
90 ctx->xcm == EIP197_XCM_MODE_CCM) {
91 /* 32 bit counter, starting at 0 */
92 cdesc->control_data.token[3] = 0;
94 /* 32 bit counter, start at 1 (big endian!) */
95 cdesc->control_data.token[3] = cpu_to_be32(1);
99 } else if (ctx->xcm == EIP197_XCM_MODE_GCM ||
100 (ctx->aead && ctx->alg == SAFEXCEL_CHACHA20)) {
101 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
104 memcpy(&cdesc->control_data.token[0], iv, 12);
106 if (ctx->alg == SAFEXCEL_CHACHA20) {
107 /* 32 bit counter, starting at 0 */
108 cdesc->control_data.token[3] = 0;
110 /* 32 bit counter, start at 1 (big endian!) */
111 cdesc->control_data.token[3] = cpu_to_be32(1);
115 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
116 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
118 /* 96 bit nonce part */
119 memcpy(&cdesc->control_data.token[0], &iv[4], 12);
121 cdesc->control_data.token[3] = *(u32 *)iv;
124 } else if (ctx->xcm == EIP197_XCM_MODE_CCM) {
125 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
127 /* Variable length IV part */
128 memcpy(&cdesc->control_data.token[0], iv, 15 - iv[0]);
129 /* Start variable length counter at 0 */
130 memset((u8 *)&cdesc->control_data.token[0] + 15 - iv[0],
136 if (ctx->mode != CONTEXT_CONTROL_CRYPTO_MODE_ECB) {
139 block_sz = DES_BLOCK_SIZE;
140 cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
143 block_sz = DES3_EDE_BLOCK_SIZE;
144 cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
147 block_sz = SM4_BLOCK_SIZE;
148 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
151 block_sz = AES_BLOCK_SIZE;
152 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
157 memcpy(cdesc->control_data.token, iv, block_sz);
161 static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
162 struct safexcel_command_desc *cdesc,
165 struct safexcel_token *token;
167 safexcel_cipher_token(ctx, iv, cdesc);
169 /* skip over worst case IV of 4 dwords, no need to be exact */
170 token = (struct safexcel_token *)(cdesc->control_data.token + 4);
172 token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
173 token[0].packet_length = length;
174 token[0].stat = EIP197_TOKEN_STAT_LAST_PACKET |
175 EIP197_TOKEN_STAT_LAST_HASH;
176 token[0].instructions = EIP197_TOKEN_INS_LAST |
177 EIP197_TOKEN_INS_TYPE_CRYPTO |
178 EIP197_TOKEN_INS_TYPE_OUTPUT;
181 static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
182 struct safexcel_command_desc *cdesc,
183 enum safexcel_cipher_direction direction,
184 u32 cryptlen, u32 assoclen, u32 digestsize)
186 struct safexcel_token *token;
188 safexcel_cipher_token(ctx, iv, cdesc);
190 if (direction == SAFEXCEL_ENCRYPT) {
191 /* align end of instruction sequence to end of token */
192 token = (struct safexcel_token *)(cdesc->control_data.token +
193 EIP197_MAX_TOKENS - 14);
195 token[13].opcode = EIP197_TOKEN_OPCODE_INSERT;
196 token[13].packet_length = digestsize;
197 token[13].stat = EIP197_TOKEN_STAT_LAST_HASH |
198 EIP197_TOKEN_STAT_LAST_PACKET;
199 token[13].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
200 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
202 cryptlen -= digestsize;
204 /* align end of instruction sequence to end of token */
205 token = (struct safexcel_token *)(cdesc->control_data.token +
206 EIP197_MAX_TOKENS - 15);
208 token[13].opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
209 token[13].packet_length = digestsize;
210 token[13].stat = EIP197_TOKEN_STAT_LAST_HASH |
211 EIP197_TOKEN_STAT_LAST_PACKET;
212 token[13].instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
214 token[14].opcode = EIP197_TOKEN_OPCODE_VERIFY;
215 token[14].packet_length = digestsize |
216 EIP197_TOKEN_HASH_RESULT_VERIFY;
217 token[14].stat = EIP197_TOKEN_STAT_LAST_HASH |
218 EIP197_TOKEN_STAT_LAST_PACKET;
219 token[14].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
222 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
223 /* For ESP mode (and not GMAC), skip over the IV */
224 token[8].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
225 token[8].packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
227 assoclen -= EIP197_AEAD_IPSEC_IV_SIZE;
230 token[6].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
231 token[6].packet_length = assoclen;
232 token[6].instructions = EIP197_TOKEN_INS_LAST |
233 EIP197_TOKEN_INS_TYPE_HASH;
235 if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
236 token[11].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
237 token[11].packet_length = cryptlen;
238 token[11].stat = EIP197_TOKEN_STAT_LAST_HASH;
239 if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
240 token[6].instructions = EIP197_TOKEN_INS_TYPE_HASH;
241 /* Do not send to crypt engine in case of GMAC */
242 token[11].instructions = EIP197_TOKEN_INS_LAST |
243 EIP197_TOKEN_INS_TYPE_HASH |
244 EIP197_TOKEN_INS_TYPE_OUTPUT;
246 token[11].instructions = EIP197_TOKEN_INS_LAST |
247 EIP197_TOKEN_INS_TYPE_CRYPTO |
248 EIP197_TOKEN_INS_TYPE_HASH |
249 EIP197_TOKEN_INS_TYPE_OUTPUT;
251 } else if (ctx->xcm != EIP197_XCM_MODE_CCM) {
252 token[6].stat = EIP197_TOKEN_STAT_LAST_HASH;
258 token[9].opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
259 token[9].packet_length = 0;
260 token[9].instructions = AES_BLOCK_SIZE;
262 token[10].opcode = EIP197_TOKEN_OPCODE_INSERT;
263 token[10].packet_length = AES_BLOCK_SIZE;
264 token[10].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
265 EIP197_TOKEN_INS_TYPE_CRYPTO;
267 if (ctx->xcm != EIP197_XCM_MODE_GCM) {
268 u8 *final_iv = (u8 *)cdesc->control_data.token;
269 u8 *cbcmaciv = (u8 *)&token[1];
270 u32 *aadlen = (u32 *)&token[5];
272 /* Construct IV block B0 for the CBC-MAC */
273 token[0].opcode = EIP197_TOKEN_OPCODE_INSERT;
274 token[0].packet_length = AES_BLOCK_SIZE +
275 ((assoclen > 0) << 1);
276 token[0].instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
277 EIP197_TOKEN_INS_TYPE_HASH;
278 /* Variable length IV part */
279 memcpy(cbcmaciv, final_iv, 15 - final_iv[0]);
280 /* fixup flags byte */
281 cbcmaciv[0] |= ((assoclen > 0) << 6) | ((digestsize - 2) << 2);
282 /* Clear upper bytes of variable message length to 0 */
283 memset(cbcmaciv + 15 - final_iv[0], 0, final_iv[0] - 1);
284 /* insert lower 2 bytes of message length */
285 cbcmaciv[14] = cryptlen >> 8;
286 cbcmaciv[15] = cryptlen & 255;
289 *aadlen = cpu_to_le32(cpu_to_be16(assoclen));
293 token[6].instructions = EIP197_TOKEN_INS_TYPE_HASH;
295 /* Align AAD data towards hash engine */
296 token[7].opcode = EIP197_TOKEN_OPCODE_INSERT;
298 token[7].packet_length = assoclen ? 16 - assoclen : 0;
300 if (likely(cryptlen)) {
301 token[7].instructions = EIP197_TOKEN_INS_TYPE_HASH;
303 /* Align crypto data towards hash engine */
306 token[12].opcode = EIP197_TOKEN_OPCODE_INSERT;
308 token[12].packet_length = cryptlen ? 16 - cryptlen : 0;
309 token[12].stat = EIP197_TOKEN_STAT_LAST_HASH;
310 token[12].instructions = EIP197_TOKEN_INS_TYPE_HASH;
312 token[7].stat = EIP197_TOKEN_STAT_LAST_HASH;
313 token[7].instructions = EIP197_TOKEN_INS_LAST |
314 EIP197_TOKEN_INS_TYPE_HASH;
319 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
320 const u8 *key, unsigned int len)
322 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
323 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
324 struct safexcel_crypto_priv *priv = ctx->priv;
325 struct crypto_aes_ctx aes;
328 ret = aes_expandkey(&aes, key, len);
330 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
334 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
335 for (i = 0; i < len / sizeof(u32); i++) {
336 if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
337 ctx->base.needs_inv = true;
343 for (i = 0; i < len / sizeof(u32); i++)
344 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
348 memzero_explicit(&aes, sizeof(aes));
352 static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
355 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
356 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
357 struct safexcel_ahash_export_state istate, ostate;
358 struct safexcel_crypto_priv *priv = ctx->priv;
359 struct crypto_authenc_keys keys;
360 struct crypto_aes_ctx aes;
363 if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
366 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
367 /* Must have at least space for the nonce here */
368 if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
370 /* last 4 bytes of key are the nonce! */
371 ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
372 CTR_RFC3686_NONCE_SIZE);
373 /* exclude the nonce here */
374 keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
380 err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
382 goto badkey_expflags;
385 err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
387 goto badkey_expflags;
390 err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
395 if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
399 dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
403 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
404 memcmp(ctx->key, keys.enckey, keys.enckeylen))
405 ctx->base.needs_inv = true;
408 switch (ctx->hash_alg) {
409 case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
410 if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
411 keys.authkeylen, &istate, &ostate))
414 case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
415 if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
416 keys.authkeylen, &istate, &ostate))
419 case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
420 if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
421 keys.authkeylen, &istate, &ostate))
424 case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
425 if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey,
426 keys.authkeylen, &istate, &ostate))
429 case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
430 if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey,
431 keys.authkeylen, &istate, &ostate))
434 case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
435 if (safexcel_hmac_setkey("safexcel-sm3", keys.authkey,
436 keys.authkeylen, &istate, &ostate))
440 dev_err(priv->dev, "aead: unsupported hash algorithm\n");
444 crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) &
445 CRYPTO_TFM_RES_MASK);
447 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
448 (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
449 memcmp(ctx->opad, ostate.state, ctx->state_sz)))
450 ctx->base.needs_inv = true;
452 /* Now copy the keys into the context */
453 memcpy(ctx->key, keys.enckey, keys.enckeylen);
454 ctx->key_len = keys.enckeylen;
456 memcpy(ctx->ipad, &istate.state, ctx->state_sz);
457 memcpy(ctx->opad, &ostate.state, ctx->state_sz);
459 memzero_explicit(&keys, sizeof(keys));
463 crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
465 memzero_explicit(&keys, sizeof(keys));
469 static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
470 struct crypto_async_request *async,
471 struct safexcel_cipher_req *sreq,
472 struct safexcel_command_desc *cdesc)
474 struct safexcel_crypto_priv *priv = ctx->priv;
475 int ctrl_size = ctx->key_len / sizeof(u32);
477 cdesc->control_data.control1 = ctx->mode;
480 /* Take in account the ipad+opad digests */
482 ctrl_size += ctx->state_sz / sizeof(u32);
483 cdesc->control_data.control0 =
484 CONTEXT_CONTROL_KEY_EN |
485 CONTEXT_CONTROL_DIGEST_XCM |
487 CONTEXT_CONTROL_SIZE(ctrl_size);
488 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
489 /* Chacha20-Poly1305 */
490 cdesc->control_data.control0 =
491 CONTEXT_CONTROL_KEY_EN |
492 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
493 (sreq->direction == SAFEXCEL_ENCRYPT ?
494 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
495 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
497 CONTEXT_CONTROL_SIZE(ctrl_size);
500 ctrl_size += ctx->state_sz / sizeof(u32) * 2;
501 cdesc->control_data.control0 =
502 CONTEXT_CONTROL_KEY_EN |
503 CONTEXT_CONTROL_DIGEST_HMAC |
505 CONTEXT_CONTROL_SIZE(ctrl_size);
508 if (sreq->direction == SAFEXCEL_ENCRYPT &&
509 (ctx->xcm == EIP197_XCM_MODE_CCM ||
510 ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
511 cdesc->control_data.control0 |=
512 CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
513 else if (sreq->direction == SAFEXCEL_ENCRYPT)
514 cdesc->control_data.control0 |=
515 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
516 else if (ctx->xcm == EIP197_XCM_MODE_CCM)
517 cdesc->control_data.control0 |=
518 CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
520 cdesc->control_data.control0 |=
521 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
523 if (sreq->direction == SAFEXCEL_ENCRYPT)
524 cdesc->control_data.control0 =
525 CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
526 CONTEXT_CONTROL_KEY_EN |
527 CONTEXT_CONTROL_SIZE(ctrl_size);
529 cdesc->control_data.control0 =
530 CONTEXT_CONTROL_TYPE_CRYPTO_IN |
531 CONTEXT_CONTROL_KEY_EN |
532 CONTEXT_CONTROL_SIZE(ctrl_size);
535 if (ctx->alg == SAFEXCEL_DES) {
536 cdesc->control_data.control0 |=
537 CONTEXT_CONTROL_CRYPTO_ALG_DES;
538 } else if (ctx->alg == SAFEXCEL_3DES) {
539 cdesc->control_data.control0 |=
540 CONTEXT_CONTROL_CRYPTO_ALG_3DES;
541 } else if (ctx->alg == SAFEXCEL_AES) {
542 switch (ctx->key_len >> ctx->xts) {
543 case AES_KEYSIZE_128:
544 cdesc->control_data.control0 |=
545 CONTEXT_CONTROL_CRYPTO_ALG_AES128;
547 case AES_KEYSIZE_192:
548 cdesc->control_data.control0 |=
549 CONTEXT_CONTROL_CRYPTO_ALG_AES192;
551 case AES_KEYSIZE_256:
552 cdesc->control_data.control0 |=
553 CONTEXT_CONTROL_CRYPTO_ALG_AES256;
556 dev_err(priv->dev, "aes keysize not supported: %u\n",
557 ctx->key_len >> ctx->xts);
560 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
561 cdesc->control_data.control0 |=
562 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
563 } else if (ctx->alg == SAFEXCEL_SM4) {
564 cdesc->control_data.control0 |=
565 CONTEXT_CONTROL_CRYPTO_ALG_SM4;
571 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
572 struct crypto_async_request *async,
573 struct scatterlist *src,
574 struct scatterlist *dst,
575 unsigned int cryptlen,
576 struct safexcel_cipher_req *sreq,
577 bool *should_complete, int *ret)
579 struct skcipher_request *areq = skcipher_request_cast(async);
580 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
581 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
582 struct safexcel_result_desc *rdesc;
587 if (unlikely(!sreq->rdescs))
590 while (sreq->rdescs--) {
591 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
594 "cipher: result: could not retrieve the result descriptor\n");
595 *ret = PTR_ERR(rdesc);
600 *ret = safexcel_rdesc_check_errors(priv, rdesc);
605 safexcel_complete(priv, ring);
608 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
610 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
611 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
615 * Update IV in req from last crypto output word for CBC modes
617 if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
618 (sreq->direction == SAFEXCEL_ENCRYPT)) {
619 /* For encrypt take the last output word */
620 sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
621 crypto_skcipher_ivsize(skcipher),
623 crypto_skcipher_ivsize(skcipher)));
626 *should_complete = true;
631 static int safexcel_send_req(struct crypto_async_request *base, int ring,
632 struct safexcel_cipher_req *sreq,
633 struct scatterlist *src, struct scatterlist *dst,
634 unsigned int cryptlen, unsigned int assoclen,
635 unsigned int digestsize, u8 *iv, int *commands,
638 struct skcipher_request *areq = skcipher_request_cast(base);
639 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
640 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
641 struct safexcel_crypto_priv *priv = ctx->priv;
642 struct safexcel_command_desc *cdesc;
643 struct safexcel_command_desc *first_cdesc = NULL;
644 struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
645 struct scatterlist *sg;
647 unsigned int totlen_src = cryptlen + assoclen;
648 unsigned int totlen_dst = totlen_src;
649 int n_cdesc = 0, n_rdesc = 0;
650 int queued, i, ret = 0;
653 sreq->nr_src = sg_nents_for_len(src, totlen_src);
657 * AEAD has auth tag appended to output for encrypt and
658 * removed from the output for decrypt!
660 if (sreq->direction == SAFEXCEL_DECRYPT)
661 totlen_dst -= digestsize;
663 totlen_dst += digestsize;
665 memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
666 ctx->ipad, ctx->state_sz);
668 memcpy(ctx->base.ctxr->data + (ctx->key_len +
669 ctx->state_sz) / sizeof(u32), ctx->opad,
671 } else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
672 (sreq->direction == SAFEXCEL_DECRYPT)) {
674 * Save IV from last crypto input word for CBC modes in decrypt
675 * direction. Need to do this first in case of inplace operation
676 * as it will be overwritten.
678 sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
679 crypto_skcipher_ivsize(skcipher),
681 crypto_skcipher_ivsize(skcipher)));
684 sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
687 * Remember actual input length, source buffer length may be
688 * updated in case of inline operation below.
694 sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
695 sreq->nr_dst = sreq->nr_src;
696 if (unlikely((totlen_src || totlen_dst) &&
697 (sreq->nr_src <= 0))) {
698 dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
699 max(totlen_src, totlen_dst));
702 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
704 if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
705 dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
709 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
711 if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
712 dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
714 dma_unmap_sg(priv->dev, src, sreq->nr_src,
718 dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
721 memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
723 /* The EIP cannot deal with zero length input packets! */
727 /* command descriptors */
728 for_each_sg(src, sg, sreq->nr_src, i) {
729 int len = sg_dma_len(sg);
731 /* Do not overflow the request */
732 if (queued - len < 0)
735 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
737 sg_dma_address(sg), len, totlen,
740 /* No space left in the command descriptor ring */
741 ret = PTR_ERR(cdesc);
755 if (unlikely(!n_cdesc)) {
757 * Special case: zero length input buffer.
758 * The engine always needs the 1st command descriptor, however!
760 first_cdesc = safexcel_add_cdesc(priv, ring, 1, 1, 0, 0, totlen,
765 /* Add context control words and token to first command descriptor */
766 safexcel_context_control(ctx, base, sreq, first_cdesc);
768 safexcel_aead_token(ctx, iv, first_cdesc,
769 sreq->direction, cryptlen,
770 assoclen, digestsize);
772 safexcel_skcipher_token(ctx, iv, first_cdesc,
775 /* result descriptors */
776 for_each_sg(dst, sg, sreq->nr_dst, i) {
777 bool last = (i == sreq->nr_dst - 1);
778 u32 len = sg_dma_len(sg);
780 /* only allow the part of the buffer we know we need */
781 if (len > totlen_dst)
787 /* skip over AAD space in buffer - not written */
789 if (assoclen >= len) {
793 rdesc = safexcel_add_rdesc(priv, ring, first, last,
799 rdesc = safexcel_add_rdesc(priv, ring, first, last,
804 /* No space left in the result descriptor ring */
805 ret = PTR_ERR(rdesc);
815 if (unlikely(first)) {
817 * Special case: AEAD decrypt with only AAD data.
818 * In this case there is NO output data from the engine,
819 * but the engine still needs a result descriptor!
820 * Create a dummy one just for catching the result token.
822 rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
824 /* No space left in the result descriptor ring */
825 ret = PTR_ERR(rdesc);
832 safexcel_rdr_req_set(priv, ring, first_rdesc, base);
839 for (i = 0; i < n_rdesc; i++)
840 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
842 for (i = 0; i < n_cdesc; i++)
843 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
846 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
848 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
849 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
855 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
857 struct crypto_async_request *base,
858 struct safexcel_cipher_req *sreq,
859 bool *should_complete, int *ret)
861 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
862 struct safexcel_result_desc *rdesc;
863 int ndesc = 0, enq_ret;
867 if (unlikely(!sreq->rdescs))
870 while (sreq->rdescs--) {
871 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
874 "cipher: invalidate: could not retrieve the result descriptor\n");
875 *ret = PTR_ERR(rdesc);
880 *ret = safexcel_rdesc_check_errors(priv, rdesc);
885 safexcel_complete(priv, ring);
887 if (ctx->base.exit_inv) {
888 dma_pool_free(priv->context_pool, ctx->base.ctxr,
891 *should_complete = true;
896 ring = safexcel_select_ring(priv);
897 ctx->base.ring = ring;
899 spin_lock_bh(&priv->ring[ring].queue_lock);
900 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
901 spin_unlock_bh(&priv->ring[ring].queue_lock);
903 if (enq_ret != -EINPROGRESS)
906 queue_work(priv->ring[ring].workqueue,
907 &priv->ring[ring].work_data.work);
909 *should_complete = false;
914 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
916 struct crypto_async_request *async,
917 bool *should_complete, int *ret)
919 struct skcipher_request *req = skcipher_request_cast(async);
920 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
923 if (sreq->needs_inv) {
924 sreq->needs_inv = false;
925 err = safexcel_handle_inv_result(priv, ring, async, sreq,
926 should_complete, ret);
928 err = safexcel_handle_req_result(priv, ring, async, req->src,
929 req->dst, req->cryptlen, sreq,
930 should_complete, ret);
936 static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
938 struct crypto_async_request *async,
939 bool *should_complete, int *ret)
941 struct aead_request *req = aead_request_cast(async);
942 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
943 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
946 if (sreq->needs_inv) {
947 sreq->needs_inv = false;
948 err = safexcel_handle_inv_result(priv, ring, async, sreq,
949 should_complete, ret);
951 err = safexcel_handle_req_result(priv, ring, async, req->src,
953 req->cryptlen + crypto_aead_authsize(tfm),
954 sreq, should_complete, ret);
960 static int safexcel_cipher_send_inv(struct crypto_async_request *base,
961 int ring, int *commands, int *results)
963 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
964 struct safexcel_crypto_priv *priv = ctx->priv;
967 ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
977 static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
978 int *commands, int *results)
980 struct skcipher_request *req = skcipher_request_cast(async);
981 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
982 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
983 struct safexcel_crypto_priv *priv = ctx->priv;
986 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
988 if (sreq->needs_inv) {
989 ret = safexcel_cipher_send_inv(async, ring, commands, results);
991 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
992 u8 input_iv[AES_BLOCK_SIZE];
995 * Save input IV in case of CBC decrypt mode
996 * Will be overwritten with output IV prior to use!
998 memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1000 ret = safexcel_send_req(async, ring, sreq, req->src,
1001 req->dst, req->cryptlen, 0, 0, input_iv,
1005 sreq->rdescs = *results;
1009 static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1010 int *commands, int *results)
1012 struct aead_request *req = aead_request_cast(async);
1013 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1014 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1015 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1016 struct safexcel_crypto_priv *priv = ctx->priv;
1019 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1021 if (sreq->needs_inv)
1022 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1024 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1025 req->cryptlen, req->assoclen,
1026 crypto_aead_authsize(tfm), req->iv,
1028 sreq->rdescs = *results;
1032 static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1033 struct crypto_async_request *base,
1034 struct safexcel_cipher_req *sreq,
1035 struct safexcel_inv_result *result)
1037 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1038 struct safexcel_crypto_priv *priv = ctx->priv;
1039 int ring = ctx->base.ring;
1041 init_completion(&result->completion);
1043 ctx = crypto_tfm_ctx(base->tfm);
1044 ctx->base.exit_inv = true;
1045 sreq->needs_inv = true;
1047 spin_lock_bh(&priv->ring[ring].queue_lock);
1048 crypto_enqueue_request(&priv->ring[ring].queue, base);
1049 spin_unlock_bh(&priv->ring[ring].queue_lock);
1051 queue_work(priv->ring[ring].workqueue,
1052 &priv->ring[ring].work_data.work);
1054 wait_for_completion(&result->completion);
1056 if (result->error) {
1058 "cipher: sync: invalidate: completion error %d\n",
1060 return result->error;
1066 static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1068 EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1069 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1070 struct safexcel_inv_result result = {};
1072 memset(req, 0, sizeof(struct skcipher_request));
1074 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1075 safexcel_inv_complete, &result);
1076 skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1078 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1081 static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1083 EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1084 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1085 struct safexcel_inv_result result = {};
1087 memset(req, 0, sizeof(struct aead_request));
1089 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1090 safexcel_inv_complete, &result);
1091 aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1093 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1096 static int safexcel_queue_req(struct crypto_async_request *base,
1097 struct safexcel_cipher_req *sreq,
1098 enum safexcel_cipher_direction dir)
1100 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1101 struct safexcel_crypto_priv *priv = ctx->priv;
1104 sreq->needs_inv = false;
1105 sreq->direction = dir;
1107 if (ctx->base.ctxr) {
1108 if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1109 sreq->needs_inv = true;
1110 ctx->base.needs_inv = false;
1113 ctx->base.ring = safexcel_select_ring(priv);
1114 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1115 EIP197_GFP_FLAGS(*base),
1116 &ctx->base.ctxr_dma);
1117 if (!ctx->base.ctxr)
1121 ring = ctx->base.ring;
1123 spin_lock_bh(&priv->ring[ring].queue_lock);
1124 ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1125 spin_unlock_bh(&priv->ring[ring].queue_lock);
1127 queue_work(priv->ring[ring].workqueue,
1128 &priv->ring[ring].work_data.work);
1133 static int safexcel_encrypt(struct skcipher_request *req)
1135 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1139 static int safexcel_decrypt(struct skcipher_request *req)
1141 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1145 static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1147 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1148 struct safexcel_alg_template *tmpl =
1149 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1152 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1153 sizeof(struct safexcel_cipher_req));
1155 ctx->priv = tmpl->priv;
1157 ctx->base.send = safexcel_skcipher_send;
1158 ctx->base.handle_result = safexcel_skcipher_handle_result;
1162 static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1164 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1166 memzero_explicit(ctx->key, sizeof(ctx->key));
1168 /* context not allocated, skip invalidation */
1169 if (!ctx->base.ctxr)
1172 memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1176 static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1178 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1179 struct safexcel_crypto_priv *priv = ctx->priv;
1182 if (safexcel_cipher_cra_exit(tfm))
1185 if (priv->flags & EIP197_TRC_CACHE) {
1186 ret = safexcel_skcipher_exit_inv(tfm);
1188 dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1191 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1192 ctx->base.ctxr_dma);
1196 static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1198 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1199 struct safexcel_crypto_priv *priv = ctx->priv;
1202 if (safexcel_cipher_cra_exit(tfm))
1205 if (priv->flags & EIP197_TRC_CACHE) {
1206 ret = safexcel_aead_exit_inv(tfm);
1208 dev_warn(priv->dev, "aead: invalidation error %d\n",
1211 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1212 ctx->base.ctxr_dma);
1216 static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1218 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1220 safexcel_skcipher_cra_init(tfm);
1221 ctx->alg = SAFEXCEL_AES;
1222 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1226 struct safexcel_alg_template safexcel_alg_ecb_aes = {
1227 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1228 .algo_mask = SAFEXCEL_ALG_AES,
1230 .setkey = safexcel_skcipher_aes_setkey,
1231 .encrypt = safexcel_encrypt,
1232 .decrypt = safexcel_decrypt,
1233 .min_keysize = AES_MIN_KEY_SIZE,
1234 .max_keysize = AES_MAX_KEY_SIZE,
1236 .cra_name = "ecb(aes)",
1237 .cra_driver_name = "safexcel-ecb-aes",
1238 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1239 .cra_flags = CRYPTO_ALG_ASYNC |
1240 CRYPTO_ALG_KERN_DRIVER_ONLY,
1241 .cra_blocksize = AES_BLOCK_SIZE,
1242 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1244 .cra_init = safexcel_skcipher_aes_ecb_cra_init,
1245 .cra_exit = safexcel_skcipher_cra_exit,
1246 .cra_module = THIS_MODULE,
1251 static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1253 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1255 safexcel_skcipher_cra_init(tfm);
1256 ctx->alg = SAFEXCEL_AES;
1257 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1261 struct safexcel_alg_template safexcel_alg_cbc_aes = {
1262 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1263 .algo_mask = SAFEXCEL_ALG_AES,
1265 .setkey = safexcel_skcipher_aes_setkey,
1266 .encrypt = safexcel_encrypt,
1267 .decrypt = safexcel_decrypt,
1268 .min_keysize = AES_MIN_KEY_SIZE,
1269 .max_keysize = AES_MAX_KEY_SIZE,
1270 .ivsize = AES_BLOCK_SIZE,
1272 .cra_name = "cbc(aes)",
1273 .cra_driver_name = "safexcel-cbc-aes",
1274 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1275 .cra_flags = CRYPTO_ALG_ASYNC |
1276 CRYPTO_ALG_KERN_DRIVER_ONLY,
1277 .cra_blocksize = AES_BLOCK_SIZE,
1278 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1280 .cra_init = safexcel_skcipher_aes_cbc_cra_init,
1281 .cra_exit = safexcel_skcipher_cra_exit,
1282 .cra_module = THIS_MODULE,
1287 static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
1289 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1291 safexcel_skcipher_cra_init(tfm);
1292 ctx->alg = SAFEXCEL_AES;
1293 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
1297 struct safexcel_alg_template safexcel_alg_cfb_aes = {
1298 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1299 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1301 .setkey = safexcel_skcipher_aes_setkey,
1302 .encrypt = safexcel_encrypt,
1303 .decrypt = safexcel_decrypt,
1304 .min_keysize = AES_MIN_KEY_SIZE,
1305 .max_keysize = AES_MAX_KEY_SIZE,
1306 .ivsize = AES_BLOCK_SIZE,
1308 .cra_name = "cfb(aes)",
1309 .cra_driver_name = "safexcel-cfb-aes",
1310 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1311 .cra_flags = CRYPTO_ALG_ASYNC |
1312 CRYPTO_ALG_KERN_DRIVER_ONLY,
1314 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1316 .cra_init = safexcel_skcipher_aes_cfb_cra_init,
1317 .cra_exit = safexcel_skcipher_cra_exit,
1318 .cra_module = THIS_MODULE,
1323 static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
1325 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1327 safexcel_skcipher_cra_init(tfm);
1328 ctx->alg = SAFEXCEL_AES;
1329 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
1333 struct safexcel_alg_template safexcel_alg_ofb_aes = {
1334 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1335 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1337 .setkey = safexcel_skcipher_aes_setkey,
1338 .encrypt = safexcel_encrypt,
1339 .decrypt = safexcel_decrypt,
1340 .min_keysize = AES_MIN_KEY_SIZE,
1341 .max_keysize = AES_MAX_KEY_SIZE,
1342 .ivsize = AES_BLOCK_SIZE,
1344 .cra_name = "ofb(aes)",
1345 .cra_driver_name = "safexcel-ofb-aes",
1346 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1347 .cra_flags = CRYPTO_ALG_ASYNC |
1348 CRYPTO_ALG_KERN_DRIVER_ONLY,
1350 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1352 .cra_init = safexcel_skcipher_aes_ofb_cra_init,
1353 .cra_exit = safexcel_skcipher_cra_exit,
1354 .cra_module = THIS_MODULE,
1359 static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1360 const u8 *key, unsigned int len)
1362 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1363 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1364 struct safexcel_crypto_priv *priv = ctx->priv;
1365 struct crypto_aes_ctx aes;
1367 unsigned int keylen;
1369 /* last 4 bytes of key are the nonce! */
1370 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1371 /* exclude the nonce here */
1372 keylen = len - CTR_RFC3686_NONCE_SIZE;
1373 ret = aes_expandkey(&aes, key, keylen);
1375 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1379 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1380 for (i = 0; i < keylen / sizeof(u32); i++) {
1381 if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
1382 ctx->base.needs_inv = true;
1388 for (i = 0; i < keylen / sizeof(u32); i++)
1389 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1391 ctx->key_len = keylen;
1393 memzero_explicit(&aes, sizeof(aes));
1397 static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1399 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1401 safexcel_skcipher_cra_init(tfm);
1402 ctx->alg = SAFEXCEL_AES;
1403 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1407 struct safexcel_alg_template safexcel_alg_ctr_aes = {
1408 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1409 .algo_mask = SAFEXCEL_ALG_AES,
1411 .setkey = safexcel_skcipher_aesctr_setkey,
1412 .encrypt = safexcel_encrypt,
1413 .decrypt = safexcel_decrypt,
1414 /* Add nonce size */
1415 .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1416 .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1417 .ivsize = CTR_RFC3686_IV_SIZE,
1419 .cra_name = "rfc3686(ctr(aes))",
1420 .cra_driver_name = "safexcel-ctr-aes",
1421 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1422 .cra_flags = CRYPTO_ALG_ASYNC |
1423 CRYPTO_ALG_KERN_DRIVER_ONLY,
1425 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1427 .cra_init = safexcel_skcipher_aes_ctr_cra_init,
1428 .cra_exit = safexcel_skcipher_cra_exit,
1429 .cra_module = THIS_MODULE,
1434 static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1437 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1440 ret = verify_skcipher_des_key(ctfm, key);
1444 /* if context exits and key changed, need to invalidate it */
1445 if (ctx->base.ctxr_dma)
1446 if (memcmp(ctx->key, key, len))
1447 ctx->base.needs_inv = true;
1449 memcpy(ctx->key, key, len);
1455 static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1457 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1459 safexcel_skcipher_cra_init(tfm);
1460 ctx->alg = SAFEXCEL_DES;
1461 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1465 struct safexcel_alg_template safexcel_alg_cbc_des = {
1466 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1467 .algo_mask = SAFEXCEL_ALG_DES,
1469 .setkey = safexcel_des_setkey,
1470 .encrypt = safexcel_encrypt,
1471 .decrypt = safexcel_decrypt,
1472 .min_keysize = DES_KEY_SIZE,
1473 .max_keysize = DES_KEY_SIZE,
1474 .ivsize = DES_BLOCK_SIZE,
1476 .cra_name = "cbc(des)",
1477 .cra_driver_name = "safexcel-cbc-des",
1478 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1479 .cra_flags = CRYPTO_ALG_ASYNC |
1480 CRYPTO_ALG_KERN_DRIVER_ONLY,
1481 .cra_blocksize = DES_BLOCK_SIZE,
1482 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1484 .cra_init = safexcel_skcipher_des_cbc_cra_init,
1485 .cra_exit = safexcel_skcipher_cra_exit,
1486 .cra_module = THIS_MODULE,
1491 static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1493 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1495 safexcel_skcipher_cra_init(tfm);
1496 ctx->alg = SAFEXCEL_DES;
1497 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1501 struct safexcel_alg_template safexcel_alg_ecb_des = {
1502 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1503 .algo_mask = SAFEXCEL_ALG_DES,
1505 .setkey = safexcel_des_setkey,
1506 .encrypt = safexcel_encrypt,
1507 .decrypt = safexcel_decrypt,
1508 .min_keysize = DES_KEY_SIZE,
1509 .max_keysize = DES_KEY_SIZE,
1511 .cra_name = "ecb(des)",
1512 .cra_driver_name = "safexcel-ecb-des",
1513 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1514 .cra_flags = CRYPTO_ALG_ASYNC |
1515 CRYPTO_ALG_KERN_DRIVER_ONLY,
1516 .cra_blocksize = DES_BLOCK_SIZE,
1517 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1519 .cra_init = safexcel_skcipher_des_ecb_cra_init,
1520 .cra_exit = safexcel_skcipher_cra_exit,
1521 .cra_module = THIS_MODULE,
1526 static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1527 const u8 *key, unsigned int len)
1529 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1532 err = verify_skcipher_des3_key(ctfm, key);
1536 /* if context exits and key changed, need to invalidate it */
1537 if (ctx->base.ctxr_dma) {
1538 if (memcmp(ctx->key, key, len))
1539 ctx->base.needs_inv = true;
1542 memcpy(ctx->key, key, len);
1549 static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1551 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1553 safexcel_skcipher_cra_init(tfm);
1554 ctx->alg = SAFEXCEL_3DES;
1555 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1559 struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1560 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1561 .algo_mask = SAFEXCEL_ALG_DES,
1563 .setkey = safexcel_des3_ede_setkey,
1564 .encrypt = safexcel_encrypt,
1565 .decrypt = safexcel_decrypt,
1566 .min_keysize = DES3_EDE_KEY_SIZE,
1567 .max_keysize = DES3_EDE_KEY_SIZE,
1568 .ivsize = DES3_EDE_BLOCK_SIZE,
1570 .cra_name = "cbc(des3_ede)",
1571 .cra_driver_name = "safexcel-cbc-des3_ede",
1572 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1573 .cra_flags = CRYPTO_ALG_ASYNC |
1574 CRYPTO_ALG_KERN_DRIVER_ONLY,
1575 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1576 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1578 .cra_init = safexcel_skcipher_des3_cbc_cra_init,
1579 .cra_exit = safexcel_skcipher_cra_exit,
1580 .cra_module = THIS_MODULE,
1585 static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1587 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1589 safexcel_skcipher_cra_init(tfm);
1590 ctx->alg = SAFEXCEL_3DES;
1591 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1595 struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1596 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1597 .algo_mask = SAFEXCEL_ALG_DES,
1599 .setkey = safexcel_des3_ede_setkey,
1600 .encrypt = safexcel_encrypt,
1601 .decrypt = safexcel_decrypt,
1602 .min_keysize = DES3_EDE_KEY_SIZE,
1603 .max_keysize = DES3_EDE_KEY_SIZE,
1605 .cra_name = "ecb(des3_ede)",
1606 .cra_driver_name = "safexcel-ecb-des3_ede",
1607 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1608 .cra_flags = CRYPTO_ALG_ASYNC |
1609 CRYPTO_ALG_KERN_DRIVER_ONLY,
1610 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1611 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1613 .cra_init = safexcel_skcipher_des3_ecb_cra_init,
1614 .cra_exit = safexcel_skcipher_cra_exit,
1615 .cra_module = THIS_MODULE,
1620 static int safexcel_aead_encrypt(struct aead_request *req)
1622 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1624 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1627 static int safexcel_aead_decrypt(struct aead_request *req)
1629 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1631 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1634 static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1636 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1637 struct safexcel_alg_template *tmpl =
1638 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1641 crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1642 sizeof(struct safexcel_cipher_req));
1644 ctx->priv = tmpl->priv;
1646 ctx->alg = SAFEXCEL_AES; /* default */
1647 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1649 ctx->base.send = safexcel_aead_send;
1650 ctx->base.handle_result = safexcel_aead_handle_result;
1654 static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1656 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1658 safexcel_aead_cra_init(tfm);
1659 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1660 ctx->state_sz = SHA1_DIGEST_SIZE;
1664 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1665 .type = SAFEXCEL_ALG_TYPE_AEAD,
1666 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1668 .setkey = safexcel_aead_setkey,
1669 .encrypt = safexcel_aead_encrypt,
1670 .decrypt = safexcel_aead_decrypt,
1671 .ivsize = AES_BLOCK_SIZE,
1672 .maxauthsize = SHA1_DIGEST_SIZE,
1674 .cra_name = "authenc(hmac(sha1),cbc(aes))",
1675 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1676 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1677 .cra_flags = CRYPTO_ALG_ASYNC |
1678 CRYPTO_ALG_KERN_DRIVER_ONLY,
1679 .cra_blocksize = AES_BLOCK_SIZE,
1680 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1682 .cra_init = safexcel_aead_sha1_cra_init,
1683 .cra_exit = safexcel_aead_cra_exit,
1684 .cra_module = THIS_MODULE,
1689 static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1691 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1693 safexcel_aead_cra_init(tfm);
1694 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1695 ctx->state_sz = SHA256_DIGEST_SIZE;
1699 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1700 .type = SAFEXCEL_ALG_TYPE_AEAD,
1701 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1703 .setkey = safexcel_aead_setkey,
1704 .encrypt = safexcel_aead_encrypt,
1705 .decrypt = safexcel_aead_decrypt,
1706 .ivsize = AES_BLOCK_SIZE,
1707 .maxauthsize = SHA256_DIGEST_SIZE,
1709 .cra_name = "authenc(hmac(sha256),cbc(aes))",
1710 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1711 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1712 .cra_flags = CRYPTO_ALG_ASYNC |
1713 CRYPTO_ALG_KERN_DRIVER_ONLY,
1714 .cra_blocksize = AES_BLOCK_SIZE,
1715 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1717 .cra_init = safexcel_aead_sha256_cra_init,
1718 .cra_exit = safexcel_aead_cra_exit,
1719 .cra_module = THIS_MODULE,
1724 static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1726 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1728 safexcel_aead_cra_init(tfm);
1729 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1730 ctx->state_sz = SHA256_DIGEST_SIZE;
1734 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1735 .type = SAFEXCEL_ALG_TYPE_AEAD,
1736 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1738 .setkey = safexcel_aead_setkey,
1739 .encrypt = safexcel_aead_encrypt,
1740 .decrypt = safexcel_aead_decrypt,
1741 .ivsize = AES_BLOCK_SIZE,
1742 .maxauthsize = SHA224_DIGEST_SIZE,
1744 .cra_name = "authenc(hmac(sha224),cbc(aes))",
1745 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1746 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1747 .cra_flags = CRYPTO_ALG_ASYNC |
1748 CRYPTO_ALG_KERN_DRIVER_ONLY,
1749 .cra_blocksize = AES_BLOCK_SIZE,
1750 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1752 .cra_init = safexcel_aead_sha224_cra_init,
1753 .cra_exit = safexcel_aead_cra_exit,
1754 .cra_module = THIS_MODULE,
1759 static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1761 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1763 safexcel_aead_cra_init(tfm);
1764 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1765 ctx->state_sz = SHA512_DIGEST_SIZE;
1769 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1770 .type = SAFEXCEL_ALG_TYPE_AEAD,
1771 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1773 .setkey = safexcel_aead_setkey,
1774 .encrypt = safexcel_aead_encrypt,
1775 .decrypt = safexcel_aead_decrypt,
1776 .ivsize = AES_BLOCK_SIZE,
1777 .maxauthsize = SHA512_DIGEST_SIZE,
1779 .cra_name = "authenc(hmac(sha512),cbc(aes))",
1780 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1781 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1782 .cra_flags = CRYPTO_ALG_ASYNC |
1783 CRYPTO_ALG_KERN_DRIVER_ONLY,
1784 .cra_blocksize = AES_BLOCK_SIZE,
1785 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1787 .cra_init = safexcel_aead_sha512_cra_init,
1788 .cra_exit = safexcel_aead_cra_exit,
1789 .cra_module = THIS_MODULE,
1794 static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1796 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1798 safexcel_aead_cra_init(tfm);
1799 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1800 ctx->state_sz = SHA512_DIGEST_SIZE;
1804 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1805 .type = SAFEXCEL_ALG_TYPE_AEAD,
1806 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1808 .setkey = safexcel_aead_setkey,
1809 .encrypt = safexcel_aead_encrypt,
1810 .decrypt = safexcel_aead_decrypt,
1811 .ivsize = AES_BLOCK_SIZE,
1812 .maxauthsize = SHA384_DIGEST_SIZE,
1814 .cra_name = "authenc(hmac(sha384),cbc(aes))",
1815 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1816 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1817 .cra_flags = CRYPTO_ALG_ASYNC |
1818 CRYPTO_ALG_KERN_DRIVER_ONLY,
1819 .cra_blocksize = AES_BLOCK_SIZE,
1820 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1822 .cra_init = safexcel_aead_sha384_cra_init,
1823 .cra_exit = safexcel_aead_cra_exit,
1824 .cra_module = THIS_MODULE,
1829 static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1831 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1833 safexcel_aead_sha1_cra_init(tfm);
1834 ctx->alg = SAFEXCEL_3DES; /* override default */
1838 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1839 .type = SAFEXCEL_ALG_TYPE_AEAD,
1840 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1842 .setkey = safexcel_aead_setkey,
1843 .encrypt = safexcel_aead_encrypt,
1844 .decrypt = safexcel_aead_decrypt,
1845 .ivsize = DES3_EDE_BLOCK_SIZE,
1846 .maxauthsize = SHA1_DIGEST_SIZE,
1848 .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1849 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1850 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1851 .cra_flags = CRYPTO_ALG_ASYNC |
1852 CRYPTO_ALG_KERN_DRIVER_ONLY,
1853 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1854 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1856 .cra_init = safexcel_aead_sha1_des3_cra_init,
1857 .cra_exit = safexcel_aead_cra_exit,
1858 .cra_module = THIS_MODULE,
1863 static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1865 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1867 safexcel_aead_sha256_cra_init(tfm);
1868 ctx->alg = SAFEXCEL_3DES; /* override default */
1872 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1873 .type = SAFEXCEL_ALG_TYPE_AEAD,
1874 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1876 .setkey = safexcel_aead_setkey,
1877 .encrypt = safexcel_aead_encrypt,
1878 .decrypt = safexcel_aead_decrypt,
1879 .ivsize = DES3_EDE_BLOCK_SIZE,
1880 .maxauthsize = SHA256_DIGEST_SIZE,
1882 .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1883 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1884 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1885 .cra_flags = CRYPTO_ALG_ASYNC |
1886 CRYPTO_ALG_KERN_DRIVER_ONLY,
1887 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1888 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1890 .cra_init = safexcel_aead_sha256_des3_cra_init,
1891 .cra_exit = safexcel_aead_cra_exit,
1892 .cra_module = THIS_MODULE,
1897 static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1899 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1901 safexcel_aead_sha224_cra_init(tfm);
1902 ctx->alg = SAFEXCEL_3DES; /* override default */
1906 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
1907 .type = SAFEXCEL_ALG_TYPE_AEAD,
1908 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1910 .setkey = safexcel_aead_setkey,
1911 .encrypt = safexcel_aead_encrypt,
1912 .decrypt = safexcel_aead_decrypt,
1913 .ivsize = DES3_EDE_BLOCK_SIZE,
1914 .maxauthsize = SHA224_DIGEST_SIZE,
1916 .cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
1917 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
1918 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1919 .cra_flags = CRYPTO_ALG_ASYNC |
1920 CRYPTO_ALG_KERN_DRIVER_ONLY,
1921 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1922 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1924 .cra_init = safexcel_aead_sha224_des3_cra_init,
1925 .cra_exit = safexcel_aead_cra_exit,
1926 .cra_module = THIS_MODULE,
1931 static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
1933 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1935 safexcel_aead_sha512_cra_init(tfm);
1936 ctx->alg = SAFEXCEL_3DES; /* override default */
1940 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
1941 .type = SAFEXCEL_ALG_TYPE_AEAD,
1942 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
1944 .setkey = safexcel_aead_setkey,
1945 .encrypt = safexcel_aead_encrypt,
1946 .decrypt = safexcel_aead_decrypt,
1947 .ivsize = DES3_EDE_BLOCK_SIZE,
1948 .maxauthsize = SHA512_DIGEST_SIZE,
1950 .cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
1951 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
1952 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1953 .cra_flags = CRYPTO_ALG_ASYNC |
1954 CRYPTO_ALG_KERN_DRIVER_ONLY,
1955 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1956 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1958 .cra_init = safexcel_aead_sha512_des3_cra_init,
1959 .cra_exit = safexcel_aead_cra_exit,
1960 .cra_module = THIS_MODULE,
1965 static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
1967 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1969 safexcel_aead_sha384_cra_init(tfm);
1970 ctx->alg = SAFEXCEL_3DES; /* override default */
1974 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
1975 .type = SAFEXCEL_ALG_TYPE_AEAD,
1976 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
1978 .setkey = safexcel_aead_setkey,
1979 .encrypt = safexcel_aead_encrypt,
1980 .decrypt = safexcel_aead_decrypt,
1981 .ivsize = DES3_EDE_BLOCK_SIZE,
1982 .maxauthsize = SHA384_DIGEST_SIZE,
1984 .cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
1985 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
1986 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1987 .cra_flags = CRYPTO_ALG_ASYNC |
1988 CRYPTO_ALG_KERN_DRIVER_ONLY,
1989 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1990 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1992 .cra_init = safexcel_aead_sha384_des3_cra_init,
1993 .cra_exit = safexcel_aead_cra_exit,
1994 .cra_module = THIS_MODULE,
1999 static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2001 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2003 safexcel_aead_sha1_cra_init(tfm);
2004 ctx->alg = SAFEXCEL_DES; /* override default */
2008 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2009 .type = SAFEXCEL_ALG_TYPE_AEAD,
2010 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2012 .setkey = safexcel_aead_setkey,
2013 .encrypt = safexcel_aead_encrypt,
2014 .decrypt = safexcel_aead_decrypt,
2015 .ivsize = DES_BLOCK_SIZE,
2016 .maxauthsize = SHA1_DIGEST_SIZE,
2018 .cra_name = "authenc(hmac(sha1),cbc(des))",
2019 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2020 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2021 .cra_flags = CRYPTO_ALG_ASYNC |
2022 CRYPTO_ALG_KERN_DRIVER_ONLY,
2023 .cra_blocksize = DES_BLOCK_SIZE,
2024 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2026 .cra_init = safexcel_aead_sha1_des_cra_init,
2027 .cra_exit = safexcel_aead_cra_exit,
2028 .cra_module = THIS_MODULE,
2033 static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2035 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2037 safexcel_aead_sha256_cra_init(tfm);
2038 ctx->alg = SAFEXCEL_DES; /* override default */
2042 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2043 .type = SAFEXCEL_ALG_TYPE_AEAD,
2044 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2046 .setkey = safexcel_aead_setkey,
2047 .encrypt = safexcel_aead_encrypt,
2048 .decrypt = safexcel_aead_decrypt,
2049 .ivsize = DES_BLOCK_SIZE,
2050 .maxauthsize = SHA256_DIGEST_SIZE,
2052 .cra_name = "authenc(hmac(sha256),cbc(des))",
2053 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2054 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2055 .cra_flags = CRYPTO_ALG_ASYNC |
2056 CRYPTO_ALG_KERN_DRIVER_ONLY,
2057 .cra_blocksize = DES_BLOCK_SIZE,
2058 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2060 .cra_init = safexcel_aead_sha256_des_cra_init,
2061 .cra_exit = safexcel_aead_cra_exit,
2062 .cra_module = THIS_MODULE,
2067 static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2069 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2071 safexcel_aead_sha224_cra_init(tfm);
2072 ctx->alg = SAFEXCEL_DES; /* override default */
2076 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2077 .type = SAFEXCEL_ALG_TYPE_AEAD,
2078 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2080 .setkey = safexcel_aead_setkey,
2081 .encrypt = safexcel_aead_encrypt,
2082 .decrypt = safexcel_aead_decrypt,
2083 .ivsize = DES_BLOCK_SIZE,
2084 .maxauthsize = SHA224_DIGEST_SIZE,
2086 .cra_name = "authenc(hmac(sha224),cbc(des))",
2087 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2088 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2089 .cra_flags = CRYPTO_ALG_ASYNC |
2090 CRYPTO_ALG_KERN_DRIVER_ONLY,
2091 .cra_blocksize = DES_BLOCK_SIZE,
2092 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2094 .cra_init = safexcel_aead_sha224_des_cra_init,
2095 .cra_exit = safexcel_aead_cra_exit,
2096 .cra_module = THIS_MODULE,
2101 static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2103 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2105 safexcel_aead_sha512_cra_init(tfm);
2106 ctx->alg = SAFEXCEL_DES; /* override default */
2110 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2111 .type = SAFEXCEL_ALG_TYPE_AEAD,
2112 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2114 .setkey = safexcel_aead_setkey,
2115 .encrypt = safexcel_aead_encrypt,
2116 .decrypt = safexcel_aead_decrypt,
2117 .ivsize = DES_BLOCK_SIZE,
2118 .maxauthsize = SHA512_DIGEST_SIZE,
2120 .cra_name = "authenc(hmac(sha512),cbc(des))",
2121 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2122 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2123 .cra_flags = CRYPTO_ALG_ASYNC |
2124 CRYPTO_ALG_KERN_DRIVER_ONLY,
2125 .cra_blocksize = DES_BLOCK_SIZE,
2126 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2128 .cra_init = safexcel_aead_sha512_des_cra_init,
2129 .cra_exit = safexcel_aead_cra_exit,
2130 .cra_module = THIS_MODULE,
2135 static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2137 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2139 safexcel_aead_sha384_cra_init(tfm);
2140 ctx->alg = SAFEXCEL_DES; /* override default */
2144 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2145 .type = SAFEXCEL_ALG_TYPE_AEAD,
2146 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2148 .setkey = safexcel_aead_setkey,
2149 .encrypt = safexcel_aead_encrypt,
2150 .decrypt = safexcel_aead_decrypt,
2151 .ivsize = DES_BLOCK_SIZE,
2152 .maxauthsize = SHA384_DIGEST_SIZE,
2154 .cra_name = "authenc(hmac(sha384),cbc(des))",
2155 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2156 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2157 .cra_flags = CRYPTO_ALG_ASYNC |
2158 CRYPTO_ALG_KERN_DRIVER_ONLY,
2159 .cra_blocksize = DES_BLOCK_SIZE,
2160 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2162 .cra_init = safexcel_aead_sha384_des_cra_init,
2163 .cra_exit = safexcel_aead_cra_exit,
2164 .cra_module = THIS_MODULE,
2169 static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2171 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2173 safexcel_aead_sha1_cra_init(tfm);
2174 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2178 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2179 .type = SAFEXCEL_ALG_TYPE_AEAD,
2180 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2182 .setkey = safexcel_aead_setkey,
2183 .encrypt = safexcel_aead_encrypt,
2184 .decrypt = safexcel_aead_decrypt,
2185 .ivsize = CTR_RFC3686_IV_SIZE,
2186 .maxauthsize = SHA1_DIGEST_SIZE,
2188 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2189 .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2190 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2191 .cra_flags = CRYPTO_ALG_ASYNC |
2192 CRYPTO_ALG_KERN_DRIVER_ONLY,
2194 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2196 .cra_init = safexcel_aead_sha1_ctr_cra_init,
2197 .cra_exit = safexcel_aead_cra_exit,
2198 .cra_module = THIS_MODULE,
2203 static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2205 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2207 safexcel_aead_sha256_cra_init(tfm);
2208 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2212 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2213 .type = SAFEXCEL_ALG_TYPE_AEAD,
2214 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2216 .setkey = safexcel_aead_setkey,
2217 .encrypt = safexcel_aead_encrypt,
2218 .decrypt = safexcel_aead_decrypt,
2219 .ivsize = CTR_RFC3686_IV_SIZE,
2220 .maxauthsize = SHA256_DIGEST_SIZE,
2222 .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2223 .cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2224 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2225 .cra_flags = CRYPTO_ALG_ASYNC |
2226 CRYPTO_ALG_KERN_DRIVER_ONLY,
2228 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2230 .cra_init = safexcel_aead_sha256_ctr_cra_init,
2231 .cra_exit = safexcel_aead_cra_exit,
2232 .cra_module = THIS_MODULE,
2237 static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2239 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2241 safexcel_aead_sha224_cra_init(tfm);
2242 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2246 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2247 .type = SAFEXCEL_ALG_TYPE_AEAD,
2248 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2250 .setkey = safexcel_aead_setkey,
2251 .encrypt = safexcel_aead_encrypt,
2252 .decrypt = safexcel_aead_decrypt,
2253 .ivsize = CTR_RFC3686_IV_SIZE,
2254 .maxauthsize = SHA224_DIGEST_SIZE,
2256 .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2257 .cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2258 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2259 .cra_flags = CRYPTO_ALG_ASYNC |
2260 CRYPTO_ALG_KERN_DRIVER_ONLY,
2262 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2264 .cra_init = safexcel_aead_sha224_ctr_cra_init,
2265 .cra_exit = safexcel_aead_cra_exit,
2266 .cra_module = THIS_MODULE,
2271 static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2273 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2275 safexcel_aead_sha512_cra_init(tfm);
2276 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2280 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2281 .type = SAFEXCEL_ALG_TYPE_AEAD,
2282 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2284 .setkey = safexcel_aead_setkey,
2285 .encrypt = safexcel_aead_encrypt,
2286 .decrypt = safexcel_aead_decrypt,
2287 .ivsize = CTR_RFC3686_IV_SIZE,
2288 .maxauthsize = SHA512_DIGEST_SIZE,
2290 .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2291 .cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2292 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2293 .cra_flags = CRYPTO_ALG_ASYNC |
2294 CRYPTO_ALG_KERN_DRIVER_ONLY,
2296 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2298 .cra_init = safexcel_aead_sha512_ctr_cra_init,
2299 .cra_exit = safexcel_aead_cra_exit,
2300 .cra_module = THIS_MODULE,
2305 static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2307 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2309 safexcel_aead_sha384_cra_init(tfm);
2310 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2314 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2315 .type = SAFEXCEL_ALG_TYPE_AEAD,
2316 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2318 .setkey = safexcel_aead_setkey,
2319 .encrypt = safexcel_aead_encrypt,
2320 .decrypt = safexcel_aead_decrypt,
2321 .ivsize = CTR_RFC3686_IV_SIZE,
2322 .maxauthsize = SHA384_DIGEST_SIZE,
2324 .cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2325 .cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2326 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2327 .cra_flags = CRYPTO_ALG_ASYNC |
2328 CRYPTO_ALG_KERN_DRIVER_ONLY,
2330 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2332 .cra_init = safexcel_aead_sha384_ctr_cra_init,
2333 .cra_exit = safexcel_aead_cra_exit,
2334 .cra_module = THIS_MODULE,
2339 static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2340 const u8 *key, unsigned int len)
2342 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2343 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2344 struct safexcel_crypto_priv *priv = ctx->priv;
2345 struct crypto_aes_ctx aes;
2347 unsigned int keylen;
2349 /* Check for illegal XTS keys */
2350 ret = xts_verify_key(ctfm, key, len);
2354 /* Only half of the key data is cipher key */
2355 keylen = (len >> 1);
2356 ret = aes_expandkey(&aes, key, keylen);
2358 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2362 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2363 for (i = 0; i < keylen / sizeof(u32); i++) {
2364 if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
2365 ctx->base.needs_inv = true;
2371 for (i = 0; i < keylen / sizeof(u32); i++)
2372 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2374 /* The other half is the tweak key */
2375 ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2377 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2381 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2382 for (i = 0; i < keylen / sizeof(u32); i++) {
2383 if (ctx->key[i + keylen / sizeof(u32)] !=
2384 cpu_to_le32(aes.key_enc[i])) {
2385 ctx->base.needs_inv = true;
2391 for (i = 0; i < keylen / sizeof(u32); i++)
2392 ctx->key[i + keylen / sizeof(u32)] =
2393 cpu_to_le32(aes.key_enc[i]);
2395 ctx->key_len = keylen << 1;
2397 memzero_explicit(&aes, sizeof(aes));
2401 static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2403 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2405 safexcel_skcipher_cra_init(tfm);
2406 ctx->alg = SAFEXCEL_AES;
2408 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2412 static int safexcel_encrypt_xts(struct skcipher_request *req)
2414 if (req->cryptlen < XTS_BLOCK_SIZE)
2416 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2420 static int safexcel_decrypt_xts(struct skcipher_request *req)
2422 if (req->cryptlen < XTS_BLOCK_SIZE)
2424 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2428 struct safexcel_alg_template safexcel_alg_xts_aes = {
2429 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2430 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2432 .setkey = safexcel_skcipher_aesxts_setkey,
2433 .encrypt = safexcel_encrypt_xts,
2434 .decrypt = safexcel_decrypt_xts,
2435 /* XTS actually uses 2 AES keys glued together */
2436 .min_keysize = AES_MIN_KEY_SIZE * 2,
2437 .max_keysize = AES_MAX_KEY_SIZE * 2,
2438 .ivsize = XTS_BLOCK_SIZE,
2440 .cra_name = "xts(aes)",
2441 .cra_driver_name = "safexcel-xts-aes",
2442 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2443 .cra_flags = CRYPTO_ALG_ASYNC |
2444 CRYPTO_ALG_KERN_DRIVER_ONLY,
2445 .cra_blocksize = XTS_BLOCK_SIZE,
2446 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2448 .cra_init = safexcel_skcipher_aes_xts_cra_init,
2449 .cra_exit = safexcel_skcipher_cra_exit,
2450 .cra_module = THIS_MODULE,
2455 static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2458 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2459 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2460 struct safexcel_crypto_priv *priv = ctx->priv;
2461 struct crypto_aes_ctx aes;
2462 u32 hashkey[AES_BLOCK_SIZE >> 2];
2465 ret = aes_expandkey(&aes, key, len);
2467 crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2468 memzero_explicit(&aes, sizeof(aes));
2472 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2473 for (i = 0; i < len / sizeof(u32); i++) {
2474 if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
2475 ctx->base.needs_inv = true;
2481 for (i = 0; i < len / sizeof(u32); i++)
2482 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2486 /* Compute hash key by encrypting zeroes with cipher key */
2487 crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
2488 crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
2489 CRYPTO_TFM_REQ_MASK);
2490 ret = crypto_cipher_setkey(ctx->hkaes, key, len);
2491 crypto_aead_set_flags(ctfm, crypto_cipher_get_flags(ctx->hkaes) &
2492 CRYPTO_TFM_RES_MASK);
2496 memset(hashkey, 0, AES_BLOCK_SIZE);
2497 crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
2499 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2500 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2501 if (ctx->ipad[i] != cpu_to_be32(hashkey[i])) {
2502 ctx->base.needs_inv = true;
2508 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2509 ctx->ipad[i] = cpu_to_be32(hashkey[i]);
2511 memzero_explicit(hashkey, AES_BLOCK_SIZE);
2512 memzero_explicit(&aes, sizeof(aes));
2516 static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2518 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2520 safexcel_aead_cra_init(tfm);
2521 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2522 ctx->state_sz = GHASH_BLOCK_SIZE;
2523 ctx->xcm = EIP197_XCM_MODE_GCM;
2524 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2526 ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
2527 if (IS_ERR(ctx->hkaes))
2528 return PTR_ERR(ctx->hkaes);
2533 static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2535 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2537 crypto_free_cipher(ctx->hkaes);
2538 safexcel_aead_cra_exit(tfm);
2541 static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2542 unsigned int authsize)
2544 return crypto_gcm_check_authsize(authsize);
2547 struct safexcel_alg_template safexcel_alg_gcm = {
2548 .type = SAFEXCEL_ALG_TYPE_AEAD,
2549 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2551 .setkey = safexcel_aead_gcm_setkey,
2552 .setauthsize = safexcel_aead_gcm_setauthsize,
2553 .encrypt = safexcel_aead_encrypt,
2554 .decrypt = safexcel_aead_decrypt,
2555 .ivsize = GCM_AES_IV_SIZE,
2556 .maxauthsize = GHASH_DIGEST_SIZE,
2558 .cra_name = "gcm(aes)",
2559 .cra_driver_name = "safexcel-gcm-aes",
2560 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2561 .cra_flags = CRYPTO_ALG_ASYNC |
2562 CRYPTO_ALG_KERN_DRIVER_ONLY,
2564 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2566 .cra_init = safexcel_aead_gcm_cra_init,
2567 .cra_exit = safexcel_aead_gcm_cra_exit,
2568 .cra_module = THIS_MODULE,
2573 static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2576 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2577 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2578 struct safexcel_crypto_priv *priv = ctx->priv;
2579 struct crypto_aes_ctx aes;
2582 ret = aes_expandkey(&aes, key, len);
2584 crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2585 memzero_explicit(&aes, sizeof(aes));
2589 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2590 for (i = 0; i < len / sizeof(u32); i++) {
2591 if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
2592 ctx->base.needs_inv = true;
2598 for (i = 0; i < len / sizeof(u32); i++) {
2599 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2600 ctx->ipad[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2601 cpu_to_be32(aes.key_enc[i]);
2605 ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2607 if (len == AES_KEYSIZE_192)
2608 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2609 else if (len == AES_KEYSIZE_256)
2610 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2612 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2614 memzero_explicit(&aes, sizeof(aes));
2618 static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2620 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2622 safexcel_aead_cra_init(tfm);
2623 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2624 ctx->state_sz = 3 * AES_BLOCK_SIZE;
2625 ctx->xcm = EIP197_XCM_MODE_CCM;
2626 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2630 static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2631 unsigned int authsize)
2633 /* Borrowed from crypto/ccm.c */
2650 static int safexcel_ccm_encrypt(struct aead_request *req)
2652 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2654 if (req->iv[0] < 1 || req->iv[0] > 7)
2657 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2660 static int safexcel_ccm_decrypt(struct aead_request *req)
2662 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2664 if (req->iv[0] < 1 || req->iv[0] > 7)
2667 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2670 struct safexcel_alg_template safexcel_alg_ccm = {
2671 .type = SAFEXCEL_ALG_TYPE_AEAD,
2672 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2674 .setkey = safexcel_aead_ccm_setkey,
2675 .setauthsize = safexcel_aead_ccm_setauthsize,
2676 .encrypt = safexcel_ccm_encrypt,
2677 .decrypt = safexcel_ccm_decrypt,
2678 .ivsize = AES_BLOCK_SIZE,
2679 .maxauthsize = AES_BLOCK_SIZE,
2681 .cra_name = "ccm(aes)",
2682 .cra_driver_name = "safexcel-ccm-aes",
2683 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2684 .cra_flags = CRYPTO_ALG_ASYNC |
2685 CRYPTO_ALG_KERN_DRIVER_ONLY,
2687 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2689 .cra_init = safexcel_aead_ccm_cra_init,
2690 .cra_exit = safexcel_aead_cra_exit,
2691 .cra_module = THIS_MODULE,
2696 static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2699 struct safexcel_crypto_priv *priv = ctx->priv;
2702 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2703 for (i = 0; i < CHACHA_KEY_SIZE / sizeof(u32); i++) {
2705 get_unaligned_le32(key + i * sizeof(u32))) {
2706 ctx->base.needs_inv = true;
2712 for (i = 0; i < CHACHA_KEY_SIZE / sizeof(u32); i++)
2713 ctx->key[i] = get_unaligned_le32(key + i * sizeof(u32));
2714 ctx->key_len = CHACHA_KEY_SIZE;
2717 static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2718 const u8 *key, unsigned int len)
2720 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2722 if (len != CHACHA_KEY_SIZE) {
2723 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2726 safexcel_chacha20_setkey(ctx, key);
2731 static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2733 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2735 safexcel_skcipher_cra_init(tfm);
2736 ctx->alg = SAFEXCEL_CHACHA20;
2737 ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2741 struct safexcel_alg_template safexcel_alg_chacha20 = {
2742 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2743 .algo_mask = SAFEXCEL_ALG_CHACHA20,
2745 .setkey = safexcel_skcipher_chacha20_setkey,
2746 .encrypt = safexcel_encrypt,
2747 .decrypt = safexcel_decrypt,
2748 .min_keysize = CHACHA_KEY_SIZE,
2749 .max_keysize = CHACHA_KEY_SIZE,
2750 .ivsize = CHACHA_IV_SIZE,
2752 .cra_name = "chacha20",
2753 .cra_driver_name = "safexcel-chacha20",
2754 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2755 .cra_flags = CRYPTO_ALG_ASYNC |
2756 CRYPTO_ALG_KERN_DRIVER_ONLY,
2758 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2760 .cra_init = safexcel_skcipher_chacha20_cra_init,
2761 .cra_exit = safexcel_skcipher_cra_exit,
2762 .cra_module = THIS_MODULE,
2767 static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2768 const u8 *key, unsigned int len)
2770 struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2772 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP &&
2773 len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2774 /* ESP variant has nonce appended to key */
2775 len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2776 ctx->nonce = *(u32 *)(key + len);
2778 if (len != CHACHA_KEY_SIZE) {
2779 crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2782 safexcel_chacha20_setkey(ctx, key);
2787 static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2788 unsigned int authsize)
2790 if (authsize != POLY1305_DIGEST_SIZE)
2795 static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2796 enum safexcel_cipher_direction dir)
2798 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2799 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2800 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2801 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2802 struct aead_request *subreq = aead_request_ctx(req);
2803 u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2807 * Instead of wasting time detecting umpteen silly corner cases,
2808 * just dump all "small" requests to the fallback implementation.
2809 * HW would not be faster on such small requests anyway.
2811 if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2812 req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2813 req->cryptlen > POLY1305_DIGEST_SIZE)) {
2814 return safexcel_queue_req(&req->base, creq, dir);
2817 /* HW cannot do full (AAD+payload) zero length, use fallback */
2818 for (i = 0; i < CHACHA_KEY_SIZE / sizeof(u32); i++)
2819 key[i] = cpu_to_le32(ctx->key[i]);
2820 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2821 /* ESP variant has nonce appended to the key */
2822 key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2823 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2825 EIP197_AEAD_IPSEC_NONCE_SIZE);
2827 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2831 crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2832 crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2833 CRYPTO_TFM_REQ_MASK);
2837 aead_request_set_tfm(subreq, ctx->fback);
2838 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2840 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2842 aead_request_set_ad(subreq, req->assoclen);
2844 return (dir == SAFEXCEL_ENCRYPT) ?
2845 crypto_aead_encrypt(subreq) :
2846 crypto_aead_decrypt(subreq);
2849 static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2851 return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2854 static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2856 return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2859 static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2861 struct crypto_aead *aead = __crypto_aead_cast(tfm);
2862 struct aead_alg *alg = crypto_aead_alg(aead);
2863 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2865 safexcel_aead_cra_init(tfm);
2867 /* Allocate fallback implementation */
2868 ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2870 CRYPTO_ALG_NEED_FALLBACK);
2871 if (IS_ERR(ctx->fback))
2872 return PTR_ERR(ctx->fback);
2874 crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2875 sizeof(struct aead_request) +
2876 crypto_aead_reqsize(ctx->fback)));
2881 static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2883 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2885 safexcel_aead_fallback_cra_init(tfm);
2886 ctx->alg = SAFEXCEL_CHACHA20;
2887 ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2888 CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2889 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2890 ctx->state_sz = 0; /* Precomputed by HW */
2894 static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
2896 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2898 crypto_free_aead(ctx->fback);
2899 safexcel_aead_cra_exit(tfm);
2902 struct safexcel_alg_template safexcel_alg_chachapoly = {
2903 .type = SAFEXCEL_ALG_TYPE_AEAD,
2904 .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2906 .setkey = safexcel_aead_chachapoly_setkey,
2907 .setauthsize = safexcel_aead_chachapoly_setauthsize,
2908 .encrypt = safexcel_aead_chachapoly_encrypt,
2909 .decrypt = safexcel_aead_chachapoly_decrypt,
2910 .ivsize = CHACHAPOLY_IV_SIZE,
2911 .maxauthsize = POLY1305_DIGEST_SIZE,
2913 .cra_name = "rfc7539(chacha20,poly1305)",
2914 .cra_driver_name = "safexcel-chacha20-poly1305",
2915 /* +1 to put it above HW chacha + SW poly */
2916 .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
2917 .cra_flags = CRYPTO_ALG_ASYNC |
2918 CRYPTO_ALG_KERN_DRIVER_ONLY |
2919 CRYPTO_ALG_NEED_FALLBACK,
2921 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2923 .cra_init = safexcel_aead_chachapoly_cra_init,
2924 .cra_exit = safexcel_aead_fallback_cra_exit,
2925 .cra_module = THIS_MODULE,
2930 static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
2932 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2935 ret = safexcel_aead_chachapoly_cra_init(tfm);
2936 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
2940 struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
2941 .type = SAFEXCEL_ALG_TYPE_AEAD,
2942 .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2944 .setkey = safexcel_aead_chachapoly_setkey,
2945 .setauthsize = safexcel_aead_chachapoly_setauthsize,
2946 .encrypt = safexcel_aead_chachapoly_encrypt,
2947 .decrypt = safexcel_aead_chachapoly_decrypt,
2948 .ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
2949 .maxauthsize = POLY1305_DIGEST_SIZE,
2951 .cra_name = "rfc7539esp(chacha20,poly1305)",
2952 .cra_driver_name = "safexcel-chacha20-poly1305-esp",
2953 /* +1 to put it above HW chacha + SW poly */
2954 .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
2955 .cra_flags = CRYPTO_ALG_ASYNC |
2956 CRYPTO_ALG_KERN_DRIVER_ONLY |
2957 CRYPTO_ALG_NEED_FALLBACK,
2959 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2961 .cra_init = safexcel_aead_chachapolyesp_cra_init,
2962 .cra_exit = safexcel_aead_fallback_cra_exit,
2963 .cra_module = THIS_MODULE,
2968 static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
2969 const u8 *key, unsigned int len)
2971 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2972 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2973 struct safexcel_crypto_priv *priv = ctx->priv;
2976 if (len != SM4_KEY_SIZE) {
2977 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2981 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2982 for (i = 0; i < SM4_KEY_SIZE / sizeof(u32); i++) {
2984 get_unaligned_le32(key + i * sizeof(u32))) {
2985 ctx->base.needs_inv = true;
2991 for (i = 0; i < SM4_KEY_SIZE / sizeof(u32); i++)
2992 ctx->key[i] = get_unaligned_le32(key + i * sizeof(u32));
2993 ctx->key_len = SM4_KEY_SIZE;
2998 static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3000 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3001 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3004 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3008 static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3010 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3011 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3014 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3018 static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3020 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3022 safexcel_skcipher_cra_init(tfm);
3023 ctx->alg = SAFEXCEL_SM4;
3024 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3028 struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3029 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3030 .algo_mask = SAFEXCEL_ALG_SM4,
3032 .setkey = safexcel_skcipher_sm4_setkey,
3033 .encrypt = safexcel_sm4_blk_encrypt,
3034 .decrypt = safexcel_sm4_blk_decrypt,
3035 .min_keysize = SM4_KEY_SIZE,
3036 .max_keysize = SM4_KEY_SIZE,
3038 .cra_name = "ecb(sm4)",
3039 .cra_driver_name = "safexcel-ecb-sm4",
3040 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3041 .cra_flags = CRYPTO_ALG_ASYNC |
3042 CRYPTO_ALG_KERN_DRIVER_ONLY,
3043 .cra_blocksize = SM4_BLOCK_SIZE,
3044 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3046 .cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3047 .cra_exit = safexcel_skcipher_cra_exit,
3048 .cra_module = THIS_MODULE,
3053 static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3055 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3057 safexcel_skcipher_cra_init(tfm);
3058 ctx->alg = SAFEXCEL_SM4;
3059 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3063 struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3064 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3065 .algo_mask = SAFEXCEL_ALG_SM4,
3067 .setkey = safexcel_skcipher_sm4_setkey,
3068 .encrypt = safexcel_sm4_blk_encrypt,
3069 .decrypt = safexcel_sm4_blk_decrypt,
3070 .min_keysize = SM4_KEY_SIZE,
3071 .max_keysize = SM4_KEY_SIZE,
3072 .ivsize = SM4_BLOCK_SIZE,
3074 .cra_name = "cbc(sm4)",
3075 .cra_driver_name = "safexcel-cbc-sm4",
3076 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3077 .cra_flags = CRYPTO_ALG_ASYNC |
3078 CRYPTO_ALG_KERN_DRIVER_ONLY,
3079 .cra_blocksize = SM4_BLOCK_SIZE,
3080 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3082 .cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3083 .cra_exit = safexcel_skcipher_cra_exit,
3084 .cra_module = THIS_MODULE,
3089 static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
3091 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3093 safexcel_skcipher_cra_init(tfm);
3094 ctx->alg = SAFEXCEL_SM4;
3095 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
3099 struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
3100 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3101 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3103 .setkey = safexcel_skcipher_sm4_setkey,
3104 .encrypt = safexcel_encrypt,
3105 .decrypt = safexcel_decrypt,
3106 .min_keysize = SM4_KEY_SIZE,
3107 .max_keysize = SM4_KEY_SIZE,
3108 .ivsize = SM4_BLOCK_SIZE,
3110 .cra_name = "ofb(sm4)",
3111 .cra_driver_name = "safexcel-ofb-sm4",
3112 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3113 .cra_flags = CRYPTO_ALG_ASYNC |
3114 CRYPTO_ALG_KERN_DRIVER_ONLY,
3116 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3118 .cra_init = safexcel_skcipher_sm4_ofb_cra_init,
3119 .cra_exit = safexcel_skcipher_cra_exit,
3120 .cra_module = THIS_MODULE,
3125 static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
3127 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3129 safexcel_skcipher_cra_init(tfm);
3130 ctx->alg = SAFEXCEL_SM4;
3131 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
3135 struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
3136 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3137 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3139 .setkey = safexcel_skcipher_sm4_setkey,
3140 .encrypt = safexcel_encrypt,
3141 .decrypt = safexcel_decrypt,
3142 .min_keysize = SM4_KEY_SIZE,
3143 .max_keysize = SM4_KEY_SIZE,
3144 .ivsize = SM4_BLOCK_SIZE,
3146 .cra_name = "cfb(sm4)",
3147 .cra_driver_name = "safexcel-cfb-sm4",
3148 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3149 .cra_flags = CRYPTO_ALG_ASYNC |
3150 CRYPTO_ALG_KERN_DRIVER_ONLY,
3152 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3154 .cra_init = safexcel_skcipher_sm4_cfb_cra_init,
3155 .cra_exit = safexcel_skcipher_cra_exit,
3156 .cra_module = THIS_MODULE,
3161 static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3162 const u8 *key, unsigned int len)
3164 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3165 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3167 /* last 4 bytes of key are the nonce! */
3168 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3169 /* exclude the nonce here */
3170 len -= CTR_RFC3686_NONCE_SIZE;
3172 return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3175 static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3177 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3179 safexcel_skcipher_cra_init(tfm);
3180 ctx->alg = SAFEXCEL_SM4;
3181 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3185 struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3186 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3187 .algo_mask = SAFEXCEL_ALG_SM4,
3189 .setkey = safexcel_skcipher_sm4ctr_setkey,
3190 .encrypt = safexcel_encrypt,
3191 .decrypt = safexcel_decrypt,
3192 /* Add nonce size */
3193 .min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3194 .max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3195 .ivsize = CTR_RFC3686_IV_SIZE,
3197 .cra_name = "rfc3686(ctr(sm4))",
3198 .cra_driver_name = "safexcel-ctr-sm4",
3199 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3200 .cra_flags = CRYPTO_ALG_ASYNC |
3201 CRYPTO_ALG_KERN_DRIVER_ONLY,
3203 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3205 .cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3206 .cra_exit = safexcel_skcipher_cra_exit,
3207 .cra_module = THIS_MODULE,
3212 static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3214 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3215 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3218 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3222 static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3224 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3226 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3227 if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3230 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3234 static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3236 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3238 safexcel_aead_cra_init(tfm);
3239 ctx->alg = SAFEXCEL_SM4;
3240 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3241 ctx->state_sz = SHA1_DIGEST_SIZE;
3245 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3246 .type = SAFEXCEL_ALG_TYPE_AEAD,
3247 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3249 .setkey = safexcel_aead_setkey,
3250 .encrypt = safexcel_aead_sm4_blk_encrypt,
3251 .decrypt = safexcel_aead_sm4_blk_decrypt,
3252 .ivsize = SM4_BLOCK_SIZE,
3253 .maxauthsize = SHA1_DIGEST_SIZE,
3255 .cra_name = "authenc(hmac(sha1),cbc(sm4))",
3256 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3257 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3258 .cra_flags = CRYPTO_ALG_ASYNC |
3259 CRYPTO_ALG_KERN_DRIVER_ONLY,
3260 .cra_blocksize = SM4_BLOCK_SIZE,
3261 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3263 .cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3264 .cra_exit = safexcel_aead_cra_exit,
3265 .cra_module = THIS_MODULE,
3270 static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3271 const u8 *key, unsigned int len)
3273 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3274 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3276 /* Keep fallback cipher synchronized */
3277 return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3278 safexcel_aead_setkey(ctfm, key, len);
3281 static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3282 unsigned int authsize)
3284 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3285 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3287 /* Keep fallback cipher synchronized */
3288 return crypto_aead_setauthsize(ctx->fback, authsize);
3291 static int safexcel_aead_fallback_crypt(struct aead_request *req,
3292 enum safexcel_cipher_direction dir)
3294 struct crypto_aead *aead = crypto_aead_reqtfm(req);
3295 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3296 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3297 struct aead_request *subreq = aead_request_ctx(req);
3299 aead_request_set_tfm(subreq, ctx->fback);
3300 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3302 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3304 aead_request_set_ad(subreq, req->assoclen);
3306 return (dir == SAFEXCEL_ENCRYPT) ?
3307 crypto_aead_encrypt(subreq) :
3308 crypto_aead_decrypt(subreq);
3311 static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3313 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3315 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3316 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3318 else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3319 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3321 /* HW cannot do full (AAD+payload) zero length, use fallback */
3322 return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3325 static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3327 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3328 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3330 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3331 if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3333 else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3334 /* If input length > 0 only */
3335 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3337 /* HW cannot do full (AAD+payload) zero length, use fallback */
3338 return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3341 static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3343 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3345 safexcel_aead_fallback_cra_init(tfm);
3346 ctx->alg = SAFEXCEL_SM4;
3347 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3348 ctx->state_sz = SM3_DIGEST_SIZE;
3352 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3353 .type = SAFEXCEL_ALG_TYPE_AEAD,
3354 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3356 .setkey = safexcel_aead_fallback_setkey,
3357 .setauthsize = safexcel_aead_fallback_setauthsize,
3358 .encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3359 .decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3360 .ivsize = SM4_BLOCK_SIZE,
3361 .maxauthsize = SM3_DIGEST_SIZE,
3363 .cra_name = "authenc(hmac(sm3),cbc(sm4))",
3364 .cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3365 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3366 .cra_flags = CRYPTO_ALG_ASYNC |
3367 CRYPTO_ALG_KERN_DRIVER_ONLY |
3368 CRYPTO_ALG_NEED_FALLBACK,
3369 .cra_blocksize = SM4_BLOCK_SIZE,
3370 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3372 .cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3373 .cra_exit = safexcel_aead_fallback_cra_exit,
3374 .cra_module = THIS_MODULE,
3379 static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3381 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3383 safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3384 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3388 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3389 .type = SAFEXCEL_ALG_TYPE_AEAD,
3390 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3392 .setkey = safexcel_aead_setkey,
3393 .encrypt = safexcel_aead_encrypt,
3394 .decrypt = safexcel_aead_decrypt,
3395 .ivsize = CTR_RFC3686_IV_SIZE,
3396 .maxauthsize = SHA1_DIGEST_SIZE,
3398 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3399 .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3400 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3401 .cra_flags = CRYPTO_ALG_ASYNC |
3402 CRYPTO_ALG_KERN_DRIVER_ONLY,
3404 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3406 .cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3407 .cra_exit = safexcel_aead_cra_exit,
3408 .cra_module = THIS_MODULE,
3413 static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3415 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3417 safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3418 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3422 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3423 .type = SAFEXCEL_ALG_TYPE_AEAD,
3424 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3426 .setkey = safexcel_aead_setkey,
3427 .encrypt = safexcel_aead_encrypt,
3428 .decrypt = safexcel_aead_decrypt,
3429 .ivsize = CTR_RFC3686_IV_SIZE,
3430 .maxauthsize = SM3_DIGEST_SIZE,
3432 .cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3433 .cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3434 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3435 .cra_flags = CRYPTO_ALG_ASYNC |
3436 CRYPTO_ALG_KERN_DRIVER_ONLY,
3438 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3440 .cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3441 .cra_exit = safexcel_aead_cra_exit,
3442 .cra_module = THIS_MODULE,
3447 static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3450 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3451 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3453 /* last 4 bytes of key are the nonce! */
3454 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3456 len -= CTR_RFC3686_NONCE_SIZE;
3457 return safexcel_aead_gcm_setkey(ctfm, key, len);
3460 static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3461 unsigned int authsize)
3463 return crypto_rfc4106_check_authsize(authsize);
3466 static int safexcel_rfc4106_encrypt(struct aead_request *req)
3468 return crypto_ipsec_check_assoclen(req->assoclen) ?:
3469 safexcel_aead_encrypt(req);
3472 static int safexcel_rfc4106_decrypt(struct aead_request *req)
3474 return crypto_ipsec_check_assoclen(req->assoclen) ?:
3475 safexcel_aead_decrypt(req);
3478 static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3480 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3483 ret = safexcel_aead_gcm_cra_init(tfm);
3484 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3488 struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3489 .type = SAFEXCEL_ALG_TYPE_AEAD,
3490 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3492 .setkey = safexcel_rfc4106_gcm_setkey,
3493 .setauthsize = safexcel_rfc4106_gcm_setauthsize,
3494 .encrypt = safexcel_rfc4106_encrypt,
3495 .decrypt = safexcel_rfc4106_decrypt,
3496 .ivsize = GCM_RFC4106_IV_SIZE,
3497 .maxauthsize = GHASH_DIGEST_SIZE,
3499 .cra_name = "rfc4106(gcm(aes))",
3500 .cra_driver_name = "safexcel-rfc4106-gcm-aes",
3501 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3502 .cra_flags = CRYPTO_ALG_ASYNC |
3503 CRYPTO_ALG_KERN_DRIVER_ONLY,
3505 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3507 .cra_init = safexcel_rfc4106_gcm_cra_init,
3508 .cra_exit = safexcel_aead_gcm_cra_exit,
3513 static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3514 unsigned int authsize)
3516 if (authsize != GHASH_DIGEST_SIZE)
3522 static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3524 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3527 ret = safexcel_aead_gcm_cra_init(tfm);
3528 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3532 struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3533 .type = SAFEXCEL_ALG_TYPE_AEAD,
3534 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3536 .setkey = safexcel_rfc4106_gcm_setkey,
3537 .setauthsize = safexcel_rfc4543_gcm_setauthsize,
3538 .encrypt = safexcel_rfc4106_encrypt,
3539 .decrypt = safexcel_rfc4106_decrypt,
3540 .ivsize = GCM_RFC4543_IV_SIZE,
3541 .maxauthsize = GHASH_DIGEST_SIZE,
3543 .cra_name = "rfc4543(gcm(aes))",
3544 .cra_driver_name = "safexcel-rfc4543-gcm-aes",
3545 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3546 .cra_flags = CRYPTO_ALG_ASYNC |
3547 CRYPTO_ALG_KERN_DRIVER_ONLY,
3549 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3551 .cra_init = safexcel_rfc4543_gcm_cra_init,
3552 .cra_exit = safexcel_aead_gcm_cra_exit,
3557 static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3560 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3561 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3563 /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3564 *(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3565 /* last 3 bytes of key are the nonce! */
3566 memcpy((u8 *)&ctx->nonce + 1, key + len -
3567 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3568 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3570 len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3571 return safexcel_aead_ccm_setkey(ctfm, key, len);
3574 static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3575 unsigned int authsize)
3577 /* Borrowed from crypto/ccm.c */
3590 static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3592 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3594 /* Borrowed from crypto/ccm.c */
3595 if (req->assoclen != 16 && req->assoclen != 20)
3598 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3601 static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3603 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3605 /* Borrowed from crypto/ccm.c */
3606 if (req->assoclen != 16 && req->assoclen != 20)
3609 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3612 static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3614 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3617 ret = safexcel_aead_ccm_cra_init(tfm);
3618 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3622 struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3623 .type = SAFEXCEL_ALG_TYPE_AEAD,
3624 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3626 .setkey = safexcel_rfc4309_ccm_setkey,
3627 .setauthsize = safexcel_rfc4309_ccm_setauthsize,
3628 .encrypt = safexcel_rfc4309_ccm_encrypt,
3629 .decrypt = safexcel_rfc4309_ccm_decrypt,
3630 .ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3631 .maxauthsize = AES_BLOCK_SIZE,
3633 .cra_name = "rfc4309(ccm(aes))",
3634 .cra_driver_name = "safexcel-rfc4309-ccm-aes",
3635 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3636 .cra_flags = CRYPTO_ALG_ASYNC |
3637 CRYPTO_ALG_KERN_DRIVER_ONLY,
3639 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3641 .cra_init = safexcel_rfc4309_ccm_cra_init,
3642 .cra_exit = safexcel_aead_cra_exit,
3643 .cra_module = THIS_MODULE,