crypto: inside-secure - fix spelling mistake "algorithmn" -> "algorithm"
[linux-2.6-microblaze.git] / drivers / crypto / inside-secure / safexcel_cipher.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2017 Marvell
4  *
5  * Antoine Tenart <antoine.tenart@free-electrons.com>
6  */
7
8 #include <asm/unaligned.h>
9 #include <linux/device.h>
10 #include <linux/dma-mapping.h>
11 #include <linux/dmapool.h>
12 #include <crypto/aead.h>
13 #include <crypto/aes.h>
14 #include <crypto/authenc.h>
15 #include <crypto/chacha.h>
16 #include <crypto/ctr.h>
17 #include <crypto/internal/des.h>
18 #include <crypto/gcm.h>
19 #include <crypto/ghash.h>
20 #include <crypto/poly1305.h>
21 #include <crypto/sha.h>
22 #include <crypto/sm3.h>
23 #include <crypto/sm4.h>
24 #include <crypto/xts.h>
25 #include <crypto/skcipher.h>
26 #include <crypto/internal/aead.h>
27 #include <crypto/internal/skcipher.h>
28
29 #include "safexcel.h"
30
31 enum safexcel_cipher_direction {
32         SAFEXCEL_ENCRYPT,
33         SAFEXCEL_DECRYPT,
34 };
35
36 enum safexcel_cipher_alg {
37         SAFEXCEL_DES,
38         SAFEXCEL_3DES,
39         SAFEXCEL_AES,
40         SAFEXCEL_CHACHA20,
41         SAFEXCEL_SM4,
42 };
43
44 struct safexcel_cipher_ctx {
45         struct safexcel_context base;
46         struct safexcel_crypto_priv *priv;
47
48         u32 mode;
49         enum safexcel_cipher_alg alg;
50         char aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
51         char xcm;  /* 0=authenc, 1=GCM, 2 reserved for CCM */
52
53         __le32 key[16];
54         u32 nonce;
55         unsigned int key_len, xts;
56
57         /* All the below is AEAD specific */
58         u32 hash_alg;
59         u32 state_sz;
60         u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
61         u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
62
63         struct crypto_cipher *hkaes;
64         struct crypto_aead *fback;
65 };
66
67 struct safexcel_cipher_req {
68         enum safexcel_cipher_direction direction;
69         /* Number of result descriptors associated to the request */
70         unsigned int rdescs;
71         bool needs_inv;
72         int  nr_src, nr_dst;
73 };
74
75 static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
76                                   struct safexcel_command_desc *cdesc)
77 {
78         u32 block_sz = 0;
79
80         if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
81             ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
82                 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
83
84                 /* 32 bit nonce */
85                 cdesc->control_data.token[0] = ctx->nonce;
86                 /* 64 bit IV part */
87                 memcpy(&cdesc->control_data.token[1], iv, 8);
88
89                 if (ctx->alg == SAFEXCEL_CHACHA20 ||
90                     ctx->xcm == EIP197_XCM_MODE_CCM) {
91                         /* 32 bit counter, starting at 0 */
92                         cdesc->control_data.token[3] = 0;
93                 } else {
94                         /* 32 bit counter, start at 1 (big endian!) */
95                         cdesc->control_data.token[3] = cpu_to_be32(1);
96                 }
97
98                 return;
99         } else if (ctx->xcm == EIP197_XCM_MODE_GCM ||
100                    (ctx->aead && ctx->alg == SAFEXCEL_CHACHA20)) {
101                 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
102
103                 /* 96 bit IV part */
104                 memcpy(&cdesc->control_data.token[0], iv, 12);
105
106                 if (ctx->alg == SAFEXCEL_CHACHA20) {
107                         /* 32 bit counter, starting at 0 */
108                         cdesc->control_data.token[3] = 0;
109                 } else {
110                         /* 32 bit counter, start at 1 (big endian!) */
111                         cdesc->control_data.token[3] = cpu_to_be32(1);
112                 }
113
114                 return;
115         } else if (ctx->alg == SAFEXCEL_CHACHA20) {
116                 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
117
118                 /* 96 bit nonce part */
119                 memcpy(&cdesc->control_data.token[0], &iv[4], 12);
120                 /* 32 bit counter */
121                 cdesc->control_data.token[3] = *(u32 *)iv;
122
123                 return;
124         } else if (ctx->xcm == EIP197_XCM_MODE_CCM) {
125                 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
126
127                 /* Variable length IV part */
128                 memcpy(&cdesc->control_data.token[0], iv, 15 - iv[0]);
129                 /* Start variable length counter at 0 */
130                 memset((u8 *)&cdesc->control_data.token[0] + 15 - iv[0],
131                        0, iv[0] + 1);
132
133                 return;
134         }
135
136         if (ctx->mode != CONTEXT_CONTROL_CRYPTO_MODE_ECB) {
137                 switch (ctx->alg) {
138                 case SAFEXCEL_DES:
139                         block_sz = DES_BLOCK_SIZE;
140                         cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
141                         break;
142                 case SAFEXCEL_3DES:
143                         block_sz = DES3_EDE_BLOCK_SIZE;
144                         cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
145                         break;
146                 case SAFEXCEL_SM4:
147                         block_sz = SM4_BLOCK_SIZE;
148                         cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
149                         break;
150                 case SAFEXCEL_AES:
151                         block_sz = AES_BLOCK_SIZE;
152                         cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
153                         break;
154                 default:
155                         break;
156                 }
157                 memcpy(cdesc->control_data.token, iv, block_sz);
158         }
159 }
160
161 static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
162                                     struct safexcel_command_desc *cdesc,
163                                     u32 length)
164 {
165         struct safexcel_token *token;
166
167         safexcel_cipher_token(ctx, iv, cdesc);
168
169         /* skip over worst case IV of 4 dwords, no need to be exact */
170         token = (struct safexcel_token *)(cdesc->control_data.token + 4);
171
172         token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
173         token[0].packet_length = length;
174         token[0].stat = EIP197_TOKEN_STAT_LAST_PACKET |
175                         EIP197_TOKEN_STAT_LAST_HASH;
176         token[0].instructions = EIP197_TOKEN_INS_LAST |
177                                 EIP197_TOKEN_INS_TYPE_CRYPTO |
178                                 EIP197_TOKEN_INS_TYPE_OUTPUT;
179 }
180
181 static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
182                                 struct safexcel_command_desc *cdesc,
183                                 enum safexcel_cipher_direction direction,
184                                 u32 cryptlen, u32 assoclen, u32 digestsize)
185 {
186         struct safexcel_token *token;
187
188         safexcel_cipher_token(ctx, iv, cdesc);
189
190         if (direction == SAFEXCEL_ENCRYPT) {
191                 /* align end of instruction sequence to end of token */
192                 token = (struct safexcel_token *)(cdesc->control_data.token +
193                          EIP197_MAX_TOKENS - 14);
194
195                 token[13].opcode = EIP197_TOKEN_OPCODE_INSERT;
196                 token[13].packet_length = digestsize;
197                 token[13].stat = EIP197_TOKEN_STAT_LAST_HASH |
198                                  EIP197_TOKEN_STAT_LAST_PACKET;
199                 token[13].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
200                                          EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
201         } else {
202                 cryptlen -= digestsize;
203
204                 /* align end of instruction sequence to end of token */
205                 token = (struct safexcel_token *)(cdesc->control_data.token +
206                          EIP197_MAX_TOKENS - 15);
207
208                 token[13].opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
209                 token[13].packet_length = digestsize;
210                 token[13].stat = EIP197_TOKEN_STAT_LAST_HASH |
211                                  EIP197_TOKEN_STAT_LAST_PACKET;
212                 token[13].instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
213
214                 token[14].opcode = EIP197_TOKEN_OPCODE_VERIFY;
215                 token[14].packet_length = digestsize |
216                                           EIP197_TOKEN_HASH_RESULT_VERIFY;
217                 token[14].stat = EIP197_TOKEN_STAT_LAST_HASH |
218                                  EIP197_TOKEN_STAT_LAST_PACKET;
219                 token[14].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
220         }
221
222         if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
223                 /* For ESP mode (and not GMAC), skip over the IV */
224                 token[8].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
225                 token[8].packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
226
227                 assoclen -= EIP197_AEAD_IPSEC_IV_SIZE;
228         }
229
230         token[6].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
231         token[6].packet_length = assoclen;
232         token[6].instructions = EIP197_TOKEN_INS_LAST |
233                                 EIP197_TOKEN_INS_TYPE_HASH;
234
235         if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
236                 token[11].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
237                 token[11].packet_length = cryptlen;
238                 token[11].stat = EIP197_TOKEN_STAT_LAST_HASH;
239                 if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
240                         token[6].instructions = EIP197_TOKEN_INS_TYPE_HASH;
241                         /* Do not send to crypt engine in case of GMAC */
242                         token[11].instructions = EIP197_TOKEN_INS_LAST |
243                                                  EIP197_TOKEN_INS_TYPE_HASH |
244                                                  EIP197_TOKEN_INS_TYPE_OUTPUT;
245                 } else {
246                         token[11].instructions = EIP197_TOKEN_INS_LAST |
247                                                  EIP197_TOKEN_INS_TYPE_CRYPTO |
248                                                  EIP197_TOKEN_INS_TYPE_HASH |
249                                                  EIP197_TOKEN_INS_TYPE_OUTPUT;
250                 }
251         } else if (ctx->xcm != EIP197_XCM_MODE_CCM) {
252                 token[6].stat = EIP197_TOKEN_STAT_LAST_HASH;
253         }
254
255         if (!ctx->xcm)
256                 return;
257
258         token[9].opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
259         token[9].packet_length = 0;
260         token[9].instructions = AES_BLOCK_SIZE;
261
262         token[10].opcode = EIP197_TOKEN_OPCODE_INSERT;
263         token[10].packet_length = AES_BLOCK_SIZE;
264         token[10].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
265                                  EIP197_TOKEN_INS_TYPE_CRYPTO;
266
267         if (ctx->xcm != EIP197_XCM_MODE_GCM) {
268                 u8 *final_iv = (u8 *)cdesc->control_data.token;
269                 u8 *cbcmaciv = (u8 *)&token[1];
270                 u32 *aadlen = (u32 *)&token[5];
271
272                 /* Construct IV block B0 for the CBC-MAC */
273                 token[0].opcode = EIP197_TOKEN_OPCODE_INSERT;
274                 token[0].packet_length = AES_BLOCK_SIZE +
275                                          ((assoclen > 0) << 1);
276                 token[0].instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
277                                         EIP197_TOKEN_INS_TYPE_HASH;
278                 /* Variable length IV part */
279                 memcpy(cbcmaciv, final_iv, 15 - final_iv[0]);
280                 /* fixup flags byte */
281                 cbcmaciv[0] |= ((assoclen > 0) << 6) | ((digestsize - 2) << 2);
282                 /* Clear upper bytes of variable message length to 0 */
283                 memset(cbcmaciv + 15 - final_iv[0], 0, final_iv[0] - 1);
284                 /* insert lower 2 bytes of message length */
285                 cbcmaciv[14] = cryptlen >> 8;
286                 cbcmaciv[15] = cryptlen & 255;
287
288                 if (assoclen) {
289                         *aadlen = cpu_to_le32(cpu_to_be16(assoclen));
290                         assoclen += 2;
291                 }
292
293                 token[6].instructions = EIP197_TOKEN_INS_TYPE_HASH;
294
295                 /* Align AAD data towards hash engine */
296                 token[7].opcode = EIP197_TOKEN_OPCODE_INSERT;
297                 assoclen &= 15;
298                 token[7].packet_length = assoclen ? 16 - assoclen : 0;
299
300                 if (likely(cryptlen)) {
301                         token[7].instructions = EIP197_TOKEN_INS_TYPE_HASH;
302
303                         /* Align crypto data towards hash engine */
304                         token[11].stat = 0;
305
306                         token[12].opcode = EIP197_TOKEN_OPCODE_INSERT;
307                         cryptlen &= 15;
308                         token[12].packet_length = cryptlen ? 16 - cryptlen : 0;
309                         token[12].stat = EIP197_TOKEN_STAT_LAST_HASH;
310                         token[12].instructions = EIP197_TOKEN_INS_TYPE_HASH;
311                 } else {
312                         token[7].stat = EIP197_TOKEN_STAT_LAST_HASH;
313                         token[7].instructions = EIP197_TOKEN_INS_LAST |
314                                                 EIP197_TOKEN_INS_TYPE_HASH;
315                 }
316         }
317 }
318
319 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
320                                         const u8 *key, unsigned int len)
321 {
322         struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
323         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
324         struct safexcel_crypto_priv *priv = ctx->priv;
325         struct crypto_aes_ctx aes;
326         int ret, i;
327
328         ret = aes_expandkey(&aes, key, len);
329         if (ret) {
330                 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
331                 return ret;
332         }
333
334         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
335                 for (i = 0; i < len / sizeof(u32); i++) {
336                         if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
337                                 ctx->base.needs_inv = true;
338                                 break;
339                         }
340                 }
341         }
342
343         for (i = 0; i < len / sizeof(u32); i++)
344                 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
345
346         ctx->key_len = len;
347
348         memzero_explicit(&aes, sizeof(aes));
349         return 0;
350 }
351
352 static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
353                                 unsigned int len)
354 {
355         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
356         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
357         struct safexcel_ahash_export_state istate, ostate;
358         struct safexcel_crypto_priv *priv = ctx->priv;
359         struct crypto_authenc_keys keys;
360         struct crypto_aes_ctx aes;
361         int err = -EINVAL;
362
363         if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
364                 goto badkey;
365
366         if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
367                 /* Must have at least space for the nonce here */
368                 if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
369                         goto badkey;
370                 /* last 4 bytes of key are the nonce! */
371                 ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
372                                       CTR_RFC3686_NONCE_SIZE);
373                 /* exclude the nonce here */
374                 keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
375         }
376
377         /* Encryption key */
378         switch (ctx->alg) {
379         case SAFEXCEL_DES:
380                 err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
381                 if (unlikely(err))
382                         goto badkey_expflags;
383                 break;
384         case SAFEXCEL_3DES:
385                 err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
386                 if (unlikely(err))
387                         goto badkey_expflags;
388                 break;
389         case SAFEXCEL_AES:
390                 err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
391                 if (unlikely(err))
392                         goto badkey;
393                 break;
394         case SAFEXCEL_SM4:
395                 if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
396                         goto badkey;
397                 break;
398         default:
399                 dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
400                 goto badkey;
401         }
402
403         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
404             memcmp(ctx->key, keys.enckey, keys.enckeylen))
405                 ctx->base.needs_inv = true;
406
407         /* Auth key */
408         switch (ctx->hash_alg) {
409         case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
410                 if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
411                                          keys.authkeylen, &istate, &ostate))
412                         goto badkey;
413                 break;
414         case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
415                 if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
416                                          keys.authkeylen, &istate, &ostate))
417                         goto badkey;
418                 break;
419         case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
420                 if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
421                                          keys.authkeylen, &istate, &ostate))
422                         goto badkey;
423                 break;
424         case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
425                 if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey,
426                                          keys.authkeylen, &istate, &ostate))
427                         goto badkey;
428                 break;
429         case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
430                 if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey,
431                                          keys.authkeylen, &istate, &ostate))
432                         goto badkey;
433                 break;
434         case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
435                 if (safexcel_hmac_setkey("safexcel-sm3", keys.authkey,
436                                          keys.authkeylen, &istate, &ostate))
437                         goto badkey;
438                 break;
439         default:
440                 dev_err(priv->dev, "aead: unsupported hash algorithm\n");
441                 goto badkey;
442         }
443
444         crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) &
445                                     CRYPTO_TFM_RES_MASK);
446
447         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
448             (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
449              memcmp(ctx->opad, ostate.state, ctx->state_sz)))
450                 ctx->base.needs_inv = true;
451
452         /* Now copy the keys into the context */
453         memcpy(ctx->key, keys.enckey, keys.enckeylen);
454         ctx->key_len = keys.enckeylen;
455
456         memcpy(ctx->ipad, &istate.state, ctx->state_sz);
457         memcpy(ctx->opad, &ostate.state, ctx->state_sz);
458
459         memzero_explicit(&keys, sizeof(keys));
460         return 0;
461
462 badkey:
463         crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
464 badkey_expflags:
465         memzero_explicit(&keys, sizeof(keys));
466         return err;
467 }
468
469 static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
470                                     struct crypto_async_request *async,
471                                     struct safexcel_cipher_req *sreq,
472                                     struct safexcel_command_desc *cdesc)
473 {
474         struct safexcel_crypto_priv *priv = ctx->priv;
475         int ctrl_size = ctx->key_len / sizeof(u32);
476
477         cdesc->control_data.control1 = ctx->mode;
478
479         if (ctx->aead) {
480                 /* Take in account the ipad+opad digests */
481                 if (ctx->xcm) {
482                         ctrl_size += ctx->state_sz / sizeof(u32);
483                         cdesc->control_data.control0 =
484                                 CONTEXT_CONTROL_KEY_EN |
485                                 CONTEXT_CONTROL_DIGEST_XCM |
486                                 ctx->hash_alg |
487                                 CONTEXT_CONTROL_SIZE(ctrl_size);
488                 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
489                         /* Chacha20-Poly1305 */
490                         cdesc->control_data.control0 =
491                                 CONTEXT_CONTROL_KEY_EN |
492                                 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
493                                 (sreq->direction == SAFEXCEL_ENCRYPT ?
494                                         CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
495                                         CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
496                                 ctx->hash_alg |
497                                 CONTEXT_CONTROL_SIZE(ctrl_size);
498                         return 0;
499                 } else {
500                         ctrl_size += ctx->state_sz / sizeof(u32) * 2;
501                         cdesc->control_data.control0 =
502                                 CONTEXT_CONTROL_KEY_EN |
503                                 CONTEXT_CONTROL_DIGEST_HMAC |
504                                 ctx->hash_alg |
505                                 CONTEXT_CONTROL_SIZE(ctrl_size);
506                 }
507
508                 if (sreq->direction == SAFEXCEL_ENCRYPT &&
509                     (ctx->xcm == EIP197_XCM_MODE_CCM ||
510                      ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
511                         cdesc->control_data.control0 |=
512                                 CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
513                 else if (sreq->direction == SAFEXCEL_ENCRYPT)
514                         cdesc->control_data.control0 |=
515                                 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
516                 else if (ctx->xcm == EIP197_XCM_MODE_CCM)
517                         cdesc->control_data.control0 |=
518                                 CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
519                 else
520                         cdesc->control_data.control0 |=
521                                 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
522         } else {
523                 if (sreq->direction == SAFEXCEL_ENCRYPT)
524                         cdesc->control_data.control0 =
525                                 CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
526                                 CONTEXT_CONTROL_KEY_EN |
527                                 CONTEXT_CONTROL_SIZE(ctrl_size);
528                 else
529                         cdesc->control_data.control0 =
530                                 CONTEXT_CONTROL_TYPE_CRYPTO_IN |
531                                 CONTEXT_CONTROL_KEY_EN |
532                                 CONTEXT_CONTROL_SIZE(ctrl_size);
533         }
534
535         if (ctx->alg == SAFEXCEL_DES) {
536                 cdesc->control_data.control0 |=
537                         CONTEXT_CONTROL_CRYPTO_ALG_DES;
538         } else if (ctx->alg == SAFEXCEL_3DES) {
539                 cdesc->control_data.control0 |=
540                         CONTEXT_CONTROL_CRYPTO_ALG_3DES;
541         } else if (ctx->alg == SAFEXCEL_AES) {
542                 switch (ctx->key_len >> ctx->xts) {
543                 case AES_KEYSIZE_128:
544                         cdesc->control_data.control0 |=
545                                 CONTEXT_CONTROL_CRYPTO_ALG_AES128;
546                         break;
547                 case AES_KEYSIZE_192:
548                         cdesc->control_data.control0 |=
549                                 CONTEXT_CONTROL_CRYPTO_ALG_AES192;
550                         break;
551                 case AES_KEYSIZE_256:
552                         cdesc->control_data.control0 |=
553                                 CONTEXT_CONTROL_CRYPTO_ALG_AES256;
554                         break;
555                 default:
556                         dev_err(priv->dev, "aes keysize not supported: %u\n",
557                                 ctx->key_len >> ctx->xts);
558                         return -EINVAL;
559                 }
560         } else if (ctx->alg == SAFEXCEL_CHACHA20) {
561                 cdesc->control_data.control0 |=
562                         CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
563         } else if (ctx->alg == SAFEXCEL_SM4) {
564                 cdesc->control_data.control0 |=
565                         CONTEXT_CONTROL_CRYPTO_ALG_SM4;
566         }
567
568         return 0;
569 }
570
571 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
572                                       struct crypto_async_request *async,
573                                       struct scatterlist *src,
574                                       struct scatterlist *dst,
575                                       unsigned int cryptlen,
576                                       struct safexcel_cipher_req *sreq,
577                                       bool *should_complete, int *ret)
578 {
579         struct skcipher_request *areq = skcipher_request_cast(async);
580         struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
581         struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
582         struct safexcel_result_desc *rdesc;
583         int ndesc = 0;
584
585         *ret = 0;
586
587         if (unlikely(!sreq->rdescs))
588                 return 0;
589
590         while (sreq->rdescs--) {
591                 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
592                 if (IS_ERR(rdesc)) {
593                         dev_err(priv->dev,
594                                 "cipher: result: could not retrieve the result descriptor\n");
595                         *ret = PTR_ERR(rdesc);
596                         break;
597                 }
598
599                 if (likely(!*ret))
600                         *ret = safexcel_rdesc_check_errors(priv, rdesc);
601
602                 ndesc++;
603         }
604
605         safexcel_complete(priv, ring);
606
607         if (src == dst) {
608                 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
609         } else {
610                 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
611                 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
612         }
613
614         /*
615          * Update IV in req from last crypto output word for CBC modes
616          */
617         if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
618             (sreq->direction == SAFEXCEL_ENCRYPT)) {
619                 /* For encrypt take the last output word */
620                 sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
621                                    crypto_skcipher_ivsize(skcipher),
622                                    (cryptlen -
623                                     crypto_skcipher_ivsize(skcipher)));
624         }
625
626         *should_complete = true;
627
628         return ndesc;
629 }
630
631 static int safexcel_send_req(struct crypto_async_request *base, int ring,
632                              struct safexcel_cipher_req *sreq,
633                              struct scatterlist *src, struct scatterlist *dst,
634                              unsigned int cryptlen, unsigned int assoclen,
635                              unsigned int digestsize, u8 *iv, int *commands,
636                              int *results)
637 {
638         struct skcipher_request *areq = skcipher_request_cast(base);
639         struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
640         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
641         struct safexcel_crypto_priv *priv = ctx->priv;
642         struct safexcel_command_desc *cdesc;
643         struct safexcel_command_desc *first_cdesc = NULL;
644         struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
645         struct scatterlist *sg;
646         unsigned int totlen;
647         unsigned int totlen_src = cryptlen + assoclen;
648         unsigned int totlen_dst = totlen_src;
649         int n_cdesc = 0, n_rdesc = 0;
650         int queued, i, ret = 0;
651         bool first = true;
652
653         sreq->nr_src = sg_nents_for_len(src, totlen_src);
654
655         if (ctx->aead) {
656                 /*
657                  * AEAD has auth tag appended to output for encrypt and
658                  * removed from the output for decrypt!
659                  */
660                 if (sreq->direction == SAFEXCEL_DECRYPT)
661                         totlen_dst -= digestsize;
662                 else
663                         totlen_dst += digestsize;
664
665                 memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
666                        ctx->ipad, ctx->state_sz);
667                 if (!ctx->xcm)
668                         memcpy(ctx->base.ctxr->data + (ctx->key_len +
669                                ctx->state_sz) / sizeof(u32), ctx->opad,
670                                ctx->state_sz);
671         } else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
672                    (sreq->direction == SAFEXCEL_DECRYPT)) {
673                 /*
674                  * Save IV from last crypto input word for CBC modes in decrypt
675                  * direction. Need to do this first in case of inplace operation
676                  * as it will be overwritten.
677                  */
678                 sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
679                                    crypto_skcipher_ivsize(skcipher),
680                                    (totlen_src -
681                                     crypto_skcipher_ivsize(skcipher)));
682         }
683
684         sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
685
686         /*
687          * Remember actual input length, source buffer length may be
688          * updated in case of inline operation below.
689          */
690         totlen = totlen_src;
691         queued = totlen_src;
692
693         if (src == dst) {
694                 sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
695                 sreq->nr_dst = sreq->nr_src;
696                 if (unlikely((totlen_src || totlen_dst) &&
697                     (sreq->nr_src <= 0))) {
698                         dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
699                                 max(totlen_src, totlen_dst));
700                         return -EINVAL;
701                 }
702                 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
703         } else {
704                 if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
705                         dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
706                                 totlen_src);
707                         return -EINVAL;
708                 }
709                 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
710
711                 if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
712                         dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
713                                 totlen_dst);
714                         dma_unmap_sg(priv->dev, src, sreq->nr_src,
715                                      DMA_TO_DEVICE);
716                         return -EINVAL;
717                 }
718                 dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
719         }
720
721         memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
722
723         /* The EIP cannot deal with zero length input packets! */
724         if (totlen == 0)
725                 totlen = 1;
726
727         /* command descriptors */
728         for_each_sg(src, sg, sreq->nr_src, i) {
729                 int len = sg_dma_len(sg);
730
731                 /* Do not overflow the request */
732                 if (queued - len < 0)
733                         len = queued;
734
735                 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
736                                            !(queued - len),
737                                            sg_dma_address(sg), len, totlen,
738                                            ctx->base.ctxr_dma);
739                 if (IS_ERR(cdesc)) {
740                         /* No space left in the command descriptor ring */
741                         ret = PTR_ERR(cdesc);
742                         goto cdesc_rollback;
743                 }
744                 n_cdesc++;
745
746                 if (n_cdesc == 1) {
747                         first_cdesc = cdesc;
748                 }
749
750                 queued -= len;
751                 if (!queued)
752                         break;
753         }
754
755         if (unlikely(!n_cdesc)) {
756                 /*
757                  * Special case: zero length input buffer.
758                  * The engine always needs the 1st command descriptor, however!
759                  */
760                 first_cdesc = safexcel_add_cdesc(priv, ring, 1, 1, 0, 0, totlen,
761                                                  ctx->base.ctxr_dma);
762                 n_cdesc = 1;
763         }
764
765         /* Add context control words and token to first command descriptor */
766         safexcel_context_control(ctx, base, sreq, first_cdesc);
767         if (ctx->aead)
768                 safexcel_aead_token(ctx, iv, first_cdesc,
769                                     sreq->direction, cryptlen,
770                                     assoclen, digestsize);
771         else
772                 safexcel_skcipher_token(ctx, iv, first_cdesc,
773                                         cryptlen);
774
775         /* result descriptors */
776         for_each_sg(dst, sg, sreq->nr_dst, i) {
777                 bool last = (i == sreq->nr_dst - 1);
778                 u32 len = sg_dma_len(sg);
779
780                 /* only allow the part of the buffer we know we need */
781                 if (len > totlen_dst)
782                         len = totlen_dst;
783                 if (unlikely(!len))
784                         break;
785                 totlen_dst -= len;
786
787                 /* skip over AAD space in buffer - not written */
788                 if (assoclen) {
789                         if (assoclen >= len) {
790                                 assoclen -= len;
791                                 continue;
792                         }
793                         rdesc = safexcel_add_rdesc(priv, ring, first, last,
794                                                    sg_dma_address(sg) +
795                                                    assoclen,
796                                                    len - assoclen);
797                         assoclen = 0;
798                 } else {
799                         rdesc = safexcel_add_rdesc(priv, ring, first, last,
800                                                    sg_dma_address(sg),
801                                                    len);
802                 }
803                 if (IS_ERR(rdesc)) {
804                         /* No space left in the result descriptor ring */
805                         ret = PTR_ERR(rdesc);
806                         goto rdesc_rollback;
807                 }
808                 if (first) {
809                         first_rdesc = rdesc;
810                         first = false;
811                 }
812                 n_rdesc++;
813         }
814
815         if (unlikely(first)) {
816                 /*
817                  * Special case: AEAD decrypt with only AAD data.
818                  * In this case there is NO output data from the engine,
819                  * but the engine still needs a result descriptor!
820                  * Create a dummy one just for catching the result token.
821                  */
822                 rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
823                 if (IS_ERR(rdesc)) {
824                         /* No space left in the result descriptor ring */
825                         ret = PTR_ERR(rdesc);
826                         goto rdesc_rollback;
827                 }
828                 first_rdesc = rdesc;
829                 n_rdesc = 1;
830         }
831
832         safexcel_rdr_req_set(priv, ring, first_rdesc, base);
833
834         *commands = n_cdesc;
835         *results = n_rdesc;
836         return 0;
837
838 rdesc_rollback:
839         for (i = 0; i < n_rdesc; i++)
840                 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
841 cdesc_rollback:
842         for (i = 0; i < n_cdesc; i++)
843                 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
844
845         if (src == dst) {
846                 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
847         } else {
848                 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
849                 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
850         }
851
852         return ret;
853 }
854
855 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
856                                       int ring,
857                                       struct crypto_async_request *base,
858                                       struct safexcel_cipher_req *sreq,
859                                       bool *should_complete, int *ret)
860 {
861         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
862         struct safexcel_result_desc *rdesc;
863         int ndesc = 0, enq_ret;
864
865         *ret = 0;
866
867         if (unlikely(!sreq->rdescs))
868                 return 0;
869
870         while (sreq->rdescs--) {
871                 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
872                 if (IS_ERR(rdesc)) {
873                         dev_err(priv->dev,
874                                 "cipher: invalidate: could not retrieve the result descriptor\n");
875                         *ret = PTR_ERR(rdesc);
876                         break;
877                 }
878
879                 if (likely(!*ret))
880                         *ret = safexcel_rdesc_check_errors(priv, rdesc);
881
882                 ndesc++;
883         }
884
885         safexcel_complete(priv, ring);
886
887         if (ctx->base.exit_inv) {
888                 dma_pool_free(priv->context_pool, ctx->base.ctxr,
889                               ctx->base.ctxr_dma);
890
891                 *should_complete = true;
892
893                 return ndesc;
894         }
895
896         ring = safexcel_select_ring(priv);
897         ctx->base.ring = ring;
898
899         spin_lock_bh(&priv->ring[ring].queue_lock);
900         enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
901         spin_unlock_bh(&priv->ring[ring].queue_lock);
902
903         if (enq_ret != -EINPROGRESS)
904                 *ret = enq_ret;
905
906         queue_work(priv->ring[ring].workqueue,
907                    &priv->ring[ring].work_data.work);
908
909         *should_complete = false;
910
911         return ndesc;
912 }
913
914 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
915                                            int ring,
916                                            struct crypto_async_request *async,
917                                            bool *should_complete, int *ret)
918 {
919         struct skcipher_request *req = skcipher_request_cast(async);
920         struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
921         int err;
922
923         if (sreq->needs_inv) {
924                 sreq->needs_inv = false;
925                 err = safexcel_handle_inv_result(priv, ring, async, sreq,
926                                                  should_complete, ret);
927         } else {
928                 err = safexcel_handle_req_result(priv, ring, async, req->src,
929                                                  req->dst, req->cryptlen, sreq,
930                                                  should_complete, ret);
931         }
932
933         return err;
934 }
935
936 static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
937                                        int ring,
938                                        struct crypto_async_request *async,
939                                        bool *should_complete, int *ret)
940 {
941         struct aead_request *req = aead_request_cast(async);
942         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
943         struct safexcel_cipher_req *sreq = aead_request_ctx(req);
944         int err;
945
946         if (sreq->needs_inv) {
947                 sreq->needs_inv = false;
948                 err = safexcel_handle_inv_result(priv, ring, async, sreq,
949                                                  should_complete, ret);
950         } else {
951                 err = safexcel_handle_req_result(priv, ring, async, req->src,
952                                                  req->dst,
953                                                  req->cryptlen + crypto_aead_authsize(tfm),
954                                                  sreq, should_complete, ret);
955         }
956
957         return err;
958 }
959
960 static int safexcel_cipher_send_inv(struct crypto_async_request *base,
961                                     int ring, int *commands, int *results)
962 {
963         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
964         struct safexcel_crypto_priv *priv = ctx->priv;
965         int ret;
966
967         ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
968         if (unlikely(ret))
969                 return ret;
970
971         *commands = 1;
972         *results = 1;
973
974         return 0;
975 }
976
977 static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
978                                   int *commands, int *results)
979 {
980         struct skcipher_request *req = skcipher_request_cast(async);
981         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
982         struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
983         struct safexcel_crypto_priv *priv = ctx->priv;
984         int ret;
985
986         BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
987
988         if (sreq->needs_inv) {
989                 ret = safexcel_cipher_send_inv(async, ring, commands, results);
990         } else {
991                 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
992                 u8 input_iv[AES_BLOCK_SIZE];
993
994                 /*
995                  * Save input IV in case of CBC decrypt mode
996                  * Will be overwritten with output IV prior to use!
997                  */
998                 memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
999
1000                 ret = safexcel_send_req(async, ring, sreq, req->src,
1001                                         req->dst, req->cryptlen, 0, 0, input_iv,
1002                                         commands, results);
1003         }
1004
1005         sreq->rdescs = *results;
1006         return ret;
1007 }
1008
1009 static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1010                               int *commands, int *results)
1011 {
1012         struct aead_request *req = aead_request_cast(async);
1013         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1014         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1015         struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1016         struct safexcel_crypto_priv *priv = ctx->priv;
1017         int ret;
1018
1019         BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1020
1021         if (sreq->needs_inv)
1022                 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1023         else
1024                 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1025                                         req->cryptlen, req->assoclen,
1026                                         crypto_aead_authsize(tfm), req->iv,
1027                                         commands, results);
1028         sreq->rdescs = *results;
1029         return ret;
1030 }
1031
1032 static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1033                                     struct crypto_async_request *base,
1034                                     struct safexcel_cipher_req *sreq,
1035                                     struct safexcel_inv_result *result)
1036 {
1037         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1038         struct safexcel_crypto_priv *priv = ctx->priv;
1039         int ring = ctx->base.ring;
1040
1041         init_completion(&result->completion);
1042
1043         ctx = crypto_tfm_ctx(base->tfm);
1044         ctx->base.exit_inv = true;
1045         sreq->needs_inv = true;
1046
1047         spin_lock_bh(&priv->ring[ring].queue_lock);
1048         crypto_enqueue_request(&priv->ring[ring].queue, base);
1049         spin_unlock_bh(&priv->ring[ring].queue_lock);
1050
1051         queue_work(priv->ring[ring].workqueue,
1052                    &priv->ring[ring].work_data.work);
1053
1054         wait_for_completion(&result->completion);
1055
1056         if (result->error) {
1057                 dev_warn(priv->dev,
1058                         "cipher: sync: invalidate: completion error %d\n",
1059                          result->error);
1060                 return result->error;
1061         }
1062
1063         return 0;
1064 }
1065
1066 static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1067 {
1068         EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1069         struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1070         struct safexcel_inv_result result = {};
1071
1072         memset(req, 0, sizeof(struct skcipher_request));
1073
1074         skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1075                                       safexcel_inv_complete, &result);
1076         skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1077
1078         return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1079 }
1080
1081 static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1082 {
1083         EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1084         struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1085         struct safexcel_inv_result result = {};
1086
1087         memset(req, 0, sizeof(struct aead_request));
1088
1089         aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1090                                   safexcel_inv_complete, &result);
1091         aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1092
1093         return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1094 }
1095
1096 static int safexcel_queue_req(struct crypto_async_request *base,
1097                         struct safexcel_cipher_req *sreq,
1098                         enum safexcel_cipher_direction dir)
1099 {
1100         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1101         struct safexcel_crypto_priv *priv = ctx->priv;
1102         int ret, ring;
1103
1104         sreq->needs_inv = false;
1105         sreq->direction = dir;
1106
1107         if (ctx->base.ctxr) {
1108                 if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1109                         sreq->needs_inv = true;
1110                         ctx->base.needs_inv = false;
1111                 }
1112         } else {
1113                 ctx->base.ring = safexcel_select_ring(priv);
1114                 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1115                                                  EIP197_GFP_FLAGS(*base),
1116                                                  &ctx->base.ctxr_dma);
1117                 if (!ctx->base.ctxr)
1118                         return -ENOMEM;
1119         }
1120
1121         ring = ctx->base.ring;
1122
1123         spin_lock_bh(&priv->ring[ring].queue_lock);
1124         ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1125         spin_unlock_bh(&priv->ring[ring].queue_lock);
1126
1127         queue_work(priv->ring[ring].workqueue,
1128                    &priv->ring[ring].work_data.work);
1129
1130         return ret;
1131 }
1132
1133 static int safexcel_encrypt(struct skcipher_request *req)
1134 {
1135         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1136                         SAFEXCEL_ENCRYPT);
1137 }
1138
1139 static int safexcel_decrypt(struct skcipher_request *req)
1140 {
1141         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1142                         SAFEXCEL_DECRYPT);
1143 }
1144
1145 static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1146 {
1147         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1148         struct safexcel_alg_template *tmpl =
1149                 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1150                              alg.skcipher.base);
1151
1152         crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1153                                     sizeof(struct safexcel_cipher_req));
1154
1155         ctx->priv = tmpl->priv;
1156
1157         ctx->base.send = safexcel_skcipher_send;
1158         ctx->base.handle_result = safexcel_skcipher_handle_result;
1159         return 0;
1160 }
1161
1162 static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1163 {
1164         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1165
1166         memzero_explicit(ctx->key, sizeof(ctx->key));
1167
1168         /* context not allocated, skip invalidation */
1169         if (!ctx->base.ctxr)
1170                 return -ENOMEM;
1171
1172         memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1173         return 0;
1174 }
1175
1176 static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1177 {
1178         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1179         struct safexcel_crypto_priv *priv = ctx->priv;
1180         int ret;
1181
1182         if (safexcel_cipher_cra_exit(tfm))
1183                 return;
1184
1185         if (priv->flags & EIP197_TRC_CACHE) {
1186                 ret = safexcel_skcipher_exit_inv(tfm);
1187                 if (ret)
1188                         dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1189                                  ret);
1190         } else {
1191                 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1192                               ctx->base.ctxr_dma);
1193         }
1194 }
1195
1196 static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1197 {
1198         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1199         struct safexcel_crypto_priv *priv = ctx->priv;
1200         int ret;
1201
1202         if (safexcel_cipher_cra_exit(tfm))
1203                 return;
1204
1205         if (priv->flags & EIP197_TRC_CACHE) {
1206                 ret = safexcel_aead_exit_inv(tfm);
1207                 if (ret)
1208                         dev_warn(priv->dev, "aead: invalidation error %d\n",
1209                                  ret);
1210         } else {
1211                 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1212                               ctx->base.ctxr_dma);
1213         }
1214 }
1215
1216 static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1217 {
1218         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1219
1220         safexcel_skcipher_cra_init(tfm);
1221         ctx->alg  = SAFEXCEL_AES;
1222         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1223         return 0;
1224 }
1225
1226 struct safexcel_alg_template safexcel_alg_ecb_aes = {
1227         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1228         .algo_mask = SAFEXCEL_ALG_AES,
1229         .alg.skcipher = {
1230                 .setkey = safexcel_skcipher_aes_setkey,
1231                 .encrypt = safexcel_encrypt,
1232                 .decrypt = safexcel_decrypt,
1233                 .min_keysize = AES_MIN_KEY_SIZE,
1234                 .max_keysize = AES_MAX_KEY_SIZE,
1235                 .base = {
1236                         .cra_name = "ecb(aes)",
1237                         .cra_driver_name = "safexcel-ecb-aes",
1238                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1239                         .cra_flags = CRYPTO_ALG_ASYNC |
1240                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1241                         .cra_blocksize = AES_BLOCK_SIZE,
1242                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1243                         .cra_alignmask = 0,
1244                         .cra_init = safexcel_skcipher_aes_ecb_cra_init,
1245                         .cra_exit = safexcel_skcipher_cra_exit,
1246                         .cra_module = THIS_MODULE,
1247                 },
1248         },
1249 };
1250
1251 static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1252 {
1253         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1254
1255         safexcel_skcipher_cra_init(tfm);
1256         ctx->alg  = SAFEXCEL_AES;
1257         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1258         return 0;
1259 }
1260
1261 struct safexcel_alg_template safexcel_alg_cbc_aes = {
1262         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1263         .algo_mask = SAFEXCEL_ALG_AES,
1264         .alg.skcipher = {
1265                 .setkey = safexcel_skcipher_aes_setkey,
1266                 .encrypt = safexcel_encrypt,
1267                 .decrypt = safexcel_decrypt,
1268                 .min_keysize = AES_MIN_KEY_SIZE,
1269                 .max_keysize = AES_MAX_KEY_SIZE,
1270                 .ivsize = AES_BLOCK_SIZE,
1271                 .base = {
1272                         .cra_name = "cbc(aes)",
1273                         .cra_driver_name = "safexcel-cbc-aes",
1274                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1275                         .cra_flags = CRYPTO_ALG_ASYNC |
1276                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1277                         .cra_blocksize = AES_BLOCK_SIZE,
1278                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1279                         .cra_alignmask = 0,
1280                         .cra_init = safexcel_skcipher_aes_cbc_cra_init,
1281                         .cra_exit = safexcel_skcipher_cra_exit,
1282                         .cra_module = THIS_MODULE,
1283                 },
1284         },
1285 };
1286
1287 static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
1288 {
1289         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1290
1291         safexcel_skcipher_cra_init(tfm);
1292         ctx->alg  = SAFEXCEL_AES;
1293         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
1294         return 0;
1295 }
1296
1297 struct safexcel_alg_template safexcel_alg_cfb_aes = {
1298         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1299         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1300         .alg.skcipher = {
1301                 .setkey = safexcel_skcipher_aes_setkey,
1302                 .encrypt = safexcel_encrypt,
1303                 .decrypt = safexcel_decrypt,
1304                 .min_keysize = AES_MIN_KEY_SIZE,
1305                 .max_keysize = AES_MAX_KEY_SIZE,
1306                 .ivsize = AES_BLOCK_SIZE,
1307                 .base = {
1308                         .cra_name = "cfb(aes)",
1309                         .cra_driver_name = "safexcel-cfb-aes",
1310                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1311                         .cra_flags = CRYPTO_ALG_ASYNC |
1312                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1313                         .cra_blocksize = 1,
1314                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1315                         .cra_alignmask = 0,
1316                         .cra_init = safexcel_skcipher_aes_cfb_cra_init,
1317                         .cra_exit = safexcel_skcipher_cra_exit,
1318                         .cra_module = THIS_MODULE,
1319                 },
1320         },
1321 };
1322
1323 static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
1324 {
1325         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1326
1327         safexcel_skcipher_cra_init(tfm);
1328         ctx->alg  = SAFEXCEL_AES;
1329         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
1330         return 0;
1331 }
1332
1333 struct safexcel_alg_template safexcel_alg_ofb_aes = {
1334         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1335         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1336         .alg.skcipher = {
1337                 .setkey = safexcel_skcipher_aes_setkey,
1338                 .encrypt = safexcel_encrypt,
1339                 .decrypt = safexcel_decrypt,
1340                 .min_keysize = AES_MIN_KEY_SIZE,
1341                 .max_keysize = AES_MAX_KEY_SIZE,
1342                 .ivsize = AES_BLOCK_SIZE,
1343                 .base = {
1344                         .cra_name = "ofb(aes)",
1345                         .cra_driver_name = "safexcel-ofb-aes",
1346                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1347                         .cra_flags = CRYPTO_ALG_ASYNC |
1348                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1349                         .cra_blocksize = 1,
1350                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1351                         .cra_alignmask = 0,
1352                         .cra_init = safexcel_skcipher_aes_ofb_cra_init,
1353                         .cra_exit = safexcel_skcipher_cra_exit,
1354                         .cra_module = THIS_MODULE,
1355                 },
1356         },
1357 };
1358
1359 static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1360                                            const u8 *key, unsigned int len)
1361 {
1362         struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1363         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1364         struct safexcel_crypto_priv *priv = ctx->priv;
1365         struct crypto_aes_ctx aes;
1366         int ret, i;
1367         unsigned int keylen;
1368
1369         /* last 4 bytes of key are the nonce! */
1370         ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1371         /* exclude the nonce here */
1372         keylen = len - CTR_RFC3686_NONCE_SIZE;
1373         ret = aes_expandkey(&aes, key, keylen);
1374         if (ret) {
1375                 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1376                 return ret;
1377         }
1378
1379         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1380                 for (i = 0; i < keylen / sizeof(u32); i++) {
1381                         if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
1382                                 ctx->base.needs_inv = true;
1383                                 break;
1384                         }
1385                 }
1386         }
1387
1388         for (i = 0; i < keylen / sizeof(u32); i++)
1389                 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1390
1391         ctx->key_len = keylen;
1392
1393         memzero_explicit(&aes, sizeof(aes));
1394         return 0;
1395 }
1396
1397 static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1398 {
1399         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1400
1401         safexcel_skcipher_cra_init(tfm);
1402         ctx->alg  = SAFEXCEL_AES;
1403         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1404         return 0;
1405 }
1406
1407 struct safexcel_alg_template safexcel_alg_ctr_aes = {
1408         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1409         .algo_mask = SAFEXCEL_ALG_AES,
1410         .alg.skcipher = {
1411                 .setkey = safexcel_skcipher_aesctr_setkey,
1412                 .encrypt = safexcel_encrypt,
1413                 .decrypt = safexcel_decrypt,
1414                 /* Add nonce size */
1415                 .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1416                 .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1417                 .ivsize = CTR_RFC3686_IV_SIZE,
1418                 .base = {
1419                         .cra_name = "rfc3686(ctr(aes))",
1420                         .cra_driver_name = "safexcel-ctr-aes",
1421                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1422                         .cra_flags = CRYPTO_ALG_ASYNC |
1423                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1424                         .cra_blocksize = 1,
1425                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1426                         .cra_alignmask = 0,
1427                         .cra_init = safexcel_skcipher_aes_ctr_cra_init,
1428                         .cra_exit = safexcel_skcipher_cra_exit,
1429                         .cra_module = THIS_MODULE,
1430                 },
1431         },
1432 };
1433
1434 static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1435                                unsigned int len)
1436 {
1437         struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1438         int ret;
1439
1440         ret = verify_skcipher_des_key(ctfm, key);
1441         if (ret)
1442                 return ret;
1443
1444         /* if context exits and key changed, need to invalidate it */
1445         if (ctx->base.ctxr_dma)
1446                 if (memcmp(ctx->key, key, len))
1447                         ctx->base.needs_inv = true;
1448
1449         memcpy(ctx->key, key, len);
1450         ctx->key_len = len;
1451
1452         return 0;
1453 }
1454
1455 static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1456 {
1457         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1458
1459         safexcel_skcipher_cra_init(tfm);
1460         ctx->alg  = SAFEXCEL_DES;
1461         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1462         return 0;
1463 }
1464
1465 struct safexcel_alg_template safexcel_alg_cbc_des = {
1466         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1467         .algo_mask = SAFEXCEL_ALG_DES,
1468         .alg.skcipher = {
1469                 .setkey = safexcel_des_setkey,
1470                 .encrypt = safexcel_encrypt,
1471                 .decrypt = safexcel_decrypt,
1472                 .min_keysize = DES_KEY_SIZE,
1473                 .max_keysize = DES_KEY_SIZE,
1474                 .ivsize = DES_BLOCK_SIZE,
1475                 .base = {
1476                         .cra_name = "cbc(des)",
1477                         .cra_driver_name = "safexcel-cbc-des",
1478                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1479                         .cra_flags = CRYPTO_ALG_ASYNC |
1480                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1481                         .cra_blocksize = DES_BLOCK_SIZE,
1482                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1483                         .cra_alignmask = 0,
1484                         .cra_init = safexcel_skcipher_des_cbc_cra_init,
1485                         .cra_exit = safexcel_skcipher_cra_exit,
1486                         .cra_module = THIS_MODULE,
1487                 },
1488         },
1489 };
1490
1491 static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1492 {
1493         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1494
1495         safexcel_skcipher_cra_init(tfm);
1496         ctx->alg  = SAFEXCEL_DES;
1497         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1498         return 0;
1499 }
1500
1501 struct safexcel_alg_template safexcel_alg_ecb_des = {
1502         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1503         .algo_mask = SAFEXCEL_ALG_DES,
1504         .alg.skcipher = {
1505                 .setkey = safexcel_des_setkey,
1506                 .encrypt = safexcel_encrypt,
1507                 .decrypt = safexcel_decrypt,
1508                 .min_keysize = DES_KEY_SIZE,
1509                 .max_keysize = DES_KEY_SIZE,
1510                 .base = {
1511                         .cra_name = "ecb(des)",
1512                         .cra_driver_name = "safexcel-ecb-des",
1513                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1514                         .cra_flags = CRYPTO_ALG_ASYNC |
1515                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1516                         .cra_blocksize = DES_BLOCK_SIZE,
1517                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1518                         .cra_alignmask = 0,
1519                         .cra_init = safexcel_skcipher_des_ecb_cra_init,
1520                         .cra_exit = safexcel_skcipher_cra_exit,
1521                         .cra_module = THIS_MODULE,
1522                 },
1523         },
1524 };
1525
1526 static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1527                                    const u8 *key, unsigned int len)
1528 {
1529         struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1530         int err;
1531
1532         err = verify_skcipher_des3_key(ctfm, key);
1533         if (err)
1534                 return err;
1535
1536         /* if context exits and key changed, need to invalidate it */
1537         if (ctx->base.ctxr_dma) {
1538                 if (memcmp(ctx->key, key, len))
1539                         ctx->base.needs_inv = true;
1540         }
1541
1542         memcpy(ctx->key, key, len);
1543
1544         ctx->key_len = len;
1545
1546         return 0;
1547 }
1548
1549 static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1550 {
1551         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1552
1553         safexcel_skcipher_cra_init(tfm);
1554         ctx->alg  = SAFEXCEL_3DES;
1555         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1556         return 0;
1557 }
1558
1559 struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1560         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1561         .algo_mask = SAFEXCEL_ALG_DES,
1562         .alg.skcipher = {
1563                 .setkey = safexcel_des3_ede_setkey,
1564                 .encrypt = safexcel_encrypt,
1565                 .decrypt = safexcel_decrypt,
1566                 .min_keysize = DES3_EDE_KEY_SIZE,
1567                 .max_keysize = DES3_EDE_KEY_SIZE,
1568                 .ivsize = DES3_EDE_BLOCK_SIZE,
1569                 .base = {
1570                         .cra_name = "cbc(des3_ede)",
1571                         .cra_driver_name = "safexcel-cbc-des3_ede",
1572                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1573                         .cra_flags = CRYPTO_ALG_ASYNC |
1574                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1575                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1576                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1577                         .cra_alignmask = 0,
1578                         .cra_init = safexcel_skcipher_des3_cbc_cra_init,
1579                         .cra_exit = safexcel_skcipher_cra_exit,
1580                         .cra_module = THIS_MODULE,
1581                 },
1582         },
1583 };
1584
1585 static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1586 {
1587         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1588
1589         safexcel_skcipher_cra_init(tfm);
1590         ctx->alg  = SAFEXCEL_3DES;
1591         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1592         return 0;
1593 }
1594
1595 struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1596         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1597         .algo_mask = SAFEXCEL_ALG_DES,
1598         .alg.skcipher = {
1599                 .setkey = safexcel_des3_ede_setkey,
1600                 .encrypt = safexcel_encrypt,
1601                 .decrypt = safexcel_decrypt,
1602                 .min_keysize = DES3_EDE_KEY_SIZE,
1603                 .max_keysize = DES3_EDE_KEY_SIZE,
1604                 .base = {
1605                         .cra_name = "ecb(des3_ede)",
1606                         .cra_driver_name = "safexcel-ecb-des3_ede",
1607                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1608                         .cra_flags = CRYPTO_ALG_ASYNC |
1609                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1610                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1611                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1612                         .cra_alignmask = 0,
1613                         .cra_init = safexcel_skcipher_des3_ecb_cra_init,
1614                         .cra_exit = safexcel_skcipher_cra_exit,
1615                         .cra_module = THIS_MODULE,
1616                 },
1617         },
1618 };
1619
1620 static int safexcel_aead_encrypt(struct aead_request *req)
1621 {
1622         struct safexcel_cipher_req *creq = aead_request_ctx(req);
1623
1624         return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1625 }
1626
1627 static int safexcel_aead_decrypt(struct aead_request *req)
1628 {
1629         struct safexcel_cipher_req *creq = aead_request_ctx(req);
1630
1631         return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1632 }
1633
1634 static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1635 {
1636         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1637         struct safexcel_alg_template *tmpl =
1638                 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1639                              alg.aead.base);
1640
1641         crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1642                                 sizeof(struct safexcel_cipher_req));
1643
1644         ctx->priv = tmpl->priv;
1645
1646         ctx->alg  = SAFEXCEL_AES; /* default */
1647         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1648         ctx->aead = true;
1649         ctx->base.send = safexcel_aead_send;
1650         ctx->base.handle_result = safexcel_aead_handle_result;
1651         return 0;
1652 }
1653
1654 static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1655 {
1656         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1657
1658         safexcel_aead_cra_init(tfm);
1659         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1660         ctx->state_sz = SHA1_DIGEST_SIZE;
1661         return 0;
1662 }
1663
1664 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1665         .type = SAFEXCEL_ALG_TYPE_AEAD,
1666         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1667         .alg.aead = {
1668                 .setkey = safexcel_aead_setkey,
1669                 .encrypt = safexcel_aead_encrypt,
1670                 .decrypt = safexcel_aead_decrypt,
1671                 .ivsize = AES_BLOCK_SIZE,
1672                 .maxauthsize = SHA1_DIGEST_SIZE,
1673                 .base = {
1674                         .cra_name = "authenc(hmac(sha1),cbc(aes))",
1675                         .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1676                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1677                         .cra_flags = CRYPTO_ALG_ASYNC |
1678                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1679                         .cra_blocksize = AES_BLOCK_SIZE,
1680                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1681                         .cra_alignmask = 0,
1682                         .cra_init = safexcel_aead_sha1_cra_init,
1683                         .cra_exit = safexcel_aead_cra_exit,
1684                         .cra_module = THIS_MODULE,
1685                 },
1686         },
1687 };
1688
1689 static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1690 {
1691         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1692
1693         safexcel_aead_cra_init(tfm);
1694         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1695         ctx->state_sz = SHA256_DIGEST_SIZE;
1696         return 0;
1697 }
1698
1699 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1700         .type = SAFEXCEL_ALG_TYPE_AEAD,
1701         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1702         .alg.aead = {
1703                 .setkey = safexcel_aead_setkey,
1704                 .encrypt = safexcel_aead_encrypt,
1705                 .decrypt = safexcel_aead_decrypt,
1706                 .ivsize = AES_BLOCK_SIZE,
1707                 .maxauthsize = SHA256_DIGEST_SIZE,
1708                 .base = {
1709                         .cra_name = "authenc(hmac(sha256),cbc(aes))",
1710                         .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1711                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1712                         .cra_flags = CRYPTO_ALG_ASYNC |
1713                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1714                         .cra_blocksize = AES_BLOCK_SIZE,
1715                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1716                         .cra_alignmask = 0,
1717                         .cra_init = safexcel_aead_sha256_cra_init,
1718                         .cra_exit = safexcel_aead_cra_exit,
1719                         .cra_module = THIS_MODULE,
1720                 },
1721         },
1722 };
1723
1724 static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1725 {
1726         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1727
1728         safexcel_aead_cra_init(tfm);
1729         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1730         ctx->state_sz = SHA256_DIGEST_SIZE;
1731         return 0;
1732 }
1733
1734 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1735         .type = SAFEXCEL_ALG_TYPE_AEAD,
1736         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1737         .alg.aead = {
1738                 .setkey = safexcel_aead_setkey,
1739                 .encrypt = safexcel_aead_encrypt,
1740                 .decrypt = safexcel_aead_decrypt,
1741                 .ivsize = AES_BLOCK_SIZE,
1742                 .maxauthsize = SHA224_DIGEST_SIZE,
1743                 .base = {
1744                         .cra_name = "authenc(hmac(sha224),cbc(aes))",
1745                         .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1746                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1747                         .cra_flags = CRYPTO_ALG_ASYNC |
1748                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1749                         .cra_blocksize = AES_BLOCK_SIZE,
1750                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1751                         .cra_alignmask = 0,
1752                         .cra_init = safexcel_aead_sha224_cra_init,
1753                         .cra_exit = safexcel_aead_cra_exit,
1754                         .cra_module = THIS_MODULE,
1755                 },
1756         },
1757 };
1758
1759 static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1760 {
1761         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1762
1763         safexcel_aead_cra_init(tfm);
1764         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1765         ctx->state_sz = SHA512_DIGEST_SIZE;
1766         return 0;
1767 }
1768
1769 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1770         .type = SAFEXCEL_ALG_TYPE_AEAD,
1771         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1772         .alg.aead = {
1773                 .setkey = safexcel_aead_setkey,
1774                 .encrypt = safexcel_aead_encrypt,
1775                 .decrypt = safexcel_aead_decrypt,
1776                 .ivsize = AES_BLOCK_SIZE,
1777                 .maxauthsize = SHA512_DIGEST_SIZE,
1778                 .base = {
1779                         .cra_name = "authenc(hmac(sha512),cbc(aes))",
1780                         .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1781                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1782                         .cra_flags = CRYPTO_ALG_ASYNC |
1783                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1784                         .cra_blocksize = AES_BLOCK_SIZE,
1785                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1786                         .cra_alignmask = 0,
1787                         .cra_init = safexcel_aead_sha512_cra_init,
1788                         .cra_exit = safexcel_aead_cra_exit,
1789                         .cra_module = THIS_MODULE,
1790                 },
1791         },
1792 };
1793
1794 static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1795 {
1796         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1797
1798         safexcel_aead_cra_init(tfm);
1799         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1800         ctx->state_sz = SHA512_DIGEST_SIZE;
1801         return 0;
1802 }
1803
1804 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1805         .type = SAFEXCEL_ALG_TYPE_AEAD,
1806         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1807         .alg.aead = {
1808                 .setkey = safexcel_aead_setkey,
1809                 .encrypt = safexcel_aead_encrypt,
1810                 .decrypt = safexcel_aead_decrypt,
1811                 .ivsize = AES_BLOCK_SIZE,
1812                 .maxauthsize = SHA384_DIGEST_SIZE,
1813                 .base = {
1814                         .cra_name = "authenc(hmac(sha384),cbc(aes))",
1815                         .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1816                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1817                         .cra_flags = CRYPTO_ALG_ASYNC |
1818                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1819                         .cra_blocksize = AES_BLOCK_SIZE,
1820                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1821                         .cra_alignmask = 0,
1822                         .cra_init = safexcel_aead_sha384_cra_init,
1823                         .cra_exit = safexcel_aead_cra_exit,
1824                         .cra_module = THIS_MODULE,
1825                 },
1826         },
1827 };
1828
1829 static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1830 {
1831         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1832
1833         safexcel_aead_sha1_cra_init(tfm);
1834         ctx->alg = SAFEXCEL_3DES; /* override default */
1835         return 0;
1836 }
1837
1838 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1839         .type = SAFEXCEL_ALG_TYPE_AEAD,
1840         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1841         .alg.aead = {
1842                 .setkey = safexcel_aead_setkey,
1843                 .encrypt = safexcel_aead_encrypt,
1844                 .decrypt = safexcel_aead_decrypt,
1845                 .ivsize = DES3_EDE_BLOCK_SIZE,
1846                 .maxauthsize = SHA1_DIGEST_SIZE,
1847                 .base = {
1848                         .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1849                         .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1850                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1851                         .cra_flags = CRYPTO_ALG_ASYNC |
1852                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1853                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1854                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1855                         .cra_alignmask = 0,
1856                         .cra_init = safexcel_aead_sha1_des3_cra_init,
1857                         .cra_exit = safexcel_aead_cra_exit,
1858                         .cra_module = THIS_MODULE,
1859                 },
1860         },
1861 };
1862
1863 static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1864 {
1865         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1866
1867         safexcel_aead_sha256_cra_init(tfm);
1868         ctx->alg = SAFEXCEL_3DES; /* override default */
1869         return 0;
1870 }
1871
1872 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1873         .type = SAFEXCEL_ALG_TYPE_AEAD,
1874         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1875         .alg.aead = {
1876                 .setkey = safexcel_aead_setkey,
1877                 .encrypt = safexcel_aead_encrypt,
1878                 .decrypt = safexcel_aead_decrypt,
1879                 .ivsize = DES3_EDE_BLOCK_SIZE,
1880                 .maxauthsize = SHA256_DIGEST_SIZE,
1881                 .base = {
1882                         .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1883                         .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1884                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1885                         .cra_flags = CRYPTO_ALG_ASYNC |
1886                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1887                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1888                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1889                         .cra_alignmask = 0,
1890                         .cra_init = safexcel_aead_sha256_des3_cra_init,
1891                         .cra_exit = safexcel_aead_cra_exit,
1892                         .cra_module = THIS_MODULE,
1893                 },
1894         },
1895 };
1896
1897 static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1898 {
1899         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1900
1901         safexcel_aead_sha224_cra_init(tfm);
1902         ctx->alg = SAFEXCEL_3DES; /* override default */
1903         return 0;
1904 }
1905
1906 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
1907         .type = SAFEXCEL_ALG_TYPE_AEAD,
1908         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1909         .alg.aead = {
1910                 .setkey = safexcel_aead_setkey,
1911                 .encrypt = safexcel_aead_encrypt,
1912                 .decrypt = safexcel_aead_decrypt,
1913                 .ivsize = DES3_EDE_BLOCK_SIZE,
1914                 .maxauthsize = SHA224_DIGEST_SIZE,
1915                 .base = {
1916                         .cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
1917                         .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
1918                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1919                         .cra_flags = CRYPTO_ALG_ASYNC |
1920                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1921                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1922                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1923                         .cra_alignmask = 0,
1924                         .cra_init = safexcel_aead_sha224_des3_cra_init,
1925                         .cra_exit = safexcel_aead_cra_exit,
1926                         .cra_module = THIS_MODULE,
1927                 },
1928         },
1929 };
1930
1931 static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
1932 {
1933         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1934
1935         safexcel_aead_sha512_cra_init(tfm);
1936         ctx->alg = SAFEXCEL_3DES; /* override default */
1937         return 0;
1938 }
1939
1940 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
1941         .type = SAFEXCEL_ALG_TYPE_AEAD,
1942         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
1943         .alg.aead = {
1944                 .setkey = safexcel_aead_setkey,
1945                 .encrypt = safexcel_aead_encrypt,
1946                 .decrypt = safexcel_aead_decrypt,
1947                 .ivsize = DES3_EDE_BLOCK_SIZE,
1948                 .maxauthsize = SHA512_DIGEST_SIZE,
1949                 .base = {
1950                         .cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
1951                         .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
1952                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1953                         .cra_flags = CRYPTO_ALG_ASYNC |
1954                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1955                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1956                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1957                         .cra_alignmask = 0,
1958                         .cra_init = safexcel_aead_sha512_des3_cra_init,
1959                         .cra_exit = safexcel_aead_cra_exit,
1960                         .cra_module = THIS_MODULE,
1961                 },
1962         },
1963 };
1964
1965 static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
1966 {
1967         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1968
1969         safexcel_aead_sha384_cra_init(tfm);
1970         ctx->alg = SAFEXCEL_3DES; /* override default */
1971         return 0;
1972 }
1973
1974 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
1975         .type = SAFEXCEL_ALG_TYPE_AEAD,
1976         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
1977         .alg.aead = {
1978                 .setkey = safexcel_aead_setkey,
1979                 .encrypt = safexcel_aead_encrypt,
1980                 .decrypt = safexcel_aead_decrypt,
1981                 .ivsize = DES3_EDE_BLOCK_SIZE,
1982                 .maxauthsize = SHA384_DIGEST_SIZE,
1983                 .base = {
1984                         .cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
1985                         .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
1986                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1987                         .cra_flags = CRYPTO_ALG_ASYNC |
1988                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1989                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1990                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1991                         .cra_alignmask = 0,
1992                         .cra_init = safexcel_aead_sha384_des3_cra_init,
1993                         .cra_exit = safexcel_aead_cra_exit,
1994                         .cra_module = THIS_MODULE,
1995                 },
1996         },
1997 };
1998
1999 static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2000 {
2001         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2002
2003         safexcel_aead_sha1_cra_init(tfm);
2004         ctx->alg = SAFEXCEL_DES; /* override default */
2005         return 0;
2006 }
2007
2008 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2009         .type = SAFEXCEL_ALG_TYPE_AEAD,
2010         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2011         .alg.aead = {
2012                 .setkey = safexcel_aead_setkey,
2013                 .encrypt = safexcel_aead_encrypt,
2014                 .decrypt = safexcel_aead_decrypt,
2015                 .ivsize = DES_BLOCK_SIZE,
2016                 .maxauthsize = SHA1_DIGEST_SIZE,
2017                 .base = {
2018                         .cra_name = "authenc(hmac(sha1),cbc(des))",
2019                         .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2020                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2021                         .cra_flags = CRYPTO_ALG_ASYNC |
2022                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2023                         .cra_blocksize = DES_BLOCK_SIZE,
2024                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2025                         .cra_alignmask = 0,
2026                         .cra_init = safexcel_aead_sha1_des_cra_init,
2027                         .cra_exit = safexcel_aead_cra_exit,
2028                         .cra_module = THIS_MODULE,
2029                 },
2030         },
2031 };
2032
2033 static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2034 {
2035         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2036
2037         safexcel_aead_sha256_cra_init(tfm);
2038         ctx->alg = SAFEXCEL_DES; /* override default */
2039         return 0;
2040 }
2041
2042 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2043         .type = SAFEXCEL_ALG_TYPE_AEAD,
2044         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2045         .alg.aead = {
2046                 .setkey = safexcel_aead_setkey,
2047                 .encrypt = safexcel_aead_encrypt,
2048                 .decrypt = safexcel_aead_decrypt,
2049                 .ivsize = DES_BLOCK_SIZE,
2050                 .maxauthsize = SHA256_DIGEST_SIZE,
2051                 .base = {
2052                         .cra_name = "authenc(hmac(sha256),cbc(des))",
2053                         .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2054                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2055                         .cra_flags = CRYPTO_ALG_ASYNC |
2056                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2057                         .cra_blocksize = DES_BLOCK_SIZE,
2058                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2059                         .cra_alignmask = 0,
2060                         .cra_init = safexcel_aead_sha256_des_cra_init,
2061                         .cra_exit = safexcel_aead_cra_exit,
2062                         .cra_module = THIS_MODULE,
2063                 },
2064         },
2065 };
2066
2067 static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2068 {
2069         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2070
2071         safexcel_aead_sha224_cra_init(tfm);
2072         ctx->alg = SAFEXCEL_DES; /* override default */
2073         return 0;
2074 }
2075
2076 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2077         .type = SAFEXCEL_ALG_TYPE_AEAD,
2078         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2079         .alg.aead = {
2080                 .setkey = safexcel_aead_setkey,
2081                 .encrypt = safexcel_aead_encrypt,
2082                 .decrypt = safexcel_aead_decrypt,
2083                 .ivsize = DES_BLOCK_SIZE,
2084                 .maxauthsize = SHA224_DIGEST_SIZE,
2085                 .base = {
2086                         .cra_name = "authenc(hmac(sha224),cbc(des))",
2087                         .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2088                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2089                         .cra_flags = CRYPTO_ALG_ASYNC |
2090                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2091                         .cra_blocksize = DES_BLOCK_SIZE,
2092                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2093                         .cra_alignmask = 0,
2094                         .cra_init = safexcel_aead_sha224_des_cra_init,
2095                         .cra_exit = safexcel_aead_cra_exit,
2096                         .cra_module = THIS_MODULE,
2097                 },
2098         },
2099 };
2100
2101 static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2102 {
2103         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2104
2105         safexcel_aead_sha512_cra_init(tfm);
2106         ctx->alg = SAFEXCEL_DES; /* override default */
2107         return 0;
2108 }
2109
2110 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2111         .type = SAFEXCEL_ALG_TYPE_AEAD,
2112         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2113         .alg.aead = {
2114                 .setkey = safexcel_aead_setkey,
2115                 .encrypt = safexcel_aead_encrypt,
2116                 .decrypt = safexcel_aead_decrypt,
2117                 .ivsize = DES_BLOCK_SIZE,
2118                 .maxauthsize = SHA512_DIGEST_SIZE,
2119                 .base = {
2120                         .cra_name = "authenc(hmac(sha512),cbc(des))",
2121                         .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2122                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2123                         .cra_flags = CRYPTO_ALG_ASYNC |
2124                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2125                         .cra_blocksize = DES_BLOCK_SIZE,
2126                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2127                         .cra_alignmask = 0,
2128                         .cra_init = safexcel_aead_sha512_des_cra_init,
2129                         .cra_exit = safexcel_aead_cra_exit,
2130                         .cra_module = THIS_MODULE,
2131                 },
2132         },
2133 };
2134
2135 static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2136 {
2137         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2138
2139         safexcel_aead_sha384_cra_init(tfm);
2140         ctx->alg = SAFEXCEL_DES; /* override default */
2141         return 0;
2142 }
2143
2144 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2145         .type = SAFEXCEL_ALG_TYPE_AEAD,
2146         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2147         .alg.aead = {
2148                 .setkey = safexcel_aead_setkey,
2149                 .encrypt = safexcel_aead_encrypt,
2150                 .decrypt = safexcel_aead_decrypt,
2151                 .ivsize = DES_BLOCK_SIZE,
2152                 .maxauthsize = SHA384_DIGEST_SIZE,
2153                 .base = {
2154                         .cra_name = "authenc(hmac(sha384),cbc(des))",
2155                         .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2156                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2157                         .cra_flags = CRYPTO_ALG_ASYNC |
2158                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2159                         .cra_blocksize = DES_BLOCK_SIZE,
2160                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2161                         .cra_alignmask = 0,
2162                         .cra_init = safexcel_aead_sha384_des_cra_init,
2163                         .cra_exit = safexcel_aead_cra_exit,
2164                         .cra_module = THIS_MODULE,
2165                 },
2166         },
2167 };
2168
2169 static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2170 {
2171         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2172
2173         safexcel_aead_sha1_cra_init(tfm);
2174         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2175         return 0;
2176 }
2177
2178 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2179         .type = SAFEXCEL_ALG_TYPE_AEAD,
2180         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2181         .alg.aead = {
2182                 .setkey = safexcel_aead_setkey,
2183                 .encrypt = safexcel_aead_encrypt,
2184                 .decrypt = safexcel_aead_decrypt,
2185                 .ivsize = CTR_RFC3686_IV_SIZE,
2186                 .maxauthsize = SHA1_DIGEST_SIZE,
2187                 .base = {
2188                         .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2189                         .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2190                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2191                         .cra_flags = CRYPTO_ALG_ASYNC |
2192                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2193                         .cra_blocksize = 1,
2194                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2195                         .cra_alignmask = 0,
2196                         .cra_init = safexcel_aead_sha1_ctr_cra_init,
2197                         .cra_exit = safexcel_aead_cra_exit,
2198                         .cra_module = THIS_MODULE,
2199                 },
2200         },
2201 };
2202
2203 static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2204 {
2205         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2206
2207         safexcel_aead_sha256_cra_init(tfm);
2208         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2209         return 0;
2210 }
2211
2212 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2213         .type = SAFEXCEL_ALG_TYPE_AEAD,
2214         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2215         .alg.aead = {
2216                 .setkey = safexcel_aead_setkey,
2217                 .encrypt = safexcel_aead_encrypt,
2218                 .decrypt = safexcel_aead_decrypt,
2219                 .ivsize = CTR_RFC3686_IV_SIZE,
2220                 .maxauthsize = SHA256_DIGEST_SIZE,
2221                 .base = {
2222                         .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2223                         .cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2224                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2225                         .cra_flags = CRYPTO_ALG_ASYNC |
2226                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2227                         .cra_blocksize = 1,
2228                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2229                         .cra_alignmask = 0,
2230                         .cra_init = safexcel_aead_sha256_ctr_cra_init,
2231                         .cra_exit = safexcel_aead_cra_exit,
2232                         .cra_module = THIS_MODULE,
2233                 },
2234         },
2235 };
2236
2237 static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2238 {
2239         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2240
2241         safexcel_aead_sha224_cra_init(tfm);
2242         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2243         return 0;
2244 }
2245
2246 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2247         .type = SAFEXCEL_ALG_TYPE_AEAD,
2248         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2249         .alg.aead = {
2250                 .setkey = safexcel_aead_setkey,
2251                 .encrypt = safexcel_aead_encrypt,
2252                 .decrypt = safexcel_aead_decrypt,
2253                 .ivsize = CTR_RFC3686_IV_SIZE,
2254                 .maxauthsize = SHA224_DIGEST_SIZE,
2255                 .base = {
2256                         .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2257                         .cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2258                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2259                         .cra_flags = CRYPTO_ALG_ASYNC |
2260                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2261                         .cra_blocksize = 1,
2262                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2263                         .cra_alignmask = 0,
2264                         .cra_init = safexcel_aead_sha224_ctr_cra_init,
2265                         .cra_exit = safexcel_aead_cra_exit,
2266                         .cra_module = THIS_MODULE,
2267                 },
2268         },
2269 };
2270
2271 static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2272 {
2273         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2274
2275         safexcel_aead_sha512_cra_init(tfm);
2276         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2277         return 0;
2278 }
2279
2280 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2281         .type = SAFEXCEL_ALG_TYPE_AEAD,
2282         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2283         .alg.aead = {
2284                 .setkey = safexcel_aead_setkey,
2285                 .encrypt = safexcel_aead_encrypt,
2286                 .decrypt = safexcel_aead_decrypt,
2287                 .ivsize = CTR_RFC3686_IV_SIZE,
2288                 .maxauthsize = SHA512_DIGEST_SIZE,
2289                 .base = {
2290                         .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2291                         .cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2292                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2293                         .cra_flags = CRYPTO_ALG_ASYNC |
2294                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2295                         .cra_blocksize = 1,
2296                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2297                         .cra_alignmask = 0,
2298                         .cra_init = safexcel_aead_sha512_ctr_cra_init,
2299                         .cra_exit = safexcel_aead_cra_exit,
2300                         .cra_module = THIS_MODULE,
2301                 },
2302         },
2303 };
2304
2305 static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2306 {
2307         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2308
2309         safexcel_aead_sha384_cra_init(tfm);
2310         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2311         return 0;
2312 }
2313
2314 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2315         .type = SAFEXCEL_ALG_TYPE_AEAD,
2316         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2317         .alg.aead = {
2318                 .setkey = safexcel_aead_setkey,
2319                 .encrypt = safexcel_aead_encrypt,
2320                 .decrypt = safexcel_aead_decrypt,
2321                 .ivsize = CTR_RFC3686_IV_SIZE,
2322                 .maxauthsize = SHA384_DIGEST_SIZE,
2323                 .base = {
2324                         .cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2325                         .cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2326                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2327                         .cra_flags = CRYPTO_ALG_ASYNC |
2328                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2329                         .cra_blocksize = 1,
2330                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2331                         .cra_alignmask = 0,
2332                         .cra_init = safexcel_aead_sha384_ctr_cra_init,
2333                         .cra_exit = safexcel_aead_cra_exit,
2334                         .cra_module = THIS_MODULE,
2335                 },
2336         },
2337 };
2338
2339 static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2340                                            const u8 *key, unsigned int len)
2341 {
2342         struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2343         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2344         struct safexcel_crypto_priv *priv = ctx->priv;
2345         struct crypto_aes_ctx aes;
2346         int ret, i;
2347         unsigned int keylen;
2348
2349         /* Check for illegal XTS keys */
2350         ret = xts_verify_key(ctfm, key, len);
2351         if (ret)
2352                 return ret;
2353
2354         /* Only half of the key data is cipher key */
2355         keylen = (len >> 1);
2356         ret = aes_expandkey(&aes, key, keylen);
2357         if (ret) {
2358                 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2359                 return ret;
2360         }
2361
2362         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2363                 for (i = 0; i < keylen / sizeof(u32); i++) {
2364                         if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
2365                                 ctx->base.needs_inv = true;
2366                                 break;
2367                         }
2368                 }
2369         }
2370
2371         for (i = 0; i < keylen / sizeof(u32); i++)
2372                 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2373
2374         /* The other half is the tweak key */
2375         ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2376         if (ret) {
2377                 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2378                 return ret;
2379         }
2380
2381         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2382                 for (i = 0; i < keylen / sizeof(u32); i++) {
2383                         if (ctx->key[i + keylen / sizeof(u32)] !=
2384                             cpu_to_le32(aes.key_enc[i])) {
2385                                 ctx->base.needs_inv = true;
2386                                 break;
2387                         }
2388                 }
2389         }
2390
2391         for (i = 0; i < keylen / sizeof(u32); i++)
2392                 ctx->key[i + keylen / sizeof(u32)] =
2393                         cpu_to_le32(aes.key_enc[i]);
2394
2395         ctx->key_len = keylen << 1;
2396
2397         memzero_explicit(&aes, sizeof(aes));
2398         return 0;
2399 }
2400
2401 static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2402 {
2403         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2404
2405         safexcel_skcipher_cra_init(tfm);
2406         ctx->alg  = SAFEXCEL_AES;
2407         ctx->xts  = 1;
2408         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2409         return 0;
2410 }
2411
2412 static int safexcel_encrypt_xts(struct skcipher_request *req)
2413 {
2414         if (req->cryptlen < XTS_BLOCK_SIZE)
2415                 return -EINVAL;
2416         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2417                                   SAFEXCEL_ENCRYPT);
2418 }
2419
2420 static int safexcel_decrypt_xts(struct skcipher_request *req)
2421 {
2422         if (req->cryptlen < XTS_BLOCK_SIZE)
2423                 return -EINVAL;
2424         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2425                                   SAFEXCEL_DECRYPT);
2426 }
2427
2428 struct safexcel_alg_template safexcel_alg_xts_aes = {
2429         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2430         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2431         .alg.skcipher = {
2432                 .setkey = safexcel_skcipher_aesxts_setkey,
2433                 .encrypt = safexcel_encrypt_xts,
2434                 .decrypt = safexcel_decrypt_xts,
2435                 /* XTS actually uses 2 AES keys glued together */
2436                 .min_keysize = AES_MIN_KEY_SIZE * 2,
2437                 .max_keysize = AES_MAX_KEY_SIZE * 2,
2438                 .ivsize = XTS_BLOCK_SIZE,
2439                 .base = {
2440                         .cra_name = "xts(aes)",
2441                         .cra_driver_name = "safexcel-xts-aes",
2442                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2443                         .cra_flags = CRYPTO_ALG_ASYNC |
2444                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2445                         .cra_blocksize = XTS_BLOCK_SIZE,
2446                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2447                         .cra_alignmask = 0,
2448                         .cra_init = safexcel_skcipher_aes_xts_cra_init,
2449                         .cra_exit = safexcel_skcipher_cra_exit,
2450                         .cra_module = THIS_MODULE,
2451                 },
2452         },
2453 };
2454
2455 static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2456                                     unsigned int len)
2457 {
2458         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2459         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2460         struct safexcel_crypto_priv *priv = ctx->priv;
2461         struct crypto_aes_ctx aes;
2462         u32 hashkey[AES_BLOCK_SIZE >> 2];
2463         int ret, i;
2464
2465         ret = aes_expandkey(&aes, key, len);
2466         if (ret) {
2467                 crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2468                 memzero_explicit(&aes, sizeof(aes));
2469                 return ret;
2470         }
2471
2472         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2473                 for (i = 0; i < len / sizeof(u32); i++) {
2474                         if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
2475                                 ctx->base.needs_inv = true;
2476                                 break;
2477                         }
2478                 }
2479         }
2480
2481         for (i = 0; i < len / sizeof(u32); i++)
2482                 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2483
2484         ctx->key_len = len;
2485
2486         /* Compute hash key by encrypting zeroes with cipher key */
2487         crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
2488         crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
2489                                 CRYPTO_TFM_REQ_MASK);
2490         ret = crypto_cipher_setkey(ctx->hkaes, key, len);
2491         crypto_aead_set_flags(ctfm, crypto_cipher_get_flags(ctx->hkaes) &
2492                               CRYPTO_TFM_RES_MASK);
2493         if (ret)
2494                 return ret;
2495
2496         memset(hashkey, 0, AES_BLOCK_SIZE);
2497         crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
2498
2499         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2500                 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2501                         if (ctx->ipad[i] != cpu_to_be32(hashkey[i])) {
2502                                 ctx->base.needs_inv = true;
2503                                 break;
2504                         }
2505                 }
2506         }
2507
2508         for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2509                 ctx->ipad[i] = cpu_to_be32(hashkey[i]);
2510
2511         memzero_explicit(hashkey, AES_BLOCK_SIZE);
2512         memzero_explicit(&aes, sizeof(aes));
2513         return 0;
2514 }
2515
2516 static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2517 {
2518         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2519
2520         safexcel_aead_cra_init(tfm);
2521         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2522         ctx->state_sz = GHASH_BLOCK_SIZE;
2523         ctx->xcm = EIP197_XCM_MODE_GCM;
2524         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2525
2526         ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
2527         if (IS_ERR(ctx->hkaes))
2528                 return PTR_ERR(ctx->hkaes);
2529
2530         return 0;
2531 }
2532
2533 static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2534 {
2535         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2536
2537         crypto_free_cipher(ctx->hkaes);
2538         safexcel_aead_cra_exit(tfm);
2539 }
2540
2541 static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2542                                          unsigned int authsize)
2543 {
2544         return crypto_gcm_check_authsize(authsize);
2545 }
2546
2547 struct safexcel_alg_template safexcel_alg_gcm = {
2548         .type = SAFEXCEL_ALG_TYPE_AEAD,
2549         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2550         .alg.aead = {
2551                 .setkey = safexcel_aead_gcm_setkey,
2552                 .setauthsize = safexcel_aead_gcm_setauthsize,
2553                 .encrypt = safexcel_aead_encrypt,
2554                 .decrypt = safexcel_aead_decrypt,
2555                 .ivsize = GCM_AES_IV_SIZE,
2556                 .maxauthsize = GHASH_DIGEST_SIZE,
2557                 .base = {
2558                         .cra_name = "gcm(aes)",
2559                         .cra_driver_name = "safexcel-gcm-aes",
2560                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2561                         .cra_flags = CRYPTO_ALG_ASYNC |
2562                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2563                         .cra_blocksize = 1,
2564                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2565                         .cra_alignmask = 0,
2566                         .cra_init = safexcel_aead_gcm_cra_init,
2567                         .cra_exit = safexcel_aead_gcm_cra_exit,
2568                         .cra_module = THIS_MODULE,
2569                 },
2570         },
2571 };
2572
2573 static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2574                                     unsigned int len)
2575 {
2576         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2577         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2578         struct safexcel_crypto_priv *priv = ctx->priv;
2579         struct crypto_aes_ctx aes;
2580         int ret, i;
2581
2582         ret = aes_expandkey(&aes, key, len);
2583         if (ret) {
2584                 crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2585                 memzero_explicit(&aes, sizeof(aes));
2586                 return ret;
2587         }
2588
2589         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2590                 for (i = 0; i < len / sizeof(u32); i++) {
2591                         if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
2592                                 ctx->base.needs_inv = true;
2593                                 break;
2594                         }
2595                 }
2596         }
2597
2598         for (i = 0; i < len / sizeof(u32); i++) {
2599                 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2600                 ctx->ipad[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2601                         cpu_to_be32(aes.key_enc[i]);
2602         }
2603
2604         ctx->key_len = len;
2605         ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2606
2607         if (len == AES_KEYSIZE_192)
2608                 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2609         else if (len == AES_KEYSIZE_256)
2610                 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2611         else
2612                 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2613
2614         memzero_explicit(&aes, sizeof(aes));
2615         return 0;
2616 }
2617
2618 static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2619 {
2620         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2621
2622         safexcel_aead_cra_init(tfm);
2623         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2624         ctx->state_sz = 3 * AES_BLOCK_SIZE;
2625         ctx->xcm = EIP197_XCM_MODE_CCM;
2626         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2627         return 0;
2628 }
2629
2630 static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2631                                          unsigned int authsize)
2632 {
2633         /* Borrowed from crypto/ccm.c */
2634         switch (authsize) {
2635         case 4:
2636         case 6:
2637         case 8:
2638         case 10:
2639         case 12:
2640         case 14:
2641         case 16:
2642                 break;
2643         default:
2644                 return -EINVAL;
2645         }
2646
2647         return 0;
2648 }
2649
2650 static int safexcel_ccm_encrypt(struct aead_request *req)
2651 {
2652         struct safexcel_cipher_req *creq = aead_request_ctx(req);
2653
2654         if (req->iv[0] < 1 || req->iv[0] > 7)
2655                 return -EINVAL;
2656
2657         return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2658 }
2659
2660 static int safexcel_ccm_decrypt(struct aead_request *req)
2661 {
2662         struct safexcel_cipher_req *creq = aead_request_ctx(req);
2663
2664         if (req->iv[0] < 1 || req->iv[0] > 7)
2665                 return -EINVAL;
2666
2667         return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2668 }
2669
2670 struct safexcel_alg_template safexcel_alg_ccm = {
2671         .type = SAFEXCEL_ALG_TYPE_AEAD,
2672         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2673         .alg.aead = {
2674                 .setkey = safexcel_aead_ccm_setkey,
2675                 .setauthsize = safexcel_aead_ccm_setauthsize,
2676                 .encrypt = safexcel_ccm_encrypt,
2677                 .decrypt = safexcel_ccm_decrypt,
2678                 .ivsize = AES_BLOCK_SIZE,
2679                 .maxauthsize = AES_BLOCK_SIZE,
2680                 .base = {
2681                         .cra_name = "ccm(aes)",
2682                         .cra_driver_name = "safexcel-ccm-aes",
2683                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2684                         .cra_flags = CRYPTO_ALG_ASYNC |
2685                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2686                         .cra_blocksize = 1,
2687                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2688                         .cra_alignmask = 0,
2689                         .cra_init = safexcel_aead_ccm_cra_init,
2690                         .cra_exit = safexcel_aead_cra_exit,
2691                         .cra_module = THIS_MODULE,
2692                 },
2693         },
2694 };
2695
2696 static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2697                                      const u8 *key)
2698 {
2699         struct safexcel_crypto_priv *priv = ctx->priv;
2700         int i;
2701
2702         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2703                 for (i = 0; i < CHACHA_KEY_SIZE / sizeof(u32); i++) {
2704                         if (ctx->key[i] !=
2705                             get_unaligned_le32(key + i * sizeof(u32))) {
2706                                 ctx->base.needs_inv = true;
2707                                 break;
2708                         }
2709                 }
2710         }
2711
2712         for (i = 0; i < CHACHA_KEY_SIZE / sizeof(u32); i++)
2713                 ctx->key[i] = get_unaligned_le32(key + i * sizeof(u32));
2714         ctx->key_len = CHACHA_KEY_SIZE;
2715 }
2716
2717 static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2718                                              const u8 *key, unsigned int len)
2719 {
2720         struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2721
2722         if (len != CHACHA_KEY_SIZE) {
2723                 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2724                 return -EINVAL;
2725         }
2726         safexcel_chacha20_setkey(ctx, key);
2727
2728         return 0;
2729 }
2730
2731 static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2732 {
2733         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2734
2735         safexcel_skcipher_cra_init(tfm);
2736         ctx->alg  = SAFEXCEL_CHACHA20;
2737         ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2738         return 0;
2739 }
2740
2741 struct safexcel_alg_template safexcel_alg_chacha20 = {
2742         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2743         .algo_mask = SAFEXCEL_ALG_CHACHA20,
2744         .alg.skcipher = {
2745                 .setkey = safexcel_skcipher_chacha20_setkey,
2746                 .encrypt = safexcel_encrypt,
2747                 .decrypt = safexcel_decrypt,
2748                 .min_keysize = CHACHA_KEY_SIZE,
2749                 .max_keysize = CHACHA_KEY_SIZE,
2750                 .ivsize = CHACHA_IV_SIZE,
2751                 .base = {
2752                         .cra_name = "chacha20",
2753                         .cra_driver_name = "safexcel-chacha20",
2754                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2755                         .cra_flags = CRYPTO_ALG_ASYNC |
2756                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2757                         .cra_blocksize = 1,
2758                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2759                         .cra_alignmask = 0,
2760                         .cra_init = safexcel_skcipher_chacha20_cra_init,
2761                         .cra_exit = safexcel_skcipher_cra_exit,
2762                         .cra_module = THIS_MODULE,
2763                 },
2764         },
2765 };
2766
2767 static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2768                                     const u8 *key, unsigned int len)
2769 {
2770         struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2771
2772         if (ctx->aead  == EIP197_AEAD_TYPE_IPSEC_ESP &&
2773             len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2774                 /* ESP variant has nonce appended to key */
2775                 len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2776                 ctx->nonce = *(u32 *)(key + len);
2777         }
2778         if (len != CHACHA_KEY_SIZE) {
2779                 crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2780                 return -EINVAL;
2781         }
2782         safexcel_chacha20_setkey(ctx, key);
2783
2784         return 0;
2785 }
2786
2787 static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2788                                          unsigned int authsize)
2789 {
2790         if (authsize != POLY1305_DIGEST_SIZE)
2791                 return -EINVAL;
2792         return 0;
2793 }
2794
2795 static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2796                                           enum safexcel_cipher_direction dir)
2797 {
2798         struct safexcel_cipher_req *creq = aead_request_ctx(req);
2799         struct crypto_aead *aead = crypto_aead_reqtfm(req);
2800         struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2801         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2802         struct aead_request *subreq = aead_request_ctx(req);
2803         u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2804         int i, ret = 0;
2805
2806         /*
2807          * Instead of wasting time detecting umpteen silly corner cases,
2808          * just dump all "small" requests to the fallback implementation.
2809          * HW would not be faster on such small requests anyway.
2810          */
2811         if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2812                     req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2813                    req->cryptlen > POLY1305_DIGEST_SIZE)) {
2814                 return safexcel_queue_req(&req->base, creq, dir);
2815         }
2816
2817         /* HW cannot do full (AAD+payload) zero length, use fallback */
2818         for (i = 0; i < CHACHA_KEY_SIZE / sizeof(u32); i++)
2819                 key[i] = cpu_to_le32(ctx->key[i]);
2820         if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2821                 /* ESP variant has nonce appended to the key */
2822                 key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2823                 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2824                                          CHACHA_KEY_SIZE +
2825                                          EIP197_AEAD_IPSEC_NONCE_SIZE);
2826         } else {
2827                 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2828                                          CHACHA_KEY_SIZE);
2829         }
2830         if (ret) {
2831                 crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2832                 crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2833                                             CRYPTO_TFM_REQ_MASK);
2834                 return ret;
2835         }
2836
2837         aead_request_set_tfm(subreq, ctx->fback);
2838         aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2839                                   req->base.data);
2840         aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2841                                req->iv);
2842         aead_request_set_ad(subreq, req->assoclen);
2843
2844         return (dir ==  SAFEXCEL_ENCRYPT) ?
2845                 crypto_aead_encrypt(subreq) :
2846                 crypto_aead_decrypt(subreq);
2847 }
2848
2849 static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2850 {
2851         return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2852 }
2853
2854 static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2855 {
2856         return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2857 }
2858
2859 static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2860 {
2861         struct crypto_aead *aead = __crypto_aead_cast(tfm);
2862         struct aead_alg *alg = crypto_aead_alg(aead);
2863         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2864
2865         safexcel_aead_cra_init(tfm);
2866
2867         /* Allocate fallback implementation */
2868         ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2869                                        CRYPTO_ALG_ASYNC |
2870                                        CRYPTO_ALG_NEED_FALLBACK);
2871         if (IS_ERR(ctx->fback))
2872                 return PTR_ERR(ctx->fback);
2873
2874         crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2875                                           sizeof(struct aead_request) +
2876                                           crypto_aead_reqsize(ctx->fback)));
2877
2878         return 0;
2879 }
2880
2881 static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2882 {
2883         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2884
2885         safexcel_aead_fallback_cra_init(tfm);
2886         ctx->alg  = SAFEXCEL_CHACHA20;
2887         ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2888                     CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2889         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2890         ctx->state_sz = 0; /* Precomputed by HW */
2891         return 0;
2892 }
2893
2894 static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
2895 {
2896         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2897
2898         crypto_free_aead(ctx->fback);
2899         safexcel_aead_cra_exit(tfm);
2900 }
2901
2902 struct safexcel_alg_template safexcel_alg_chachapoly = {
2903         .type = SAFEXCEL_ALG_TYPE_AEAD,
2904         .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2905         .alg.aead = {
2906                 .setkey = safexcel_aead_chachapoly_setkey,
2907                 .setauthsize = safexcel_aead_chachapoly_setauthsize,
2908                 .encrypt = safexcel_aead_chachapoly_encrypt,
2909                 .decrypt = safexcel_aead_chachapoly_decrypt,
2910                 .ivsize = CHACHAPOLY_IV_SIZE,
2911                 .maxauthsize = POLY1305_DIGEST_SIZE,
2912                 .base = {
2913                         .cra_name = "rfc7539(chacha20,poly1305)",
2914                         .cra_driver_name = "safexcel-chacha20-poly1305",
2915                         /* +1 to put it above HW chacha + SW poly */
2916                         .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
2917                         .cra_flags = CRYPTO_ALG_ASYNC |
2918                                      CRYPTO_ALG_KERN_DRIVER_ONLY |
2919                                      CRYPTO_ALG_NEED_FALLBACK,
2920                         .cra_blocksize = 1,
2921                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2922                         .cra_alignmask = 0,
2923                         .cra_init = safexcel_aead_chachapoly_cra_init,
2924                         .cra_exit = safexcel_aead_fallback_cra_exit,
2925                         .cra_module = THIS_MODULE,
2926                 },
2927         },
2928 };
2929
2930 static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
2931 {
2932         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2933         int ret;
2934
2935         ret = safexcel_aead_chachapoly_cra_init(tfm);
2936         ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
2937         return ret;
2938 }
2939
2940 struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
2941         .type = SAFEXCEL_ALG_TYPE_AEAD,
2942         .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2943         .alg.aead = {
2944                 .setkey = safexcel_aead_chachapoly_setkey,
2945                 .setauthsize = safexcel_aead_chachapoly_setauthsize,
2946                 .encrypt = safexcel_aead_chachapoly_encrypt,
2947                 .decrypt = safexcel_aead_chachapoly_decrypt,
2948                 .ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
2949                 .maxauthsize = POLY1305_DIGEST_SIZE,
2950                 .base = {
2951                         .cra_name = "rfc7539esp(chacha20,poly1305)",
2952                         .cra_driver_name = "safexcel-chacha20-poly1305-esp",
2953                         /* +1 to put it above HW chacha + SW poly */
2954                         .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
2955                         .cra_flags = CRYPTO_ALG_ASYNC |
2956                                      CRYPTO_ALG_KERN_DRIVER_ONLY |
2957                                      CRYPTO_ALG_NEED_FALLBACK,
2958                         .cra_blocksize = 1,
2959                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2960                         .cra_alignmask = 0,
2961                         .cra_init = safexcel_aead_chachapolyesp_cra_init,
2962                         .cra_exit = safexcel_aead_fallback_cra_exit,
2963                         .cra_module = THIS_MODULE,
2964                 },
2965         },
2966 };
2967
2968 static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
2969                                         const u8 *key, unsigned int len)
2970 {
2971         struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2972         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2973         struct safexcel_crypto_priv *priv = ctx->priv;
2974         int i;
2975
2976         if (len != SM4_KEY_SIZE) {
2977                 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2978                 return -EINVAL;
2979         }
2980
2981         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2982                 for (i = 0; i < SM4_KEY_SIZE / sizeof(u32); i++) {
2983                         if (ctx->key[i] !=
2984                             get_unaligned_le32(key + i * sizeof(u32))) {
2985                                 ctx->base.needs_inv = true;
2986                                 break;
2987                         }
2988                 }
2989         }
2990
2991         for (i = 0; i < SM4_KEY_SIZE / sizeof(u32); i++)
2992                 ctx->key[i] = get_unaligned_le32(key + i * sizeof(u32));
2993         ctx->key_len = SM4_KEY_SIZE;
2994
2995         return 0;
2996 }
2997
2998 static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
2999 {
3000         /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3001         if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3002                 return -EINVAL;
3003         else
3004                 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3005                                           SAFEXCEL_ENCRYPT);
3006 }
3007
3008 static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3009 {
3010         /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3011         if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3012                 return -EINVAL;
3013         else
3014                 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3015                                           SAFEXCEL_DECRYPT);
3016 }
3017
3018 static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3019 {
3020         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3021
3022         safexcel_skcipher_cra_init(tfm);
3023         ctx->alg  = SAFEXCEL_SM4;
3024         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3025         return 0;
3026 }
3027
3028 struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3029         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3030         .algo_mask = SAFEXCEL_ALG_SM4,
3031         .alg.skcipher = {
3032                 .setkey = safexcel_skcipher_sm4_setkey,
3033                 .encrypt = safexcel_sm4_blk_encrypt,
3034                 .decrypt = safexcel_sm4_blk_decrypt,
3035                 .min_keysize = SM4_KEY_SIZE,
3036                 .max_keysize = SM4_KEY_SIZE,
3037                 .base = {
3038                         .cra_name = "ecb(sm4)",
3039                         .cra_driver_name = "safexcel-ecb-sm4",
3040                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3041                         .cra_flags = CRYPTO_ALG_ASYNC |
3042                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3043                         .cra_blocksize = SM4_BLOCK_SIZE,
3044                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3045                         .cra_alignmask = 0,
3046                         .cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3047                         .cra_exit = safexcel_skcipher_cra_exit,
3048                         .cra_module = THIS_MODULE,
3049                 },
3050         },
3051 };
3052
3053 static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3054 {
3055         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3056
3057         safexcel_skcipher_cra_init(tfm);
3058         ctx->alg  = SAFEXCEL_SM4;
3059         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3060         return 0;
3061 }
3062
3063 struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3064         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3065         .algo_mask = SAFEXCEL_ALG_SM4,
3066         .alg.skcipher = {
3067                 .setkey = safexcel_skcipher_sm4_setkey,
3068                 .encrypt = safexcel_sm4_blk_encrypt,
3069                 .decrypt = safexcel_sm4_blk_decrypt,
3070                 .min_keysize = SM4_KEY_SIZE,
3071                 .max_keysize = SM4_KEY_SIZE,
3072                 .ivsize = SM4_BLOCK_SIZE,
3073                 .base = {
3074                         .cra_name = "cbc(sm4)",
3075                         .cra_driver_name = "safexcel-cbc-sm4",
3076                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3077                         .cra_flags = CRYPTO_ALG_ASYNC |
3078                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3079                         .cra_blocksize = SM4_BLOCK_SIZE,
3080                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3081                         .cra_alignmask = 0,
3082                         .cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3083                         .cra_exit = safexcel_skcipher_cra_exit,
3084                         .cra_module = THIS_MODULE,
3085                 },
3086         },
3087 };
3088
3089 static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
3090 {
3091         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3092
3093         safexcel_skcipher_cra_init(tfm);
3094         ctx->alg  = SAFEXCEL_SM4;
3095         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
3096         return 0;
3097 }
3098
3099 struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
3100         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3101         .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3102         .alg.skcipher = {
3103                 .setkey = safexcel_skcipher_sm4_setkey,
3104                 .encrypt = safexcel_encrypt,
3105                 .decrypt = safexcel_decrypt,
3106                 .min_keysize = SM4_KEY_SIZE,
3107                 .max_keysize = SM4_KEY_SIZE,
3108                 .ivsize = SM4_BLOCK_SIZE,
3109                 .base = {
3110                         .cra_name = "ofb(sm4)",
3111                         .cra_driver_name = "safexcel-ofb-sm4",
3112                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3113                         .cra_flags = CRYPTO_ALG_ASYNC |
3114                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3115                         .cra_blocksize = 1,
3116                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3117                         .cra_alignmask = 0,
3118                         .cra_init = safexcel_skcipher_sm4_ofb_cra_init,
3119                         .cra_exit = safexcel_skcipher_cra_exit,
3120                         .cra_module = THIS_MODULE,
3121                 },
3122         },
3123 };
3124
3125 static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
3126 {
3127         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3128
3129         safexcel_skcipher_cra_init(tfm);
3130         ctx->alg  = SAFEXCEL_SM4;
3131         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
3132         return 0;
3133 }
3134
3135 struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
3136         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3137         .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3138         .alg.skcipher = {
3139                 .setkey = safexcel_skcipher_sm4_setkey,
3140                 .encrypt = safexcel_encrypt,
3141                 .decrypt = safexcel_decrypt,
3142                 .min_keysize = SM4_KEY_SIZE,
3143                 .max_keysize = SM4_KEY_SIZE,
3144                 .ivsize = SM4_BLOCK_SIZE,
3145                 .base = {
3146                         .cra_name = "cfb(sm4)",
3147                         .cra_driver_name = "safexcel-cfb-sm4",
3148                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3149                         .cra_flags = CRYPTO_ALG_ASYNC |
3150                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3151                         .cra_blocksize = 1,
3152                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3153                         .cra_alignmask = 0,
3154                         .cra_init = safexcel_skcipher_sm4_cfb_cra_init,
3155                         .cra_exit = safexcel_skcipher_cra_exit,
3156                         .cra_module = THIS_MODULE,
3157                 },
3158         },
3159 };
3160
3161 static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3162                                            const u8 *key, unsigned int len)
3163 {
3164         struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3165         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3166
3167         /* last 4 bytes of key are the nonce! */
3168         ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3169         /* exclude the nonce here */
3170         len -= CTR_RFC3686_NONCE_SIZE;
3171
3172         return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3173 }
3174
3175 static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3176 {
3177         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3178
3179         safexcel_skcipher_cra_init(tfm);
3180         ctx->alg  = SAFEXCEL_SM4;
3181         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3182         return 0;
3183 }
3184
3185 struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3186         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3187         .algo_mask = SAFEXCEL_ALG_SM4,
3188         .alg.skcipher = {
3189                 .setkey = safexcel_skcipher_sm4ctr_setkey,
3190                 .encrypt = safexcel_encrypt,
3191                 .decrypt = safexcel_decrypt,
3192                 /* Add nonce size */
3193                 .min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3194                 .max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3195                 .ivsize = CTR_RFC3686_IV_SIZE,
3196                 .base = {
3197                         .cra_name = "rfc3686(ctr(sm4))",
3198                         .cra_driver_name = "safexcel-ctr-sm4",
3199                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3200                         .cra_flags = CRYPTO_ALG_ASYNC |
3201                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3202                         .cra_blocksize = 1,
3203                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3204                         .cra_alignmask = 0,
3205                         .cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3206                         .cra_exit = safexcel_skcipher_cra_exit,
3207                         .cra_module = THIS_MODULE,
3208                 },
3209         },
3210 };
3211
3212 static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3213 {
3214         /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3215         if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3216                 return -EINVAL;
3217
3218         return safexcel_queue_req(&req->base, aead_request_ctx(req),
3219                                   SAFEXCEL_ENCRYPT);
3220 }
3221
3222 static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3223 {
3224         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3225
3226         /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3227         if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3228                 return -EINVAL;
3229
3230         return safexcel_queue_req(&req->base, aead_request_ctx(req),
3231                                   SAFEXCEL_DECRYPT);
3232 }
3233
3234 static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3235 {
3236         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3237
3238         safexcel_aead_cra_init(tfm);
3239         ctx->alg = SAFEXCEL_SM4;
3240         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3241         ctx->state_sz = SHA1_DIGEST_SIZE;
3242         return 0;
3243 }
3244
3245 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3246         .type = SAFEXCEL_ALG_TYPE_AEAD,
3247         .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3248         .alg.aead = {
3249                 .setkey = safexcel_aead_setkey,
3250                 .encrypt = safexcel_aead_sm4_blk_encrypt,
3251                 .decrypt = safexcel_aead_sm4_blk_decrypt,
3252                 .ivsize = SM4_BLOCK_SIZE,
3253                 .maxauthsize = SHA1_DIGEST_SIZE,
3254                 .base = {
3255                         .cra_name = "authenc(hmac(sha1),cbc(sm4))",
3256                         .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3257                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3258                         .cra_flags = CRYPTO_ALG_ASYNC |
3259                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3260                         .cra_blocksize = SM4_BLOCK_SIZE,
3261                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3262                         .cra_alignmask = 0,
3263                         .cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3264                         .cra_exit = safexcel_aead_cra_exit,
3265                         .cra_module = THIS_MODULE,
3266                 },
3267         },
3268 };
3269
3270 static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3271                                          const u8 *key, unsigned int len)
3272 {
3273         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3274         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3275
3276         /* Keep fallback cipher synchronized */
3277         return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3278                safexcel_aead_setkey(ctfm, key, len);
3279 }
3280
3281 static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3282                                               unsigned int authsize)
3283 {
3284         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3285         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3286
3287         /* Keep fallback cipher synchronized */
3288         return crypto_aead_setauthsize(ctx->fback, authsize);
3289 }
3290
3291 static int safexcel_aead_fallback_crypt(struct aead_request *req,
3292                                         enum safexcel_cipher_direction dir)
3293 {
3294         struct crypto_aead *aead = crypto_aead_reqtfm(req);
3295         struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3296         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3297         struct aead_request *subreq = aead_request_ctx(req);
3298
3299         aead_request_set_tfm(subreq, ctx->fback);
3300         aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3301                                   req->base.data);
3302         aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3303                                req->iv);
3304         aead_request_set_ad(subreq, req->assoclen);
3305
3306         return (dir ==  SAFEXCEL_ENCRYPT) ?
3307                 crypto_aead_encrypt(subreq) :
3308                 crypto_aead_decrypt(subreq);
3309 }
3310
3311 static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3312 {
3313         struct safexcel_cipher_req *creq = aead_request_ctx(req);
3314
3315         /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3316         if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3317                 return -EINVAL;
3318         else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3319                 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3320
3321         /* HW cannot do full (AAD+payload) zero length, use fallback */
3322         return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3323 }
3324
3325 static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3326 {
3327         struct safexcel_cipher_req *creq = aead_request_ctx(req);
3328         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3329
3330         /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3331         if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3332                 return -EINVAL;
3333         else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3334                 /* If input length > 0 only */
3335                 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3336
3337         /* HW cannot do full (AAD+payload) zero length, use fallback */
3338         return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3339 }
3340
3341 static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3342 {
3343         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3344
3345         safexcel_aead_fallback_cra_init(tfm);
3346         ctx->alg = SAFEXCEL_SM4;
3347         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3348         ctx->state_sz = SM3_DIGEST_SIZE;
3349         return 0;
3350 }
3351
3352 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3353         .type = SAFEXCEL_ALG_TYPE_AEAD,
3354         .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3355         .alg.aead = {
3356                 .setkey = safexcel_aead_fallback_setkey,
3357                 .setauthsize = safexcel_aead_fallback_setauthsize,
3358                 .encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3359                 .decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3360                 .ivsize = SM4_BLOCK_SIZE,
3361                 .maxauthsize = SM3_DIGEST_SIZE,
3362                 .base = {
3363                         .cra_name = "authenc(hmac(sm3),cbc(sm4))",
3364                         .cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3365                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3366                         .cra_flags = CRYPTO_ALG_ASYNC |
3367                                      CRYPTO_ALG_KERN_DRIVER_ONLY |
3368                                      CRYPTO_ALG_NEED_FALLBACK,
3369                         .cra_blocksize = SM4_BLOCK_SIZE,
3370                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3371                         .cra_alignmask = 0,
3372                         .cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3373                         .cra_exit = safexcel_aead_fallback_cra_exit,
3374                         .cra_module = THIS_MODULE,
3375                 },
3376         },
3377 };
3378
3379 static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3380 {
3381         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3382
3383         safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3384         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3385         return 0;
3386 }
3387
3388 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3389         .type = SAFEXCEL_ALG_TYPE_AEAD,
3390         .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3391         .alg.aead = {
3392                 .setkey = safexcel_aead_setkey,
3393                 .encrypt = safexcel_aead_encrypt,
3394                 .decrypt = safexcel_aead_decrypt,
3395                 .ivsize = CTR_RFC3686_IV_SIZE,
3396                 .maxauthsize = SHA1_DIGEST_SIZE,
3397                 .base = {
3398                         .cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3399                         .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3400                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3401                         .cra_flags = CRYPTO_ALG_ASYNC |
3402                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3403                         .cra_blocksize = 1,
3404                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3405                         .cra_alignmask = 0,
3406                         .cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3407                         .cra_exit = safexcel_aead_cra_exit,
3408                         .cra_module = THIS_MODULE,
3409                 },
3410         },
3411 };
3412
3413 static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3414 {
3415         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3416
3417         safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3418         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3419         return 0;
3420 }
3421
3422 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3423         .type = SAFEXCEL_ALG_TYPE_AEAD,
3424         .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3425         .alg.aead = {
3426                 .setkey = safexcel_aead_setkey,
3427                 .encrypt = safexcel_aead_encrypt,
3428                 .decrypt = safexcel_aead_decrypt,
3429                 .ivsize = CTR_RFC3686_IV_SIZE,
3430                 .maxauthsize = SM3_DIGEST_SIZE,
3431                 .base = {
3432                         .cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3433                         .cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3434                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3435                         .cra_flags = CRYPTO_ALG_ASYNC |
3436                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3437                         .cra_blocksize = 1,
3438                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3439                         .cra_alignmask = 0,
3440                         .cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3441                         .cra_exit = safexcel_aead_cra_exit,
3442                         .cra_module = THIS_MODULE,
3443                 },
3444         },
3445 };
3446
3447 static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3448                                        unsigned int len)
3449 {
3450         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3451         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3452
3453         /* last 4 bytes of key are the nonce! */
3454         ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3455
3456         len -= CTR_RFC3686_NONCE_SIZE;
3457         return safexcel_aead_gcm_setkey(ctfm, key, len);
3458 }
3459
3460 static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3461                                             unsigned int authsize)
3462 {
3463         return crypto_rfc4106_check_authsize(authsize);
3464 }
3465
3466 static int safexcel_rfc4106_encrypt(struct aead_request *req)
3467 {
3468         return crypto_ipsec_check_assoclen(req->assoclen) ?:
3469                safexcel_aead_encrypt(req);
3470 }
3471
3472 static int safexcel_rfc4106_decrypt(struct aead_request *req)
3473 {
3474         return crypto_ipsec_check_assoclen(req->assoclen) ?:
3475                safexcel_aead_decrypt(req);
3476 }
3477
3478 static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3479 {
3480         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3481         int ret;
3482
3483         ret = safexcel_aead_gcm_cra_init(tfm);
3484         ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3485         return ret;
3486 }
3487
3488 struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3489         .type = SAFEXCEL_ALG_TYPE_AEAD,
3490         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3491         .alg.aead = {
3492                 .setkey = safexcel_rfc4106_gcm_setkey,
3493                 .setauthsize = safexcel_rfc4106_gcm_setauthsize,
3494                 .encrypt = safexcel_rfc4106_encrypt,
3495                 .decrypt = safexcel_rfc4106_decrypt,
3496                 .ivsize = GCM_RFC4106_IV_SIZE,
3497                 .maxauthsize = GHASH_DIGEST_SIZE,
3498                 .base = {
3499                         .cra_name = "rfc4106(gcm(aes))",
3500                         .cra_driver_name = "safexcel-rfc4106-gcm-aes",
3501                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3502                         .cra_flags = CRYPTO_ALG_ASYNC |
3503                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3504                         .cra_blocksize = 1,
3505                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3506                         .cra_alignmask = 0,
3507                         .cra_init = safexcel_rfc4106_gcm_cra_init,
3508                         .cra_exit = safexcel_aead_gcm_cra_exit,
3509                 },
3510         },
3511 };
3512
3513 static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3514                                             unsigned int authsize)
3515 {
3516         if (authsize != GHASH_DIGEST_SIZE)
3517                 return -EINVAL;
3518
3519         return 0;
3520 }
3521
3522 static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3523 {
3524         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3525         int ret;
3526
3527         ret = safexcel_aead_gcm_cra_init(tfm);
3528         ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3529         return ret;
3530 }
3531
3532 struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3533         .type = SAFEXCEL_ALG_TYPE_AEAD,
3534         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3535         .alg.aead = {
3536                 .setkey = safexcel_rfc4106_gcm_setkey,
3537                 .setauthsize = safexcel_rfc4543_gcm_setauthsize,
3538                 .encrypt = safexcel_rfc4106_encrypt,
3539                 .decrypt = safexcel_rfc4106_decrypt,
3540                 .ivsize = GCM_RFC4543_IV_SIZE,
3541                 .maxauthsize = GHASH_DIGEST_SIZE,
3542                 .base = {
3543                         .cra_name = "rfc4543(gcm(aes))",
3544                         .cra_driver_name = "safexcel-rfc4543-gcm-aes",
3545                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3546                         .cra_flags = CRYPTO_ALG_ASYNC |
3547                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3548                         .cra_blocksize = 1,
3549                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3550                         .cra_alignmask = 0,
3551                         .cra_init = safexcel_rfc4543_gcm_cra_init,
3552                         .cra_exit = safexcel_aead_gcm_cra_exit,
3553                 },
3554         },
3555 };
3556
3557 static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3558                                        unsigned int len)
3559 {
3560         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3561         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3562
3563         /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3564         *(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3565         /* last 3 bytes of key are the nonce! */
3566         memcpy((u8 *)&ctx->nonce + 1, key + len -
3567                EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3568                EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3569
3570         len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3571         return safexcel_aead_ccm_setkey(ctfm, key, len);
3572 }
3573
3574 static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3575                                             unsigned int authsize)
3576 {
3577         /* Borrowed from crypto/ccm.c */
3578         switch (authsize) {
3579         case 8:
3580         case 12:
3581         case 16:
3582                 break;
3583         default:
3584                 return -EINVAL;
3585         }
3586
3587         return 0;
3588 }
3589
3590 static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3591 {
3592         struct safexcel_cipher_req *creq = aead_request_ctx(req);
3593
3594         /* Borrowed from crypto/ccm.c */
3595         if (req->assoclen != 16 && req->assoclen != 20)
3596                 return -EINVAL;
3597
3598         return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3599 }
3600
3601 static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3602 {
3603         struct safexcel_cipher_req *creq = aead_request_ctx(req);
3604
3605         /* Borrowed from crypto/ccm.c */
3606         if (req->assoclen != 16 && req->assoclen != 20)
3607                 return -EINVAL;
3608
3609         return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3610 }
3611
3612 static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3613 {
3614         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3615         int ret;
3616
3617         ret = safexcel_aead_ccm_cra_init(tfm);
3618         ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3619         return ret;
3620 }
3621
3622 struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3623         .type = SAFEXCEL_ALG_TYPE_AEAD,
3624         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3625         .alg.aead = {
3626                 .setkey = safexcel_rfc4309_ccm_setkey,
3627                 .setauthsize = safexcel_rfc4309_ccm_setauthsize,
3628                 .encrypt = safexcel_rfc4309_ccm_encrypt,
3629                 .decrypt = safexcel_rfc4309_ccm_decrypt,
3630                 .ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3631                 .maxauthsize = AES_BLOCK_SIZE,
3632                 .base = {
3633                         .cra_name = "rfc4309(ccm(aes))",
3634                         .cra_driver_name = "safexcel-rfc4309-ccm-aes",
3635                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3636                         .cra_flags = CRYPTO_ALG_ASYNC |
3637                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3638                         .cra_blocksize = 1,
3639                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3640                         .cra_alignmask = 0,
3641                         .cra_init = safexcel_rfc4309_ccm_cra_init,
3642                         .cra_exit = safexcel_aead_cra_exit,
3643                         .cra_module = THIS_MODULE,
3644                 },
3645         },
3646 };