crypto: inside-secure - Move priv pointer into safexcel_context
[linux-2.6-microblaze.git] / drivers / crypto / inside-secure / safexcel_cipher.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2017 Marvell
4  *
5  * Antoine Tenart <antoine.tenart@free-electrons.com>
6  */
7
8 #include <asm/unaligned.h>
9 #include <linux/device.h>
10 #include <linux/dma-mapping.h>
11 #include <linux/dmapool.h>
12 #include <crypto/aead.h>
13 #include <crypto/aes.h>
14 #include <crypto/authenc.h>
15 #include <crypto/chacha.h>
16 #include <crypto/ctr.h>
17 #include <crypto/internal/des.h>
18 #include <crypto/gcm.h>
19 #include <crypto/ghash.h>
20 #include <crypto/poly1305.h>
21 #include <crypto/sha.h>
22 #include <crypto/sm3.h>
23 #include <crypto/sm4.h>
24 #include <crypto/xts.h>
25 #include <crypto/skcipher.h>
26 #include <crypto/internal/aead.h>
27 #include <crypto/internal/skcipher.h>
28
29 #include "safexcel.h"
30
31 enum safexcel_cipher_direction {
32         SAFEXCEL_ENCRYPT,
33         SAFEXCEL_DECRYPT,
34 };
35
36 enum safexcel_cipher_alg {
37         SAFEXCEL_DES,
38         SAFEXCEL_3DES,
39         SAFEXCEL_AES,
40         SAFEXCEL_CHACHA20,
41         SAFEXCEL_SM4,
42 };
43
44 struct safexcel_cipher_ctx {
45         struct safexcel_context base;
46         struct safexcel_crypto_priv *priv;
47
48         u32 mode;
49         enum safexcel_cipher_alg alg;
50         u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
51         u8 xcm;  /* 0=authenc, 1=GCM, 2 reserved for CCM */
52         u8 aadskip;
53         u8 blocksz;
54         u32 ivmask;
55         u32 ctrinit;
56
57         __le32 key[16];
58         u32 nonce;
59         unsigned int key_len, xts;
60
61         /* All the below is AEAD specific */
62         u32 hash_alg;
63         u32 state_sz;
64         __be32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
65         __be32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
66
67         struct crypto_cipher *hkaes;
68         struct crypto_aead *fback;
69 };
70
71 struct safexcel_cipher_req {
72         enum safexcel_cipher_direction direction;
73         /* Number of result descriptors associated to the request */
74         unsigned int rdescs;
75         bool needs_inv;
76         int  nr_src, nr_dst;
77 };
78
79 static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
80                                 struct safexcel_command_desc *cdesc)
81 {
82         if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
83                 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
84                 /* 32 bit nonce */
85                 cdesc->control_data.token[0] = ctx->nonce;
86                 /* 64 bit IV part */
87                 memcpy(&cdesc->control_data.token[1], iv, 8);
88                 /* 32 bit counter, start at 0 or 1 (big endian!) */
89                 cdesc->control_data.token[3] =
90                         (__force u32)cpu_to_be32(ctx->ctrinit);
91                 return 4;
92         }
93         if (ctx->alg == SAFEXCEL_CHACHA20) {
94                 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
95                 /* 96 bit nonce part */
96                 memcpy(&cdesc->control_data.token[0], &iv[4], 12);
97                 /* 32 bit counter */
98                 cdesc->control_data.token[3] = *(u32 *)iv;
99                 return 4;
100         }
101
102         cdesc->control_data.options |= ctx->ivmask;
103         memcpy(cdesc->control_data.token, iv, ctx->blocksz);
104         return ctx->blocksz / sizeof(u32);
105 }
106
107 static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
108                                     struct safexcel_command_desc *cdesc,
109                                     struct safexcel_token *atoken,
110                                     u32 length)
111 {
112         struct safexcel_token *token;
113         int ivlen;
114
115         ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
116         if (ivlen == 4) {
117                 /* No space in cdesc, instruction moves to atoken */
118                 cdesc->additional_cdata_size = 1;
119                 token = atoken;
120         } else {
121                 /* Everything fits in cdesc */
122                 token = (struct safexcel_token *)(cdesc->control_data.token + 2);
123                 /* Need to pad with NOP */
124                 eip197_noop_token(&token[1]);
125         }
126
127         token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
128         token->packet_length = length;
129         token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
130                       EIP197_TOKEN_STAT_LAST_HASH;
131         token->instructions = EIP197_TOKEN_INS_LAST |
132                               EIP197_TOKEN_INS_TYPE_CRYPTO |
133                               EIP197_TOKEN_INS_TYPE_OUTPUT;
134 }
135
136 static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
137                              struct safexcel_command_desc *cdesc)
138 {
139         if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
140             ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
141                 /* 32 bit nonce */
142                 cdesc->control_data.token[0] = ctx->nonce;
143                 /* 64 bit IV part */
144                 memcpy(&cdesc->control_data.token[1], iv, 8);
145                 /* 32 bit counter, start at 0 or 1 (big endian!) */
146                 cdesc->control_data.token[3] =
147                         (__force u32)cpu_to_be32(ctx->ctrinit);
148                 return;
149         }
150         if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
151                 /* 96 bit IV part */
152                 memcpy(&cdesc->control_data.token[0], iv, 12);
153                 /* 32 bit counter, start at 0 or 1 (big endian!) */
154                 cdesc->control_data.token[3] =
155                         (__force u32)cpu_to_be32(ctx->ctrinit);
156                 return;
157         }
158         /* CBC */
159         memcpy(cdesc->control_data.token, iv, ctx->blocksz);
160 }
161
162 static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
163                                 struct safexcel_command_desc *cdesc,
164                                 struct safexcel_token *atoken,
165                                 enum safexcel_cipher_direction direction,
166                                 u32 cryptlen, u32 assoclen, u32 digestsize)
167 {
168         struct safexcel_token *aadref;
169         int atoksize = 2; /* Start with minimum size */
170         int assocadj = assoclen - ctx->aadskip, aadalign;
171
172         /* Always 4 dwords of embedded IV  for AEAD modes */
173         cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
174
175         if (direction == SAFEXCEL_DECRYPT)
176                 cryptlen -= digestsize;
177
178         if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
179                 /* Construct IV block B0 for the CBC-MAC */
180                 u8 *final_iv = (u8 *)cdesc->control_data.token;
181                 u8 *cbcmaciv = (u8 *)&atoken[1];
182                 __le32 *aadlen = (__le32 *)&atoken[5];
183
184                 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
185                         /* Length + nonce */
186                         cdesc->control_data.token[0] = ctx->nonce;
187                         /* Fixup flags byte */
188                         *(__le32 *)cbcmaciv =
189                                 cpu_to_le32(ctx->nonce |
190                                             ((assocadj > 0) << 6) |
191                                             ((digestsize - 2) << 2));
192                         /* 64 bit IV part */
193                         memcpy(&cdesc->control_data.token[1], iv, 8);
194                         memcpy(cbcmaciv + 4, iv, 8);
195                         /* Start counter at 0 */
196                         cdesc->control_data.token[3] = 0;
197                         /* Message length */
198                         *(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
199                 } else {
200                         /* Variable length IV part */
201                         memcpy(final_iv, iv, 15 - iv[0]);
202                         memcpy(cbcmaciv, iv, 15 - iv[0]);
203                         /* Start variable length counter at 0 */
204                         memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
205                         memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
206                         /* fixup flags byte */
207                         cbcmaciv[0] |= ((assocadj > 0) << 6) |
208                                        ((digestsize - 2) << 2);
209                         /* insert lower 2 bytes of message length */
210                         cbcmaciv[14] = cryptlen >> 8;
211                         cbcmaciv[15] = cryptlen & 255;
212                 }
213
214                 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
215                 atoken->packet_length = AES_BLOCK_SIZE +
216                                         ((assocadj > 0) << 1);
217                 atoken->stat = 0;
218                 atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
219                                        EIP197_TOKEN_INS_TYPE_HASH;
220
221                 if (likely(assocadj)) {
222                         *aadlen = cpu_to_le32((assocadj >> 8) |
223                                               (assocadj & 255) << 8);
224                         atoken += 6;
225                         atoksize += 7;
226                 } else {
227                         atoken += 5;
228                         atoksize += 6;
229                 }
230
231                 /* Process AAD data */
232                 aadref = atoken;
233                 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
234                 atoken->packet_length = assocadj;
235                 atoken->stat = 0;
236                 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
237                 atoken++;
238
239                 /* For CCM only, align AAD data towards hash engine */
240                 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
241                 aadalign = (assocadj + 2) & 15;
242                 atoken->packet_length = assocadj && aadalign ?
243                                                 16 - aadalign :
244                                                 0;
245                 if (likely(cryptlen)) {
246                         atoken->stat = 0;
247                         atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
248                 } else {
249                         atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
250                         atoken->instructions = EIP197_TOKEN_INS_LAST |
251                                                EIP197_TOKEN_INS_TYPE_HASH;
252                 }
253         } else {
254                 safexcel_aead_iv(ctx, iv, cdesc);
255
256                 /* Process AAD data */
257                 aadref = atoken;
258                 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
259                 atoken->packet_length = assocadj;
260                 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
261                 atoken->instructions = EIP197_TOKEN_INS_LAST |
262                                        EIP197_TOKEN_INS_TYPE_HASH;
263         }
264         atoken++;
265
266         if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
267                 /* For ESP mode (and not GMAC), skip over the IV */
268                 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
269                 atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
270                 atoken->stat = 0;
271                 atoken->instructions = 0;
272                 atoken++;
273                 atoksize++;
274         } else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
275                             direction == SAFEXCEL_DECRYPT)) {
276                 /* Poly-chacha decryption needs a dummy NOP here ... */
277                 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
278                 atoken->packet_length = 16; /* According to Op Manual */
279                 atoken->stat = 0;
280                 atoken->instructions = 0;
281                 atoken++;
282                 atoksize++;
283         }
284
285         if  (ctx->xcm) {
286                 /* For GCM and CCM, obtain enc(Y0) */
287                 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
288                 atoken->packet_length = 0;
289                 atoken->stat = 0;
290                 atoken->instructions = AES_BLOCK_SIZE;
291                 atoken++;
292
293                 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
294                 atoken->packet_length = AES_BLOCK_SIZE;
295                 atoken->stat = 0;
296                 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
297                                        EIP197_TOKEN_INS_TYPE_CRYPTO;
298                 atoken++;
299                 atoksize += 2;
300         }
301
302         if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
303                 /* Fixup stat field for AAD direction instruction */
304                 aadref->stat = 0;
305
306                 /* Process crypto data */
307                 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
308                 atoken->packet_length = cryptlen;
309
310                 if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
311                         /* Fixup instruction field for AAD dir instruction */
312                         aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
313
314                         /* Do not send to crypt engine in case of GMAC */
315                         atoken->instructions = EIP197_TOKEN_INS_LAST |
316                                                EIP197_TOKEN_INS_TYPE_HASH |
317                                                EIP197_TOKEN_INS_TYPE_OUTPUT;
318                 } else {
319                         atoken->instructions = EIP197_TOKEN_INS_LAST |
320                                                EIP197_TOKEN_INS_TYPE_CRYPTO |
321                                                EIP197_TOKEN_INS_TYPE_HASH |
322                                                EIP197_TOKEN_INS_TYPE_OUTPUT;
323                 }
324
325                 cryptlen &= 15;
326                 if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
327                         atoken->stat = 0;
328                         /* For CCM only, pad crypto data to the hash engine */
329                         atoken++;
330                         atoksize++;
331                         atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
332                         atoken->packet_length = 16 - cryptlen;
333                         atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
334                         atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
335                 } else {
336                         atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
337                 }
338                 atoken++;
339                 atoksize++;
340         }
341
342         if (direction == SAFEXCEL_ENCRYPT) {
343                 /* Append ICV */
344                 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
345                 atoken->packet_length = digestsize;
346                 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
347                                EIP197_TOKEN_STAT_LAST_PACKET;
348                 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
349                                        EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
350         } else {
351                 /* Extract ICV */
352                 atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
353                 atoken->packet_length = digestsize;
354                 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
355                                EIP197_TOKEN_STAT_LAST_PACKET;
356                 atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
357                 atoken++;
358                 atoksize++;
359
360                 /* Verify ICV */
361                 atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
362                 atoken->packet_length = digestsize |
363                                         EIP197_TOKEN_HASH_RESULT_VERIFY;
364                 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
365                                EIP197_TOKEN_STAT_LAST_PACKET;
366                 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
367         }
368
369         /* Fixup length of the token in the command descriptor */
370         cdesc->additional_cdata_size = atoksize;
371 }
372
373 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
374                                         const u8 *key, unsigned int len)
375 {
376         struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
377         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
378         struct safexcel_crypto_priv *priv = ctx->base.priv;
379         struct crypto_aes_ctx aes;
380         int ret, i;
381
382         ret = aes_expandkey(&aes, key, len);
383         if (ret)
384                 return ret;
385
386         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
387                 for (i = 0; i < len / sizeof(u32); i++) {
388                         if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
389                                 ctx->base.needs_inv = true;
390                                 break;
391                         }
392                 }
393         }
394
395         for (i = 0; i < len / sizeof(u32); i++)
396                 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
397
398         ctx->key_len = len;
399
400         memzero_explicit(&aes, sizeof(aes));
401         return 0;
402 }
403
404 static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
405                                 unsigned int len)
406 {
407         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
408         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
409         struct safexcel_ahash_export_state istate, ostate;
410         struct safexcel_crypto_priv *priv = ctx->base.priv;
411         struct crypto_authenc_keys keys;
412         struct crypto_aes_ctx aes;
413         int err = -EINVAL, i;
414
415         if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
416                 goto badkey;
417
418         if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
419                 /* Must have at least space for the nonce here */
420                 if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
421                         goto badkey;
422                 /* last 4 bytes of key are the nonce! */
423                 ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
424                                       CTR_RFC3686_NONCE_SIZE);
425                 /* exclude the nonce here */
426                 keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
427         }
428
429         /* Encryption key */
430         switch (ctx->alg) {
431         case SAFEXCEL_DES:
432                 err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
433                 if (unlikely(err))
434                         goto badkey;
435                 break;
436         case SAFEXCEL_3DES:
437                 err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
438                 if (unlikely(err))
439                         goto badkey;
440                 break;
441         case SAFEXCEL_AES:
442                 err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
443                 if (unlikely(err))
444                         goto badkey;
445                 break;
446         case SAFEXCEL_SM4:
447                 if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
448                         goto badkey;
449                 break;
450         default:
451                 dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
452                 goto badkey;
453         }
454
455         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
456                 for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
457                         if (le32_to_cpu(ctx->key[i]) !=
458                             ((u32 *)keys.enckey)[i]) {
459                                 ctx->base.needs_inv = true;
460                                 break;
461                         }
462                 }
463         }
464
465         /* Auth key */
466         switch (ctx->hash_alg) {
467         case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
468                 if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
469                                          keys.authkeylen, &istate, &ostate))
470                         goto badkey;
471                 break;
472         case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
473                 if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
474                                          keys.authkeylen, &istate, &ostate))
475                         goto badkey;
476                 break;
477         case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
478                 if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
479                                          keys.authkeylen, &istate, &ostate))
480                         goto badkey;
481                 break;
482         case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
483                 if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey,
484                                          keys.authkeylen, &istate, &ostate))
485                         goto badkey;
486                 break;
487         case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
488                 if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey,
489                                          keys.authkeylen, &istate, &ostate))
490                         goto badkey;
491                 break;
492         case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
493                 if (safexcel_hmac_setkey("safexcel-sm3", keys.authkey,
494                                          keys.authkeylen, &istate, &ostate))
495                         goto badkey;
496                 break;
497         default:
498                 dev_err(priv->dev, "aead: unsupported hash algorithm\n");
499                 goto badkey;
500         }
501
502         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
503             (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
504              memcmp(ctx->opad, ostate.state, ctx->state_sz)))
505                 ctx->base.needs_inv = true;
506
507         /* Now copy the keys into the context */
508         for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
509                 ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
510         ctx->key_len = keys.enckeylen;
511
512         memcpy(ctx->ipad, &istate.state, ctx->state_sz);
513         memcpy(ctx->opad, &ostate.state, ctx->state_sz);
514
515         memzero_explicit(&keys, sizeof(keys));
516         return 0;
517
518 badkey:
519         memzero_explicit(&keys, sizeof(keys));
520         return err;
521 }
522
523 static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
524                                     struct crypto_async_request *async,
525                                     struct safexcel_cipher_req *sreq,
526                                     struct safexcel_command_desc *cdesc)
527 {
528         struct safexcel_crypto_priv *priv = ctx->base.priv;
529         int ctrl_size = ctx->key_len / sizeof(u32);
530
531         cdesc->control_data.control1 = ctx->mode;
532
533         if (ctx->aead) {
534                 /* Take in account the ipad+opad digests */
535                 if (ctx->xcm) {
536                         ctrl_size += ctx->state_sz / sizeof(u32);
537                         cdesc->control_data.control0 =
538                                 CONTEXT_CONTROL_KEY_EN |
539                                 CONTEXT_CONTROL_DIGEST_XCM |
540                                 ctx->hash_alg |
541                                 CONTEXT_CONTROL_SIZE(ctrl_size);
542                 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
543                         /* Chacha20-Poly1305 */
544                         cdesc->control_data.control0 =
545                                 CONTEXT_CONTROL_KEY_EN |
546                                 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
547                                 (sreq->direction == SAFEXCEL_ENCRYPT ?
548                                         CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
549                                         CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
550                                 ctx->hash_alg |
551                                 CONTEXT_CONTROL_SIZE(ctrl_size);
552                         return 0;
553                 } else {
554                         ctrl_size += ctx->state_sz / sizeof(u32) * 2;
555                         cdesc->control_data.control0 =
556                                 CONTEXT_CONTROL_KEY_EN |
557                                 CONTEXT_CONTROL_DIGEST_HMAC |
558                                 ctx->hash_alg |
559                                 CONTEXT_CONTROL_SIZE(ctrl_size);
560                 }
561
562                 if (sreq->direction == SAFEXCEL_ENCRYPT &&
563                     (ctx->xcm == EIP197_XCM_MODE_CCM ||
564                      ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
565                         cdesc->control_data.control0 |=
566                                 CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
567                 else if (sreq->direction == SAFEXCEL_ENCRYPT)
568                         cdesc->control_data.control0 |=
569                                 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
570                 else if (ctx->xcm == EIP197_XCM_MODE_CCM)
571                         cdesc->control_data.control0 |=
572                                 CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
573                 else
574                         cdesc->control_data.control0 |=
575                                 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
576         } else {
577                 if (sreq->direction == SAFEXCEL_ENCRYPT)
578                         cdesc->control_data.control0 =
579                                 CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
580                                 CONTEXT_CONTROL_KEY_EN |
581                                 CONTEXT_CONTROL_SIZE(ctrl_size);
582                 else
583                         cdesc->control_data.control0 =
584                                 CONTEXT_CONTROL_TYPE_CRYPTO_IN |
585                                 CONTEXT_CONTROL_KEY_EN |
586                                 CONTEXT_CONTROL_SIZE(ctrl_size);
587         }
588
589         if (ctx->alg == SAFEXCEL_DES) {
590                 cdesc->control_data.control0 |=
591                         CONTEXT_CONTROL_CRYPTO_ALG_DES;
592         } else if (ctx->alg == SAFEXCEL_3DES) {
593                 cdesc->control_data.control0 |=
594                         CONTEXT_CONTROL_CRYPTO_ALG_3DES;
595         } else if (ctx->alg == SAFEXCEL_AES) {
596                 switch (ctx->key_len >> ctx->xts) {
597                 case AES_KEYSIZE_128:
598                         cdesc->control_data.control0 |=
599                                 CONTEXT_CONTROL_CRYPTO_ALG_AES128;
600                         break;
601                 case AES_KEYSIZE_192:
602                         cdesc->control_data.control0 |=
603                                 CONTEXT_CONTROL_CRYPTO_ALG_AES192;
604                         break;
605                 case AES_KEYSIZE_256:
606                         cdesc->control_data.control0 |=
607                                 CONTEXT_CONTROL_CRYPTO_ALG_AES256;
608                         break;
609                 default:
610                         dev_err(priv->dev, "aes keysize not supported: %u\n",
611                                 ctx->key_len >> ctx->xts);
612                         return -EINVAL;
613                 }
614         } else if (ctx->alg == SAFEXCEL_CHACHA20) {
615                 cdesc->control_data.control0 |=
616                         CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
617         } else if (ctx->alg == SAFEXCEL_SM4) {
618                 cdesc->control_data.control0 |=
619                         CONTEXT_CONTROL_CRYPTO_ALG_SM4;
620         }
621
622         return 0;
623 }
624
625 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
626                                       struct crypto_async_request *async,
627                                       struct scatterlist *src,
628                                       struct scatterlist *dst,
629                                       unsigned int cryptlen,
630                                       struct safexcel_cipher_req *sreq,
631                                       bool *should_complete, int *ret)
632 {
633         struct skcipher_request *areq = skcipher_request_cast(async);
634         struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
635         struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
636         struct safexcel_result_desc *rdesc;
637         int ndesc = 0;
638
639         *ret = 0;
640
641         if (unlikely(!sreq->rdescs))
642                 return 0;
643
644         while (sreq->rdescs--) {
645                 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
646                 if (IS_ERR(rdesc)) {
647                         dev_err(priv->dev,
648                                 "cipher: result: could not retrieve the result descriptor\n");
649                         *ret = PTR_ERR(rdesc);
650                         break;
651                 }
652
653                 if (likely(!*ret))
654                         *ret = safexcel_rdesc_check_errors(priv, rdesc);
655
656                 ndesc++;
657         }
658
659         safexcel_complete(priv, ring);
660
661         if (src == dst) {
662                 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
663         } else {
664                 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
665                 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
666         }
667
668         /*
669          * Update IV in req from last crypto output word for CBC modes
670          */
671         if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
672             (sreq->direction == SAFEXCEL_ENCRYPT)) {
673                 /* For encrypt take the last output word */
674                 sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
675                                    crypto_skcipher_ivsize(skcipher),
676                                    (cryptlen -
677                                     crypto_skcipher_ivsize(skcipher)));
678         }
679
680         *should_complete = true;
681
682         return ndesc;
683 }
684
685 static int safexcel_send_req(struct crypto_async_request *base, int ring,
686                              struct safexcel_cipher_req *sreq,
687                              struct scatterlist *src, struct scatterlist *dst,
688                              unsigned int cryptlen, unsigned int assoclen,
689                              unsigned int digestsize, u8 *iv, int *commands,
690                              int *results)
691 {
692         struct skcipher_request *areq = skcipher_request_cast(base);
693         struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
694         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
695         struct safexcel_crypto_priv *priv = ctx->base.priv;
696         struct safexcel_command_desc *cdesc;
697         struct safexcel_command_desc *first_cdesc = NULL;
698         struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
699         struct scatterlist *sg;
700         unsigned int totlen;
701         unsigned int totlen_src = cryptlen + assoclen;
702         unsigned int totlen_dst = totlen_src;
703         struct safexcel_token *atoken;
704         int n_cdesc = 0, n_rdesc = 0;
705         int queued, i, ret = 0;
706         bool first = true;
707
708         sreq->nr_src = sg_nents_for_len(src, totlen_src);
709
710         if (ctx->aead) {
711                 /*
712                  * AEAD has auth tag appended to output for encrypt and
713                  * removed from the output for decrypt!
714                  */
715                 if (sreq->direction == SAFEXCEL_DECRYPT)
716                         totlen_dst -= digestsize;
717                 else
718                         totlen_dst += digestsize;
719
720                 memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
721                        ctx->ipad, ctx->state_sz);
722                 if (!ctx->xcm)
723                         memcpy(ctx->base.ctxr->data + (ctx->key_len +
724                                ctx->state_sz) / sizeof(u32), ctx->opad,
725                                ctx->state_sz);
726         } else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
727                    (sreq->direction == SAFEXCEL_DECRYPT)) {
728                 /*
729                  * Save IV from last crypto input word for CBC modes in decrypt
730                  * direction. Need to do this first in case of inplace operation
731                  * as it will be overwritten.
732                  */
733                 sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
734                                    crypto_skcipher_ivsize(skcipher),
735                                    (totlen_src -
736                                     crypto_skcipher_ivsize(skcipher)));
737         }
738
739         sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
740
741         /*
742          * Remember actual input length, source buffer length may be
743          * updated in case of inline operation below.
744          */
745         totlen = totlen_src;
746         queued = totlen_src;
747
748         if (src == dst) {
749                 sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
750                 sreq->nr_dst = sreq->nr_src;
751                 if (unlikely((totlen_src || totlen_dst) &&
752                     (sreq->nr_src <= 0))) {
753                         dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
754                                 max(totlen_src, totlen_dst));
755                         return -EINVAL;
756                 }
757                 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
758         } else {
759                 if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
760                         dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
761                                 totlen_src);
762                         return -EINVAL;
763                 }
764                 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
765
766                 if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
767                         dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
768                                 totlen_dst);
769                         dma_unmap_sg(priv->dev, src, sreq->nr_src,
770                                      DMA_TO_DEVICE);
771                         return -EINVAL;
772                 }
773                 dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
774         }
775
776         memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
777
778         if (!totlen) {
779                 /*
780                  * The EIP97 cannot deal with zero length input packets!
781                  * So stuff a dummy command descriptor indicating a 1 byte
782                  * (dummy) input packet, using the context record as source.
783                  */
784                 first_cdesc = safexcel_add_cdesc(priv, ring,
785                                                  1, 1, ctx->base.ctxr_dma,
786                                                  1, 1, ctx->base.ctxr_dma,
787                                                  &atoken);
788                 if (IS_ERR(first_cdesc)) {
789                         /* No space left in the command descriptor ring */
790                         ret = PTR_ERR(first_cdesc);
791                         goto cdesc_rollback;
792                 }
793                 n_cdesc = 1;
794                 goto skip_cdesc;
795         }
796
797         /* command descriptors */
798         for_each_sg(src, sg, sreq->nr_src, i) {
799                 int len = sg_dma_len(sg);
800
801                 /* Do not overflow the request */
802                 if (queued < len)
803                         len = queued;
804
805                 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
806                                            !(queued - len),
807                                            sg_dma_address(sg), len, totlen,
808                                            ctx->base.ctxr_dma, &atoken);
809                 if (IS_ERR(cdesc)) {
810                         /* No space left in the command descriptor ring */
811                         ret = PTR_ERR(cdesc);
812                         goto cdesc_rollback;
813                 }
814
815                 if (!n_cdesc)
816                         first_cdesc = cdesc;
817
818                 n_cdesc++;
819                 queued -= len;
820                 if (!queued)
821                         break;
822         }
823 skip_cdesc:
824         /* Add context control words and token to first command descriptor */
825         safexcel_context_control(ctx, base, sreq, first_cdesc);
826         if (ctx->aead)
827                 safexcel_aead_token(ctx, iv, first_cdesc, atoken,
828                                     sreq->direction, cryptlen,
829                                     assoclen, digestsize);
830         else
831                 safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
832                                         cryptlen);
833
834         /* result descriptors */
835         for_each_sg(dst, sg, sreq->nr_dst, i) {
836                 bool last = (i == sreq->nr_dst - 1);
837                 u32 len = sg_dma_len(sg);
838
839                 /* only allow the part of the buffer we know we need */
840                 if (len > totlen_dst)
841                         len = totlen_dst;
842                 if (unlikely(!len))
843                         break;
844                 totlen_dst -= len;
845
846                 /* skip over AAD space in buffer - not written */
847                 if (assoclen) {
848                         if (assoclen >= len) {
849                                 assoclen -= len;
850                                 continue;
851                         }
852                         rdesc = safexcel_add_rdesc(priv, ring, first, last,
853                                                    sg_dma_address(sg) +
854                                                    assoclen,
855                                                    len - assoclen);
856                         assoclen = 0;
857                 } else {
858                         rdesc = safexcel_add_rdesc(priv, ring, first, last,
859                                                    sg_dma_address(sg),
860                                                    len);
861                 }
862                 if (IS_ERR(rdesc)) {
863                         /* No space left in the result descriptor ring */
864                         ret = PTR_ERR(rdesc);
865                         goto rdesc_rollback;
866                 }
867                 if (first) {
868                         first_rdesc = rdesc;
869                         first = false;
870                 }
871                 n_rdesc++;
872         }
873
874         if (unlikely(first)) {
875                 /*
876                  * Special case: AEAD decrypt with only AAD data.
877                  * In this case there is NO output data from the engine,
878                  * but the engine still needs a result descriptor!
879                  * Create a dummy one just for catching the result token.
880                  */
881                 rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
882                 if (IS_ERR(rdesc)) {
883                         /* No space left in the result descriptor ring */
884                         ret = PTR_ERR(rdesc);
885                         goto rdesc_rollback;
886                 }
887                 first_rdesc = rdesc;
888                 n_rdesc = 1;
889         }
890
891         safexcel_rdr_req_set(priv, ring, first_rdesc, base);
892
893         *commands = n_cdesc;
894         *results = n_rdesc;
895         return 0;
896
897 rdesc_rollback:
898         for (i = 0; i < n_rdesc; i++)
899                 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
900 cdesc_rollback:
901         for (i = 0; i < n_cdesc; i++)
902                 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
903
904         if (src == dst) {
905                 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
906         } else {
907                 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
908                 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
909         }
910
911         return ret;
912 }
913
914 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
915                                       int ring,
916                                       struct crypto_async_request *base,
917                                       struct safexcel_cipher_req *sreq,
918                                       bool *should_complete, int *ret)
919 {
920         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
921         struct safexcel_result_desc *rdesc;
922         int ndesc = 0, enq_ret;
923
924         *ret = 0;
925
926         if (unlikely(!sreq->rdescs))
927                 return 0;
928
929         while (sreq->rdescs--) {
930                 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
931                 if (IS_ERR(rdesc)) {
932                         dev_err(priv->dev,
933                                 "cipher: invalidate: could not retrieve the result descriptor\n");
934                         *ret = PTR_ERR(rdesc);
935                         break;
936                 }
937
938                 if (likely(!*ret))
939                         *ret = safexcel_rdesc_check_errors(priv, rdesc);
940
941                 ndesc++;
942         }
943
944         safexcel_complete(priv, ring);
945
946         if (ctx->base.exit_inv) {
947                 dma_pool_free(priv->context_pool, ctx->base.ctxr,
948                               ctx->base.ctxr_dma);
949
950                 *should_complete = true;
951
952                 return ndesc;
953         }
954
955         ring = safexcel_select_ring(priv);
956         ctx->base.ring = ring;
957
958         spin_lock_bh(&priv->ring[ring].queue_lock);
959         enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
960         spin_unlock_bh(&priv->ring[ring].queue_lock);
961
962         if (enq_ret != -EINPROGRESS)
963                 *ret = enq_ret;
964
965         queue_work(priv->ring[ring].workqueue,
966                    &priv->ring[ring].work_data.work);
967
968         *should_complete = false;
969
970         return ndesc;
971 }
972
973 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
974                                            int ring,
975                                            struct crypto_async_request *async,
976                                            bool *should_complete, int *ret)
977 {
978         struct skcipher_request *req = skcipher_request_cast(async);
979         struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
980         int err;
981
982         if (sreq->needs_inv) {
983                 sreq->needs_inv = false;
984                 err = safexcel_handle_inv_result(priv, ring, async, sreq,
985                                                  should_complete, ret);
986         } else {
987                 err = safexcel_handle_req_result(priv, ring, async, req->src,
988                                                  req->dst, req->cryptlen, sreq,
989                                                  should_complete, ret);
990         }
991
992         return err;
993 }
994
995 static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
996                                        int ring,
997                                        struct crypto_async_request *async,
998                                        bool *should_complete, int *ret)
999 {
1000         struct aead_request *req = aead_request_cast(async);
1001         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1002         struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1003         int err;
1004
1005         if (sreq->needs_inv) {
1006                 sreq->needs_inv = false;
1007                 err = safexcel_handle_inv_result(priv, ring, async, sreq,
1008                                                  should_complete, ret);
1009         } else {
1010                 err = safexcel_handle_req_result(priv, ring, async, req->src,
1011                                                  req->dst,
1012                                                  req->cryptlen + crypto_aead_authsize(tfm),
1013                                                  sreq, should_complete, ret);
1014         }
1015
1016         return err;
1017 }
1018
1019 static int safexcel_cipher_send_inv(struct crypto_async_request *base,
1020                                     int ring, int *commands, int *results)
1021 {
1022         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1023         struct safexcel_crypto_priv *priv = ctx->base.priv;
1024         int ret;
1025
1026         ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1027         if (unlikely(ret))
1028                 return ret;
1029
1030         *commands = 1;
1031         *results = 1;
1032
1033         return 0;
1034 }
1035
1036 static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
1037                                   int *commands, int *results)
1038 {
1039         struct skcipher_request *req = skcipher_request_cast(async);
1040         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1041         struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1042         struct safexcel_crypto_priv *priv = ctx->base.priv;
1043         int ret;
1044
1045         BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1046
1047         if (sreq->needs_inv) {
1048                 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1049         } else {
1050                 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1051                 u8 input_iv[AES_BLOCK_SIZE];
1052
1053                 /*
1054                  * Save input IV in case of CBC decrypt mode
1055                  * Will be overwritten with output IV prior to use!
1056                  */
1057                 memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1058
1059                 ret = safexcel_send_req(async, ring, sreq, req->src,
1060                                         req->dst, req->cryptlen, 0, 0, input_iv,
1061                                         commands, results);
1062         }
1063
1064         sreq->rdescs = *results;
1065         return ret;
1066 }
1067
1068 static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1069                               int *commands, int *results)
1070 {
1071         struct aead_request *req = aead_request_cast(async);
1072         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1073         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1074         struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1075         struct safexcel_crypto_priv *priv = ctx->base.priv;
1076         int ret;
1077
1078         BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1079
1080         if (sreq->needs_inv)
1081                 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1082         else
1083                 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1084                                         req->cryptlen, req->assoclen,
1085                                         crypto_aead_authsize(tfm), req->iv,
1086                                         commands, results);
1087         sreq->rdescs = *results;
1088         return ret;
1089 }
1090
1091 static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1092                                     struct crypto_async_request *base,
1093                                     struct safexcel_cipher_req *sreq,
1094                                     struct safexcel_inv_result *result)
1095 {
1096         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1097         struct safexcel_crypto_priv *priv = ctx->base.priv;
1098         int ring = ctx->base.ring;
1099
1100         init_completion(&result->completion);
1101
1102         ctx = crypto_tfm_ctx(base->tfm);
1103         ctx->base.exit_inv = true;
1104         sreq->needs_inv = true;
1105
1106         spin_lock_bh(&priv->ring[ring].queue_lock);
1107         crypto_enqueue_request(&priv->ring[ring].queue, base);
1108         spin_unlock_bh(&priv->ring[ring].queue_lock);
1109
1110         queue_work(priv->ring[ring].workqueue,
1111                    &priv->ring[ring].work_data.work);
1112
1113         wait_for_completion(&result->completion);
1114
1115         if (result->error) {
1116                 dev_warn(priv->dev,
1117                         "cipher: sync: invalidate: completion error %d\n",
1118                          result->error);
1119                 return result->error;
1120         }
1121
1122         return 0;
1123 }
1124
1125 static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1126 {
1127         EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1128         struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1129         struct safexcel_inv_result result = {};
1130
1131         memset(req, 0, sizeof(struct skcipher_request));
1132
1133         skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1134                                       safexcel_inv_complete, &result);
1135         skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1136
1137         return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1138 }
1139
1140 static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1141 {
1142         EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1143         struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1144         struct safexcel_inv_result result = {};
1145
1146         memset(req, 0, sizeof(struct aead_request));
1147
1148         aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1149                                   safexcel_inv_complete, &result);
1150         aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1151
1152         return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1153 }
1154
1155 static int safexcel_queue_req(struct crypto_async_request *base,
1156                         struct safexcel_cipher_req *sreq,
1157                         enum safexcel_cipher_direction dir)
1158 {
1159         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1160         struct safexcel_crypto_priv *priv = ctx->base.priv;
1161         int ret, ring;
1162
1163         sreq->needs_inv = false;
1164         sreq->direction = dir;
1165
1166         if (ctx->base.ctxr) {
1167                 if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1168                         sreq->needs_inv = true;
1169                         ctx->base.needs_inv = false;
1170                 }
1171         } else {
1172                 ctx->base.ring = safexcel_select_ring(priv);
1173                 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1174                                                  EIP197_GFP_FLAGS(*base),
1175                                                  &ctx->base.ctxr_dma);
1176                 if (!ctx->base.ctxr)
1177                         return -ENOMEM;
1178         }
1179
1180         ring = ctx->base.ring;
1181
1182         spin_lock_bh(&priv->ring[ring].queue_lock);
1183         ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1184         spin_unlock_bh(&priv->ring[ring].queue_lock);
1185
1186         queue_work(priv->ring[ring].workqueue,
1187                    &priv->ring[ring].work_data.work);
1188
1189         return ret;
1190 }
1191
1192 static int safexcel_encrypt(struct skcipher_request *req)
1193 {
1194         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1195                         SAFEXCEL_ENCRYPT);
1196 }
1197
1198 static int safexcel_decrypt(struct skcipher_request *req)
1199 {
1200         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1201                         SAFEXCEL_DECRYPT);
1202 }
1203
1204 static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1205 {
1206         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1207         struct safexcel_alg_template *tmpl =
1208                 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1209                              alg.skcipher.base);
1210
1211         crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1212                                     sizeof(struct safexcel_cipher_req));
1213
1214         ctx->base.priv = tmpl->priv;
1215
1216         ctx->base.send = safexcel_skcipher_send;
1217         ctx->base.handle_result = safexcel_skcipher_handle_result;
1218         ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1219         ctx->ctrinit = 1;
1220         return 0;
1221 }
1222
1223 static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1224 {
1225         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1226
1227         memzero_explicit(ctx->key, sizeof(ctx->key));
1228
1229         /* context not allocated, skip invalidation */
1230         if (!ctx->base.ctxr)
1231                 return -ENOMEM;
1232
1233         memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1234         return 0;
1235 }
1236
1237 static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1238 {
1239         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1240         struct safexcel_crypto_priv *priv = ctx->base.priv;
1241         int ret;
1242
1243         if (safexcel_cipher_cra_exit(tfm))
1244                 return;
1245
1246         if (priv->flags & EIP197_TRC_CACHE) {
1247                 ret = safexcel_skcipher_exit_inv(tfm);
1248                 if (ret)
1249                         dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1250                                  ret);
1251         } else {
1252                 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1253                               ctx->base.ctxr_dma);
1254         }
1255 }
1256
1257 static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1258 {
1259         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1260         struct safexcel_crypto_priv *priv = ctx->base.priv;
1261         int ret;
1262
1263         if (safexcel_cipher_cra_exit(tfm))
1264                 return;
1265
1266         if (priv->flags & EIP197_TRC_CACHE) {
1267                 ret = safexcel_aead_exit_inv(tfm);
1268                 if (ret)
1269                         dev_warn(priv->dev, "aead: invalidation error %d\n",
1270                                  ret);
1271         } else {
1272                 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1273                               ctx->base.ctxr_dma);
1274         }
1275 }
1276
1277 static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1278 {
1279         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1280
1281         safexcel_skcipher_cra_init(tfm);
1282         ctx->alg  = SAFEXCEL_AES;
1283         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1284         ctx->blocksz = 0;
1285         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1286         return 0;
1287 }
1288
1289 struct safexcel_alg_template safexcel_alg_ecb_aes = {
1290         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1291         .algo_mask = SAFEXCEL_ALG_AES,
1292         .alg.skcipher = {
1293                 .setkey = safexcel_skcipher_aes_setkey,
1294                 .encrypt = safexcel_encrypt,
1295                 .decrypt = safexcel_decrypt,
1296                 .min_keysize = AES_MIN_KEY_SIZE,
1297                 .max_keysize = AES_MAX_KEY_SIZE,
1298                 .base = {
1299                         .cra_name = "ecb(aes)",
1300                         .cra_driver_name = "safexcel-ecb-aes",
1301                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1302                         .cra_flags = CRYPTO_ALG_ASYNC |
1303                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1304                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1305                         .cra_blocksize = AES_BLOCK_SIZE,
1306                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1307                         .cra_alignmask = 0,
1308                         .cra_init = safexcel_skcipher_aes_ecb_cra_init,
1309                         .cra_exit = safexcel_skcipher_cra_exit,
1310                         .cra_module = THIS_MODULE,
1311                 },
1312         },
1313 };
1314
1315 static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1316 {
1317         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1318
1319         safexcel_skcipher_cra_init(tfm);
1320         ctx->alg  = SAFEXCEL_AES;
1321         ctx->blocksz = AES_BLOCK_SIZE;
1322         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1323         return 0;
1324 }
1325
1326 struct safexcel_alg_template safexcel_alg_cbc_aes = {
1327         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1328         .algo_mask = SAFEXCEL_ALG_AES,
1329         .alg.skcipher = {
1330                 .setkey = safexcel_skcipher_aes_setkey,
1331                 .encrypt = safexcel_encrypt,
1332                 .decrypt = safexcel_decrypt,
1333                 .min_keysize = AES_MIN_KEY_SIZE,
1334                 .max_keysize = AES_MAX_KEY_SIZE,
1335                 .ivsize = AES_BLOCK_SIZE,
1336                 .base = {
1337                         .cra_name = "cbc(aes)",
1338                         .cra_driver_name = "safexcel-cbc-aes",
1339                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1340                         .cra_flags = CRYPTO_ALG_ASYNC |
1341                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1342                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1343                         .cra_blocksize = AES_BLOCK_SIZE,
1344                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1345                         .cra_alignmask = 0,
1346                         .cra_init = safexcel_skcipher_aes_cbc_cra_init,
1347                         .cra_exit = safexcel_skcipher_cra_exit,
1348                         .cra_module = THIS_MODULE,
1349                 },
1350         },
1351 };
1352
1353 static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
1354 {
1355         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1356
1357         safexcel_skcipher_cra_init(tfm);
1358         ctx->alg  = SAFEXCEL_AES;
1359         ctx->blocksz = AES_BLOCK_SIZE;
1360         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
1361         return 0;
1362 }
1363
1364 struct safexcel_alg_template safexcel_alg_cfb_aes = {
1365         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1366         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1367         .alg.skcipher = {
1368                 .setkey = safexcel_skcipher_aes_setkey,
1369                 .encrypt = safexcel_encrypt,
1370                 .decrypt = safexcel_decrypt,
1371                 .min_keysize = AES_MIN_KEY_SIZE,
1372                 .max_keysize = AES_MAX_KEY_SIZE,
1373                 .ivsize = AES_BLOCK_SIZE,
1374                 .base = {
1375                         .cra_name = "cfb(aes)",
1376                         .cra_driver_name = "safexcel-cfb-aes",
1377                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1378                         .cra_flags = CRYPTO_ALG_ASYNC |
1379                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1380                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1381                         .cra_blocksize = 1,
1382                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1383                         .cra_alignmask = 0,
1384                         .cra_init = safexcel_skcipher_aes_cfb_cra_init,
1385                         .cra_exit = safexcel_skcipher_cra_exit,
1386                         .cra_module = THIS_MODULE,
1387                 },
1388         },
1389 };
1390
1391 static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
1392 {
1393         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1394
1395         safexcel_skcipher_cra_init(tfm);
1396         ctx->alg  = SAFEXCEL_AES;
1397         ctx->blocksz = AES_BLOCK_SIZE;
1398         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
1399         return 0;
1400 }
1401
1402 struct safexcel_alg_template safexcel_alg_ofb_aes = {
1403         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1404         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1405         .alg.skcipher = {
1406                 .setkey = safexcel_skcipher_aes_setkey,
1407                 .encrypt = safexcel_encrypt,
1408                 .decrypt = safexcel_decrypt,
1409                 .min_keysize = AES_MIN_KEY_SIZE,
1410                 .max_keysize = AES_MAX_KEY_SIZE,
1411                 .ivsize = AES_BLOCK_SIZE,
1412                 .base = {
1413                         .cra_name = "ofb(aes)",
1414                         .cra_driver_name = "safexcel-ofb-aes",
1415                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1416                         .cra_flags = CRYPTO_ALG_ASYNC |
1417                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1418                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1419                         .cra_blocksize = 1,
1420                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1421                         .cra_alignmask = 0,
1422                         .cra_init = safexcel_skcipher_aes_ofb_cra_init,
1423                         .cra_exit = safexcel_skcipher_cra_exit,
1424                         .cra_module = THIS_MODULE,
1425                 },
1426         },
1427 };
1428
1429 static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1430                                            const u8 *key, unsigned int len)
1431 {
1432         struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1433         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1434         struct safexcel_crypto_priv *priv = ctx->base.priv;
1435         struct crypto_aes_ctx aes;
1436         int ret, i;
1437         unsigned int keylen;
1438
1439         /* last 4 bytes of key are the nonce! */
1440         ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1441         /* exclude the nonce here */
1442         keylen = len - CTR_RFC3686_NONCE_SIZE;
1443         ret = aes_expandkey(&aes, key, keylen);
1444         if (ret)
1445                 return ret;
1446
1447         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1448                 for (i = 0; i < keylen / sizeof(u32); i++) {
1449                         if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1450                                 ctx->base.needs_inv = true;
1451                                 break;
1452                         }
1453                 }
1454         }
1455
1456         for (i = 0; i < keylen / sizeof(u32); i++)
1457                 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1458
1459         ctx->key_len = keylen;
1460
1461         memzero_explicit(&aes, sizeof(aes));
1462         return 0;
1463 }
1464
1465 static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1466 {
1467         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1468
1469         safexcel_skcipher_cra_init(tfm);
1470         ctx->alg  = SAFEXCEL_AES;
1471         ctx->blocksz = AES_BLOCK_SIZE;
1472         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1473         return 0;
1474 }
1475
1476 struct safexcel_alg_template safexcel_alg_ctr_aes = {
1477         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1478         .algo_mask = SAFEXCEL_ALG_AES,
1479         .alg.skcipher = {
1480                 .setkey = safexcel_skcipher_aesctr_setkey,
1481                 .encrypt = safexcel_encrypt,
1482                 .decrypt = safexcel_decrypt,
1483                 /* Add nonce size */
1484                 .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1485                 .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1486                 .ivsize = CTR_RFC3686_IV_SIZE,
1487                 .base = {
1488                         .cra_name = "rfc3686(ctr(aes))",
1489                         .cra_driver_name = "safexcel-ctr-aes",
1490                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1491                         .cra_flags = CRYPTO_ALG_ASYNC |
1492                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1493                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1494                         .cra_blocksize = 1,
1495                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1496                         .cra_alignmask = 0,
1497                         .cra_init = safexcel_skcipher_aes_ctr_cra_init,
1498                         .cra_exit = safexcel_skcipher_cra_exit,
1499                         .cra_module = THIS_MODULE,
1500                 },
1501         },
1502 };
1503
1504 static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1505                                unsigned int len)
1506 {
1507         struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1508         struct safexcel_crypto_priv *priv = ctx->base.priv;
1509         int ret;
1510
1511         ret = verify_skcipher_des_key(ctfm, key);
1512         if (ret)
1513                 return ret;
1514
1515         /* if context exits and key changed, need to invalidate it */
1516         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1517                 if (memcmp(ctx->key, key, len))
1518                         ctx->base.needs_inv = true;
1519
1520         memcpy(ctx->key, key, len);
1521         ctx->key_len = len;
1522
1523         return 0;
1524 }
1525
1526 static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1527 {
1528         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1529
1530         safexcel_skcipher_cra_init(tfm);
1531         ctx->alg  = SAFEXCEL_DES;
1532         ctx->blocksz = DES_BLOCK_SIZE;
1533         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1534         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1535         return 0;
1536 }
1537
1538 struct safexcel_alg_template safexcel_alg_cbc_des = {
1539         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1540         .algo_mask = SAFEXCEL_ALG_DES,
1541         .alg.skcipher = {
1542                 .setkey = safexcel_des_setkey,
1543                 .encrypt = safexcel_encrypt,
1544                 .decrypt = safexcel_decrypt,
1545                 .min_keysize = DES_KEY_SIZE,
1546                 .max_keysize = DES_KEY_SIZE,
1547                 .ivsize = DES_BLOCK_SIZE,
1548                 .base = {
1549                         .cra_name = "cbc(des)",
1550                         .cra_driver_name = "safexcel-cbc-des",
1551                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1552                         .cra_flags = CRYPTO_ALG_ASYNC |
1553                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1554                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1555                         .cra_blocksize = DES_BLOCK_SIZE,
1556                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1557                         .cra_alignmask = 0,
1558                         .cra_init = safexcel_skcipher_des_cbc_cra_init,
1559                         .cra_exit = safexcel_skcipher_cra_exit,
1560                         .cra_module = THIS_MODULE,
1561                 },
1562         },
1563 };
1564
1565 static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1566 {
1567         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1568
1569         safexcel_skcipher_cra_init(tfm);
1570         ctx->alg  = SAFEXCEL_DES;
1571         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1572         ctx->blocksz = 0;
1573         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1574         return 0;
1575 }
1576
1577 struct safexcel_alg_template safexcel_alg_ecb_des = {
1578         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1579         .algo_mask = SAFEXCEL_ALG_DES,
1580         .alg.skcipher = {
1581                 .setkey = safexcel_des_setkey,
1582                 .encrypt = safexcel_encrypt,
1583                 .decrypt = safexcel_decrypt,
1584                 .min_keysize = DES_KEY_SIZE,
1585                 .max_keysize = DES_KEY_SIZE,
1586                 .base = {
1587                         .cra_name = "ecb(des)",
1588                         .cra_driver_name = "safexcel-ecb-des",
1589                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1590                         .cra_flags = CRYPTO_ALG_ASYNC |
1591                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1592                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1593                         .cra_blocksize = DES_BLOCK_SIZE,
1594                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1595                         .cra_alignmask = 0,
1596                         .cra_init = safexcel_skcipher_des_ecb_cra_init,
1597                         .cra_exit = safexcel_skcipher_cra_exit,
1598                         .cra_module = THIS_MODULE,
1599                 },
1600         },
1601 };
1602
1603 static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1604                                    const u8 *key, unsigned int len)
1605 {
1606         struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1607         struct safexcel_crypto_priv *priv = ctx->base.priv;
1608         int err;
1609
1610         err = verify_skcipher_des3_key(ctfm, key);
1611         if (err)
1612                 return err;
1613
1614         /* if context exits and key changed, need to invalidate it */
1615         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1616                 if (memcmp(ctx->key, key, len))
1617                         ctx->base.needs_inv = true;
1618
1619         memcpy(ctx->key, key, len);
1620         ctx->key_len = len;
1621
1622         return 0;
1623 }
1624
1625 static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1626 {
1627         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1628
1629         safexcel_skcipher_cra_init(tfm);
1630         ctx->alg  = SAFEXCEL_3DES;
1631         ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1632         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1633         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1634         return 0;
1635 }
1636
1637 struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1638         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1639         .algo_mask = SAFEXCEL_ALG_DES,
1640         .alg.skcipher = {
1641                 .setkey = safexcel_des3_ede_setkey,
1642                 .encrypt = safexcel_encrypt,
1643                 .decrypt = safexcel_decrypt,
1644                 .min_keysize = DES3_EDE_KEY_SIZE,
1645                 .max_keysize = DES3_EDE_KEY_SIZE,
1646                 .ivsize = DES3_EDE_BLOCK_SIZE,
1647                 .base = {
1648                         .cra_name = "cbc(des3_ede)",
1649                         .cra_driver_name = "safexcel-cbc-des3_ede",
1650                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1651                         .cra_flags = CRYPTO_ALG_ASYNC |
1652                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1653                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1654                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1655                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1656                         .cra_alignmask = 0,
1657                         .cra_init = safexcel_skcipher_des3_cbc_cra_init,
1658                         .cra_exit = safexcel_skcipher_cra_exit,
1659                         .cra_module = THIS_MODULE,
1660                 },
1661         },
1662 };
1663
1664 static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1665 {
1666         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1667
1668         safexcel_skcipher_cra_init(tfm);
1669         ctx->alg  = SAFEXCEL_3DES;
1670         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1671         ctx->blocksz = 0;
1672         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1673         return 0;
1674 }
1675
1676 struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1677         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1678         .algo_mask = SAFEXCEL_ALG_DES,
1679         .alg.skcipher = {
1680                 .setkey = safexcel_des3_ede_setkey,
1681                 .encrypt = safexcel_encrypt,
1682                 .decrypt = safexcel_decrypt,
1683                 .min_keysize = DES3_EDE_KEY_SIZE,
1684                 .max_keysize = DES3_EDE_KEY_SIZE,
1685                 .base = {
1686                         .cra_name = "ecb(des3_ede)",
1687                         .cra_driver_name = "safexcel-ecb-des3_ede",
1688                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1689                         .cra_flags = CRYPTO_ALG_ASYNC |
1690                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1691                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1692                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1693                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1694                         .cra_alignmask = 0,
1695                         .cra_init = safexcel_skcipher_des3_ecb_cra_init,
1696                         .cra_exit = safexcel_skcipher_cra_exit,
1697                         .cra_module = THIS_MODULE,
1698                 },
1699         },
1700 };
1701
1702 static int safexcel_aead_encrypt(struct aead_request *req)
1703 {
1704         struct safexcel_cipher_req *creq = aead_request_ctx(req);
1705
1706         return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1707 }
1708
1709 static int safexcel_aead_decrypt(struct aead_request *req)
1710 {
1711         struct safexcel_cipher_req *creq = aead_request_ctx(req);
1712
1713         return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1714 }
1715
1716 static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1717 {
1718         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1719         struct safexcel_alg_template *tmpl =
1720                 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1721                              alg.aead.base);
1722
1723         crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1724                                 sizeof(struct safexcel_cipher_req));
1725
1726         ctx->base.priv = tmpl->priv;
1727
1728         ctx->alg  = SAFEXCEL_AES; /* default */
1729         ctx->blocksz = AES_BLOCK_SIZE;
1730         ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1731         ctx->ctrinit = 1;
1732         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1733         ctx->aead = true;
1734         ctx->base.send = safexcel_aead_send;
1735         ctx->base.handle_result = safexcel_aead_handle_result;
1736         return 0;
1737 }
1738
1739 static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1740 {
1741         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1742
1743         safexcel_aead_cra_init(tfm);
1744         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1745         ctx->state_sz = SHA1_DIGEST_SIZE;
1746         return 0;
1747 }
1748
1749 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1750         .type = SAFEXCEL_ALG_TYPE_AEAD,
1751         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1752         .alg.aead = {
1753                 .setkey = safexcel_aead_setkey,
1754                 .encrypt = safexcel_aead_encrypt,
1755                 .decrypt = safexcel_aead_decrypt,
1756                 .ivsize = AES_BLOCK_SIZE,
1757                 .maxauthsize = SHA1_DIGEST_SIZE,
1758                 .base = {
1759                         .cra_name = "authenc(hmac(sha1),cbc(aes))",
1760                         .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1761                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1762                         .cra_flags = CRYPTO_ALG_ASYNC |
1763                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1764                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1765                         .cra_blocksize = AES_BLOCK_SIZE,
1766                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1767                         .cra_alignmask = 0,
1768                         .cra_init = safexcel_aead_sha1_cra_init,
1769                         .cra_exit = safexcel_aead_cra_exit,
1770                         .cra_module = THIS_MODULE,
1771                 },
1772         },
1773 };
1774
1775 static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1776 {
1777         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1778
1779         safexcel_aead_cra_init(tfm);
1780         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1781         ctx->state_sz = SHA256_DIGEST_SIZE;
1782         return 0;
1783 }
1784
1785 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1786         .type = SAFEXCEL_ALG_TYPE_AEAD,
1787         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1788         .alg.aead = {
1789                 .setkey = safexcel_aead_setkey,
1790                 .encrypt = safexcel_aead_encrypt,
1791                 .decrypt = safexcel_aead_decrypt,
1792                 .ivsize = AES_BLOCK_SIZE,
1793                 .maxauthsize = SHA256_DIGEST_SIZE,
1794                 .base = {
1795                         .cra_name = "authenc(hmac(sha256),cbc(aes))",
1796                         .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1797                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1798                         .cra_flags = CRYPTO_ALG_ASYNC |
1799                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1800                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1801                         .cra_blocksize = AES_BLOCK_SIZE,
1802                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1803                         .cra_alignmask = 0,
1804                         .cra_init = safexcel_aead_sha256_cra_init,
1805                         .cra_exit = safexcel_aead_cra_exit,
1806                         .cra_module = THIS_MODULE,
1807                 },
1808         },
1809 };
1810
1811 static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1812 {
1813         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1814
1815         safexcel_aead_cra_init(tfm);
1816         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1817         ctx->state_sz = SHA256_DIGEST_SIZE;
1818         return 0;
1819 }
1820
1821 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1822         .type = SAFEXCEL_ALG_TYPE_AEAD,
1823         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1824         .alg.aead = {
1825                 .setkey = safexcel_aead_setkey,
1826                 .encrypt = safexcel_aead_encrypt,
1827                 .decrypt = safexcel_aead_decrypt,
1828                 .ivsize = AES_BLOCK_SIZE,
1829                 .maxauthsize = SHA224_DIGEST_SIZE,
1830                 .base = {
1831                         .cra_name = "authenc(hmac(sha224),cbc(aes))",
1832                         .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1833                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1834                         .cra_flags = CRYPTO_ALG_ASYNC |
1835                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1836                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1837                         .cra_blocksize = AES_BLOCK_SIZE,
1838                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1839                         .cra_alignmask = 0,
1840                         .cra_init = safexcel_aead_sha224_cra_init,
1841                         .cra_exit = safexcel_aead_cra_exit,
1842                         .cra_module = THIS_MODULE,
1843                 },
1844         },
1845 };
1846
1847 static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1848 {
1849         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1850
1851         safexcel_aead_cra_init(tfm);
1852         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1853         ctx->state_sz = SHA512_DIGEST_SIZE;
1854         return 0;
1855 }
1856
1857 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1858         .type = SAFEXCEL_ALG_TYPE_AEAD,
1859         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1860         .alg.aead = {
1861                 .setkey = safexcel_aead_setkey,
1862                 .encrypt = safexcel_aead_encrypt,
1863                 .decrypt = safexcel_aead_decrypt,
1864                 .ivsize = AES_BLOCK_SIZE,
1865                 .maxauthsize = SHA512_DIGEST_SIZE,
1866                 .base = {
1867                         .cra_name = "authenc(hmac(sha512),cbc(aes))",
1868                         .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1869                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1870                         .cra_flags = CRYPTO_ALG_ASYNC |
1871                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1872                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1873                         .cra_blocksize = AES_BLOCK_SIZE,
1874                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1875                         .cra_alignmask = 0,
1876                         .cra_init = safexcel_aead_sha512_cra_init,
1877                         .cra_exit = safexcel_aead_cra_exit,
1878                         .cra_module = THIS_MODULE,
1879                 },
1880         },
1881 };
1882
1883 static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1884 {
1885         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1886
1887         safexcel_aead_cra_init(tfm);
1888         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1889         ctx->state_sz = SHA512_DIGEST_SIZE;
1890         return 0;
1891 }
1892
1893 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1894         .type = SAFEXCEL_ALG_TYPE_AEAD,
1895         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1896         .alg.aead = {
1897                 .setkey = safexcel_aead_setkey,
1898                 .encrypt = safexcel_aead_encrypt,
1899                 .decrypt = safexcel_aead_decrypt,
1900                 .ivsize = AES_BLOCK_SIZE,
1901                 .maxauthsize = SHA384_DIGEST_SIZE,
1902                 .base = {
1903                         .cra_name = "authenc(hmac(sha384),cbc(aes))",
1904                         .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1905                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1906                         .cra_flags = CRYPTO_ALG_ASYNC |
1907                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1908                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1909                         .cra_blocksize = AES_BLOCK_SIZE,
1910                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1911                         .cra_alignmask = 0,
1912                         .cra_init = safexcel_aead_sha384_cra_init,
1913                         .cra_exit = safexcel_aead_cra_exit,
1914                         .cra_module = THIS_MODULE,
1915                 },
1916         },
1917 };
1918
1919 static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1920 {
1921         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1922
1923         safexcel_aead_sha1_cra_init(tfm);
1924         ctx->alg = SAFEXCEL_3DES; /* override default */
1925         ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1926         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1927         return 0;
1928 }
1929
1930 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1931         .type = SAFEXCEL_ALG_TYPE_AEAD,
1932         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1933         .alg.aead = {
1934                 .setkey = safexcel_aead_setkey,
1935                 .encrypt = safexcel_aead_encrypt,
1936                 .decrypt = safexcel_aead_decrypt,
1937                 .ivsize = DES3_EDE_BLOCK_SIZE,
1938                 .maxauthsize = SHA1_DIGEST_SIZE,
1939                 .base = {
1940                         .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1941                         .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1942                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1943                         .cra_flags = CRYPTO_ALG_ASYNC |
1944                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1945                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1946                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1947                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1948                         .cra_alignmask = 0,
1949                         .cra_init = safexcel_aead_sha1_des3_cra_init,
1950                         .cra_exit = safexcel_aead_cra_exit,
1951                         .cra_module = THIS_MODULE,
1952                 },
1953         },
1954 };
1955
1956 static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1957 {
1958         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1959
1960         safexcel_aead_sha256_cra_init(tfm);
1961         ctx->alg = SAFEXCEL_3DES; /* override default */
1962         ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1963         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1964         return 0;
1965 }
1966
1967 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1968         .type = SAFEXCEL_ALG_TYPE_AEAD,
1969         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1970         .alg.aead = {
1971                 .setkey = safexcel_aead_setkey,
1972                 .encrypt = safexcel_aead_encrypt,
1973                 .decrypt = safexcel_aead_decrypt,
1974                 .ivsize = DES3_EDE_BLOCK_SIZE,
1975                 .maxauthsize = SHA256_DIGEST_SIZE,
1976                 .base = {
1977                         .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1978                         .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1979                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1980                         .cra_flags = CRYPTO_ALG_ASYNC |
1981                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1982                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1983                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1984                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1985                         .cra_alignmask = 0,
1986                         .cra_init = safexcel_aead_sha256_des3_cra_init,
1987                         .cra_exit = safexcel_aead_cra_exit,
1988                         .cra_module = THIS_MODULE,
1989                 },
1990         },
1991 };
1992
1993 static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1994 {
1995         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1996
1997         safexcel_aead_sha224_cra_init(tfm);
1998         ctx->alg = SAFEXCEL_3DES; /* override default */
1999         ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2000         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2001         return 0;
2002 }
2003
2004 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
2005         .type = SAFEXCEL_ALG_TYPE_AEAD,
2006         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2007         .alg.aead = {
2008                 .setkey = safexcel_aead_setkey,
2009                 .encrypt = safexcel_aead_encrypt,
2010                 .decrypt = safexcel_aead_decrypt,
2011                 .ivsize = DES3_EDE_BLOCK_SIZE,
2012                 .maxauthsize = SHA224_DIGEST_SIZE,
2013                 .base = {
2014                         .cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
2015                         .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
2016                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2017                         .cra_flags = CRYPTO_ALG_ASYNC |
2018                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2019                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2020                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2021                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2022                         .cra_alignmask = 0,
2023                         .cra_init = safexcel_aead_sha224_des3_cra_init,
2024                         .cra_exit = safexcel_aead_cra_exit,
2025                         .cra_module = THIS_MODULE,
2026                 },
2027         },
2028 };
2029
2030 static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
2031 {
2032         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2033
2034         safexcel_aead_sha512_cra_init(tfm);
2035         ctx->alg = SAFEXCEL_3DES; /* override default */
2036         ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2037         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2038         return 0;
2039 }
2040
2041 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
2042         .type = SAFEXCEL_ALG_TYPE_AEAD,
2043         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2044         .alg.aead = {
2045                 .setkey = safexcel_aead_setkey,
2046                 .encrypt = safexcel_aead_encrypt,
2047                 .decrypt = safexcel_aead_decrypt,
2048                 .ivsize = DES3_EDE_BLOCK_SIZE,
2049                 .maxauthsize = SHA512_DIGEST_SIZE,
2050                 .base = {
2051                         .cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
2052                         .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
2053                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2054                         .cra_flags = CRYPTO_ALG_ASYNC |
2055                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2056                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2057                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2058                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2059                         .cra_alignmask = 0,
2060                         .cra_init = safexcel_aead_sha512_des3_cra_init,
2061                         .cra_exit = safexcel_aead_cra_exit,
2062                         .cra_module = THIS_MODULE,
2063                 },
2064         },
2065 };
2066
2067 static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
2068 {
2069         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2070
2071         safexcel_aead_sha384_cra_init(tfm);
2072         ctx->alg = SAFEXCEL_3DES; /* override default */
2073         ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2074         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2075         return 0;
2076 }
2077
2078 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
2079         .type = SAFEXCEL_ALG_TYPE_AEAD,
2080         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2081         .alg.aead = {
2082                 .setkey = safexcel_aead_setkey,
2083                 .encrypt = safexcel_aead_encrypt,
2084                 .decrypt = safexcel_aead_decrypt,
2085                 .ivsize = DES3_EDE_BLOCK_SIZE,
2086                 .maxauthsize = SHA384_DIGEST_SIZE,
2087                 .base = {
2088                         .cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
2089                         .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2090                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2091                         .cra_flags = CRYPTO_ALG_ASYNC |
2092                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2093                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2094                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2095                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2096                         .cra_alignmask = 0,
2097                         .cra_init = safexcel_aead_sha384_des3_cra_init,
2098                         .cra_exit = safexcel_aead_cra_exit,
2099                         .cra_module = THIS_MODULE,
2100                 },
2101         },
2102 };
2103
2104 static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2105 {
2106         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2107
2108         safexcel_aead_sha1_cra_init(tfm);
2109         ctx->alg = SAFEXCEL_DES; /* override default */
2110         ctx->blocksz = DES_BLOCK_SIZE;
2111         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2112         return 0;
2113 }
2114
2115 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2116         .type = SAFEXCEL_ALG_TYPE_AEAD,
2117         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2118         .alg.aead = {
2119                 .setkey = safexcel_aead_setkey,
2120                 .encrypt = safexcel_aead_encrypt,
2121                 .decrypt = safexcel_aead_decrypt,
2122                 .ivsize = DES_BLOCK_SIZE,
2123                 .maxauthsize = SHA1_DIGEST_SIZE,
2124                 .base = {
2125                         .cra_name = "authenc(hmac(sha1),cbc(des))",
2126                         .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2127                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2128                         .cra_flags = CRYPTO_ALG_ASYNC |
2129                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2130                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2131                         .cra_blocksize = DES_BLOCK_SIZE,
2132                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2133                         .cra_alignmask = 0,
2134                         .cra_init = safexcel_aead_sha1_des_cra_init,
2135                         .cra_exit = safexcel_aead_cra_exit,
2136                         .cra_module = THIS_MODULE,
2137                 },
2138         },
2139 };
2140
2141 static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2142 {
2143         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2144
2145         safexcel_aead_sha256_cra_init(tfm);
2146         ctx->alg = SAFEXCEL_DES; /* override default */
2147         ctx->blocksz = DES_BLOCK_SIZE;
2148         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2149         return 0;
2150 }
2151
2152 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2153         .type = SAFEXCEL_ALG_TYPE_AEAD,
2154         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2155         .alg.aead = {
2156                 .setkey = safexcel_aead_setkey,
2157                 .encrypt = safexcel_aead_encrypt,
2158                 .decrypt = safexcel_aead_decrypt,
2159                 .ivsize = DES_BLOCK_SIZE,
2160                 .maxauthsize = SHA256_DIGEST_SIZE,
2161                 .base = {
2162                         .cra_name = "authenc(hmac(sha256),cbc(des))",
2163                         .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2164                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2165                         .cra_flags = CRYPTO_ALG_ASYNC |
2166                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2167                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2168                         .cra_blocksize = DES_BLOCK_SIZE,
2169                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2170                         .cra_alignmask = 0,
2171                         .cra_init = safexcel_aead_sha256_des_cra_init,
2172                         .cra_exit = safexcel_aead_cra_exit,
2173                         .cra_module = THIS_MODULE,
2174                 },
2175         },
2176 };
2177
2178 static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2179 {
2180         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2181
2182         safexcel_aead_sha224_cra_init(tfm);
2183         ctx->alg = SAFEXCEL_DES; /* override default */
2184         ctx->blocksz = DES_BLOCK_SIZE;
2185         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2186         return 0;
2187 }
2188
2189 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2190         .type = SAFEXCEL_ALG_TYPE_AEAD,
2191         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2192         .alg.aead = {
2193                 .setkey = safexcel_aead_setkey,
2194                 .encrypt = safexcel_aead_encrypt,
2195                 .decrypt = safexcel_aead_decrypt,
2196                 .ivsize = DES_BLOCK_SIZE,
2197                 .maxauthsize = SHA224_DIGEST_SIZE,
2198                 .base = {
2199                         .cra_name = "authenc(hmac(sha224),cbc(des))",
2200                         .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2201                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2202                         .cra_flags = CRYPTO_ALG_ASYNC |
2203                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2204                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2205                         .cra_blocksize = DES_BLOCK_SIZE,
2206                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2207                         .cra_alignmask = 0,
2208                         .cra_init = safexcel_aead_sha224_des_cra_init,
2209                         .cra_exit = safexcel_aead_cra_exit,
2210                         .cra_module = THIS_MODULE,
2211                 },
2212         },
2213 };
2214
2215 static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2216 {
2217         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2218
2219         safexcel_aead_sha512_cra_init(tfm);
2220         ctx->alg = SAFEXCEL_DES; /* override default */
2221         ctx->blocksz = DES_BLOCK_SIZE;
2222         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2223         return 0;
2224 }
2225
2226 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2227         .type = SAFEXCEL_ALG_TYPE_AEAD,
2228         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2229         .alg.aead = {
2230                 .setkey = safexcel_aead_setkey,
2231                 .encrypt = safexcel_aead_encrypt,
2232                 .decrypt = safexcel_aead_decrypt,
2233                 .ivsize = DES_BLOCK_SIZE,
2234                 .maxauthsize = SHA512_DIGEST_SIZE,
2235                 .base = {
2236                         .cra_name = "authenc(hmac(sha512),cbc(des))",
2237                         .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2238                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2239                         .cra_flags = CRYPTO_ALG_ASYNC |
2240                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2241                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2242                         .cra_blocksize = DES_BLOCK_SIZE,
2243                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2244                         .cra_alignmask = 0,
2245                         .cra_init = safexcel_aead_sha512_des_cra_init,
2246                         .cra_exit = safexcel_aead_cra_exit,
2247                         .cra_module = THIS_MODULE,
2248                 },
2249         },
2250 };
2251
2252 static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2253 {
2254         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2255
2256         safexcel_aead_sha384_cra_init(tfm);
2257         ctx->alg = SAFEXCEL_DES; /* override default */
2258         ctx->blocksz = DES_BLOCK_SIZE;
2259         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2260         return 0;
2261 }
2262
2263 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2264         .type = SAFEXCEL_ALG_TYPE_AEAD,
2265         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2266         .alg.aead = {
2267                 .setkey = safexcel_aead_setkey,
2268                 .encrypt = safexcel_aead_encrypt,
2269                 .decrypt = safexcel_aead_decrypt,
2270                 .ivsize = DES_BLOCK_SIZE,
2271                 .maxauthsize = SHA384_DIGEST_SIZE,
2272                 .base = {
2273                         .cra_name = "authenc(hmac(sha384),cbc(des))",
2274                         .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2275                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2276                         .cra_flags = CRYPTO_ALG_ASYNC |
2277                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2278                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2279                         .cra_blocksize = DES_BLOCK_SIZE,
2280                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2281                         .cra_alignmask = 0,
2282                         .cra_init = safexcel_aead_sha384_des_cra_init,
2283                         .cra_exit = safexcel_aead_cra_exit,
2284                         .cra_module = THIS_MODULE,
2285                 },
2286         },
2287 };
2288
2289 static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2290 {
2291         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2292
2293         safexcel_aead_sha1_cra_init(tfm);
2294         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2295         return 0;
2296 }
2297
2298 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2299         .type = SAFEXCEL_ALG_TYPE_AEAD,
2300         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2301         .alg.aead = {
2302                 .setkey = safexcel_aead_setkey,
2303                 .encrypt = safexcel_aead_encrypt,
2304                 .decrypt = safexcel_aead_decrypt,
2305                 .ivsize = CTR_RFC3686_IV_SIZE,
2306                 .maxauthsize = SHA1_DIGEST_SIZE,
2307                 .base = {
2308                         .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2309                         .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2310                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2311                         .cra_flags = CRYPTO_ALG_ASYNC |
2312                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2313                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2314                         .cra_blocksize = 1,
2315                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2316                         .cra_alignmask = 0,
2317                         .cra_init = safexcel_aead_sha1_ctr_cra_init,
2318                         .cra_exit = safexcel_aead_cra_exit,
2319                         .cra_module = THIS_MODULE,
2320                 },
2321         },
2322 };
2323
2324 static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2325 {
2326         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2327
2328         safexcel_aead_sha256_cra_init(tfm);
2329         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2330         return 0;
2331 }
2332
2333 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2334         .type = SAFEXCEL_ALG_TYPE_AEAD,
2335         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2336         .alg.aead = {
2337                 .setkey = safexcel_aead_setkey,
2338                 .encrypt = safexcel_aead_encrypt,
2339                 .decrypt = safexcel_aead_decrypt,
2340                 .ivsize = CTR_RFC3686_IV_SIZE,
2341                 .maxauthsize = SHA256_DIGEST_SIZE,
2342                 .base = {
2343                         .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2344                         .cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2345                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2346                         .cra_flags = CRYPTO_ALG_ASYNC |
2347                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2348                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2349                         .cra_blocksize = 1,
2350                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2351                         .cra_alignmask = 0,
2352                         .cra_init = safexcel_aead_sha256_ctr_cra_init,
2353                         .cra_exit = safexcel_aead_cra_exit,
2354                         .cra_module = THIS_MODULE,
2355                 },
2356         },
2357 };
2358
2359 static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2360 {
2361         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2362
2363         safexcel_aead_sha224_cra_init(tfm);
2364         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2365         return 0;
2366 }
2367
2368 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2369         .type = SAFEXCEL_ALG_TYPE_AEAD,
2370         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2371         .alg.aead = {
2372                 .setkey = safexcel_aead_setkey,
2373                 .encrypt = safexcel_aead_encrypt,
2374                 .decrypt = safexcel_aead_decrypt,
2375                 .ivsize = CTR_RFC3686_IV_SIZE,
2376                 .maxauthsize = SHA224_DIGEST_SIZE,
2377                 .base = {
2378                         .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2379                         .cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2380                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2381                         .cra_flags = CRYPTO_ALG_ASYNC |
2382                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2383                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2384                         .cra_blocksize = 1,
2385                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2386                         .cra_alignmask = 0,
2387                         .cra_init = safexcel_aead_sha224_ctr_cra_init,
2388                         .cra_exit = safexcel_aead_cra_exit,
2389                         .cra_module = THIS_MODULE,
2390                 },
2391         },
2392 };
2393
2394 static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2395 {
2396         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2397
2398         safexcel_aead_sha512_cra_init(tfm);
2399         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2400         return 0;
2401 }
2402
2403 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2404         .type = SAFEXCEL_ALG_TYPE_AEAD,
2405         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2406         .alg.aead = {
2407                 .setkey = safexcel_aead_setkey,
2408                 .encrypt = safexcel_aead_encrypt,
2409                 .decrypt = safexcel_aead_decrypt,
2410                 .ivsize = CTR_RFC3686_IV_SIZE,
2411                 .maxauthsize = SHA512_DIGEST_SIZE,
2412                 .base = {
2413                         .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2414                         .cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2415                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2416                         .cra_flags = CRYPTO_ALG_ASYNC |
2417                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2418                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2419                         .cra_blocksize = 1,
2420                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2421                         .cra_alignmask = 0,
2422                         .cra_init = safexcel_aead_sha512_ctr_cra_init,
2423                         .cra_exit = safexcel_aead_cra_exit,
2424                         .cra_module = THIS_MODULE,
2425                 },
2426         },
2427 };
2428
2429 static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2430 {
2431         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2432
2433         safexcel_aead_sha384_cra_init(tfm);
2434         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2435         return 0;
2436 }
2437
2438 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2439         .type = SAFEXCEL_ALG_TYPE_AEAD,
2440         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2441         .alg.aead = {
2442                 .setkey = safexcel_aead_setkey,
2443                 .encrypt = safexcel_aead_encrypt,
2444                 .decrypt = safexcel_aead_decrypt,
2445                 .ivsize = CTR_RFC3686_IV_SIZE,
2446                 .maxauthsize = SHA384_DIGEST_SIZE,
2447                 .base = {
2448                         .cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2449                         .cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2450                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2451                         .cra_flags = CRYPTO_ALG_ASYNC |
2452                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2453                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2454                         .cra_blocksize = 1,
2455                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2456                         .cra_alignmask = 0,
2457                         .cra_init = safexcel_aead_sha384_ctr_cra_init,
2458                         .cra_exit = safexcel_aead_cra_exit,
2459                         .cra_module = THIS_MODULE,
2460                 },
2461         },
2462 };
2463
2464 static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2465                                            const u8 *key, unsigned int len)
2466 {
2467         struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2468         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2469         struct safexcel_crypto_priv *priv = ctx->base.priv;
2470         struct crypto_aes_ctx aes;
2471         int ret, i;
2472         unsigned int keylen;
2473
2474         /* Check for illegal XTS keys */
2475         ret = xts_verify_key(ctfm, key, len);
2476         if (ret)
2477                 return ret;
2478
2479         /* Only half of the key data is cipher key */
2480         keylen = (len >> 1);
2481         ret = aes_expandkey(&aes, key, keylen);
2482         if (ret)
2483                 return ret;
2484
2485         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2486                 for (i = 0; i < keylen / sizeof(u32); i++) {
2487                         if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2488                                 ctx->base.needs_inv = true;
2489                                 break;
2490                         }
2491                 }
2492         }
2493
2494         for (i = 0; i < keylen / sizeof(u32); i++)
2495                 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2496
2497         /* The other half is the tweak key */
2498         ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2499         if (ret)
2500                 return ret;
2501
2502         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2503                 for (i = 0; i < keylen / sizeof(u32); i++) {
2504                         if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2505                             aes.key_enc[i]) {
2506                                 ctx->base.needs_inv = true;
2507                                 break;
2508                         }
2509                 }
2510         }
2511
2512         for (i = 0; i < keylen / sizeof(u32); i++)
2513                 ctx->key[i + keylen / sizeof(u32)] =
2514                         cpu_to_le32(aes.key_enc[i]);
2515
2516         ctx->key_len = keylen << 1;
2517
2518         memzero_explicit(&aes, sizeof(aes));
2519         return 0;
2520 }
2521
2522 static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2523 {
2524         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2525
2526         safexcel_skcipher_cra_init(tfm);
2527         ctx->alg  = SAFEXCEL_AES;
2528         ctx->blocksz = AES_BLOCK_SIZE;
2529         ctx->xts  = 1;
2530         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2531         return 0;
2532 }
2533
2534 static int safexcel_encrypt_xts(struct skcipher_request *req)
2535 {
2536         if (req->cryptlen < XTS_BLOCK_SIZE)
2537                 return -EINVAL;
2538         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2539                                   SAFEXCEL_ENCRYPT);
2540 }
2541
2542 static int safexcel_decrypt_xts(struct skcipher_request *req)
2543 {
2544         if (req->cryptlen < XTS_BLOCK_SIZE)
2545                 return -EINVAL;
2546         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2547                                   SAFEXCEL_DECRYPT);
2548 }
2549
2550 struct safexcel_alg_template safexcel_alg_xts_aes = {
2551         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2552         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2553         .alg.skcipher = {
2554                 .setkey = safexcel_skcipher_aesxts_setkey,
2555                 .encrypt = safexcel_encrypt_xts,
2556                 .decrypt = safexcel_decrypt_xts,
2557                 /* XTS actually uses 2 AES keys glued together */
2558                 .min_keysize = AES_MIN_KEY_SIZE * 2,
2559                 .max_keysize = AES_MAX_KEY_SIZE * 2,
2560                 .ivsize = XTS_BLOCK_SIZE,
2561                 .base = {
2562                         .cra_name = "xts(aes)",
2563                         .cra_driver_name = "safexcel-xts-aes",
2564                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2565                         .cra_flags = CRYPTO_ALG_ASYNC |
2566                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2567                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2568                         .cra_blocksize = XTS_BLOCK_SIZE,
2569                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2570                         .cra_alignmask = 0,
2571                         .cra_init = safexcel_skcipher_aes_xts_cra_init,
2572                         .cra_exit = safexcel_skcipher_cra_exit,
2573                         .cra_module = THIS_MODULE,
2574                 },
2575         },
2576 };
2577
2578 static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2579                                     unsigned int len)
2580 {
2581         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2582         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2583         struct safexcel_crypto_priv *priv = ctx->base.priv;
2584         struct crypto_aes_ctx aes;
2585         u32 hashkey[AES_BLOCK_SIZE >> 2];
2586         int ret, i;
2587
2588         ret = aes_expandkey(&aes, key, len);
2589         if (ret) {
2590                 memzero_explicit(&aes, sizeof(aes));
2591                 return ret;
2592         }
2593
2594         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2595                 for (i = 0; i < len / sizeof(u32); i++) {
2596                         if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2597                                 ctx->base.needs_inv = true;
2598                                 break;
2599                         }
2600                 }
2601         }
2602
2603         for (i = 0; i < len / sizeof(u32); i++)
2604                 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2605
2606         ctx->key_len = len;
2607
2608         /* Compute hash key by encrypting zeroes with cipher key */
2609         crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
2610         crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
2611                                 CRYPTO_TFM_REQ_MASK);
2612         ret = crypto_cipher_setkey(ctx->hkaes, key, len);
2613         if (ret)
2614                 return ret;
2615
2616         memset(hashkey, 0, AES_BLOCK_SIZE);
2617         crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
2618
2619         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2620                 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2621                         if (be32_to_cpu(ctx->ipad[i]) != hashkey[i]) {
2622                                 ctx->base.needs_inv = true;
2623                                 break;
2624                         }
2625                 }
2626         }
2627
2628         for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2629                 ctx->ipad[i] = cpu_to_be32(hashkey[i]);
2630
2631         memzero_explicit(hashkey, AES_BLOCK_SIZE);
2632         memzero_explicit(&aes, sizeof(aes));
2633         return 0;
2634 }
2635
2636 static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2637 {
2638         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2639
2640         safexcel_aead_cra_init(tfm);
2641         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2642         ctx->state_sz = GHASH_BLOCK_SIZE;
2643         ctx->xcm = EIP197_XCM_MODE_GCM;
2644         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2645
2646         ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
2647         return PTR_ERR_OR_ZERO(ctx->hkaes);
2648 }
2649
2650 static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2651 {
2652         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2653
2654         crypto_free_cipher(ctx->hkaes);
2655         safexcel_aead_cra_exit(tfm);
2656 }
2657
2658 static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2659                                          unsigned int authsize)
2660 {
2661         return crypto_gcm_check_authsize(authsize);
2662 }
2663
2664 struct safexcel_alg_template safexcel_alg_gcm = {
2665         .type = SAFEXCEL_ALG_TYPE_AEAD,
2666         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2667         .alg.aead = {
2668                 .setkey = safexcel_aead_gcm_setkey,
2669                 .setauthsize = safexcel_aead_gcm_setauthsize,
2670                 .encrypt = safexcel_aead_encrypt,
2671                 .decrypt = safexcel_aead_decrypt,
2672                 .ivsize = GCM_AES_IV_SIZE,
2673                 .maxauthsize = GHASH_DIGEST_SIZE,
2674                 .base = {
2675                         .cra_name = "gcm(aes)",
2676                         .cra_driver_name = "safexcel-gcm-aes",
2677                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2678                         .cra_flags = CRYPTO_ALG_ASYNC |
2679                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2680                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2681                         .cra_blocksize = 1,
2682                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2683                         .cra_alignmask = 0,
2684                         .cra_init = safexcel_aead_gcm_cra_init,
2685                         .cra_exit = safexcel_aead_gcm_cra_exit,
2686                         .cra_module = THIS_MODULE,
2687                 },
2688         },
2689 };
2690
2691 static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2692                                     unsigned int len)
2693 {
2694         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2695         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2696         struct safexcel_crypto_priv *priv = ctx->base.priv;
2697         struct crypto_aes_ctx aes;
2698         int ret, i;
2699
2700         ret = aes_expandkey(&aes, key, len);
2701         if (ret) {
2702                 memzero_explicit(&aes, sizeof(aes));
2703                 return ret;
2704         }
2705
2706         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2707                 for (i = 0; i < len / sizeof(u32); i++) {
2708                         if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2709                                 ctx->base.needs_inv = true;
2710                                 break;
2711                         }
2712                 }
2713         }
2714
2715         for (i = 0; i < len / sizeof(u32); i++) {
2716                 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2717                 ctx->ipad[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2718                         cpu_to_be32(aes.key_enc[i]);
2719         }
2720
2721         ctx->key_len = len;
2722         ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2723
2724         if (len == AES_KEYSIZE_192)
2725                 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2726         else if (len == AES_KEYSIZE_256)
2727                 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2728         else
2729                 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2730
2731         memzero_explicit(&aes, sizeof(aes));
2732         return 0;
2733 }
2734
2735 static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2736 {
2737         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2738
2739         safexcel_aead_cra_init(tfm);
2740         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2741         ctx->state_sz = 3 * AES_BLOCK_SIZE;
2742         ctx->xcm = EIP197_XCM_MODE_CCM;
2743         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2744         ctx->ctrinit = 0;
2745         return 0;
2746 }
2747
2748 static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2749                                          unsigned int authsize)
2750 {
2751         /* Borrowed from crypto/ccm.c */
2752         switch (authsize) {
2753         case 4:
2754         case 6:
2755         case 8:
2756         case 10:
2757         case 12:
2758         case 14:
2759         case 16:
2760                 break;
2761         default:
2762                 return -EINVAL;
2763         }
2764
2765         return 0;
2766 }
2767
2768 static int safexcel_ccm_encrypt(struct aead_request *req)
2769 {
2770         struct safexcel_cipher_req *creq = aead_request_ctx(req);
2771
2772         if (req->iv[0] < 1 || req->iv[0] > 7)
2773                 return -EINVAL;
2774
2775         return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2776 }
2777
2778 static int safexcel_ccm_decrypt(struct aead_request *req)
2779 {
2780         struct safexcel_cipher_req *creq = aead_request_ctx(req);
2781
2782         if (req->iv[0] < 1 || req->iv[0] > 7)
2783                 return -EINVAL;
2784
2785         return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2786 }
2787
2788 struct safexcel_alg_template safexcel_alg_ccm = {
2789         .type = SAFEXCEL_ALG_TYPE_AEAD,
2790         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2791         .alg.aead = {
2792                 .setkey = safexcel_aead_ccm_setkey,
2793                 .setauthsize = safexcel_aead_ccm_setauthsize,
2794                 .encrypt = safexcel_ccm_encrypt,
2795                 .decrypt = safexcel_ccm_decrypt,
2796                 .ivsize = AES_BLOCK_SIZE,
2797                 .maxauthsize = AES_BLOCK_SIZE,
2798                 .base = {
2799                         .cra_name = "ccm(aes)",
2800                         .cra_driver_name = "safexcel-ccm-aes",
2801                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2802                         .cra_flags = CRYPTO_ALG_ASYNC |
2803                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2804                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2805                         .cra_blocksize = 1,
2806                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2807                         .cra_alignmask = 0,
2808                         .cra_init = safexcel_aead_ccm_cra_init,
2809                         .cra_exit = safexcel_aead_cra_exit,
2810                         .cra_module = THIS_MODULE,
2811                 },
2812         },
2813 };
2814
2815 static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2816                                      const u8 *key)
2817 {
2818         struct safexcel_crypto_priv *priv = ctx->base.priv;
2819
2820         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2821                 if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2822                         ctx->base.needs_inv = true;
2823
2824         memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2825         ctx->key_len = CHACHA_KEY_SIZE;
2826 }
2827
2828 static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2829                                              const u8 *key, unsigned int len)
2830 {
2831         struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2832
2833         if (len != CHACHA_KEY_SIZE)
2834                 return -EINVAL;
2835
2836         safexcel_chacha20_setkey(ctx, key);
2837
2838         return 0;
2839 }
2840
2841 static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2842 {
2843         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2844
2845         safexcel_skcipher_cra_init(tfm);
2846         ctx->alg  = SAFEXCEL_CHACHA20;
2847         ctx->ctrinit = 0;
2848         ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2849         return 0;
2850 }
2851
2852 struct safexcel_alg_template safexcel_alg_chacha20 = {
2853         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2854         .algo_mask = SAFEXCEL_ALG_CHACHA20,
2855         .alg.skcipher = {
2856                 .setkey = safexcel_skcipher_chacha20_setkey,
2857                 .encrypt = safexcel_encrypt,
2858                 .decrypt = safexcel_decrypt,
2859                 .min_keysize = CHACHA_KEY_SIZE,
2860                 .max_keysize = CHACHA_KEY_SIZE,
2861                 .ivsize = CHACHA_IV_SIZE,
2862                 .base = {
2863                         .cra_name = "chacha20",
2864                         .cra_driver_name = "safexcel-chacha20",
2865                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2866                         .cra_flags = CRYPTO_ALG_ASYNC |
2867                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2868                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2869                         .cra_blocksize = 1,
2870                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2871                         .cra_alignmask = 0,
2872                         .cra_init = safexcel_skcipher_chacha20_cra_init,
2873                         .cra_exit = safexcel_skcipher_cra_exit,
2874                         .cra_module = THIS_MODULE,
2875                 },
2876         },
2877 };
2878
2879 static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2880                                     const u8 *key, unsigned int len)
2881 {
2882         struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2883
2884         if (ctx->aead  == EIP197_AEAD_TYPE_IPSEC_ESP &&
2885             len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2886                 /* ESP variant has nonce appended to key */
2887                 len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2888                 ctx->nonce = *(u32 *)(key + len);
2889         }
2890         if (len != CHACHA_KEY_SIZE)
2891                 return -EINVAL;
2892
2893         safexcel_chacha20_setkey(ctx, key);
2894
2895         return 0;
2896 }
2897
2898 static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2899                                          unsigned int authsize)
2900 {
2901         if (authsize != POLY1305_DIGEST_SIZE)
2902                 return -EINVAL;
2903         return 0;
2904 }
2905
2906 static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2907                                           enum safexcel_cipher_direction dir)
2908 {
2909         struct safexcel_cipher_req *creq = aead_request_ctx(req);
2910         struct crypto_aead *aead = crypto_aead_reqtfm(req);
2911         struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2912         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2913         struct aead_request *subreq = aead_request_ctx(req);
2914         u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2915         int ret = 0;
2916
2917         /*
2918          * Instead of wasting time detecting umpteen silly corner cases,
2919          * just dump all "small" requests to the fallback implementation.
2920          * HW would not be faster on such small requests anyway.
2921          */
2922         if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2923                     req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2924                    req->cryptlen > POLY1305_DIGEST_SIZE)) {
2925                 return safexcel_queue_req(&req->base, creq, dir);
2926         }
2927
2928         /* HW cannot do full (AAD+payload) zero length, use fallback */
2929         memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2930         if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2931                 /* ESP variant has nonce appended to the key */
2932                 key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2933                 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2934                                          CHACHA_KEY_SIZE +
2935                                          EIP197_AEAD_IPSEC_NONCE_SIZE);
2936         } else {
2937                 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2938                                          CHACHA_KEY_SIZE);
2939         }
2940         if (ret) {
2941                 crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2942                 crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2943                                             CRYPTO_TFM_REQ_MASK);
2944                 return ret;
2945         }
2946
2947         aead_request_set_tfm(subreq, ctx->fback);
2948         aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2949                                   req->base.data);
2950         aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2951                                req->iv);
2952         aead_request_set_ad(subreq, req->assoclen);
2953
2954         return (dir ==  SAFEXCEL_ENCRYPT) ?
2955                 crypto_aead_encrypt(subreq) :
2956                 crypto_aead_decrypt(subreq);
2957 }
2958
2959 static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2960 {
2961         return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2962 }
2963
2964 static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2965 {
2966         return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2967 }
2968
2969 static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2970 {
2971         struct crypto_aead *aead = __crypto_aead_cast(tfm);
2972         struct aead_alg *alg = crypto_aead_alg(aead);
2973         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2974
2975         safexcel_aead_cra_init(tfm);
2976
2977         /* Allocate fallback implementation */
2978         ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2979                                        CRYPTO_ALG_ASYNC |
2980                                        CRYPTO_ALG_NEED_FALLBACK);
2981         if (IS_ERR(ctx->fback))
2982                 return PTR_ERR(ctx->fback);
2983
2984         crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2985                                           sizeof(struct aead_request) +
2986                                           crypto_aead_reqsize(ctx->fback)));
2987
2988         return 0;
2989 }
2990
2991 static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2992 {
2993         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2994
2995         safexcel_aead_fallback_cra_init(tfm);
2996         ctx->alg  = SAFEXCEL_CHACHA20;
2997         ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2998                     CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2999         ctx->ctrinit = 0;
3000         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
3001         ctx->state_sz = 0; /* Precomputed by HW */
3002         return 0;
3003 }
3004
3005 static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
3006 {
3007         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3008
3009         crypto_free_aead(ctx->fback);
3010         safexcel_aead_cra_exit(tfm);
3011 }
3012
3013 struct safexcel_alg_template safexcel_alg_chachapoly = {
3014         .type = SAFEXCEL_ALG_TYPE_AEAD,
3015         .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3016         .alg.aead = {
3017                 .setkey = safexcel_aead_chachapoly_setkey,
3018                 .setauthsize = safexcel_aead_chachapoly_setauthsize,
3019                 .encrypt = safexcel_aead_chachapoly_encrypt,
3020                 .decrypt = safexcel_aead_chachapoly_decrypt,
3021                 .ivsize = CHACHAPOLY_IV_SIZE,
3022                 .maxauthsize = POLY1305_DIGEST_SIZE,
3023                 .base = {
3024                         .cra_name = "rfc7539(chacha20,poly1305)",
3025                         .cra_driver_name = "safexcel-chacha20-poly1305",
3026                         /* +1 to put it above HW chacha + SW poly */
3027                         .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3028                         .cra_flags = CRYPTO_ALG_ASYNC |
3029                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3030                                      CRYPTO_ALG_KERN_DRIVER_ONLY |
3031                                      CRYPTO_ALG_NEED_FALLBACK,
3032                         .cra_blocksize = 1,
3033                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3034                         .cra_alignmask = 0,
3035                         .cra_init = safexcel_aead_chachapoly_cra_init,
3036                         .cra_exit = safexcel_aead_fallback_cra_exit,
3037                         .cra_module = THIS_MODULE,
3038                 },
3039         },
3040 };
3041
3042 static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
3043 {
3044         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3045         int ret;
3046
3047         ret = safexcel_aead_chachapoly_cra_init(tfm);
3048         ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3049         ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3050         return ret;
3051 }
3052
3053 struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
3054         .type = SAFEXCEL_ALG_TYPE_AEAD,
3055         .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3056         .alg.aead = {
3057                 .setkey = safexcel_aead_chachapoly_setkey,
3058                 .setauthsize = safexcel_aead_chachapoly_setauthsize,
3059                 .encrypt = safexcel_aead_chachapoly_encrypt,
3060                 .decrypt = safexcel_aead_chachapoly_decrypt,
3061                 .ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
3062                 .maxauthsize = POLY1305_DIGEST_SIZE,
3063                 .base = {
3064                         .cra_name = "rfc7539esp(chacha20,poly1305)",
3065                         .cra_driver_name = "safexcel-chacha20-poly1305-esp",
3066                         /* +1 to put it above HW chacha + SW poly */
3067                         .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3068                         .cra_flags = CRYPTO_ALG_ASYNC |
3069                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3070                                      CRYPTO_ALG_KERN_DRIVER_ONLY |
3071                                      CRYPTO_ALG_NEED_FALLBACK,
3072                         .cra_blocksize = 1,
3073                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3074                         .cra_alignmask = 0,
3075                         .cra_init = safexcel_aead_chachapolyesp_cra_init,
3076                         .cra_exit = safexcel_aead_fallback_cra_exit,
3077                         .cra_module = THIS_MODULE,
3078                 },
3079         },
3080 };
3081
3082 static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
3083                                         const u8 *key, unsigned int len)
3084 {
3085         struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3086         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3087         struct safexcel_crypto_priv *priv = ctx->base.priv;
3088
3089         if (len != SM4_KEY_SIZE)
3090                 return -EINVAL;
3091
3092         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3093                 if (memcmp(ctx->key, key, SM4_KEY_SIZE))
3094                         ctx->base.needs_inv = true;
3095
3096         memcpy(ctx->key, key, SM4_KEY_SIZE);
3097         ctx->key_len = SM4_KEY_SIZE;
3098
3099         return 0;
3100 }
3101
3102 static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3103 {
3104         /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3105         if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3106                 return -EINVAL;
3107         else
3108                 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3109                                           SAFEXCEL_ENCRYPT);
3110 }
3111
3112 static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3113 {
3114         /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3115         if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3116                 return -EINVAL;
3117         else
3118                 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3119                                           SAFEXCEL_DECRYPT);
3120 }
3121
3122 static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3123 {
3124         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3125
3126         safexcel_skcipher_cra_init(tfm);
3127         ctx->alg  = SAFEXCEL_SM4;
3128         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3129         ctx->blocksz = 0;
3130         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
3131         return 0;
3132 }
3133
3134 struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3135         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3136         .algo_mask = SAFEXCEL_ALG_SM4,
3137         .alg.skcipher = {
3138                 .setkey = safexcel_skcipher_sm4_setkey,
3139                 .encrypt = safexcel_sm4_blk_encrypt,
3140                 .decrypt = safexcel_sm4_blk_decrypt,
3141                 .min_keysize = SM4_KEY_SIZE,
3142                 .max_keysize = SM4_KEY_SIZE,
3143                 .base = {
3144                         .cra_name = "ecb(sm4)",
3145                         .cra_driver_name = "safexcel-ecb-sm4",
3146                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3147                         .cra_flags = CRYPTO_ALG_ASYNC |
3148                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3149                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3150                         .cra_blocksize = SM4_BLOCK_SIZE,
3151                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3152                         .cra_alignmask = 0,
3153                         .cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3154                         .cra_exit = safexcel_skcipher_cra_exit,
3155                         .cra_module = THIS_MODULE,
3156                 },
3157         },
3158 };
3159
3160 static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3161 {
3162         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3163
3164         safexcel_skcipher_cra_init(tfm);
3165         ctx->alg  = SAFEXCEL_SM4;
3166         ctx->blocksz = SM4_BLOCK_SIZE;
3167         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3168         return 0;
3169 }
3170
3171 struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3172         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3173         .algo_mask = SAFEXCEL_ALG_SM4,
3174         .alg.skcipher = {
3175                 .setkey = safexcel_skcipher_sm4_setkey,
3176                 .encrypt = safexcel_sm4_blk_encrypt,
3177                 .decrypt = safexcel_sm4_blk_decrypt,
3178                 .min_keysize = SM4_KEY_SIZE,
3179                 .max_keysize = SM4_KEY_SIZE,
3180                 .ivsize = SM4_BLOCK_SIZE,
3181                 .base = {
3182                         .cra_name = "cbc(sm4)",
3183                         .cra_driver_name = "safexcel-cbc-sm4",
3184                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3185                         .cra_flags = CRYPTO_ALG_ASYNC |
3186                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3187                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3188                         .cra_blocksize = SM4_BLOCK_SIZE,
3189                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3190                         .cra_alignmask = 0,
3191                         .cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3192                         .cra_exit = safexcel_skcipher_cra_exit,
3193                         .cra_module = THIS_MODULE,
3194                 },
3195         },
3196 };
3197
3198 static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
3199 {
3200         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3201
3202         safexcel_skcipher_cra_init(tfm);
3203         ctx->alg  = SAFEXCEL_SM4;
3204         ctx->blocksz = SM4_BLOCK_SIZE;
3205         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
3206         return 0;
3207 }
3208
3209 struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
3210         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3211         .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3212         .alg.skcipher = {
3213                 .setkey = safexcel_skcipher_sm4_setkey,
3214                 .encrypt = safexcel_encrypt,
3215                 .decrypt = safexcel_decrypt,
3216                 .min_keysize = SM4_KEY_SIZE,
3217                 .max_keysize = SM4_KEY_SIZE,
3218                 .ivsize = SM4_BLOCK_SIZE,
3219                 .base = {
3220                         .cra_name = "ofb(sm4)",
3221                         .cra_driver_name = "safexcel-ofb-sm4",
3222                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3223                         .cra_flags = CRYPTO_ALG_ASYNC |
3224                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3225                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3226                         .cra_blocksize = 1,
3227                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3228                         .cra_alignmask = 0,
3229                         .cra_init = safexcel_skcipher_sm4_ofb_cra_init,
3230                         .cra_exit = safexcel_skcipher_cra_exit,
3231                         .cra_module = THIS_MODULE,
3232                 },
3233         },
3234 };
3235
3236 static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
3237 {
3238         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3239
3240         safexcel_skcipher_cra_init(tfm);
3241         ctx->alg  = SAFEXCEL_SM4;
3242         ctx->blocksz = SM4_BLOCK_SIZE;
3243         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
3244         return 0;
3245 }
3246
3247 struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
3248         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3249         .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3250         .alg.skcipher = {
3251                 .setkey = safexcel_skcipher_sm4_setkey,
3252                 .encrypt = safexcel_encrypt,
3253                 .decrypt = safexcel_decrypt,
3254                 .min_keysize = SM4_KEY_SIZE,
3255                 .max_keysize = SM4_KEY_SIZE,
3256                 .ivsize = SM4_BLOCK_SIZE,
3257                 .base = {
3258                         .cra_name = "cfb(sm4)",
3259                         .cra_driver_name = "safexcel-cfb-sm4",
3260                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3261                         .cra_flags = CRYPTO_ALG_ASYNC |
3262                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3263                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3264                         .cra_blocksize = 1,
3265                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3266                         .cra_alignmask = 0,
3267                         .cra_init = safexcel_skcipher_sm4_cfb_cra_init,
3268                         .cra_exit = safexcel_skcipher_cra_exit,
3269                         .cra_module = THIS_MODULE,
3270                 },
3271         },
3272 };
3273
3274 static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3275                                            const u8 *key, unsigned int len)
3276 {
3277         struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3278         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3279
3280         /* last 4 bytes of key are the nonce! */
3281         ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3282         /* exclude the nonce here */
3283         len -= CTR_RFC3686_NONCE_SIZE;
3284
3285         return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3286 }
3287
3288 static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3289 {
3290         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3291
3292         safexcel_skcipher_cra_init(tfm);
3293         ctx->alg  = SAFEXCEL_SM4;
3294         ctx->blocksz = SM4_BLOCK_SIZE;
3295         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3296         return 0;
3297 }
3298
3299 struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3300         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3301         .algo_mask = SAFEXCEL_ALG_SM4,
3302         .alg.skcipher = {
3303                 .setkey = safexcel_skcipher_sm4ctr_setkey,
3304                 .encrypt = safexcel_encrypt,
3305                 .decrypt = safexcel_decrypt,
3306                 /* Add nonce size */
3307                 .min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3308                 .max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3309                 .ivsize = CTR_RFC3686_IV_SIZE,
3310                 .base = {
3311                         .cra_name = "rfc3686(ctr(sm4))",
3312                         .cra_driver_name = "safexcel-ctr-sm4",
3313                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3314                         .cra_flags = CRYPTO_ALG_ASYNC |
3315                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3316                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3317                         .cra_blocksize = 1,
3318                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3319                         .cra_alignmask = 0,
3320                         .cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3321                         .cra_exit = safexcel_skcipher_cra_exit,
3322                         .cra_module = THIS_MODULE,
3323                 },
3324         },
3325 };
3326
3327 static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3328 {
3329         /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3330         if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3331                 return -EINVAL;
3332
3333         return safexcel_queue_req(&req->base, aead_request_ctx(req),
3334                                   SAFEXCEL_ENCRYPT);
3335 }
3336
3337 static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3338 {
3339         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3340
3341         /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3342         if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3343                 return -EINVAL;
3344
3345         return safexcel_queue_req(&req->base, aead_request_ctx(req),
3346                                   SAFEXCEL_DECRYPT);
3347 }
3348
3349 static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3350 {
3351         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3352
3353         safexcel_aead_cra_init(tfm);
3354         ctx->alg = SAFEXCEL_SM4;
3355         ctx->blocksz = SM4_BLOCK_SIZE;
3356         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3357         ctx->state_sz = SHA1_DIGEST_SIZE;
3358         return 0;
3359 }
3360
3361 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3362         .type = SAFEXCEL_ALG_TYPE_AEAD,
3363         .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3364         .alg.aead = {
3365                 .setkey = safexcel_aead_setkey,
3366                 .encrypt = safexcel_aead_sm4_blk_encrypt,
3367                 .decrypt = safexcel_aead_sm4_blk_decrypt,
3368                 .ivsize = SM4_BLOCK_SIZE,
3369                 .maxauthsize = SHA1_DIGEST_SIZE,
3370                 .base = {
3371                         .cra_name = "authenc(hmac(sha1),cbc(sm4))",
3372                         .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3373                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3374                         .cra_flags = CRYPTO_ALG_ASYNC |
3375                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3376                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3377                         .cra_blocksize = SM4_BLOCK_SIZE,
3378                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3379                         .cra_alignmask = 0,
3380                         .cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3381                         .cra_exit = safexcel_aead_cra_exit,
3382                         .cra_module = THIS_MODULE,
3383                 },
3384         },
3385 };
3386
3387 static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3388                                          const u8 *key, unsigned int len)
3389 {
3390         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3391         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3392
3393         /* Keep fallback cipher synchronized */
3394         return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3395                safexcel_aead_setkey(ctfm, key, len);
3396 }
3397
3398 static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3399                                               unsigned int authsize)
3400 {
3401         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3402         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3403
3404         /* Keep fallback cipher synchronized */
3405         return crypto_aead_setauthsize(ctx->fback, authsize);
3406 }
3407
3408 static int safexcel_aead_fallback_crypt(struct aead_request *req,
3409                                         enum safexcel_cipher_direction dir)
3410 {
3411         struct crypto_aead *aead = crypto_aead_reqtfm(req);
3412         struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3413         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3414         struct aead_request *subreq = aead_request_ctx(req);
3415
3416         aead_request_set_tfm(subreq, ctx->fback);
3417         aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3418                                   req->base.data);
3419         aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3420                                req->iv);
3421         aead_request_set_ad(subreq, req->assoclen);
3422
3423         return (dir ==  SAFEXCEL_ENCRYPT) ?
3424                 crypto_aead_encrypt(subreq) :
3425                 crypto_aead_decrypt(subreq);
3426 }
3427
3428 static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3429 {
3430         struct safexcel_cipher_req *creq = aead_request_ctx(req);
3431
3432         /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3433         if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3434                 return -EINVAL;
3435         else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3436                 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3437
3438         /* HW cannot do full (AAD+payload) zero length, use fallback */
3439         return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3440 }
3441
3442 static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3443 {
3444         struct safexcel_cipher_req *creq = aead_request_ctx(req);
3445         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3446
3447         /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3448         if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3449                 return -EINVAL;
3450         else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3451                 /* If input length > 0 only */
3452                 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3453
3454         /* HW cannot do full (AAD+payload) zero length, use fallback */
3455         return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3456 }
3457
3458 static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3459 {
3460         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3461
3462         safexcel_aead_fallback_cra_init(tfm);
3463         ctx->alg = SAFEXCEL_SM4;
3464         ctx->blocksz = SM4_BLOCK_SIZE;
3465         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3466         ctx->state_sz = SM3_DIGEST_SIZE;
3467         return 0;
3468 }
3469
3470 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3471         .type = SAFEXCEL_ALG_TYPE_AEAD,
3472         .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3473         .alg.aead = {
3474                 .setkey = safexcel_aead_fallback_setkey,
3475                 .setauthsize = safexcel_aead_fallback_setauthsize,
3476                 .encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3477                 .decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3478                 .ivsize = SM4_BLOCK_SIZE,
3479                 .maxauthsize = SM3_DIGEST_SIZE,
3480                 .base = {
3481                         .cra_name = "authenc(hmac(sm3),cbc(sm4))",
3482                         .cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3483                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3484                         .cra_flags = CRYPTO_ALG_ASYNC |
3485                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3486                                      CRYPTO_ALG_KERN_DRIVER_ONLY |
3487                                      CRYPTO_ALG_NEED_FALLBACK,
3488                         .cra_blocksize = SM4_BLOCK_SIZE,
3489                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3490                         .cra_alignmask = 0,
3491                         .cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3492                         .cra_exit = safexcel_aead_fallback_cra_exit,
3493                         .cra_module = THIS_MODULE,
3494                 },
3495         },
3496 };
3497
3498 static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3499 {
3500         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3501
3502         safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3503         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3504         return 0;
3505 }
3506
3507 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3508         .type = SAFEXCEL_ALG_TYPE_AEAD,
3509         .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3510         .alg.aead = {
3511                 .setkey = safexcel_aead_setkey,
3512                 .encrypt = safexcel_aead_encrypt,
3513                 .decrypt = safexcel_aead_decrypt,
3514                 .ivsize = CTR_RFC3686_IV_SIZE,
3515                 .maxauthsize = SHA1_DIGEST_SIZE,
3516                 .base = {
3517                         .cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3518                         .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3519                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3520                         .cra_flags = CRYPTO_ALG_ASYNC |
3521                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3522                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3523                         .cra_blocksize = 1,
3524                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3525                         .cra_alignmask = 0,
3526                         .cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3527                         .cra_exit = safexcel_aead_cra_exit,
3528                         .cra_module = THIS_MODULE,
3529                 },
3530         },
3531 };
3532
3533 static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3534 {
3535         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3536
3537         safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3538         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3539         return 0;
3540 }
3541
3542 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3543         .type = SAFEXCEL_ALG_TYPE_AEAD,
3544         .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3545         .alg.aead = {
3546                 .setkey = safexcel_aead_setkey,
3547                 .encrypt = safexcel_aead_encrypt,
3548                 .decrypt = safexcel_aead_decrypt,
3549                 .ivsize = CTR_RFC3686_IV_SIZE,
3550                 .maxauthsize = SM3_DIGEST_SIZE,
3551                 .base = {
3552                         .cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3553                         .cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3554                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3555                         .cra_flags = CRYPTO_ALG_ASYNC |
3556                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3557                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3558                         .cra_blocksize = 1,
3559                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3560                         .cra_alignmask = 0,
3561                         .cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3562                         .cra_exit = safexcel_aead_cra_exit,
3563                         .cra_module = THIS_MODULE,
3564                 },
3565         },
3566 };
3567
3568 static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3569                                        unsigned int len)
3570 {
3571         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3572         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3573
3574         /* last 4 bytes of key are the nonce! */
3575         ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3576
3577         len -= CTR_RFC3686_NONCE_SIZE;
3578         return safexcel_aead_gcm_setkey(ctfm, key, len);
3579 }
3580
3581 static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3582                                             unsigned int authsize)
3583 {
3584         return crypto_rfc4106_check_authsize(authsize);
3585 }
3586
3587 static int safexcel_rfc4106_encrypt(struct aead_request *req)
3588 {
3589         return crypto_ipsec_check_assoclen(req->assoclen) ?:
3590                safexcel_aead_encrypt(req);
3591 }
3592
3593 static int safexcel_rfc4106_decrypt(struct aead_request *req)
3594 {
3595         return crypto_ipsec_check_assoclen(req->assoclen) ?:
3596                safexcel_aead_decrypt(req);
3597 }
3598
3599 static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3600 {
3601         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3602         int ret;
3603
3604         ret = safexcel_aead_gcm_cra_init(tfm);
3605         ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3606         ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3607         return ret;
3608 }
3609
3610 struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3611         .type = SAFEXCEL_ALG_TYPE_AEAD,
3612         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3613         .alg.aead = {
3614                 .setkey = safexcel_rfc4106_gcm_setkey,
3615                 .setauthsize = safexcel_rfc4106_gcm_setauthsize,
3616                 .encrypt = safexcel_rfc4106_encrypt,
3617                 .decrypt = safexcel_rfc4106_decrypt,
3618                 .ivsize = GCM_RFC4106_IV_SIZE,
3619                 .maxauthsize = GHASH_DIGEST_SIZE,
3620                 .base = {
3621                         .cra_name = "rfc4106(gcm(aes))",
3622                         .cra_driver_name = "safexcel-rfc4106-gcm-aes",
3623                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3624                         .cra_flags = CRYPTO_ALG_ASYNC |
3625                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3626                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3627                         .cra_blocksize = 1,
3628                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3629                         .cra_alignmask = 0,
3630                         .cra_init = safexcel_rfc4106_gcm_cra_init,
3631                         .cra_exit = safexcel_aead_gcm_cra_exit,
3632                 },
3633         },
3634 };
3635
3636 static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3637                                             unsigned int authsize)
3638 {
3639         if (authsize != GHASH_DIGEST_SIZE)
3640                 return -EINVAL;
3641
3642         return 0;
3643 }
3644
3645 static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3646 {
3647         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3648         int ret;
3649
3650         ret = safexcel_aead_gcm_cra_init(tfm);
3651         ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3652         return ret;
3653 }
3654
3655 struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3656         .type = SAFEXCEL_ALG_TYPE_AEAD,
3657         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3658         .alg.aead = {
3659                 .setkey = safexcel_rfc4106_gcm_setkey,
3660                 .setauthsize = safexcel_rfc4543_gcm_setauthsize,
3661                 .encrypt = safexcel_rfc4106_encrypt,
3662                 .decrypt = safexcel_rfc4106_decrypt,
3663                 .ivsize = GCM_RFC4543_IV_SIZE,
3664                 .maxauthsize = GHASH_DIGEST_SIZE,
3665                 .base = {
3666                         .cra_name = "rfc4543(gcm(aes))",
3667                         .cra_driver_name = "safexcel-rfc4543-gcm-aes",
3668                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3669                         .cra_flags = CRYPTO_ALG_ASYNC |
3670                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3671                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3672                         .cra_blocksize = 1,
3673                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3674                         .cra_alignmask = 0,
3675                         .cra_init = safexcel_rfc4543_gcm_cra_init,
3676                         .cra_exit = safexcel_aead_gcm_cra_exit,
3677                 },
3678         },
3679 };
3680
3681 static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3682                                        unsigned int len)
3683 {
3684         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3685         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3686
3687         /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3688         *(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3689         /* last 3 bytes of key are the nonce! */
3690         memcpy((u8 *)&ctx->nonce + 1, key + len -
3691                EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3692                EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3693
3694         len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3695         return safexcel_aead_ccm_setkey(ctfm, key, len);
3696 }
3697
3698 static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3699                                             unsigned int authsize)
3700 {
3701         /* Borrowed from crypto/ccm.c */
3702         switch (authsize) {
3703         case 8:
3704         case 12:
3705         case 16:
3706                 break;
3707         default:
3708                 return -EINVAL;
3709         }
3710
3711         return 0;
3712 }
3713
3714 static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3715 {
3716         struct safexcel_cipher_req *creq = aead_request_ctx(req);
3717
3718         /* Borrowed from crypto/ccm.c */
3719         if (req->assoclen != 16 && req->assoclen != 20)
3720                 return -EINVAL;
3721
3722         return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3723 }
3724
3725 static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3726 {
3727         struct safexcel_cipher_req *creq = aead_request_ctx(req);
3728
3729         /* Borrowed from crypto/ccm.c */
3730         if (req->assoclen != 16 && req->assoclen != 20)
3731                 return -EINVAL;
3732
3733         return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3734 }
3735
3736 static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3737 {
3738         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3739         int ret;
3740
3741         ret = safexcel_aead_ccm_cra_init(tfm);
3742         ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3743         ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3744         return ret;
3745 }
3746
3747 struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3748         .type = SAFEXCEL_ALG_TYPE_AEAD,
3749         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3750         .alg.aead = {
3751                 .setkey = safexcel_rfc4309_ccm_setkey,
3752                 .setauthsize = safexcel_rfc4309_ccm_setauthsize,
3753                 .encrypt = safexcel_rfc4309_ccm_encrypt,
3754                 .decrypt = safexcel_rfc4309_ccm_decrypt,
3755                 .ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3756                 .maxauthsize = AES_BLOCK_SIZE,
3757                 .base = {
3758                         .cra_name = "rfc4309(ccm(aes))",
3759                         .cra_driver_name = "safexcel-rfc4309-ccm-aes",
3760                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3761                         .cra_flags = CRYPTO_ALG_ASYNC |
3762                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3763                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3764                         .cra_blocksize = 1,
3765                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3766                         .cra_alignmask = 0,
3767                         .cra_init = safexcel_rfc4309_ccm_cra_init,
3768                         .cra_exit = safexcel_aead_cra_exit,
3769                         .cra_module = THIS_MODULE,
3770                 },
3771         },
3772 };