Merge tag 'erofs-for-5.10-rc1' of git://git.kernel.org/pub/scm/linux/kernel/git/xiang...
[linux-2.6-microblaze.git] / drivers / crypto / inside-secure / safexcel_cipher.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2017 Marvell
4  *
5  * Antoine Tenart <antoine.tenart@free-electrons.com>
6  */
7
8 #include <asm/unaligned.h>
9 #include <linux/device.h>
10 #include <linux/dma-mapping.h>
11 #include <linux/dmapool.h>
12 #include <crypto/aead.h>
13 #include <crypto/aes.h>
14 #include <crypto/authenc.h>
15 #include <crypto/chacha.h>
16 #include <crypto/ctr.h>
17 #include <crypto/internal/des.h>
18 #include <crypto/gcm.h>
19 #include <crypto/ghash.h>
20 #include <crypto/poly1305.h>
21 #include <crypto/sha.h>
22 #include <crypto/sm3.h>
23 #include <crypto/sm4.h>
24 #include <crypto/xts.h>
25 #include <crypto/skcipher.h>
26 #include <crypto/internal/aead.h>
27 #include <crypto/internal/skcipher.h>
28
29 #include "safexcel.h"
30
31 enum safexcel_cipher_direction {
32         SAFEXCEL_ENCRYPT,
33         SAFEXCEL_DECRYPT,
34 };
35
36 enum safexcel_cipher_alg {
37         SAFEXCEL_DES,
38         SAFEXCEL_3DES,
39         SAFEXCEL_AES,
40         SAFEXCEL_CHACHA20,
41         SAFEXCEL_SM4,
42 };
43
44 struct safexcel_cipher_ctx {
45         struct safexcel_context base;
46         struct safexcel_crypto_priv *priv;
47
48         u32 mode;
49         enum safexcel_cipher_alg alg;
50         u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
51         u8 xcm;  /* 0=authenc, 1=GCM, 2 reserved for CCM */
52         u8 aadskip;
53         u8 blocksz;
54         u32 ivmask;
55         u32 ctrinit;
56
57         __le32 key[16];
58         u32 nonce;
59         unsigned int key_len, xts;
60
61         /* All the below is AEAD specific */
62         u32 hash_alg;
63         u32 state_sz;
64
65         struct crypto_cipher *hkaes;
66         struct crypto_aead *fback;
67 };
68
69 struct safexcel_cipher_req {
70         enum safexcel_cipher_direction direction;
71         /* Number of result descriptors associated to the request */
72         unsigned int rdescs;
73         bool needs_inv;
74         int  nr_src, nr_dst;
75 };
76
77 static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
78                                 struct safexcel_command_desc *cdesc)
79 {
80         if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
81                 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
82                 /* 32 bit nonce */
83                 cdesc->control_data.token[0] = ctx->nonce;
84                 /* 64 bit IV part */
85                 memcpy(&cdesc->control_data.token[1], iv, 8);
86                 /* 32 bit counter, start at 0 or 1 (big endian!) */
87                 cdesc->control_data.token[3] =
88                         (__force u32)cpu_to_be32(ctx->ctrinit);
89                 return 4;
90         }
91         if (ctx->alg == SAFEXCEL_CHACHA20) {
92                 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
93                 /* 96 bit nonce part */
94                 memcpy(&cdesc->control_data.token[0], &iv[4], 12);
95                 /* 32 bit counter */
96                 cdesc->control_data.token[3] = *(u32 *)iv;
97                 return 4;
98         }
99
100         cdesc->control_data.options |= ctx->ivmask;
101         memcpy(cdesc->control_data.token, iv, ctx->blocksz);
102         return ctx->blocksz / sizeof(u32);
103 }
104
105 static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
106                                     struct safexcel_command_desc *cdesc,
107                                     struct safexcel_token *atoken,
108                                     u32 length)
109 {
110         struct safexcel_token *token;
111         int ivlen;
112
113         ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
114         if (ivlen == 4) {
115                 /* No space in cdesc, instruction moves to atoken */
116                 cdesc->additional_cdata_size = 1;
117                 token = atoken;
118         } else {
119                 /* Everything fits in cdesc */
120                 token = (struct safexcel_token *)(cdesc->control_data.token + 2);
121                 /* Need to pad with NOP */
122                 eip197_noop_token(&token[1]);
123         }
124
125         token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
126         token->packet_length = length;
127         token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
128                       EIP197_TOKEN_STAT_LAST_HASH;
129         token->instructions = EIP197_TOKEN_INS_LAST |
130                               EIP197_TOKEN_INS_TYPE_CRYPTO |
131                               EIP197_TOKEN_INS_TYPE_OUTPUT;
132 }
133
134 static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
135                              struct safexcel_command_desc *cdesc)
136 {
137         if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
138             ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
139                 /* 32 bit nonce */
140                 cdesc->control_data.token[0] = ctx->nonce;
141                 /* 64 bit IV part */
142                 memcpy(&cdesc->control_data.token[1], iv, 8);
143                 /* 32 bit counter, start at 0 or 1 (big endian!) */
144                 cdesc->control_data.token[3] =
145                         (__force u32)cpu_to_be32(ctx->ctrinit);
146                 return;
147         }
148         if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
149                 /* 96 bit IV part */
150                 memcpy(&cdesc->control_data.token[0], iv, 12);
151                 /* 32 bit counter, start at 0 or 1 (big endian!) */
152                 cdesc->control_data.token[3] =
153                         (__force u32)cpu_to_be32(ctx->ctrinit);
154                 return;
155         }
156         /* CBC */
157         memcpy(cdesc->control_data.token, iv, ctx->blocksz);
158 }
159
160 static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
161                                 struct safexcel_command_desc *cdesc,
162                                 struct safexcel_token *atoken,
163                                 enum safexcel_cipher_direction direction,
164                                 u32 cryptlen, u32 assoclen, u32 digestsize)
165 {
166         struct safexcel_token *aadref;
167         int atoksize = 2; /* Start with minimum size */
168         int assocadj = assoclen - ctx->aadskip, aadalign;
169
170         /* Always 4 dwords of embedded IV  for AEAD modes */
171         cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
172
173         if (direction == SAFEXCEL_DECRYPT)
174                 cryptlen -= digestsize;
175
176         if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
177                 /* Construct IV block B0 for the CBC-MAC */
178                 u8 *final_iv = (u8 *)cdesc->control_data.token;
179                 u8 *cbcmaciv = (u8 *)&atoken[1];
180                 __le32 *aadlen = (__le32 *)&atoken[5];
181
182                 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
183                         /* Length + nonce */
184                         cdesc->control_data.token[0] = ctx->nonce;
185                         /* Fixup flags byte */
186                         *(__le32 *)cbcmaciv =
187                                 cpu_to_le32(ctx->nonce |
188                                             ((assocadj > 0) << 6) |
189                                             ((digestsize - 2) << 2));
190                         /* 64 bit IV part */
191                         memcpy(&cdesc->control_data.token[1], iv, 8);
192                         memcpy(cbcmaciv + 4, iv, 8);
193                         /* Start counter at 0 */
194                         cdesc->control_data.token[3] = 0;
195                         /* Message length */
196                         *(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
197                 } else {
198                         /* Variable length IV part */
199                         memcpy(final_iv, iv, 15 - iv[0]);
200                         memcpy(cbcmaciv, iv, 15 - iv[0]);
201                         /* Start variable length counter at 0 */
202                         memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
203                         memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
204                         /* fixup flags byte */
205                         cbcmaciv[0] |= ((assocadj > 0) << 6) |
206                                        ((digestsize - 2) << 2);
207                         /* insert lower 2 bytes of message length */
208                         cbcmaciv[14] = cryptlen >> 8;
209                         cbcmaciv[15] = cryptlen & 255;
210                 }
211
212                 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
213                 atoken->packet_length = AES_BLOCK_SIZE +
214                                         ((assocadj > 0) << 1);
215                 atoken->stat = 0;
216                 atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
217                                        EIP197_TOKEN_INS_TYPE_HASH;
218
219                 if (likely(assocadj)) {
220                         *aadlen = cpu_to_le32((assocadj >> 8) |
221                                               (assocadj & 255) << 8);
222                         atoken += 6;
223                         atoksize += 7;
224                 } else {
225                         atoken += 5;
226                         atoksize += 6;
227                 }
228
229                 /* Process AAD data */
230                 aadref = atoken;
231                 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
232                 atoken->packet_length = assocadj;
233                 atoken->stat = 0;
234                 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
235                 atoken++;
236
237                 /* For CCM only, align AAD data towards hash engine */
238                 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
239                 aadalign = (assocadj + 2) & 15;
240                 atoken->packet_length = assocadj && aadalign ?
241                                                 16 - aadalign :
242                                                 0;
243                 if (likely(cryptlen)) {
244                         atoken->stat = 0;
245                         atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
246                 } else {
247                         atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
248                         atoken->instructions = EIP197_TOKEN_INS_LAST |
249                                                EIP197_TOKEN_INS_TYPE_HASH;
250                 }
251         } else {
252                 safexcel_aead_iv(ctx, iv, cdesc);
253
254                 /* Process AAD data */
255                 aadref = atoken;
256                 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
257                 atoken->packet_length = assocadj;
258                 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
259                 atoken->instructions = EIP197_TOKEN_INS_LAST |
260                                        EIP197_TOKEN_INS_TYPE_HASH;
261         }
262         atoken++;
263
264         if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
265                 /* For ESP mode (and not GMAC), skip over the IV */
266                 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
267                 atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
268                 atoken->stat = 0;
269                 atoken->instructions = 0;
270                 atoken++;
271                 atoksize++;
272         } else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
273                             direction == SAFEXCEL_DECRYPT)) {
274                 /* Poly-chacha decryption needs a dummy NOP here ... */
275                 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
276                 atoken->packet_length = 16; /* According to Op Manual */
277                 atoken->stat = 0;
278                 atoken->instructions = 0;
279                 atoken++;
280                 atoksize++;
281         }
282
283         if  (ctx->xcm) {
284                 /* For GCM and CCM, obtain enc(Y0) */
285                 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
286                 atoken->packet_length = 0;
287                 atoken->stat = 0;
288                 atoken->instructions = AES_BLOCK_SIZE;
289                 atoken++;
290
291                 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
292                 atoken->packet_length = AES_BLOCK_SIZE;
293                 atoken->stat = 0;
294                 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
295                                        EIP197_TOKEN_INS_TYPE_CRYPTO;
296                 atoken++;
297                 atoksize += 2;
298         }
299
300         if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
301                 /* Fixup stat field for AAD direction instruction */
302                 aadref->stat = 0;
303
304                 /* Process crypto data */
305                 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
306                 atoken->packet_length = cryptlen;
307
308                 if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
309                         /* Fixup instruction field for AAD dir instruction */
310                         aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
311
312                         /* Do not send to crypt engine in case of GMAC */
313                         atoken->instructions = EIP197_TOKEN_INS_LAST |
314                                                EIP197_TOKEN_INS_TYPE_HASH |
315                                                EIP197_TOKEN_INS_TYPE_OUTPUT;
316                 } else {
317                         atoken->instructions = EIP197_TOKEN_INS_LAST |
318                                                EIP197_TOKEN_INS_TYPE_CRYPTO |
319                                                EIP197_TOKEN_INS_TYPE_HASH |
320                                                EIP197_TOKEN_INS_TYPE_OUTPUT;
321                 }
322
323                 cryptlen &= 15;
324                 if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
325                         atoken->stat = 0;
326                         /* For CCM only, pad crypto data to the hash engine */
327                         atoken++;
328                         atoksize++;
329                         atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
330                         atoken->packet_length = 16 - cryptlen;
331                         atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
332                         atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
333                 } else {
334                         atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
335                 }
336                 atoken++;
337                 atoksize++;
338         }
339
340         if (direction == SAFEXCEL_ENCRYPT) {
341                 /* Append ICV */
342                 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
343                 atoken->packet_length = digestsize;
344                 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
345                                EIP197_TOKEN_STAT_LAST_PACKET;
346                 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
347                                        EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
348         } else {
349                 /* Extract ICV */
350                 atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
351                 atoken->packet_length = digestsize;
352                 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
353                                EIP197_TOKEN_STAT_LAST_PACKET;
354                 atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
355                 atoken++;
356                 atoksize++;
357
358                 /* Verify ICV */
359                 atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
360                 atoken->packet_length = digestsize |
361                                         EIP197_TOKEN_HASH_RESULT_VERIFY;
362                 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
363                                EIP197_TOKEN_STAT_LAST_PACKET;
364                 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
365         }
366
367         /* Fixup length of the token in the command descriptor */
368         cdesc->additional_cdata_size = atoksize;
369 }
370
371 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
372                                         const u8 *key, unsigned int len)
373 {
374         struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
375         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
376         struct safexcel_crypto_priv *priv = ctx->base.priv;
377         struct crypto_aes_ctx aes;
378         int ret, i;
379
380         ret = aes_expandkey(&aes, key, len);
381         if (ret)
382                 return ret;
383
384         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
385                 for (i = 0; i < len / sizeof(u32); i++) {
386                         if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
387                                 ctx->base.needs_inv = true;
388                                 break;
389                         }
390                 }
391         }
392
393         for (i = 0; i < len / sizeof(u32); i++)
394                 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
395
396         ctx->key_len = len;
397
398         memzero_explicit(&aes, sizeof(aes));
399         return 0;
400 }
401
402 static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
403                                 unsigned int len)
404 {
405         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
406         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
407         struct safexcel_crypto_priv *priv = ctx->base.priv;
408         struct crypto_authenc_keys keys;
409         struct crypto_aes_ctx aes;
410         int err = -EINVAL, i;
411         const char *alg;
412
413         if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
414                 goto badkey;
415
416         if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
417                 /* Must have at least space for the nonce here */
418                 if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
419                         goto badkey;
420                 /* last 4 bytes of key are the nonce! */
421                 ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
422                                       CTR_RFC3686_NONCE_SIZE);
423                 /* exclude the nonce here */
424                 keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
425         }
426
427         /* Encryption key */
428         switch (ctx->alg) {
429         case SAFEXCEL_DES:
430                 err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
431                 if (unlikely(err))
432                         goto badkey;
433                 break;
434         case SAFEXCEL_3DES:
435                 err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
436                 if (unlikely(err))
437                         goto badkey;
438                 break;
439         case SAFEXCEL_AES:
440                 err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
441                 if (unlikely(err))
442                         goto badkey;
443                 break;
444         case SAFEXCEL_SM4:
445                 if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
446                         goto badkey;
447                 break;
448         default:
449                 dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
450                 goto badkey;
451         }
452
453         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
454                 for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
455                         if (le32_to_cpu(ctx->key[i]) !=
456                             ((u32 *)keys.enckey)[i]) {
457                                 ctx->base.needs_inv = true;
458                                 break;
459                         }
460                 }
461         }
462
463         /* Auth key */
464         switch (ctx->hash_alg) {
465         case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
466                 alg = "safexcel-sha1";
467                 break;
468         case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
469                 alg = "safexcel-sha224";
470                 break;
471         case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
472                 alg = "safexcel-sha256";
473                 break;
474         case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
475                 alg = "safexcel-sha384";
476                 break;
477         case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
478                 alg = "safexcel-sha512";
479                 break;
480         case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
481                 alg = "safexcel-sm3";
482                 break;
483         default:
484                 dev_err(priv->dev, "aead: unsupported hash algorithm\n");
485                 goto badkey;
486         }
487
488         if (safexcel_hmac_setkey(&ctx->base, keys.authkey, keys.authkeylen,
489                                  alg, ctx->state_sz))
490                 goto badkey;
491
492         /* Now copy the keys into the context */
493         for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
494                 ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
495         ctx->key_len = keys.enckeylen;
496
497         memzero_explicit(&keys, sizeof(keys));
498         return 0;
499
500 badkey:
501         memzero_explicit(&keys, sizeof(keys));
502         return err;
503 }
504
505 static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
506                                     struct crypto_async_request *async,
507                                     struct safexcel_cipher_req *sreq,
508                                     struct safexcel_command_desc *cdesc)
509 {
510         struct safexcel_crypto_priv *priv = ctx->base.priv;
511         int ctrl_size = ctx->key_len / sizeof(u32);
512
513         cdesc->control_data.control1 = ctx->mode;
514
515         if (ctx->aead) {
516                 /* Take in account the ipad+opad digests */
517                 if (ctx->xcm) {
518                         ctrl_size += ctx->state_sz / sizeof(u32);
519                         cdesc->control_data.control0 =
520                                 CONTEXT_CONTROL_KEY_EN |
521                                 CONTEXT_CONTROL_DIGEST_XCM |
522                                 ctx->hash_alg |
523                                 CONTEXT_CONTROL_SIZE(ctrl_size);
524                 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
525                         /* Chacha20-Poly1305 */
526                         cdesc->control_data.control0 =
527                                 CONTEXT_CONTROL_KEY_EN |
528                                 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
529                                 (sreq->direction == SAFEXCEL_ENCRYPT ?
530                                         CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
531                                         CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
532                                 ctx->hash_alg |
533                                 CONTEXT_CONTROL_SIZE(ctrl_size);
534                         return 0;
535                 } else {
536                         ctrl_size += ctx->state_sz / sizeof(u32) * 2;
537                         cdesc->control_data.control0 =
538                                 CONTEXT_CONTROL_KEY_EN |
539                                 CONTEXT_CONTROL_DIGEST_HMAC |
540                                 ctx->hash_alg |
541                                 CONTEXT_CONTROL_SIZE(ctrl_size);
542                 }
543
544                 if (sreq->direction == SAFEXCEL_ENCRYPT &&
545                     (ctx->xcm == EIP197_XCM_MODE_CCM ||
546                      ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
547                         cdesc->control_data.control0 |=
548                                 CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
549                 else if (sreq->direction == SAFEXCEL_ENCRYPT)
550                         cdesc->control_data.control0 |=
551                                 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
552                 else if (ctx->xcm == EIP197_XCM_MODE_CCM)
553                         cdesc->control_data.control0 |=
554                                 CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
555                 else
556                         cdesc->control_data.control0 |=
557                                 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
558         } else {
559                 if (sreq->direction == SAFEXCEL_ENCRYPT)
560                         cdesc->control_data.control0 =
561                                 CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
562                                 CONTEXT_CONTROL_KEY_EN |
563                                 CONTEXT_CONTROL_SIZE(ctrl_size);
564                 else
565                         cdesc->control_data.control0 =
566                                 CONTEXT_CONTROL_TYPE_CRYPTO_IN |
567                                 CONTEXT_CONTROL_KEY_EN |
568                                 CONTEXT_CONTROL_SIZE(ctrl_size);
569         }
570
571         if (ctx->alg == SAFEXCEL_DES) {
572                 cdesc->control_data.control0 |=
573                         CONTEXT_CONTROL_CRYPTO_ALG_DES;
574         } else if (ctx->alg == SAFEXCEL_3DES) {
575                 cdesc->control_data.control0 |=
576                         CONTEXT_CONTROL_CRYPTO_ALG_3DES;
577         } else if (ctx->alg == SAFEXCEL_AES) {
578                 switch (ctx->key_len >> ctx->xts) {
579                 case AES_KEYSIZE_128:
580                         cdesc->control_data.control0 |=
581                                 CONTEXT_CONTROL_CRYPTO_ALG_AES128;
582                         break;
583                 case AES_KEYSIZE_192:
584                         cdesc->control_data.control0 |=
585                                 CONTEXT_CONTROL_CRYPTO_ALG_AES192;
586                         break;
587                 case AES_KEYSIZE_256:
588                         cdesc->control_data.control0 |=
589                                 CONTEXT_CONTROL_CRYPTO_ALG_AES256;
590                         break;
591                 default:
592                         dev_err(priv->dev, "aes keysize not supported: %u\n",
593                                 ctx->key_len >> ctx->xts);
594                         return -EINVAL;
595                 }
596         } else if (ctx->alg == SAFEXCEL_CHACHA20) {
597                 cdesc->control_data.control0 |=
598                         CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
599         } else if (ctx->alg == SAFEXCEL_SM4) {
600                 cdesc->control_data.control0 |=
601                         CONTEXT_CONTROL_CRYPTO_ALG_SM4;
602         }
603
604         return 0;
605 }
606
607 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
608                                       struct crypto_async_request *async,
609                                       struct scatterlist *src,
610                                       struct scatterlist *dst,
611                                       unsigned int cryptlen,
612                                       struct safexcel_cipher_req *sreq,
613                                       bool *should_complete, int *ret)
614 {
615         struct skcipher_request *areq = skcipher_request_cast(async);
616         struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
617         struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
618         struct safexcel_result_desc *rdesc;
619         int ndesc = 0;
620
621         *ret = 0;
622
623         if (unlikely(!sreq->rdescs))
624                 return 0;
625
626         while (sreq->rdescs--) {
627                 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
628                 if (IS_ERR(rdesc)) {
629                         dev_err(priv->dev,
630                                 "cipher: result: could not retrieve the result descriptor\n");
631                         *ret = PTR_ERR(rdesc);
632                         break;
633                 }
634
635                 if (likely(!*ret))
636                         *ret = safexcel_rdesc_check_errors(priv, rdesc);
637
638                 ndesc++;
639         }
640
641         safexcel_complete(priv, ring);
642
643         if (src == dst) {
644                 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
645         } else {
646                 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
647                 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
648         }
649
650         /*
651          * Update IV in req from last crypto output word for CBC modes
652          */
653         if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
654             (sreq->direction == SAFEXCEL_ENCRYPT)) {
655                 /* For encrypt take the last output word */
656                 sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
657                                    crypto_skcipher_ivsize(skcipher),
658                                    (cryptlen -
659                                     crypto_skcipher_ivsize(skcipher)));
660         }
661
662         *should_complete = true;
663
664         return ndesc;
665 }
666
667 static int safexcel_send_req(struct crypto_async_request *base, int ring,
668                              struct safexcel_cipher_req *sreq,
669                              struct scatterlist *src, struct scatterlist *dst,
670                              unsigned int cryptlen, unsigned int assoclen,
671                              unsigned int digestsize, u8 *iv, int *commands,
672                              int *results)
673 {
674         struct skcipher_request *areq = skcipher_request_cast(base);
675         struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
676         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
677         struct safexcel_crypto_priv *priv = ctx->base.priv;
678         struct safexcel_command_desc *cdesc;
679         struct safexcel_command_desc *first_cdesc = NULL;
680         struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
681         struct scatterlist *sg;
682         unsigned int totlen;
683         unsigned int totlen_src = cryptlen + assoclen;
684         unsigned int totlen_dst = totlen_src;
685         struct safexcel_token *atoken;
686         int n_cdesc = 0, n_rdesc = 0;
687         int queued, i, ret = 0;
688         bool first = true;
689
690         sreq->nr_src = sg_nents_for_len(src, totlen_src);
691
692         if (ctx->aead) {
693                 /*
694                  * AEAD has auth tag appended to output for encrypt and
695                  * removed from the output for decrypt!
696                  */
697                 if (sreq->direction == SAFEXCEL_DECRYPT)
698                         totlen_dst -= digestsize;
699                 else
700                         totlen_dst += digestsize;
701
702                 memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
703                        &ctx->base.ipad, ctx->state_sz);
704                 if (!ctx->xcm)
705                         memcpy(ctx->base.ctxr->data + (ctx->key_len +
706                                ctx->state_sz) / sizeof(u32), &ctx->base.opad,
707                                ctx->state_sz);
708         } else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
709                    (sreq->direction == SAFEXCEL_DECRYPT)) {
710                 /*
711                  * Save IV from last crypto input word for CBC modes in decrypt
712                  * direction. Need to do this first in case of inplace operation
713                  * as it will be overwritten.
714                  */
715                 sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
716                                    crypto_skcipher_ivsize(skcipher),
717                                    (totlen_src -
718                                     crypto_skcipher_ivsize(skcipher)));
719         }
720
721         sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
722
723         /*
724          * Remember actual input length, source buffer length may be
725          * updated in case of inline operation below.
726          */
727         totlen = totlen_src;
728         queued = totlen_src;
729
730         if (src == dst) {
731                 sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
732                 sreq->nr_dst = sreq->nr_src;
733                 if (unlikely((totlen_src || totlen_dst) &&
734                     (sreq->nr_src <= 0))) {
735                         dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
736                                 max(totlen_src, totlen_dst));
737                         return -EINVAL;
738                 }
739                 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
740         } else {
741                 if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
742                         dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
743                                 totlen_src);
744                         return -EINVAL;
745                 }
746                 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
747
748                 if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
749                         dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
750                                 totlen_dst);
751                         dma_unmap_sg(priv->dev, src, sreq->nr_src,
752                                      DMA_TO_DEVICE);
753                         return -EINVAL;
754                 }
755                 dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
756         }
757
758         memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
759
760         if (!totlen) {
761                 /*
762                  * The EIP97 cannot deal with zero length input packets!
763                  * So stuff a dummy command descriptor indicating a 1 byte
764                  * (dummy) input packet, using the context record as source.
765                  */
766                 first_cdesc = safexcel_add_cdesc(priv, ring,
767                                                  1, 1, ctx->base.ctxr_dma,
768                                                  1, 1, ctx->base.ctxr_dma,
769                                                  &atoken);
770                 if (IS_ERR(first_cdesc)) {
771                         /* No space left in the command descriptor ring */
772                         ret = PTR_ERR(first_cdesc);
773                         goto cdesc_rollback;
774                 }
775                 n_cdesc = 1;
776                 goto skip_cdesc;
777         }
778
779         /* command descriptors */
780         for_each_sg(src, sg, sreq->nr_src, i) {
781                 int len = sg_dma_len(sg);
782
783                 /* Do not overflow the request */
784                 if (queued < len)
785                         len = queued;
786
787                 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
788                                            !(queued - len),
789                                            sg_dma_address(sg), len, totlen,
790                                            ctx->base.ctxr_dma, &atoken);
791                 if (IS_ERR(cdesc)) {
792                         /* No space left in the command descriptor ring */
793                         ret = PTR_ERR(cdesc);
794                         goto cdesc_rollback;
795                 }
796
797                 if (!n_cdesc)
798                         first_cdesc = cdesc;
799
800                 n_cdesc++;
801                 queued -= len;
802                 if (!queued)
803                         break;
804         }
805 skip_cdesc:
806         /* Add context control words and token to first command descriptor */
807         safexcel_context_control(ctx, base, sreq, first_cdesc);
808         if (ctx->aead)
809                 safexcel_aead_token(ctx, iv, first_cdesc, atoken,
810                                     sreq->direction, cryptlen,
811                                     assoclen, digestsize);
812         else
813                 safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
814                                         cryptlen);
815
816         /* result descriptors */
817         for_each_sg(dst, sg, sreq->nr_dst, i) {
818                 bool last = (i == sreq->nr_dst - 1);
819                 u32 len = sg_dma_len(sg);
820
821                 /* only allow the part of the buffer we know we need */
822                 if (len > totlen_dst)
823                         len = totlen_dst;
824                 if (unlikely(!len))
825                         break;
826                 totlen_dst -= len;
827
828                 /* skip over AAD space in buffer - not written */
829                 if (assoclen) {
830                         if (assoclen >= len) {
831                                 assoclen -= len;
832                                 continue;
833                         }
834                         rdesc = safexcel_add_rdesc(priv, ring, first, last,
835                                                    sg_dma_address(sg) +
836                                                    assoclen,
837                                                    len - assoclen);
838                         assoclen = 0;
839                 } else {
840                         rdesc = safexcel_add_rdesc(priv, ring, first, last,
841                                                    sg_dma_address(sg),
842                                                    len);
843                 }
844                 if (IS_ERR(rdesc)) {
845                         /* No space left in the result descriptor ring */
846                         ret = PTR_ERR(rdesc);
847                         goto rdesc_rollback;
848                 }
849                 if (first) {
850                         first_rdesc = rdesc;
851                         first = false;
852                 }
853                 n_rdesc++;
854         }
855
856         if (unlikely(first)) {
857                 /*
858                  * Special case: AEAD decrypt with only AAD data.
859                  * In this case there is NO output data from the engine,
860                  * but the engine still needs a result descriptor!
861                  * Create a dummy one just for catching the result token.
862                  */
863                 rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
864                 if (IS_ERR(rdesc)) {
865                         /* No space left in the result descriptor ring */
866                         ret = PTR_ERR(rdesc);
867                         goto rdesc_rollback;
868                 }
869                 first_rdesc = rdesc;
870                 n_rdesc = 1;
871         }
872
873         safexcel_rdr_req_set(priv, ring, first_rdesc, base);
874
875         *commands = n_cdesc;
876         *results = n_rdesc;
877         return 0;
878
879 rdesc_rollback:
880         for (i = 0; i < n_rdesc; i++)
881                 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
882 cdesc_rollback:
883         for (i = 0; i < n_cdesc; i++)
884                 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
885
886         if (src == dst) {
887                 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
888         } else {
889                 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
890                 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
891         }
892
893         return ret;
894 }
895
896 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
897                                       int ring,
898                                       struct crypto_async_request *base,
899                                       struct safexcel_cipher_req *sreq,
900                                       bool *should_complete, int *ret)
901 {
902         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
903         struct safexcel_result_desc *rdesc;
904         int ndesc = 0, enq_ret;
905
906         *ret = 0;
907
908         if (unlikely(!sreq->rdescs))
909                 return 0;
910
911         while (sreq->rdescs--) {
912                 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
913                 if (IS_ERR(rdesc)) {
914                         dev_err(priv->dev,
915                                 "cipher: invalidate: could not retrieve the result descriptor\n");
916                         *ret = PTR_ERR(rdesc);
917                         break;
918                 }
919
920                 if (likely(!*ret))
921                         *ret = safexcel_rdesc_check_errors(priv, rdesc);
922
923                 ndesc++;
924         }
925
926         safexcel_complete(priv, ring);
927
928         if (ctx->base.exit_inv) {
929                 dma_pool_free(priv->context_pool, ctx->base.ctxr,
930                               ctx->base.ctxr_dma);
931
932                 *should_complete = true;
933
934                 return ndesc;
935         }
936
937         ring = safexcel_select_ring(priv);
938         ctx->base.ring = ring;
939
940         spin_lock_bh(&priv->ring[ring].queue_lock);
941         enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
942         spin_unlock_bh(&priv->ring[ring].queue_lock);
943
944         if (enq_ret != -EINPROGRESS)
945                 *ret = enq_ret;
946
947         queue_work(priv->ring[ring].workqueue,
948                    &priv->ring[ring].work_data.work);
949
950         *should_complete = false;
951
952         return ndesc;
953 }
954
955 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
956                                            int ring,
957                                            struct crypto_async_request *async,
958                                            bool *should_complete, int *ret)
959 {
960         struct skcipher_request *req = skcipher_request_cast(async);
961         struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
962         int err;
963
964         if (sreq->needs_inv) {
965                 sreq->needs_inv = false;
966                 err = safexcel_handle_inv_result(priv, ring, async, sreq,
967                                                  should_complete, ret);
968         } else {
969                 err = safexcel_handle_req_result(priv, ring, async, req->src,
970                                                  req->dst, req->cryptlen, sreq,
971                                                  should_complete, ret);
972         }
973
974         return err;
975 }
976
977 static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
978                                        int ring,
979                                        struct crypto_async_request *async,
980                                        bool *should_complete, int *ret)
981 {
982         struct aead_request *req = aead_request_cast(async);
983         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
984         struct safexcel_cipher_req *sreq = aead_request_ctx(req);
985         int err;
986
987         if (sreq->needs_inv) {
988                 sreq->needs_inv = false;
989                 err = safexcel_handle_inv_result(priv, ring, async, sreq,
990                                                  should_complete, ret);
991         } else {
992                 err = safexcel_handle_req_result(priv, ring, async, req->src,
993                                                  req->dst,
994                                                  req->cryptlen + crypto_aead_authsize(tfm),
995                                                  sreq, should_complete, ret);
996         }
997
998         return err;
999 }
1000
1001 static int safexcel_cipher_send_inv(struct crypto_async_request *base,
1002                                     int ring, int *commands, int *results)
1003 {
1004         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1005         struct safexcel_crypto_priv *priv = ctx->base.priv;
1006         int ret;
1007
1008         ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1009         if (unlikely(ret))
1010                 return ret;
1011
1012         *commands = 1;
1013         *results = 1;
1014
1015         return 0;
1016 }
1017
1018 static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
1019                                   int *commands, int *results)
1020 {
1021         struct skcipher_request *req = skcipher_request_cast(async);
1022         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1023         struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1024         struct safexcel_crypto_priv *priv = ctx->base.priv;
1025         int ret;
1026
1027         BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1028
1029         if (sreq->needs_inv) {
1030                 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1031         } else {
1032                 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1033                 u8 input_iv[AES_BLOCK_SIZE];
1034
1035                 /*
1036                  * Save input IV in case of CBC decrypt mode
1037                  * Will be overwritten with output IV prior to use!
1038                  */
1039                 memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1040
1041                 ret = safexcel_send_req(async, ring, sreq, req->src,
1042                                         req->dst, req->cryptlen, 0, 0, input_iv,
1043                                         commands, results);
1044         }
1045
1046         sreq->rdescs = *results;
1047         return ret;
1048 }
1049
1050 static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1051                               int *commands, int *results)
1052 {
1053         struct aead_request *req = aead_request_cast(async);
1054         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1055         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1056         struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1057         struct safexcel_crypto_priv *priv = ctx->base.priv;
1058         int ret;
1059
1060         BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1061
1062         if (sreq->needs_inv)
1063                 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1064         else
1065                 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1066                                         req->cryptlen, req->assoclen,
1067                                         crypto_aead_authsize(tfm), req->iv,
1068                                         commands, results);
1069         sreq->rdescs = *results;
1070         return ret;
1071 }
1072
1073 static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1074                                     struct crypto_async_request *base,
1075                                     struct safexcel_cipher_req *sreq,
1076                                     struct safexcel_inv_result *result)
1077 {
1078         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1079         struct safexcel_crypto_priv *priv = ctx->base.priv;
1080         int ring = ctx->base.ring;
1081
1082         init_completion(&result->completion);
1083
1084         ctx = crypto_tfm_ctx(base->tfm);
1085         ctx->base.exit_inv = true;
1086         sreq->needs_inv = true;
1087
1088         spin_lock_bh(&priv->ring[ring].queue_lock);
1089         crypto_enqueue_request(&priv->ring[ring].queue, base);
1090         spin_unlock_bh(&priv->ring[ring].queue_lock);
1091
1092         queue_work(priv->ring[ring].workqueue,
1093                    &priv->ring[ring].work_data.work);
1094
1095         wait_for_completion(&result->completion);
1096
1097         if (result->error) {
1098                 dev_warn(priv->dev,
1099                         "cipher: sync: invalidate: completion error %d\n",
1100                          result->error);
1101                 return result->error;
1102         }
1103
1104         return 0;
1105 }
1106
1107 static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1108 {
1109         EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1110         struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1111         struct safexcel_inv_result result = {};
1112
1113         memset(req, 0, sizeof(struct skcipher_request));
1114
1115         skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1116                                       safexcel_inv_complete, &result);
1117         skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1118
1119         return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1120 }
1121
1122 static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1123 {
1124         EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1125         struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1126         struct safexcel_inv_result result = {};
1127
1128         memset(req, 0, sizeof(struct aead_request));
1129
1130         aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1131                                   safexcel_inv_complete, &result);
1132         aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1133
1134         return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1135 }
1136
1137 static int safexcel_queue_req(struct crypto_async_request *base,
1138                         struct safexcel_cipher_req *sreq,
1139                         enum safexcel_cipher_direction dir)
1140 {
1141         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1142         struct safexcel_crypto_priv *priv = ctx->base.priv;
1143         int ret, ring;
1144
1145         sreq->needs_inv = false;
1146         sreq->direction = dir;
1147
1148         if (ctx->base.ctxr) {
1149                 if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1150                         sreq->needs_inv = true;
1151                         ctx->base.needs_inv = false;
1152                 }
1153         } else {
1154                 ctx->base.ring = safexcel_select_ring(priv);
1155                 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1156                                                  EIP197_GFP_FLAGS(*base),
1157                                                  &ctx->base.ctxr_dma);
1158                 if (!ctx->base.ctxr)
1159                         return -ENOMEM;
1160         }
1161
1162         ring = ctx->base.ring;
1163
1164         spin_lock_bh(&priv->ring[ring].queue_lock);
1165         ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1166         spin_unlock_bh(&priv->ring[ring].queue_lock);
1167
1168         queue_work(priv->ring[ring].workqueue,
1169                    &priv->ring[ring].work_data.work);
1170
1171         return ret;
1172 }
1173
1174 static int safexcel_encrypt(struct skcipher_request *req)
1175 {
1176         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1177                         SAFEXCEL_ENCRYPT);
1178 }
1179
1180 static int safexcel_decrypt(struct skcipher_request *req)
1181 {
1182         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1183                         SAFEXCEL_DECRYPT);
1184 }
1185
1186 static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1187 {
1188         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1189         struct safexcel_alg_template *tmpl =
1190                 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1191                              alg.skcipher.base);
1192
1193         crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1194                                     sizeof(struct safexcel_cipher_req));
1195
1196         ctx->base.priv = tmpl->priv;
1197
1198         ctx->base.send = safexcel_skcipher_send;
1199         ctx->base.handle_result = safexcel_skcipher_handle_result;
1200         ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1201         ctx->ctrinit = 1;
1202         return 0;
1203 }
1204
1205 static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1206 {
1207         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1208
1209         memzero_explicit(ctx->key, sizeof(ctx->key));
1210
1211         /* context not allocated, skip invalidation */
1212         if (!ctx->base.ctxr)
1213                 return -ENOMEM;
1214
1215         memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1216         return 0;
1217 }
1218
1219 static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1220 {
1221         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1222         struct safexcel_crypto_priv *priv = ctx->base.priv;
1223         int ret;
1224
1225         if (safexcel_cipher_cra_exit(tfm))
1226                 return;
1227
1228         if (priv->flags & EIP197_TRC_CACHE) {
1229                 ret = safexcel_skcipher_exit_inv(tfm);
1230                 if (ret)
1231                         dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1232                                  ret);
1233         } else {
1234                 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1235                               ctx->base.ctxr_dma);
1236         }
1237 }
1238
1239 static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1240 {
1241         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1242         struct safexcel_crypto_priv *priv = ctx->base.priv;
1243         int ret;
1244
1245         if (safexcel_cipher_cra_exit(tfm))
1246                 return;
1247
1248         if (priv->flags & EIP197_TRC_CACHE) {
1249                 ret = safexcel_aead_exit_inv(tfm);
1250                 if (ret)
1251                         dev_warn(priv->dev, "aead: invalidation error %d\n",
1252                                  ret);
1253         } else {
1254                 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1255                               ctx->base.ctxr_dma);
1256         }
1257 }
1258
1259 static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1260 {
1261         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1262
1263         safexcel_skcipher_cra_init(tfm);
1264         ctx->alg  = SAFEXCEL_AES;
1265         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1266         ctx->blocksz = 0;
1267         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1268         return 0;
1269 }
1270
1271 struct safexcel_alg_template safexcel_alg_ecb_aes = {
1272         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1273         .algo_mask = SAFEXCEL_ALG_AES,
1274         .alg.skcipher = {
1275                 .setkey = safexcel_skcipher_aes_setkey,
1276                 .encrypt = safexcel_encrypt,
1277                 .decrypt = safexcel_decrypt,
1278                 .min_keysize = AES_MIN_KEY_SIZE,
1279                 .max_keysize = AES_MAX_KEY_SIZE,
1280                 .base = {
1281                         .cra_name = "ecb(aes)",
1282                         .cra_driver_name = "safexcel-ecb-aes",
1283                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1284                         .cra_flags = CRYPTO_ALG_ASYNC |
1285                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1286                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1287                         .cra_blocksize = AES_BLOCK_SIZE,
1288                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1289                         .cra_alignmask = 0,
1290                         .cra_init = safexcel_skcipher_aes_ecb_cra_init,
1291                         .cra_exit = safexcel_skcipher_cra_exit,
1292                         .cra_module = THIS_MODULE,
1293                 },
1294         },
1295 };
1296
1297 static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1298 {
1299         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1300
1301         safexcel_skcipher_cra_init(tfm);
1302         ctx->alg  = SAFEXCEL_AES;
1303         ctx->blocksz = AES_BLOCK_SIZE;
1304         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1305         return 0;
1306 }
1307
1308 struct safexcel_alg_template safexcel_alg_cbc_aes = {
1309         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1310         .algo_mask = SAFEXCEL_ALG_AES,
1311         .alg.skcipher = {
1312                 .setkey = safexcel_skcipher_aes_setkey,
1313                 .encrypt = safexcel_encrypt,
1314                 .decrypt = safexcel_decrypt,
1315                 .min_keysize = AES_MIN_KEY_SIZE,
1316                 .max_keysize = AES_MAX_KEY_SIZE,
1317                 .ivsize = AES_BLOCK_SIZE,
1318                 .base = {
1319                         .cra_name = "cbc(aes)",
1320                         .cra_driver_name = "safexcel-cbc-aes",
1321                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1322                         .cra_flags = CRYPTO_ALG_ASYNC |
1323                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1324                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1325                         .cra_blocksize = AES_BLOCK_SIZE,
1326                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1327                         .cra_alignmask = 0,
1328                         .cra_init = safexcel_skcipher_aes_cbc_cra_init,
1329                         .cra_exit = safexcel_skcipher_cra_exit,
1330                         .cra_module = THIS_MODULE,
1331                 },
1332         },
1333 };
1334
1335 static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
1336 {
1337         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1338
1339         safexcel_skcipher_cra_init(tfm);
1340         ctx->alg  = SAFEXCEL_AES;
1341         ctx->blocksz = AES_BLOCK_SIZE;
1342         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
1343         return 0;
1344 }
1345
1346 struct safexcel_alg_template safexcel_alg_cfb_aes = {
1347         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1348         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1349         .alg.skcipher = {
1350                 .setkey = safexcel_skcipher_aes_setkey,
1351                 .encrypt = safexcel_encrypt,
1352                 .decrypt = safexcel_decrypt,
1353                 .min_keysize = AES_MIN_KEY_SIZE,
1354                 .max_keysize = AES_MAX_KEY_SIZE,
1355                 .ivsize = AES_BLOCK_SIZE,
1356                 .base = {
1357                         .cra_name = "cfb(aes)",
1358                         .cra_driver_name = "safexcel-cfb-aes",
1359                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1360                         .cra_flags = CRYPTO_ALG_ASYNC |
1361                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1362                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1363                         .cra_blocksize = 1,
1364                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1365                         .cra_alignmask = 0,
1366                         .cra_init = safexcel_skcipher_aes_cfb_cra_init,
1367                         .cra_exit = safexcel_skcipher_cra_exit,
1368                         .cra_module = THIS_MODULE,
1369                 },
1370         },
1371 };
1372
1373 static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
1374 {
1375         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1376
1377         safexcel_skcipher_cra_init(tfm);
1378         ctx->alg  = SAFEXCEL_AES;
1379         ctx->blocksz = AES_BLOCK_SIZE;
1380         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
1381         return 0;
1382 }
1383
1384 struct safexcel_alg_template safexcel_alg_ofb_aes = {
1385         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1386         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1387         .alg.skcipher = {
1388                 .setkey = safexcel_skcipher_aes_setkey,
1389                 .encrypt = safexcel_encrypt,
1390                 .decrypt = safexcel_decrypt,
1391                 .min_keysize = AES_MIN_KEY_SIZE,
1392                 .max_keysize = AES_MAX_KEY_SIZE,
1393                 .ivsize = AES_BLOCK_SIZE,
1394                 .base = {
1395                         .cra_name = "ofb(aes)",
1396                         .cra_driver_name = "safexcel-ofb-aes",
1397                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1398                         .cra_flags = CRYPTO_ALG_ASYNC |
1399                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1400                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1401                         .cra_blocksize = 1,
1402                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1403                         .cra_alignmask = 0,
1404                         .cra_init = safexcel_skcipher_aes_ofb_cra_init,
1405                         .cra_exit = safexcel_skcipher_cra_exit,
1406                         .cra_module = THIS_MODULE,
1407                 },
1408         },
1409 };
1410
1411 static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1412                                            const u8 *key, unsigned int len)
1413 {
1414         struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1415         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1416         struct safexcel_crypto_priv *priv = ctx->base.priv;
1417         struct crypto_aes_ctx aes;
1418         int ret, i;
1419         unsigned int keylen;
1420
1421         /* last 4 bytes of key are the nonce! */
1422         ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1423         /* exclude the nonce here */
1424         keylen = len - CTR_RFC3686_NONCE_SIZE;
1425         ret = aes_expandkey(&aes, key, keylen);
1426         if (ret)
1427                 return ret;
1428
1429         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1430                 for (i = 0; i < keylen / sizeof(u32); i++) {
1431                         if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1432                                 ctx->base.needs_inv = true;
1433                                 break;
1434                         }
1435                 }
1436         }
1437
1438         for (i = 0; i < keylen / sizeof(u32); i++)
1439                 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1440
1441         ctx->key_len = keylen;
1442
1443         memzero_explicit(&aes, sizeof(aes));
1444         return 0;
1445 }
1446
1447 static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1448 {
1449         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1450
1451         safexcel_skcipher_cra_init(tfm);
1452         ctx->alg  = SAFEXCEL_AES;
1453         ctx->blocksz = AES_BLOCK_SIZE;
1454         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1455         return 0;
1456 }
1457
1458 struct safexcel_alg_template safexcel_alg_ctr_aes = {
1459         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1460         .algo_mask = SAFEXCEL_ALG_AES,
1461         .alg.skcipher = {
1462                 .setkey = safexcel_skcipher_aesctr_setkey,
1463                 .encrypt = safexcel_encrypt,
1464                 .decrypt = safexcel_decrypt,
1465                 /* Add nonce size */
1466                 .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1467                 .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1468                 .ivsize = CTR_RFC3686_IV_SIZE,
1469                 .base = {
1470                         .cra_name = "rfc3686(ctr(aes))",
1471                         .cra_driver_name = "safexcel-ctr-aes",
1472                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1473                         .cra_flags = CRYPTO_ALG_ASYNC |
1474                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1475                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1476                         .cra_blocksize = 1,
1477                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1478                         .cra_alignmask = 0,
1479                         .cra_init = safexcel_skcipher_aes_ctr_cra_init,
1480                         .cra_exit = safexcel_skcipher_cra_exit,
1481                         .cra_module = THIS_MODULE,
1482                 },
1483         },
1484 };
1485
1486 static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1487                                unsigned int len)
1488 {
1489         struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1490         struct safexcel_crypto_priv *priv = ctx->base.priv;
1491         int ret;
1492
1493         ret = verify_skcipher_des_key(ctfm, key);
1494         if (ret)
1495                 return ret;
1496
1497         /* if context exits and key changed, need to invalidate it */
1498         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1499                 if (memcmp(ctx->key, key, len))
1500                         ctx->base.needs_inv = true;
1501
1502         memcpy(ctx->key, key, len);
1503         ctx->key_len = len;
1504
1505         return 0;
1506 }
1507
1508 static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1509 {
1510         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1511
1512         safexcel_skcipher_cra_init(tfm);
1513         ctx->alg  = SAFEXCEL_DES;
1514         ctx->blocksz = DES_BLOCK_SIZE;
1515         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1516         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1517         return 0;
1518 }
1519
1520 struct safexcel_alg_template safexcel_alg_cbc_des = {
1521         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1522         .algo_mask = SAFEXCEL_ALG_DES,
1523         .alg.skcipher = {
1524                 .setkey = safexcel_des_setkey,
1525                 .encrypt = safexcel_encrypt,
1526                 .decrypt = safexcel_decrypt,
1527                 .min_keysize = DES_KEY_SIZE,
1528                 .max_keysize = DES_KEY_SIZE,
1529                 .ivsize = DES_BLOCK_SIZE,
1530                 .base = {
1531                         .cra_name = "cbc(des)",
1532                         .cra_driver_name = "safexcel-cbc-des",
1533                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1534                         .cra_flags = CRYPTO_ALG_ASYNC |
1535                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1536                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1537                         .cra_blocksize = DES_BLOCK_SIZE,
1538                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1539                         .cra_alignmask = 0,
1540                         .cra_init = safexcel_skcipher_des_cbc_cra_init,
1541                         .cra_exit = safexcel_skcipher_cra_exit,
1542                         .cra_module = THIS_MODULE,
1543                 },
1544         },
1545 };
1546
1547 static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1548 {
1549         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1550
1551         safexcel_skcipher_cra_init(tfm);
1552         ctx->alg  = SAFEXCEL_DES;
1553         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1554         ctx->blocksz = 0;
1555         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1556         return 0;
1557 }
1558
1559 struct safexcel_alg_template safexcel_alg_ecb_des = {
1560         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1561         .algo_mask = SAFEXCEL_ALG_DES,
1562         .alg.skcipher = {
1563                 .setkey = safexcel_des_setkey,
1564                 .encrypt = safexcel_encrypt,
1565                 .decrypt = safexcel_decrypt,
1566                 .min_keysize = DES_KEY_SIZE,
1567                 .max_keysize = DES_KEY_SIZE,
1568                 .base = {
1569                         .cra_name = "ecb(des)",
1570                         .cra_driver_name = "safexcel-ecb-des",
1571                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1572                         .cra_flags = CRYPTO_ALG_ASYNC |
1573                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1574                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1575                         .cra_blocksize = DES_BLOCK_SIZE,
1576                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1577                         .cra_alignmask = 0,
1578                         .cra_init = safexcel_skcipher_des_ecb_cra_init,
1579                         .cra_exit = safexcel_skcipher_cra_exit,
1580                         .cra_module = THIS_MODULE,
1581                 },
1582         },
1583 };
1584
1585 static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1586                                    const u8 *key, unsigned int len)
1587 {
1588         struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1589         struct safexcel_crypto_priv *priv = ctx->base.priv;
1590         int err;
1591
1592         err = verify_skcipher_des3_key(ctfm, key);
1593         if (err)
1594                 return err;
1595
1596         /* if context exits and key changed, need to invalidate it */
1597         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1598                 if (memcmp(ctx->key, key, len))
1599                         ctx->base.needs_inv = true;
1600
1601         memcpy(ctx->key, key, len);
1602         ctx->key_len = len;
1603
1604         return 0;
1605 }
1606
1607 static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1608 {
1609         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1610
1611         safexcel_skcipher_cra_init(tfm);
1612         ctx->alg  = SAFEXCEL_3DES;
1613         ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1614         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1615         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1616         return 0;
1617 }
1618
1619 struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1620         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1621         .algo_mask = SAFEXCEL_ALG_DES,
1622         .alg.skcipher = {
1623                 .setkey = safexcel_des3_ede_setkey,
1624                 .encrypt = safexcel_encrypt,
1625                 .decrypt = safexcel_decrypt,
1626                 .min_keysize = DES3_EDE_KEY_SIZE,
1627                 .max_keysize = DES3_EDE_KEY_SIZE,
1628                 .ivsize = DES3_EDE_BLOCK_SIZE,
1629                 .base = {
1630                         .cra_name = "cbc(des3_ede)",
1631                         .cra_driver_name = "safexcel-cbc-des3_ede",
1632                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1633                         .cra_flags = CRYPTO_ALG_ASYNC |
1634                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1635                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1636                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1637                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1638                         .cra_alignmask = 0,
1639                         .cra_init = safexcel_skcipher_des3_cbc_cra_init,
1640                         .cra_exit = safexcel_skcipher_cra_exit,
1641                         .cra_module = THIS_MODULE,
1642                 },
1643         },
1644 };
1645
1646 static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1647 {
1648         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1649
1650         safexcel_skcipher_cra_init(tfm);
1651         ctx->alg  = SAFEXCEL_3DES;
1652         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1653         ctx->blocksz = 0;
1654         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1655         return 0;
1656 }
1657
1658 struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1659         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1660         .algo_mask = SAFEXCEL_ALG_DES,
1661         .alg.skcipher = {
1662                 .setkey = safexcel_des3_ede_setkey,
1663                 .encrypt = safexcel_encrypt,
1664                 .decrypt = safexcel_decrypt,
1665                 .min_keysize = DES3_EDE_KEY_SIZE,
1666                 .max_keysize = DES3_EDE_KEY_SIZE,
1667                 .base = {
1668                         .cra_name = "ecb(des3_ede)",
1669                         .cra_driver_name = "safexcel-ecb-des3_ede",
1670                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1671                         .cra_flags = CRYPTO_ALG_ASYNC |
1672                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1673                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1674                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1675                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1676                         .cra_alignmask = 0,
1677                         .cra_init = safexcel_skcipher_des3_ecb_cra_init,
1678                         .cra_exit = safexcel_skcipher_cra_exit,
1679                         .cra_module = THIS_MODULE,
1680                 },
1681         },
1682 };
1683
1684 static int safexcel_aead_encrypt(struct aead_request *req)
1685 {
1686         struct safexcel_cipher_req *creq = aead_request_ctx(req);
1687
1688         return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1689 }
1690
1691 static int safexcel_aead_decrypt(struct aead_request *req)
1692 {
1693         struct safexcel_cipher_req *creq = aead_request_ctx(req);
1694
1695         return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1696 }
1697
1698 static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1699 {
1700         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1701         struct safexcel_alg_template *tmpl =
1702                 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1703                              alg.aead.base);
1704
1705         crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1706                                 sizeof(struct safexcel_cipher_req));
1707
1708         ctx->base.priv = tmpl->priv;
1709
1710         ctx->alg  = SAFEXCEL_AES; /* default */
1711         ctx->blocksz = AES_BLOCK_SIZE;
1712         ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1713         ctx->ctrinit = 1;
1714         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1715         ctx->aead = true;
1716         ctx->base.send = safexcel_aead_send;
1717         ctx->base.handle_result = safexcel_aead_handle_result;
1718         return 0;
1719 }
1720
1721 static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1722 {
1723         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1724
1725         safexcel_aead_cra_init(tfm);
1726         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1727         ctx->state_sz = SHA1_DIGEST_SIZE;
1728         return 0;
1729 }
1730
1731 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1732         .type = SAFEXCEL_ALG_TYPE_AEAD,
1733         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1734         .alg.aead = {
1735                 .setkey = safexcel_aead_setkey,
1736                 .encrypt = safexcel_aead_encrypt,
1737                 .decrypt = safexcel_aead_decrypt,
1738                 .ivsize = AES_BLOCK_SIZE,
1739                 .maxauthsize = SHA1_DIGEST_SIZE,
1740                 .base = {
1741                         .cra_name = "authenc(hmac(sha1),cbc(aes))",
1742                         .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1743                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1744                         .cra_flags = CRYPTO_ALG_ASYNC |
1745                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1746                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1747                         .cra_blocksize = AES_BLOCK_SIZE,
1748                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1749                         .cra_alignmask = 0,
1750                         .cra_init = safexcel_aead_sha1_cra_init,
1751                         .cra_exit = safexcel_aead_cra_exit,
1752                         .cra_module = THIS_MODULE,
1753                 },
1754         },
1755 };
1756
1757 static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1758 {
1759         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1760
1761         safexcel_aead_cra_init(tfm);
1762         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1763         ctx->state_sz = SHA256_DIGEST_SIZE;
1764         return 0;
1765 }
1766
1767 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1768         .type = SAFEXCEL_ALG_TYPE_AEAD,
1769         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1770         .alg.aead = {
1771                 .setkey = safexcel_aead_setkey,
1772                 .encrypt = safexcel_aead_encrypt,
1773                 .decrypt = safexcel_aead_decrypt,
1774                 .ivsize = AES_BLOCK_SIZE,
1775                 .maxauthsize = SHA256_DIGEST_SIZE,
1776                 .base = {
1777                         .cra_name = "authenc(hmac(sha256),cbc(aes))",
1778                         .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1779                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1780                         .cra_flags = CRYPTO_ALG_ASYNC |
1781                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1782                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1783                         .cra_blocksize = AES_BLOCK_SIZE,
1784                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1785                         .cra_alignmask = 0,
1786                         .cra_init = safexcel_aead_sha256_cra_init,
1787                         .cra_exit = safexcel_aead_cra_exit,
1788                         .cra_module = THIS_MODULE,
1789                 },
1790         },
1791 };
1792
1793 static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1794 {
1795         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1796
1797         safexcel_aead_cra_init(tfm);
1798         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1799         ctx->state_sz = SHA256_DIGEST_SIZE;
1800         return 0;
1801 }
1802
1803 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1804         .type = SAFEXCEL_ALG_TYPE_AEAD,
1805         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1806         .alg.aead = {
1807                 .setkey = safexcel_aead_setkey,
1808                 .encrypt = safexcel_aead_encrypt,
1809                 .decrypt = safexcel_aead_decrypt,
1810                 .ivsize = AES_BLOCK_SIZE,
1811                 .maxauthsize = SHA224_DIGEST_SIZE,
1812                 .base = {
1813                         .cra_name = "authenc(hmac(sha224),cbc(aes))",
1814                         .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1815                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1816                         .cra_flags = CRYPTO_ALG_ASYNC |
1817                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1818                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1819                         .cra_blocksize = AES_BLOCK_SIZE,
1820                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1821                         .cra_alignmask = 0,
1822                         .cra_init = safexcel_aead_sha224_cra_init,
1823                         .cra_exit = safexcel_aead_cra_exit,
1824                         .cra_module = THIS_MODULE,
1825                 },
1826         },
1827 };
1828
1829 static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1830 {
1831         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1832
1833         safexcel_aead_cra_init(tfm);
1834         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1835         ctx->state_sz = SHA512_DIGEST_SIZE;
1836         return 0;
1837 }
1838
1839 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1840         .type = SAFEXCEL_ALG_TYPE_AEAD,
1841         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1842         .alg.aead = {
1843                 .setkey = safexcel_aead_setkey,
1844                 .encrypt = safexcel_aead_encrypt,
1845                 .decrypt = safexcel_aead_decrypt,
1846                 .ivsize = AES_BLOCK_SIZE,
1847                 .maxauthsize = SHA512_DIGEST_SIZE,
1848                 .base = {
1849                         .cra_name = "authenc(hmac(sha512),cbc(aes))",
1850                         .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1851                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1852                         .cra_flags = CRYPTO_ALG_ASYNC |
1853                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1854                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1855                         .cra_blocksize = AES_BLOCK_SIZE,
1856                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1857                         .cra_alignmask = 0,
1858                         .cra_init = safexcel_aead_sha512_cra_init,
1859                         .cra_exit = safexcel_aead_cra_exit,
1860                         .cra_module = THIS_MODULE,
1861                 },
1862         },
1863 };
1864
1865 static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1866 {
1867         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1868
1869         safexcel_aead_cra_init(tfm);
1870         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1871         ctx->state_sz = SHA512_DIGEST_SIZE;
1872         return 0;
1873 }
1874
1875 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1876         .type = SAFEXCEL_ALG_TYPE_AEAD,
1877         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1878         .alg.aead = {
1879                 .setkey = safexcel_aead_setkey,
1880                 .encrypt = safexcel_aead_encrypt,
1881                 .decrypt = safexcel_aead_decrypt,
1882                 .ivsize = AES_BLOCK_SIZE,
1883                 .maxauthsize = SHA384_DIGEST_SIZE,
1884                 .base = {
1885                         .cra_name = "authenc(hmac(sha384),cbc(aes))",
1886                         .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1887                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1888                         .cra_flags = CRYPTO_ALG_ASYNC |
1889                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1890                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1891                         .cra_blocksize = AES_BLOCK_SIZE,
1892                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1893                         .cra_alignmask = 0,
1894                         .cra_init = safexcel_aead_sha384_cra_init,
1895                         .cra_exit = safexcel_aead_cra_exit,
1896                         .cra_module = THIS_MODULE,
1897                 },
1898         },
1899 };
1900
1901 static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1902 {
1903         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1904
1905         safexcel_aead_sha1_cra_init(tfm);
1906         ctx->alg = SAFEXCEL_3DES; /* override default */
1907         ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1908         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1909         return 0;
1910 }
1911
1912 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1913         .type = SAFEXCEL_ALG_TYPE_AEAD,
1914         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1915         .alg.aead = {
1916                 .setkey = safexcel_aead_setkey,
1917                 .encrypt = safexcel_aead_encrypt,
1918                 .decrypt = safexcel_aead_decrypt,
1919                 .ivsize = DES3_EDE_BLOCK_SIZE,
1920                 .maxauthsize = SHA1_DIGEST_SIZE,
1921                 .base = {
1922                         .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1923                         .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1924                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1925                         .cra_flags = CRYPTO_ALG_ASYNC |
1926                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1927                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1928                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1929                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1930                         .cra_alignmask = 0,
1931                         .cra_init = safexcel_aead_sha1_des3_cra_init,
1932                         .cra_exit = safexcel_aead_cra_exit,
1933                         .cra_module = THIS_MODULE,
1934                 },
1935         },
1936 };
1937
1938 static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1939 {
1940         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1941
1942         safexcel_aead_sha256_cra_init(tfm);
1943         ctx->alg = SAFEXCEL_3DES; /* override default */
1944         ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1945         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1946         return 0;
1947 }
1948
1949 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1950         .type = SAFEXCEL_ALG_TYPE_AEAD,
1951         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1952         .alg.aead = {
1953                 .setkey = safexcel_aead_setkey,
1954                 .encrypt = safexcel_aead_encrypt,
1955                 .decrypt = safexcel_aead_decrypt,
1956                 .ivsize = DES3_EDE_BLOCK_SIZE,
1957                 .maxauthsize = SHA256_DIGEST_SIZE,
1958                 .base = {
1959                         .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1960                         .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1961                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1962                         .cra_flags = CRYPTO_ALG_ASYNC |
1963                                      CRYPTO_ALG_ALLOCATES_MEMORY |
1964                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1965                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1966                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1967                         .cra_alignmask = 0,
1968                         .cra_init = safexcel_aead_sha256_des3_cra_init,
1969                         .cra_exit = safexcel_aead_cra_exit,
1970                         .cra_module = THIS_MODULE,
1971                 },
1972         },
1973 };
1974
1975 static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1976 {
1977         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1978
1979         safexcel_aead_sha224_cra_init(tfm);
1980         ctx->alg = SAFEXCEL_3DES; /* override default */
1981         ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1982         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1983         return 0;
1984 }
1985
1986 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
1987         .type = SAFEXCEL_ALG_TYPE_AEAD,
1988         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1989         .alg.aead = {
1990                 .setkey = safexcel_aead_setkey,
1991                 .encrypt = safexcel_aead_encrypt,
1992                 .decrypt = safexcel_aead_decrypt,
1993                 .ivsize = DES3_EDE_BLOCK_SIZE,
1994                 .maxauthsize = SHA224_DIGEST_SIZE,
1995                 .base = {
1996                         .cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
1997                         .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
1998                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
1999                         .cra_flags = CRYPTO_ALG_ASYNC |
2000                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2001                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2002                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2003                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2004                         .cra_alignmask = 0,
2005                         .cra_init = safexcel_aead_sha224_des3_cra_init,
2006                         .cra_exit = safexcel_aead_cra_exit,
2007                         .cra_module = THIS_MODULE,
2008                 },
2009         },
2010 };
2011
2012 static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
2013 {
2014         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2015
2016         safexcel_aead_sha512_cra_init(tfm);
2017         ctx->alg = SAFEXCEL_3DES; /* override default */
2018         ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2019         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2020         return 0;
2021 }
2022
2023 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
2024         .type = SAFEXCEL_ALG_TYPE_AEAD,
2025         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2026         .alg.aead = {
2027                 .setkey = safexcel_aead_setkey,
2028                 .encrypt = safexcel_aead_encrypt,
2029                 .decrypt = safexcel_aead_decrypt,
2030                 .ivsize = DES3_EDE_BLOCK_SIZE,
2031                 .maxauthsize = SHA512_DIGEST_SIZE,
2032                 .base = {
2033                         .cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
2034                         .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
2035                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2036                         .cra_flags = CRYPTO_ALG_ASYNC |
2037                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2038                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2039                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2040                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2041                         .cra_alignmask = 0,
2042                         .cra_init = safexcel_aead_sha512_des3_cra_init,
2043                         .cra_exit = safexcel_aead_cra_exit,
2044                         .cra_module = THIS_MODULE,
2045                 },
2046         },
2047 };
2048
2049 static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
2050 {
2051         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2052
2053         safexcel_aead_sha384_cra_init(tfm);
2054         ctx->alg = SAFEXCEL_3DES; /* override default */
2055         ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2056         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2057         return 0;
2058 }
2059
2060 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
2061         .type = SAFEXCEL_ALG_TYPE_AEAD,
2062         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2063         .alg.aead = {
2064                 .setkey = safexcel_aead_setkey,
2065                 .encrypt = safexcel_aead_encrypt,
2066                 .decrypt = safexcel_aead_decrypt,
2067                 .ivsize = DES3_EDE_BLOCK_SIZE,
2068                 .maxauthsize = SHA384_DIGEST_SIZE,
2069                 .base = {
2070                         .cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
2071                         .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2072                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2073                         .cra_flags = CRYPTO_ALG_ASYNC |
2074                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2075                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2076                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2077                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2078                         .cra_alignmask = 0,
2079                         .cra_init = safexcel_aead_sha384_des3_cra_init,
2080                         .cra_exit = safexcel_aead_cra_exit,
2081                         .cra_module = THIS_MODULE,
2082                 },
2083         },
2084 };
2085
2086 static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2087 {
2088         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2089
2090         safexcel_aead_sha1_cra_init(tfm);
2091         ctx->alg = SAFEXCEL_DES; /* override default */
2092         ctx->blocksz = DES_BLOCK_SIZE;
2093         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2094         return 0;
2095 }
2096
2097 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2098         .type = SAFEXCEL_ALG_TYPE_AEAD,
2099         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2100         .alg.aead = {
2101                 .setkey = safexcel_aead_setkey,
2102                 .encrypt = safexcel_aead_encrypt,
2103                 .decrypt = safexcel_aead_decrypt,
2104                 .ivsize = DES_BLOCK_SIZE,
2105                 .maxauthsize = SHA1_DIGEST_SIZE,
2106                 .base = {
2107                         .cra_name = "authenc(hmac(sha1),cbc(des))",
2108                         .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2109                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2110                         .cra_flags = CRYPTO_ALG_ASYNC |
2111                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2112                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2113                         .cra_blocksize = DES_BLOCK_SIZE,
2114                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2115                         .cra_alignmask = 0,
2116                         .cra_init = safexcel_aead_sha1_des_cra_init,
2117                         .cra_exit = safexcel_aead_cra_exit,
2118                         .cra_module = THIS_MODULE,
2119                 },
2120         },
2121 };
2122
2123 static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2124 {
2125         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2126
2127         safexcel_aead_sha256_cra_init(tfm);
2128         ctx->alg = SAFEXCEL_DES; /* override default */
2129         ctx->blocksz = DES_BLOCK_SIZE;
2130         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2131         return 0;
2132 }
2133
2134 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2135         .type = SAFEXCEL_ALG_TYPE_AEAD,
2136         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2137         .alg.aead = {
2138                 .setkey = safexcel_aead_setkey,
2139                 .encrypt = safexcel_aead_encrypt,
2140                 .decrypt = safexcel_aead_decrypt,
2141                 .ivsize = DES_BLOCK_SIZE,
2142                 .maxauthsize = SHA256_DIGEST_SIZE,
2143                 .base = {
2144                         .cra_name = "authenc(hmac(sha256),cbc(des))",
2145                         .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2146                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2147                         .cra_flags = CRYPTO_ALG_ASYNC |
2148                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2149                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2150                         .cra_blocksize = DES_BLOCK_SIZE,
2151                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2152                         .cra_alignmask = 0,
2153                         .cra_init = safexcel_aead_sha256_des_cra_init,
2154                         .cra_exit = safexcel_aead_cra_exit,
2155                         .cra_module = THIS_MODULE,
2156                 },
2157         },
2158 };
2159
2160 static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2161 {
2162         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2163
2164         safexcel_aead_sha224_cra_init(tfm);
2165         ctx->alg = SAFEXCEL_DES; /* override default */
2166         ctx->blocksz = DES_BLOCK_SIZE;
2167         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2168         return 0;
2169 }
2170
2171 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2172         .type = SAFEXCEL_ALG_TYPE_AEAD,
2173         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2174         .alg.aead = {
2175                 .setkey = safexcel_aead_setkey,
2176                 .encrypt = safexcel_aead_encrypt,
2177                 .decrypt = safexcel_aead_decrypt,
2178                 .ivsize = DES_BLOCK_SIZE,
2179                 .maxauthsize = SHA224_DIGEST_SIZE,
2180                 .base = {
2181                         .cra_name = "authenc(hmac(sha224),cbc(des))",
2182                         .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2183                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2184                         .cra_flags = CRYPTO_ALG_ASYNC |
2185                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2186                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2187                         .cra_blocksize = DES_BLOCK_SIZE,
2188                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2189                         .cra_alignmask = 0,
2190                         .cra_init = safexcel_aead_sha224_des_cra_init,
2191                         .cra_exit = safexcel_aead_cra_exit,
2192                         .cra_module = THIS_MODULE,
2193                 },
2194         },
2195 };
2196
2197 static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2198 {
2199         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2200
2201         safexcel_aead_sha512_cra_init(tfm);
2202         ctx->alg = SAFEXCEL_DES; /* override default */
2203         ctx->blocksz = DES_BLOCK_SIZE;
2204         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2205         return 0;
2206 }
2207
2208 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2209         .type = SAFEXCEL_ALG_TYPE_AEAD,
2210         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2211         .alg.aead = {
2212                 .setkey = safexcel_aead_setkey,
2213                 .encrypt = safexcel_aead_encrypt,
2214                 .decrypt = safexcel_aead_decrypt,
2215                 .ivsize = DES_BLOCK_SIZE,
2216                 .maxauthsize = SHA512_DIGEST_SIZE,
2217                 .base = {
2218                         .cra_name = "authenc(hmac(sha512),cbc(des))",
2219                         .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2220                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2221                         .cra_flags = CRYPTO_ALG_ASYNC |
2222                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2223                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2224                         .cra_blocksize = DES_BLOCK_SIZE,
2225                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2226                         .cra_alignmask = 0,
2227                         .cra_init = safexcel_aead_sha512_des_cra_init,
2228                         .cra_exit = safexcel_aead_cra_exit,
2229                         .cra_module = THIS_MODULE,
2230                 },
2231         },
2232 };
2233
2234 static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2235 {
2236         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2237
2238         safexcel_aead_sha384_cra_init(tfm);
2239         ctx->alg = SAFEXCEL_DES; /* override default */
2240         ctx->blocksz = DES_BLOCK_SIZE;
2241         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2242         return 0;
2243 }
2244
2245 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2246         .type = SAFEXCEL_ALG_TYPE_AEAD,
2247         .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2248         .alg.aead = {
2249                 .setkey = safexcel_aead_setkey,
2250                 .encrypt = safexcel_aead_encrypt,
2251                 .decrypt = safexcel_aead_decrypt,
2252                 .ivsize = DES_BLOCK_SIZE,
2253                 .maxauthsize = SHA384_DIGEST_SIZE,
2254                 .base = {
2255                         .cra_name = "authenc(hmac(sha384),cbc(des))",
2256                         .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2257                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2258                         .cra_flags = CRYPTO_ALG_ASYNC |
2259                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2260                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2261                         .cra_blocksize = DES_BLOCK_SIZE,
2262                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2263                         .cra_alignmask = 0,
2264                         .cra_init = safexcel_aead_sha384_des_cra_init,
2265                         .cra_exit = safexcel_aead_cra_exit,
2266                         .cra_module = THIS_MODULE,
2267                 },
2268         },
2269 };
2270
2271 static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2272 {
2273         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2274
2275         safexcel_aead_sha1_cra_init(tfm);
2276         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2277         return 0;
2278 }
2279
2280 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2281         .type = SAFEXCEL_ALG_TYPE_AEAD,
2282         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2283         .alg.aead = {
2284                 .setkey = safexcel_aead_setkey,
2285                 .encrypt = safexcel_aead_encrypt,
2286                 .decrypt = safexcel_aead_decrypt,
2287                 .ivsize = CTR_RFC3686_IV_SIZE,
2288                 .maxauthsize = SHA1_DIGEST_SIZE,
2289                 .base = {
2290                         .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2291                         .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2292                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2293                         .cra_flags = CRYPTO_ALG_ASYNC |
2294                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2295                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2296                         .cra_blocksize = 1,
2297                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2298                         .cra_alignmask = 0,
2299                         .cra_init = safexcel_aead_sha1_ctr_cra_init,
2300                         .cra_exit = safexcel_aead_cra_exit,
2301                         .cra_module = THIS_MODULE,
2302                 },
2303         },
2304 };
2305
2306 static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2307 {
2308         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2309
2310         safexcel_aead_sha256_cra_init(tfm);
2311         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2312         return 0;
2313 }
2314
2315 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2316         .type = SAFEXCEL_ALG_TYPE_AEAD,
2317         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2318         .alg.aead = {
2319                 .setkey = safexcel_aead_setkey,
2320                 .encrypt = safexcel_aead_encrypt,
2321                 .decrypt = safexcel_aead_decrypt,
2322                 .ivsize = CTR_RFC3686_IV_SIZE,
2323                 .maxauthsize = SHA256_DIGEST_SIZE,
2324                 .base = {
2325                         .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2326                         .cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2327                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2328                         .cra_flags = CRYPTO_ALG_ASYNC |
2329                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2330                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2331                         .cra_blocksize = 1,
2332                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2333                         .cra_alignmask = 0,
2334                         .cra_init = safexcel_aead_sha256_ctr_cra_init,
2335                         .cra_exit = safexcel_aead_cra_exit,
2336                         .cra_module = THIS_MODULE,
2337                 },
2338         },
2339 };
2340
2341 static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2342 {
2343         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2344
2345         safexcel_aead_sha224_cra_init(tfm);
2346         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2347         return 0;
2348 }
2349
2350 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2351         .type = SAFEXCEL_ALG_TYPE_AEAD,
2352         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2353         .alg.aead = {
2354                 .setkey = safexcel_aead_setkey,
2355                 .encrypt = safexcel_aead_encrypt,
2356                 .decrypt = safexcel_aead_decrypt,
2357                 .ivsize = CTR_RFC3686_IV_SIZE,
2358                 .maxauthsize = SHA224_DIGEST_SIZE,
2359                 .base = {
2360                         .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2361                         .cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2362                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2363                         .cra_flags = CRYPTO_ALG_ASYNC |
2364                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2365                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2366                         .cra_blocksize = 1,
2367                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2368                         .cra_alignmask = 0,
2369                         .cra_init = safexcel_aead_sha224_ctr_cra_init,
2370                         .cra_exit = safexcel_aead_cra_exit,
2371                         .cra_module = THIS_MODULE,
2372                 },
2373         },
2374 };
2375
2376 static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2377 {
2378         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2379
2380         safexcel_aead_sha512_cra_init(tfm);
2381         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2382         return 0;
2383 }
2384
2385 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2386         .type = SAFEXCEL_ALG_TYPE_AEAD,
2387         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2388         .alg.aead = {
2389                 .setkey = safexcel_aead_setkey,
2390                 .encrypt = safexcel_aead_encrypt,
2391                 .decrypt = safexcel_aead_decrypt,
2392                 .ivsize = CTR_RFC3686_IV_SIZE,
2393                 .maxauthsize = SHA512_DIGEST_SIZE,
2394                 .base = {
2395                         .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2396                         .cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2397                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2398                         .cra_flags = CRYPTO_ALG_ASYNC |
2399                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2400                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2401                         .cra_blocksize = 1,
2402                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2403                         .cra_alignmask = 0,
2404                         .cra_init = safexcel_aead_sha512_ctr_cra_init,
2405                         .cra_exit = safexcel_aead_cra_exit,
2406                         .cra_module = THIS_MODULE,
2407                 },
2408         },
2409 };
2410
2411 static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2412 {
2413         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2414
2415         safexcel_aead_sha384_cra_init(tfm);
2416         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2417         return 0;
2418 }
2419
2420 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2421         .type = SAFEXCEL_ALG_TYPE_AEAD,
2422         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2423         .alg.aead = {
2424                 .setkey = safexcel_aead_setkey,
2425                 .encrypt = safexcel_aead_encrypt,
2426                 .decrypt = safexcel_aead_decrypt,
2427                 .ivsize = CTR_RFC3686_IV_SIZE,
2428                 .maxauthsize = SHA384_DIGEST_SIZE,
2429                 .base = {
2430                         .cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2431                         .cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2432                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2433                         .cra_flags = CRYPTO_ALG_ASYNC |
2434                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2435                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2436                         .cra_blocksize = 1,
2437                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2438                         .cra_alignmask = 0,
2439                         .cra_init = safexcel_aead_sha384_ctr_cra_init,
2440                         .cra_exit = safexcel_aead_cra_exit,
2441                         .cra_module = THIS_MODULE,
2442                 },
2443         },
2444 };
2445
2446 static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2447                                            const u8 *key, unsigned int len)
2448 {
2449         struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2450         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2451         struct safexcel_crypto_priv *priv = ctx->base.priv;
2452         struct crypto_aes_ctx aes;
2453         int ret, i;
2454         unsigned int keylen;
2455
2456         /* Check for illegal XTS keys */
2457         ret = xts_verify_key(ctfm, key, len);
2458         if (ret)
2459                 return ret;
2460
2461         /* Only half of the key data is cipher key */
2462         keylen = (len >> 1);
2463         ret = aes_expandkey(&aes, key, keylen);
2464         if (ret)
2465                 return ret;
2466
2467         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2468                 for (i = 0; i < keylen / sizeof(u32); i++) {
2469                         if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2470                                 ctx->base.needs_inv = true;
2471                                 break;
2472                         }
2473                 }
2474         }
2475
2476         for (i = 0; i < keylen / sizeof(u32); i++)
2477                 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2478
2479         /* The other half is the tweak key */
2480         ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2481         if (ret)
2482                 return ret;
2483
2484         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2485                 for (i = 0; i < keylen / sizeof(u32); i++) {
2486                         if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2487                             aes.key_enc[i]) {
2488                                 ctx->base.needs_inv = true;
2489                                 break;
2490                         }
2491                 }
2492         }
2493
2494         for (i = 0; i < keylen / sizeof(u32); i++)
2495                 ctx->key[i + keylen / sizeof(u32)] =
2496                         cpu_to_le32(aes.key_enc[i]);
2497
2498         ctx->key_len = keylen << 1;
2499
2500         memzero_explicit(&aes, sizeof(aes));
2501         return 0;
2502 }
2503
2504 static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2505 {
2506         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2507
2508         safexcel_skcipher_cra_init(tfm);
2509         ctx->alg  = SAFEXCEL_AES;
2510         ctx->blocksz = AES_BLOCK_SIZE;
2511         ctx->xts  = 1;
2512         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2513         return 0;
2514 }
2515
2516 static int safexcel_encrypt_xts(struct skcipher_request *req)
2517 {
2518         if (req->cryptlen < XTS_BLOCK_SIZE)
2519                 return -EINVAL;
2520         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2521                                   SAFEXCEL_ENCRYPT);
2522 }
2523
2524 static int safexcel_decrypt_xts(struct skcipher_request *req)
2525 {
2526         if (req->cryptlen < XTS_BLOCK_SIZE)
2527                 return -EINVAL;
2528         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2529                                   SAFEXCEL_DECRYPT);
2530 }
2531
2532 struct safexcel_alg_template safexcel_alg_xts_aes = {
2533         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2534         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2535         .alg.skcipher = {
2536                 .setkey = safexcel_skcipher_aesxts_setkey,
2537                 .encrypt = safexcel_encrypt_xts,
2538                 .decrypt = safexcel_decrypt_xts,
2539                 /* XTS actually uses 2 AES keys glued together */
2540                 .min_keysize = AES_MIN_KEY_SIZE * 2,
2541                 .max_keysize = AES_MAX_KEY_SIZE * 2,
2542                 .ivsize = XTS_BLOCK_SIZE,
2543                 .base = {
2544                         .cra_name = "xts(aes)",
2545                         .cra_driver_name = "safexcel-xts-aes",
2546                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2547                         .cra_flags = CRYPTO_ALG_ASYNC |
2548                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2549                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2550                         .cra_blocksize = XTS_BLOCK_SIZE,
2551                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2552                         .cra_alignmask = 0,
2553                         .cra_init = safexcel_skcipher_aes_xts_cra_init,
2554                         .cra_exit = safexcel_skcipher_cra_exit,
2555                         .cra_module = THIS_MODULE,
2556                 },
2557         },
2558 };
2559
2560 static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2561                                     unsigned int len)
2562 {
2563         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2564         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2565         struct safexcel_crypto_priv *priv = ctx->base.priv;
2566         struct crypto_aes_ctx aes;
2567         u32 hashkey[AES_BLOCK_SIZE >> 2];
2568         int ret, i;
2569
2570         ret = aes_expandkey(&aes, key, len);
2571         if (ret) {
2572                 memzero_explicit(&aes, sizeof(aes));
2573                 return ret;
2574         }
2575
2576         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2577                 for (i = 0; i < len / sizeof(u32); i++) {
2578                         if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2579                                 ctx->base.needs_inv = true;
2580                                 break;
2581                         }
2582                 }
2583         }
2584
2585         for (i = 0; i < len / sizeof(u32); i++)
2586                 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2587
2588         ctx->key_len = len;
2589
2590         /* Compute hash key by encrypting zeroes with cipher key */
2591         crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
2592         crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
2593                                 CRYPTO_TFM_REQ_MASK);
2594         ret = crypto_cipher_setkey(ctx->hkaes, key, len);
2595         if (ret)
2596                 return ret;
2597
2598         memset(hashkey, 0, AES_BLOCK_SIZE);
2599         crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
2600
2601         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2602                 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2603                         if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
2604                                 ctx->base.needs_inv = true;
2605                                 break;
2606                         }
2607                 }
2608         }
2609
2610         for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2611                 ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
2612
2613         memzero_explicit(hashkey, AES_BLOCK_SIZE);
2614         memzero_explicit(&aes, sizeof(aes));
2615         return 0;
2616 }
2617
2618 static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2619 {
2620         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2621
2622         safexcel_aead_cra_init(tfm);
2623         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2624         ctx->state_sz = GHASH_BLOCK_SIZE;
2625         ctx->xcm = EIP197_XCM_MODE_GCM;
2626         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2627
2628         ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
2629         return PTR_ERR_OR_ZERO(ctx->hkaes);
2630 }
2631
2632 static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2633 {
2634         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2635
2636         crypto_free_cipher(ctx->hkaes);
2637         safexcel_aead_cra_exit(tfm);
2638 }
2639
2640 static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2641                                          unsigned int authsize)
2642 {
2643         return crypto_gcm_check_authsize(authsize);
2644 }
2645
2646 struct safexcel_alg_template safexcel_alg_gcm = {
2647         .type = SAFEXCEL_ALG_TYPE_AEAD,
2648         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2649         .alg.aead = {
2650                 .setkey = safexcel_aead_gcm_setkey,
2651                 .setauthsize = safexcel_aead_gcm_setauthsize,
2652                 .encrypt = safexcel_aead_encrypt,
2653                 .decrypt = safexcel_aead_decrypt,
2654                 .ivsize = GCM_AES_IV_SIZE,
2655                 .maxauthsize = GHASH_DIGEST_SIZE,
2656                 .base = {
2657                         .cra_name = "gcm(aes)",
2658                         .cra_driver_name = "safexcel-gcm-aes",
2659                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2660                         .cra_flags = CRYPTO_ALG_ASYNC |
2661                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2662                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2663                         .cra_blocksize = 1,
2664                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2665                         .cra_alignmask = 0,
2666                         .cra_init = safexcel_aead_gcm_cra_init,
2667                         .cra_exit = safexcel_aead_gcm_cra_exit,
2668                         .cra_module = THIS_MODULE,
2669                 },
2670         },
2671 };
2672
2673 static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2674                                     unsigned int len)
2675 {
2676         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2677         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2678         struct safexcel_crypto_priv *priv = ctx->base.priv;
2679         struct crypto_aes_ctx aes;
2680         int ret, i;
2681
2682         ret = aes_expandkey(&aes, key, len);
2683         if (ret) {
2684                 memzero_explicit(&aes, sizeof(aes));
2685                 return ret;
2686         }
2687
2688         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2689                 for (i = 0; i < len / sizeof(u32); i++) {
2690                         if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2691                                 ctx->base.needs_inv = true;
2692                                 break;
2693                         }
2694                 }
2695         }
2696
2697         for (i = 0; i < len / sizeof(u32); i++) {
2698                 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2699                 ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2700                         cpu_to_be32(aes.key_enc[i]);
2701         }
2702
2703         ctx->key_len = len;
2704         ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2705
2706         if (len == AES_KEYSIZE_192)
2707                 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2708         else if (len == AES_KEYSIZE_256)
2709                 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2710         else
2711                 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2712
2713         memzero_explicit(&aes, sizeof(aes));
2714         return 0;
2715 }
2716
2717 static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2718 {
2719         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2720
2721         safexcel_aead_cra_init(tfm);
2722         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2723         ctx->state_sz = 3 * AES_BLOCK_SIZE;
2724         ctx->xcm = EIP197_XCM_MODE_CCM;
2725         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2726         ctx->ctrinit = 0;
2727         return 0;
2728 }
2729
2730 static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2731                                          unsigned int authsize)
2732 {
2733         /* Borrowed from crypto/ccm.c */
2734         switch (authsize) {
2735         case 4:
2736         case 6:
2737         case 8:
2738         case 10:
2739         case 12:
2740         case 14:
2741         case 16:
2742                 break;
2743         default:
2744                 return -EINVAL;
2745         }
2746
2747         return 0;
2748 }
2749
2750 static int safexcel_ccm_encrypt(struct aead_request *req)
2751 {
2752         struct safexcel_cipher_req *creq = aead_request_ctx(req);
2753
2754         if (req->iv[0] < 1 || req->iv[0] > 7)
2755                 return -EINVAL;
2756
2757         return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2758 }
2759
2760 static int safexcel_ccm_decrypt(struct aead_request *req)
2761 {
2762         struct safexcel_cipher_req *creq = aead_request_ctx(req);
2763
2764         if (req->iv[0] < 1 || req->iv[0] > 7)
2765                 return -EINVAL;
2766
2767         return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2768 }
2769
2770 struct safexcel_alg_template safexcel_alg_ccm = {
2771         .type = SAFEXCEL_ALG_TYPE_AEAD,
2772         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2773         .alg.aead = {
2774                 .setkey = safexcel_aead_ccm_setkey,
2775                 .setauthsize = safexcel_aead_ccm_setauthsize,
2776                 .encrypt = safexcel_ccm_encrypt,
2777                 .decrypt = safexcel_ccm_decrypt,
2778                 .ivsize = AES_BLOCK_SIZE,
2779                 .maxauthsize = AES_BLOCK_SIZE,
2780                 .base = {
2781                         .cra_name = "ccm(aes)",
2782                         .cra_driver_name = "safexcel-ccm-aes",
2783                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2784                         .cra_flags = CRYPTO_ALG_ASYNC |
2785                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2786                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2787                         .cra_blocksize = 1,
2788                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2789                         .cra_alignmask = 0,
2790                         .cra_init = safexcel_aead_ccm_cra_init,
2791                         .cra_exit = safexcel_aead_cra_exit,
2792                         .cra_module = THIS_MODULE,
2793                 },
2794         },
2795 };
2796
2797 static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2798                                      const u8 *key)
2799 {
2800         struct safexcel_crypto_priv *priv = ctx->base.priv;
2801
2802         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2803                 if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2804                         ctx->base.needs_inv = true;
2805
2806         memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2807         ctx->key_len = CHACHA_KEY_SIZE;
2808 }
2809
2810 static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2811                                              const u8 *key, unsigned int len)
2812 {
2813         struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2814
2815         if (len != CHACHA_KEY_SIZE)
2816                 return -EINVAL;
2817
2818         safexcel_chacha20_setkey(ctx, key);
2819
2820         return 0;
2821 }
2822
2823 static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2824 {
2825         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2826
2827         safexcel_skcipher_cra_init(tfm);
2828         ctx->alg  = SAFEXCEL_CHACHA20;
2829         ctx->ctrinit = 0;
2830         ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2831         return 0;
2832 }
2833
2834 struct safexcel_alg_template safexcel_alg_chacha20 = {
2835         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2836         .algo_mask = SAFEXCEL_ALG_CHACHA20,
2837         .alg.skcipher = {
2838                 .setkey = safexcel_skcipher_chacha20_setkey,
2839                 .encrypt = safexcel_encrypt,
2840                 .decrypt = safexcel_decrypt,
2841                 .min_keysize = CHACHA_KEY_SIZE,
2842                 .max_keysize = CHACHA_KEY_SIZE,
2843                 .ivsize = CHACHA_IV_SIZE,
2844                 .base = {
2845                         .cra_name = "chacha20",
2846                         .cra_driver_name = "safexcel-chacha20",
2847                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
2848                         .cra_flags = CRYPTO_ALG_ASYNC |
2849                                      CRYPTO_ALG_ALLOCATES_MEMORY |
2850                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2851                         .cra_blocksize = 1,
2852                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2853                         .cra_alignmask = 0,
2854                         .cra_init = safexcel_skcipher_chacha20_cra_init,
2855                         .cra_exit = safexcel_skcipher_cra_exit,
2856                         .cra_module = THIS_MODULE,
2857                 },
2858         },
2859 };
2860
2861 static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2862                                     const u8 *key, unsigned int len)
2863 {
2864         struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2865
2866         if (ctx->aead  == EIP197_AEAD_TYPE_IPSEC_ESP &&
2867             len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2868                 /* ESP variant has nonce appended to key */
2869                 len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2870                 ctx->nonce = *(u32 *)(key + len);
2871         }
2872         if (len != CHACHA_KEY_SIZE)
2873                 return -EINVAL;
2874
2875         safexcel_chacha20_setkey(ctx, key);
2876
2877         return 0;
2878 }
2879
2880 static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2881                                          unsigned int authsize)
2882 {
2883         if (authsize != POLY1305_DIGEST_SIZE)
2884                 return -EINVAL;
2885         return 0;
2886 }
2887
2888 static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2889                                           enum safexcel_cipher_direction dir)
2890 {
2891         struct safexcel_cipher_req *creq = aead_request_ctx(req);
2892         struct crypto_aead *aead = crypto_aead_reqtfm(req);
2893         struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2894         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2895         struct aead_request *subreq = aead_request_ctx(req);
2896         u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2897         int ret = 0;
2898
2899         /*
2900          * Instead of wasting time detecting umpteen silly corner cases,
2901          * just dump all "small" requests to the fallback implementation.
2902          * HW would not be faster on such small requests anyway.
2903          */
2904         if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2905                     req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2906                    req->cryptlen > POLY1305_DIGEST_SIZE)) {
2907                 return safexcel_queue_req(&req->base, creq, dir);
2908         }
2909
2910         /* HW cannot do full (AAD+payload) zero length, use fallback */
2911         memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2912         if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2913                 /* ESP variant has nonce appended to the key */
2914                 key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2915                 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2916                                          CHACHA_KEY_SIZE +
2917                                          EIP197_AEAD_IPSEC_NONCE_SIZE);
2918         } else {
2919                 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2920                                          CHACHA_KEY_SIZE);
2921         }
2922         if (ret) {
2923                 crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2924                 crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2925                                             CRYPTO_TFM_REQ_MASK);
2926                 return ret;
2927         }
2928
2929         aead_request_set_tfm(subreq, ctx->fback);
2930         aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2931                                   req->base.data);
2932         aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2933                                req->iv);
2934         aead_request_set_ad(subreq, req->assoclen);
2935
2936         return (dir ==  SAFEXCEL_ENCRYPT) ?
2937                 crypto_aead_encrypt(subreq) :
2938                 crypto_aead_decrypt(subreq);
2939 }
2940
2941 static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2942 {
2943         return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2944 }
2945
2946 static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2947 {
2948         return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2949 }
2950
2951 static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2952 {
2953         struct crypto_aead *aead = __crypto_aead_cast(tfm);
2954         struct aead_alg *alg = crypto_aead_alg(aead);
2955         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2956
2957         safexcel_aead_cra_init(tfm);
2958
2959         /* Allocate fallback implementation */
2960         ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2961                                        CRYPTO_ALG_ASYNC |
2962                                        CRYPTO_ALG_NEED_FALLBACK);
2963         if (IS_ERR(ctx->fback))
2964                 return PTR_ERR(ctx->fback);
2965
2966         crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2967                                           sizeof(struct aead_request) +
2968                                           crypto_aead_reqsize(ctx->fback)));
2969
2970         return 0;
2971 }
2972
2973 static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2974 {
2975         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2976
2977         safexcel_aead_fallback_cra_init(tfm);
2978         ctx->alg  = SAFEXCEL_CHACHA20;
2979         ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2980                     CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2981         ctx->ctrinit = 0;
2982         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2983         ctx->state_sz = 0; /* Precomputed by HW */
2984         return 0;
2985 }
2986
2987 static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
2988 {
2989         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2990
2991         crypto_free_aead(ctx->fback);
2992         safexcel_aead_cra_exit(tfm);
2993 }
2994
2995 struct safexcel_alg_template safexcel_alg_chachapoly = {
2996         .type = SAFEXCEL_ALG_TYPE_AEAD,
2997         .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2998         .alg.aead = {
2999                 .setkey = safexcel_aead_chachapoly_setkey,
3000                 .setauthsize = safexcel_aead_chachapoly_setauthsize,
3001                 .encrypt = safexcel_aead_chachapoly_encrypt,
3002                 .decrypt = safexcel_aead_chachapoly_decrypt,
3003                 .ivsize = CHACHAPOLY_IV_SIZE,
3004                 .maxauthsize = POLY1305_DIGEST_SIZE,
3005                 .base = {
3006                         .cra_name = "rfc7539(chacha20,poly1305)",
3007                         .cra_driver_name = "safexcel-chacha20-poly1305",
3008                         /* +1 to put it above HW chacha + SW poly */
3009                         .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3010                         .cra_flags = CRYPTO_ALG_ASYNC |
3011                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3012                                      CRYPTO_ALG_KERN_DRIVER_ONLY |
3013                                      CRYPTO_ALG_NEED_FALLBACK,
3014                         .cra_blocksize = 1,
3015                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3016                         .cra_alignmask = 0,
3017                         .cra_init = safexcel_aead_chachapoly_cra_init,
3018                         .cra_exit = safexcel_aead_fallback_cra_exit,
3019                         .cra_module = THIS_MODULE,
3020                 },
3021         },
3022 };
3023
3024 static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
3025 {
3026         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3027         int ret;
3028
3029         ret = safexcel_aead_chachapoly_cra_init(tfm);
3030         ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3031         ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3032         return ret;
3033 }
3034
3035 struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
3036         .type = SAFEXCEL_ALG_TYPE_AEAD,
3037         .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3038         .alg.aead = {
3039                 .setkey = safexcel_aead_chachapoly_setkey,
3040                 .setauthsize = safexcel_aead_chachapoly_setauthsize,
3041                 .encrypt = safexcel_aead_chachapoly_encrypt,
3042                 .decrypt = safexcel_aead_chachapoly_decrypt,
3043                 .ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
3044                 .maxauthsize = POLY1305_DIGEST_SIZE,
3045                 .base = {
3046                         .cra_name = "rfc7539esp(chacha20,poly1305)",
3047                         .cra_driver_name = "safexcel-chacha20-poly1305-esp",
3048                         /* +1 to put it above HW chacha + SW poly */
3049                         .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3050                         .cra_flags = CRYPTO_ALG_ASYNC |
3051                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3052                                      CRYPTO_ALG_KERN_DRIVER_ONLY |
3053                                      CRYPTO_ALG_NEED_FALLBACK,
3054                         .cra_blocksize = 1,
3055                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3056                         .cra_alignmask = 0,
3057                         .cra_init = safexcel_aead_chachapolyesp_cra_init,
3058                         .cra_exit = safexcel_aead_fallback_cra_exit,
3059                         .cra_module = THIS_MODULE,
3060                 },
3061         },
3062 };
3063
3064 static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
3065                                         const u8 *key, unsigned int len)
3066 {
3067         struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3068         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3069         struct safexcel_crypto_priv *priv = ctx->base.priv;
3070
3071         if (len != SM4_KEY_SIZE)
3072                 return -EINVAL;
3073
3074         if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3075                 if (memcmp(ctx->key, key, SM4_KEY_SIZE))
3076                         ctx->base.needs_inv = true;
3077
3078         memcpy(ctx->key, key, SM4_KEY_SIZE);
3079         ctx->key_len = SM4_KEY_SIZE;
3080
3081         return 0;
3082 }
3083
3084 static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3085 {
3086         /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3087         if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3088                 return -EINVAL;
3089         else
3090                 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3091                                           SAFEXCEL_ENCRYPT);
3092 }
3093
3094 static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3095 {
3096         /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3097         if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3098                 return -EINVAL;
3099         else
3100                 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3101                                           SAFEXCEL_DECRYPT);
3102 }
3103
3104 static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3105 {
3106         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3107
3108         safexcel_skcipher_cra_init(tfm);
3109         ctx->alg  = SAFEXCEL_SM4;
3110         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3111         ctx->blocksz = 0;
3112         ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
3113         return 0;
3114 }
3115
3116 struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3117         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3118         .algo_mask = SAFEXCEL_ALG_SM4,
3119         .alg.skcipher = {
3120                 .setkey = safexcel_skcipher_sm4_setkey,
3121                 .encrypt = safexcel_sm4_blk_encrypt,
3122                 .decrypt = safexcel_sm4_blk_decrypt,
3123                 .min_keysize = SM4_KEY_SIZE,
3124                 .max_keysize = SM4_KEY_SIZE,
3125                 .base = {
3126                         .cra_name = "ecb(sm4)",
3127                         .cra_driver_name = "safexcel-ecb-sm4",
3128                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3129                         .cra_flags = CRYPTO_ALG_ASYNC |
3130                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3131                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3132                         .cra_blocksize = SM4_BLOCK_SIZE,
3133                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3134                         .cra_alignmask = 0,
3135                         .cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3136                         .cra_exit = safexcel_skcipher_cra_exit,
3137                         .cra_module = THIS_MODULE,
3138                 },
3139         },
3140 };
3141
3142 static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3143 {
3144         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3145
3146         safexcel_skcipher_cra_init(tfm);
3147         ctx->alg  = SAFEXCEL_SM4;
3148         ctx->blocksz = SM4_BLOCK_SIZE;
3149         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3150         return 0;
3151 }
3152
3153 struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3154         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3155         .algo_mask = SAFEXCEL_ALG_SM4,
3156         .alg.skcipher = {
3157                 .setkey = safexcel_skcipher_sm4_setkey,
3158                 .encrypt = safexcel_sm4_blk_encrypt,
3159                 .decrypt = safexcel_sm4_blk_decrypt,
3160                 .min_keysize = SM4_KEY_SIZE,
3161                 .max_keysize = SM4_KEY_SIZE,
3162                 .ivsize = SM4_BLOCK_SIZE,
3163                 .base = {
3164                         .cra_name = "cbc(sm4)",
3165                         .cra_driver_name = "safexcel-cbc-sm4",
3166                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3167                         .cra_flags = CRYPTO_ALG_ASYNC |
3168                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3169                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3170                         .cra_blocksize = SM4_BLOCK_SIZE,
3171                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3172                         .cra_alignmask = 0,
3173                         .cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3174                         .cra_exit = safexcel_skcipher_cra_exit,
3175                         .cra_module = THIS_MODULE,
3176                 },
3177         },
3178 };
3179
3180 static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
3181 {
3182         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3183
3184         safexcel_skcipher_cra_init(tfm);
3185         ctx->alg  = SAFEXCEL_SM4;
3186         ctx->blocksz = SM4_BLOCK_SIZE;
3187         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
3188         return 0;
3189 }
3190
3191 struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
3192         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3193         .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3194         .alg.skcipher = {
3195                 .setkey = safexcel_skcipher_sm4_setkey,
3196                 .encrypt = safexcel_encrypt,
3197                 .decrypt = safexcel_decrypt,
3198                 .min_keysize = SM4_KEY_SIZE,
3199                 .max_keysize = SM4_KEY_SIZE,
3200                 .ivsize = SM4_BLOCK_SIZE,
3201                 .base = {
3202                         .cra_name = "ofb(sm4)",
3203                         .cra_driver_name = "safexcel-ofb-sm4",
3204                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3205                         .cra_flags = CRYPTO_ALG_ASYNC |
3206                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3207                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3208                         .cra_blocksize = 1,
3209                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3210                         .cra_alignmask = 0,
3211                         .cra_init = safexcel_skcipher_sm4_ofb_cra_init,
3212                         .cra_exit = safexcel_skcipher_cra_exit,
3213                         .cra_module = THIS_MODULE,
3214                 },
3215         },
3216 };
3217
3218 static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
3219 {
3220         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3221
3222         safexcel_skcipher_cra_init(tfm);
3223         ctx->alg  = SAFEXCEL_SM4;
3224         ctx->blocksz = SM4_BLOCK_SIZE;
3225         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
3226         return 0;
3227 }
3228
3229 struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
3230         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3231         .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3232         .alg.skcipher = {
3233                 .setkey = safexcel_skcipher_sm4_setkey,
3234                 .encrypt = safexcel_encrypt,
3235                 .decrypt = safexcel_decrypt,
3236                 .min_keysize = SM4_KEY_SIZE,
3237                 .max_keysize = SM4_KEY_SIZE,
3238                 .ivsize = SM4_BLOCK_SIZE,
3239                 .base = {
3240                         .cra_name = "cfb(sm4)",
3241                         .cra_driver_name = "safexcel-cfb-sm4",
3242                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3243                         .cra_flags = CRYPTO_ALG_ASYNC |
3244                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3245                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3246                         .cra_blocksize = 1,
3247                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3248                         .cra_alignmask = 0,
3249                         .cra_init = safexcel_skcipher_sm4_cfb_cra_init,
3250                         .cra_exit = safexcel_skcipher_cra_exit,
3251                         .cra_module = THIS_MODULE,
3252                 },
3253         },
3254 };
3255
3256 static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3257                                            const u8 *key, unsigned int len)
3258 {
3259         struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3260         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3261
3262         /* last 4 bytes of key are the nonce! */
3263         ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3264         /* exclude the nonce here */
3265         len -= CTR_RFC3686_NONCE_SIZE;
3266
3267         return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3268 }
3269
3270 static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3271 {
3272         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3273
3274         safexcel_skcipher_cra_init(tfm);
3275         ctx->alg  = SAFEXCEL_SM4;
3276         ctx->blocksz = SM4_BLOCK_SIZE;
3277         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3278         return 0;
3279 }
3280
3281 struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3282         .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3283         .algo_mask = SAFEXCEL_ALG_SM4,
3284         .alg.skcipher = {
3285                 .setkey = safexcel_skcipher_sm4ctr_setkey,
3286                 .encrypt = safexcel_encrypt,
3287                 .decrypt = safexcel_decrypt,
3288                 /* Add nonce size */
3289                 .min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3290                 .max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3291                 .ivsize = CTR_RFC3686_IV_SIZE,
3292                 .base = {
3293                         .cra_name = "rfc3686(ctr(sm4))",
3294                         .cra_driver_name = "safexcel-ctr-sm4",
3295                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3296                         .cra_flags = CRYPTO_ALG_ASYNC |
3297                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3298                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3299                         .cra_blocksize = 1,
3300                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3301                         .cra_alignmask = 0,
3302                         .cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3303                         .cra_exit = safexcel_skcipher_cra_exit,
3304                         .cra_module = THIS_MODULE,
3305                 },
3306         },
3307 };
3308
3309 static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3310 {
3311         /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3312         if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3313                 return -EINVAL;
3314
3315         return safexcel_queue_req(&req->base, aead_request_ctx(req),
3316                                   SAFEXCEL_ENCRYPT);
3317 }
3318
3319 static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3320 {
3321         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3322
3323         /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3324         if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3325                 return -EINVAL;
3326
3327         return safexcel_queue_req(&req->base, aead_request_ctx(req),
3328                                   SAFEXCEL_DECRYPT);
3329 }
3330
3331 static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3332 {
3333         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3334
3335         safexcel_aead_cra_init(tfm);
3336         ctx->alg = SAFEXCEL_SM4;
3337         ctx->blocksz = SM4_BLOCK_SIZE;
3338         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3339         ctx->state_sz = SHA1_DIGEST_SIZE;
3340         return 0;
3341 }
3342
3343 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3344         .type = SAFEXCEL_ALG_TYPE_AEAD,
3345         .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3346         .alg.aead = {
3347                 .setkey = safexcel_aead_setkey,
3348                 .encrypt = safexcel_aead_sm4_blk_encrypt,
3349                 .decrypt = safexcel_aead_sm4_blk_decrypt,
3350                 .ivsize = SM4_BLOCK_SIZE,
3351                 .maxauthsize = SHA1_DIGEST_SIZE,
3352                 .base = {
3353                         .cra_name = "authenc(hmac(sha1),cbc(sm4))",
3354                         .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3355                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3356                         .cra_flags = CRYPTO_ALG_ASYNC |
3357                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3358                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3359                         .cra_blocksize = SM4_BLOCK_SIZE,
3360                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3361                         .cra_alignmask = 0,
3362                         .cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3363                         .cra_exit = safexcel_aead_cra_exit,
3364                         .cra_module = THIS_MODULE,
3365                 },
3366         },
3367 };
3368
3369 static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3370                                          const u8 *key, unsigned int len)
3371 {
3372         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3373         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3374
3375         /* Keep fallback cipher synchronized */
3376         return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3377                safexcel_aead_setkey(ctfm, key, len);
3378 }
3379
3380 static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3381                                               unsigned int authsize)
3382 {
3383         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3384         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3385
3386         /* Keep fallback cipher synchronized */
3387         return crypto_aead_setauthsize(ctx->fback, authsize);
3388 }
3389
3390 static int safexcel_aead_fallback_crypt(struct aead_request *req,
3391                                         enum safexcel_cipher_direction dir)
3392 {
3393         struct crypto_aead *aead = crypto_aead_reqtfm(req);
3394         struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3395         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3396         struct aead_request *subreq = aead_request_ctx(req);
3397
3398         aead_request_set_tfm(subreq, ctx->fback);
3399         aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3400                                   req->base.data);
3401         aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3402                                req->iv);
3403         aead_request_set_ad(subreq, req->assoclen);
3404
3405         return (dir ==  SAFEXCEL_ENCRYPT) ?
3406                 crypto_aead_encrypt(subreq) :
3407                 crypto_aead_decrypt(subreq);
3408 }
3409
3410 static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3411 {
3412         struct safexcel_cipher_req *creq = aead_request_ctx(req);
3413
3414         /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3415         if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3416                 return -EINVAL;
3417         else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3418                 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3419
3420         /* HW cannot do full (AAD+payload) zero length, use fallback */
3421         return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3422 }
3423
3424 static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3425 {
3426         struct safexcel_cipher_req *creq = aead_request_ctx(req);
3427         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3428
3429         /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3430         if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3431                 return -EINVAL;
3432         else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3433                 /* If input length > 0 only */
3434                 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3435
3436         /* HW cannot do full (AAD+payload) zero length, use fallback */
3437         return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3438 }
3439
3440 static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3441 {
3442         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3443
3444         safexcel_aead_fallback_cra_init(tfm);
3445         ctx->alg = SAFEXCEL_SM4;
3446         ctx->blocksz = SM4_BLOCK_SIZE;
3447         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3448         ctx->state_sz = SM3_DIGEST_SIZE;
3449         return 0;
3450 }
3451
3452 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3453         .type = SAFEXCEL_ALG_TYPE_AEAD,
3454         .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3455         .alg.aead = {
3456                 .setkey = safexcel_aead_fallback_setkey,
3457                 .setauthsize = safexcel_aead_fallback_setauthsize,
3458                 .encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3459                 .decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3460                 .ivsize = SM4_BLOCK_SIZE,
3461                 .maxauthsize = SM3_DIGEST_SIZE,
3462                 .base = {
3463                         .cra_name = "authenc(hmac(sm3),cbc(sm4))",
3464                         .cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3465                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3466                         .cra_flags = CRYPTO_ALG_ASYNC |
3467                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3468                                      CRYPTO_ALG_KERN_DRIVER_ONLY |
3469                                      CRYPTO_ALG_NEED_FALLBACK,
3470                         .cra_blocksize = SM4_BLOCK_SIZE,
3471                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3472                         .cra_alignmask = 0,
3473                         .cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3474                         .cra_exit = safexcel_aead_fallback_cra_exit,
3475                         .cra_module = THIS_MODULE,
3476                 },
3477         },
3478 };
3479
3480 static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3481 {
3482         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3483
3484         safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3485         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3486         return 0;
3487 }
3488
3489 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3490         .type = SAFEXCEL_ALG_TYPE_AEAD,
3491         .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3492         .alg.aead = {
3493                 .setkey = safexcel_aead_setkey,
3494                 .encrypt = safexcel_aead_encrypt,
3495                 .decrypt = safexcel_aead_decrypt,
3496                 .ivsize = CTR_RFC3686_IV_SIZE,
3497                 .maxauthsize = SHA1_DIGEST_SIZE,
3498                 .base = {
3499                         .cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3500                         .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3501                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3502                         .cra_flags = CRYPTO_ALG_ASYNC |
3503                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3504                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3505                         .cra_blocksize = 1,
3506                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3507                         .cra_alignmask = 0,
3508                         .cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3509                         .cra_exit = safexcel_aead_cra_exit,
3510                         .cra_module = THIS_MODULE,
3511                 },
3512         },
3513 };
3514
3515 static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3516 {
3517         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3518
3519         safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3520         ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3521         return 0;
3522 }
3523
3524 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3525         .type = SAFEXCEL_ALG_TYPE_AEAD,
3526         .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3527         .alg.aead = {
3528                 .setkey = safexcel_aead_setkey,
3529                 .encrypt = safexcel_aead_encrypt,
3530                 .decrypt = safexcel_aead_decrypt,
3531                 .ivsize = CTR_RFC3686_IV_SIZE,
3532                 .maxauthsize = SM3_DIGEST_SIZE,
3533                 .base = {
3534                         .cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3535                         .cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3536                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3537                         .cra_flags = CRYPTO_ALG_ASYNC |
3538                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3539                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3540                         .cra_blocksize = 1,
3541                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3542                         .cra_alignmask = 0,
3543                         .cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3544                         .cra_exit = safexcel_aead_cra_exit,
3545                         .cra_module = THIS_MODULE,
3546                 },
3547         },
3548 };
3549
3550 static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3551                                        unsigned int len)
3552 {
3553         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3554         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3555
3556         /* last 4 bytes of key are the nonce! */
3557         ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3558
3559         len -= CTR_RFC3686_NONCE_SIZE;
3560         return safexcel_aead_gcm_setkey(ctfm, key, len);
3561 }
3562
3563 static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3564                                             unsigned int authsize)
3565 {
3566         return crypto_rfc4106_check_authsize(authsize);
3567 }
3568
3569 static int safexcel_rfc4106_encrypt(struct aead_request *req)
3570 {
3571         return crypto_ipsec_check_assoclen(req->assoclen) ?:
3572                safexcel_aead_encrypt(req);
3573 }
3574
3575 static int safexcel_rfc4106_decrypt(struct aead_request *req)
3576 {
3577         return crypto_ipsec_check_assoclen(req->assoclen) ?:
3578                safexcel_aead_decrypt(req);
3579 }
3580
3581 static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3582 {
3583         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3584         int ret;
3585
3586         ret = safexcel_aead_gcm_cra_init(tfm);
3587         ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3588         ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3589         return ret;
3590 }
3591
3592 struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3593         .type = SAFEXCEL_ALG_TYPE_AEAD,
3594         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3595         .alg.aead = {
3596                 .setkey = safexcel_rfc4106_gcm_setkey,
3597                 .setauthsize = safexcel_rfc4106_gcm_setauthsize,
3598                 .encrypt = safexcel_rfc4106_encrypt,
3599                 .decrypt = safexcel_rfc4106_decrypt,
3600                 .ivsize = GCM_RFC4106_IV_SIZE,
3601                 .maxauthsize = GHASH_DIGEST_SIZE,
3602                 .base = {
3603                         .cra_name = "rfc4106(gcm(aes))",
3604                         .cra_driver_name = "safexcel-rfc4106-gcm-aes",
3605                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3606                         .cra_flags = CRYPTO_ALG_ASYNC |
3607                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3608                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3609                         .cra_blocksize = 1,
3610                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3611                         .cra_alignmask = 0,
3612                         .cra_init = safexcel_rfc4106_gcm_cra_init,
3613                         .cra_exit = safexcel_aead_gcm_cra_exit,
3614                 },
3615         },
3616 };
3617
3618 static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3619                                             unsigned int authsize)
3620 {
3621         if (authsize != GHASH_DIGEST_SIZE)
3622                 return -EINVAL;
3623
3624         return 0;
3625 }
3626
3627 static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3628 {
3629         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3630         int ret;
3631
3632         ret = safexcel_aead_gcm_cra_init(tfm);
3633         ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3634         return ret;
3635 }
3636
3637 struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3638         .type = SAFEXCEL_ALG_TYPE_AEAD,
3639         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3640         .alg.aead = {
3641                 .setkey = safexcel_rfc4106_gcm_setkey,
3642                 .setauthsize = safexcel_rfc4543_gcm_setauthsize,
3643                 .encrypt = safexcel_rfc4106_encrypt,
3644                 .decrypt = safexcel_rfc4106_decrypt,
3645                 .ivsize = GCM_RFC4543_IV_SIZE,
3646                 .maxauthsize = GHASH_DIGEST_SIZE,
3647                 .base = {
3648                         .cra_name = "rfc4543(gcm(aes))",
3649                         .cra_driver_name = "safexcel-rfc4543-gcm-aes",
3650                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3651                         .cra_flags = CRYPTO_ALG_ASYNC |
3652                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3653                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3654                         .cra_blocksize = 1,
3655                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3656                         .cra_alignmask = 0,
3657                         .cra_init = safexcel_rfc4543_gcm_cra_init,
3658                         .cra_exit = safexcel_aead_gcm_cra_exit,
3659                 },
3660         },
3661 };
3662
3663 static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3664                                        unsigned int len)
3665 {
3666         struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3667         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3668
3669         /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3670         *(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3671         /* last 3 bytes of key are the nonce! */
3672         memcpy((u8 *)&ctx->nonce + 1, key + len -
3673                EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3674                EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3675
3676         len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3677         return safexcel_aead_ccm_setkey(ctfm, key, len);
3678 }
3679
3680 static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3681                                             unsigned int authsize)
3682 {
3683         /* Borrowed from crypto/ccm.c */
3684         switch (authsize) {
3685         case 8:
3686         case 12:
3687         case 16:
3688                 break;
3689         default:
3690                 return -EINVAL;
3691         }
3692
3693         return 0;
3694 }
3695
3696 static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3697 {
3698         struct safexcel_cipher_req *creq = aead_request_ctx(req);
3699
3700         /* Borrowed from crypto/ccm.c */
3701         if (req->assoclen != 16 && req->assoclen != 20)
3702                 return -EINVAL;
3703
3704         return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3705 }
3706
3707 static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3708 {
3709         struct safexcel_cipher_req *creq = aead_request_ctx(req);
3710
3711         /* Borrowed from crypto/ccm.c */
3712         if (req->assoclen != 16 && req->assoclen != 20)
3713                 return -EINVAL;
3714
3715         return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3716 }
3717
3718 static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3719 {
3720         struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3721         int ret;
3722
3723         ret = safexcel_aead_ccm_cra_init(tfm);
3724         ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3725         ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3726         return ret;
3727 }
3728
3729 struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3730         .type = SAFEXCEL_ALG_TYPE_AEAD,
3731         .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3732         .alg.aead = {
3733                 .setkey = safexcel_rfc4309_ccm_setkey,
3734                 .setauthsize = safexcel_rfc4309_ccm_setauthsize,
3735                 .encrypt = safexcel_rfc4309_ccm_encrypt,
3736                 .decrypt = safexcel_rfc4309_ccm_decrypt,
3737                 .ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3738                 .maxauthsize = AES_BLOCK_SIZE,
3739                 .base = {
3740                         .cra_name = "rfc4309(ccm(aes))",
3741                         .cra_driver_name = "safexcel-rfc4309-ccm-aes",
3742                         .cra_priority = SAFEXCEL_CRA_PRIORITY,
3743                         .cra_flags = CRYPTO_ALG_ASYNC |
3744                                      CRYPTO_ALG_ALLOCATES_MEMORY |
3745                                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3746                         .cra_blocksize = 1,
3747                         .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3748                         .cra_alignmask = 0,
3749                         .cra_init = safexcel_rfc4309_ccm_cra_init,
3750                         .cra_exit = safexcel_aead_cra_exit,
3751                         .cra_module = THIS_MODULE,
3752                 },
3753         },
3754 };