4 * s390 implementation of the AES Cipher Algorithm.
7 * Copyright IBM Corp. 2005, 2007
8 * Author(s): Jan Glauber (jang@de.ibm.com)
9 * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
11 * Derived from "crypto/aes_generic.c"
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
20 #define KMSG_COMPONENT "aes_s390"
21 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
23 #include <crypto/aes.h>
24 #include <crypto/algapi.h>
25 #include <linux/err.h>
26 #include <linux/module.h>
27 #include <linux/init.h>
28 #include "crypt_s390.h"
30 #define AES_KEYLEN_128 1
31 #define AES_KEYLEN_192 2
32 #define AES_KEYLEN_256 4
35 static char keylen_flag;
38 u8 key[AES_MAX_KEY_SIZE];
43 struct crypto_blkcipher *blk;
44 struct crypto_cipher *cip;
63 struct crypto_blkcipher *fallback;
67 * Check if the key_len is supported by the HW.
68 * Returns 0 if it is, a positive number if it is not and software fallback is
69 * required or a negative number in case the key size is not valid
71 static int need_fallback(unsigned int key_len)
75 if (!(keylen_flag & AES_KEYLEN_128))
79 if (!(keylen_flag & AES_KEYLEN_192))
83 if (!(keylen_flag & AES_KEYLEN_256))
93 static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
96 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
99 sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
100 sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags &
101 CRYPTO_TFM_REQ_MASK);
103 ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
105 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
106 tfm->crt_flags |= (sctx->fallback.cip->base.crt_flags &
107 CRYPTO_TFM_RES_MASK);
112 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
113 unsigned int key_len)
115 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
116 u32 *flags = &tfm->crt_flags;
119 ret = need_fallback(key_len);
121 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
125 sctx->key_len = key_len;
127 memcpy(sctx->key, in_key, key_len);
131 return setkey_fallback_cip(tfm, in_key, key_len);
134 static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
136 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
138 if (unlikely(need_fallback(sctx->key_len))) {
139 crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
143 switch (sctx->key_len) {
145 crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
149 crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
153 crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
159 static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
161 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
163 if (unlikely(need_fallback(sctx->key_len))) {
164 crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
168 switch (sctx->key_len) {
170 crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
174 crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
178 crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
184 static int fallback_init_cip(struct crypto_tfm *tfm)
186 const char *name = tfm->__crt_alg->cra_name;
187 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
189 sctx->fallback.cip = crypto_alloc_cipher(name, 0,
190 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
192 if (IS_ERR(sctx->fallback.cip)) {
193 pr_err("Allocating AES fallback algorithm %s failed\n",
195 return PTR_ERR(sctx->fallback.cip);
201 static void fallback_exit_cip(struct crypto_tfm *tfm)
203 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
205 crypto_free_cipher(sctx->fallback.cip);
206 sctx->fallback.cip = NULL;
209 static struct crypto_alg aes_alg = {
211 .cra_driver_name = "aes-s390",
212 .cra_priority = CRYPT_S390_PRIORITY,
213 .cra_flags = CRYPTO_ALG_TYPE_CIPHER |
214 CRYPTO_ALG_NEED_FALLBACK,
215 .cra_blocksize = AES_BLOCK_SIZE,
216 .cra_ctxsize = sizeof(struct s390_aes_ctx),
217 .cra_module = THIS_MODULE,
218 .cra_init = fallback_init_cip,
219 .cra_exit = fallback_exit_cip,
222 .cia_min_keysize = AES_MIN_KEY_SIZE,
223 .cia_max_keysize = AES_MAX_KEY_SIZE,
224 .cia_setkey = aes_set_key,
225 .cia_encrypt = aes_encrypt,
226 .cia_decrypt = aes_decrypt,
231 static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
234 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
237 sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
238 sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
239 CRYPTO_TFM_REQ_MASK);
241 ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len);
243 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
244 tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
245 CRYPTO_TFM_RES_MASK);
250 static int fallback_blk_dec(struct blkcipher_desc *desc,
251 struct scatterlist *dst, struct scatterlist *src,
255 struct crypto_blkcipher *tfm;
256 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
259 desc->tfm = sctx->fallback.blk;
261 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
267 static int fallback_blk_enc(struct blkcipher_desc *desc,
268 struct scatterlist *dst, struct scatterlist *src,
272 struct crypto_blkcipher *tfm;
273 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
276 desc->tfm = sctx->fallback.blk;
278 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
284 static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
285 unsigned int key_len)
287 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
290 ret = need_fallback(key_len);
292 sctx->key_len = key_len;
293 return setkey_fallback_blk(tfm, in_key, key_len);
298 sctx->enc = KM_AES_128_ENCRYPT;
299 sctx->dec = KM_AES_128_DECRYPT;
302 sctx->enc = KM_AES_192_ENCRYPT;
303 sctx->dec = KM_AES_192_DECRYPT;
306 sctx->enc = KM_AES_256_ENCRYPT;
307 sctx->dec = KM_AES_256_DECRYPT;
311 return aes_set_key(tfm, in_key, key_len);
314 static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
315 struct blkcipher_walk *walk)
317 int ret = blkcipher_walk_virt(desc, walk);
320 while ((nbytes = walk->nbytes)) {
321 /* only use complete blocks */
322 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
323 u8 *out = walk->dst.virt.addr;
324 u8 *in = walk->src.virt.addr;
326 ret = crypt_s390_km(func, param, out, in, n);
327 if (ret < 0 || ret != n)
330 nbytes &= AES_BLOCK_SIZE - 1;
331 ret = blkcipher_walk_done(desc, walk, nbytes);
337 static int ecb_aes_encrypt(struct blkcipher_desc *desc,
338 struct scatterlist *dst, struct scatterlist *src,
341 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
342 struct blkcipher_walk walk;
344 if (unlikely(need_fallback(sctx->key_len)))
345 return fallback_blk_enc(desc, dst, src, nbytes);
347 blkcipher_walk_init(&walk, dst, src, nbytes);
348 return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
351 static int ecb_aes_decrypt(struct blkcipher_desc *desc,
352 struct scatterlist *dst, struct scatterlist *src,
355 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
356 struct blkcipher_walk walk;
358 if (unlikely(need_fallback(sctx->key_len)))
359 return fallback_blk_dec(desc, dst, src, nbytes);
361 blkcipher_walk_init(&walk, dst, src, nbytes);
362 return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
365 static int fallback_init_blk(struct crypto_tfm *tfm)
367 const char *name = tfm->__crt_alg->cra_name;
368 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
370 sctx->fallback.blk = crypto_alloc_blkcipher(name, 0,
371 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
373 if (IS_ERR(sctx->fallback.blk)) {
374 pr_err("Allocating AES fallback algorithm %s failed\n",
376 return PTR_ERR(sctx->fallback.blk);
382 static void fallback_exit_blk(struct crypto_tfm *tfm)
384 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
386 crypto_free_blkcipher(sctx->fallback.blk);
387 sctx->fallback.blk = NULL;
390 static struct crypto_alg ecb_aes_alg = {
391 .cra_name = "ecb(aes)",
392 .cra_driver_name = "ecb-aes-s390",
393 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
394 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
395 CRYPTO_ALG_NEED_FALLBACK,
396 .cra_blocksize = AES_BLOCK_SIZE,
397 .cra_ctxsize = sizeof(struct s390_aes_ctx),
398 .cra_type = &crypto_blkcipher_type,
399 .cra_module = THIS_MODULE,
400 .cra_init = fallback_init_blk,
401 .cra_exit = fallback_exit_blk,
404 .min_keysize = AES_MIN_KEY_SIZE,
405 .max_keysize = AES_MAX_KEY_SIZE,
406 .setkey = ecb_aes_set_key,
407 .encrypt = ecb_aes_encrypt,
408 .decrypt = ecb_aes_decrypt,
413 static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
414 unsigned int key_len)
416 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
419 ret = need_fallback(key_len);
421 sctx->key_len = key_len;
422 return setkey_fallback_blk(tfm, in_key, key_len);
427 sctx->enc = KMC_AES_128_ENCRYPT;
428 sctx->dec = KMC_AES_128_DECRYPT;
431 sctx->enc = KMC_AES_192_ENCRYPT;
432 sctx->dec = KMC_AES_192_DECRYPT;
435 sctx->enc = KMC_AES_256_ENCRYPT;
436 sctx->dec = KMC_AES_256_DECRYPT;
440 return aes_set_key(tfm, in_key, key_len);
443 static int cbc_aes_crypt(struct blkcipher_desc *desc, long func,
444 struct blkcipher_walk *walk)
446 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
447 int ret = blkcipher_walk_virt(desc, walk);
448 unsigned int nbytes = walk->nbytes;
450 u8 iv[AES_BLOCK_SIZE];
451 u8 key[AES_MAX_KEY_SIZE];
457 memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
458 memcpy(param.key, sctx->key, sctx->key_len);
460 /* only use complete blocks */
461 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
462 u8 *out = walk->dst.virt.addr;
463 u8 *in = walk->src.virt.addr;
465 ret = crypt_s390_kmc(func, ¶m, out, in, n);
466 if (ret < 0 || ret != n)
469 nbytes &= AES_BLOCK_SIZE - 1;
470 ret = blkcipher_walk_done(desc, walk, nbytes);
471 } while ((nbytes = walk->nbytes));
472 memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
478 static int cbc_aes_encrypt(struct blkcipher_desc *desc,
479 struct scatterlist *dst, struct scatterlist *src,
482 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
483 struct blkcipher_walk walk;
485 if (unlikely(need_fallback(sctx->key_len)))
486 return fallback_blk_enc(desc, dst, src, nbytes);
488 blkcipher_walk_init(&walk, dst, src, nbytes);
489 return cbc_aes_crypt(desc, sctx->enc, &walk);
492 static int cbc_aes_decrypt(struct blkcipher_desc *desc,
493 struct scatterlist *dst, struct scatterlist *src,
496 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
497 struct blkcipher_walk walk;
499 if (unlikely(need_fallback(sctx->key_len)))
500 return fallback_blk_dec(desc, dst, src, nbytes);
502 blkcipher_walk_init(&walk, dst, src, nbytes);
503 return cbc_aes_crypt(desc, sctx->dec, &walk);
506 static struct crypto_alg cbc_aes_alg = {
507 .cra_name = "cbc(aes)",
508 .cra_driver_name = "cbc-aes-s390",
509 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
510 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
511 CRYPTO_ALG_NEED_FALLBACK,
512 .cra_blocksize = AES_BLOCK_SIZE,
513 .cra_ctxsize = sizeof(struct s390_aes_ctx),
514 .cra_type = &crypto_blkcipher_type,
515 .cra_module = THIS_MODULE,
516 .cra_init = fallback_init_blk,
517 .cra_exit = fallback_exit_blk,
520 .min_keysize = AES_MIN_KEY_SIZE,
521 .max_keysize = AES_MAX_KEY_SIZE,
522 .ivsize = AES_BLOCK_SIZE,
523 .setkey = cbc_aes_set_key,
524 .encrypt = cbc_aes_encrypt,
525 .decrypt = cbc_aes_decrypt,
530 static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key,
533 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
536 xts_ctx->fallback->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
537 xts_ctx->fallback->base.crt_flags |= (tfm->crt_flags &
538 CRYPTO_TFM_REQ_MASK);
540 ret = crypto_blkcipher_setkey(xts_ctx->fallback, key, len);
542 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
543 tfm->crt_flags |= (xts_ctx->fallback->base.crt_flags &
544 CRYPTO_TFM_RES_MASK);
549 static int xts_fallback_decrypt(struct blkcipher_desc *desc,
550 struct scatterlist *dst, struct scatterlist *src,
553 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
554 struct crypto_blkcipher *tfm;
558 desc->tfm = xts_ctx->fallback;
560 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
566 static int xts_fallback_encrypt(struct blkcipher_desc *desc,
567 struct scatterlist *dst, struct scatterlist *src,
570 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
571 struct crypto_blkcipher *tfm;
575 desc->tfm = xts_ctx->fallback;
577 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
583 static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
584 unsigned int key_len)
586 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
587 u32 *flags = &tfm->crt_flags;
591 xts_ctx->enc = KM_XTS_128_ENCRYPT;
592 xts_ctx->dec = KM_XTS_128_DECRYPT;
593 memcpy(xts_ctx->key + 16, in_key, 16);
594 memcpy(xts_ctx->pcc.key + 16, in_key + 16, 16);
599 xts_fallback_setkey(tfm, in_key, key_len);
602 xts_ctx->enc = KM_XTS_256_ENCRYPT;
603 xts_ctx->dec = KM_XTS_256_DECRYPT;
604 memcpy(xts_ctx->key, in_key, 32);
605 memcpy(xts_ctx->pcc.key, in_key + 32, 32);
608 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
611 xts_ctx->key_len = key_len;
615 static int xts_aes_crypt(struct blkcipher_desc *desc, long func,
616 struct s390_xts_ctx *xts_ctx,
617 struct blkcipher_walk *walk)
619 unsigned int offset = (xts_ctx->key_len >> 1) & 0x10;
620 int ret = blkcipher_walk_virt(desc, walk);
621 unsigned int nbytes = walk->nbytes;
629 memset(xts_ctx->pcc.block, 0, sizeof(xts_ctx->pcc.block));
630 memset(xts_ctx->pcc.bit, 0, sizeof(xts_ctx->pcc.bit));
631 memset(xts_ctx->pcc.xts, 0, sizeof(xts_ctx->pcc.xts));
632 memcpy(xts_ctx->pcc.tweak, walk->iv, sizeof(xts_ctx->pcc.tweak));
633 param = xts_ctx->pcc.key + offset;
634 ret = crypt_s390_pcc(func, param);
638 memcpy(xts_ctx->xts_param, xts_ctx->pcc.xts, 16);
639 param = xts_ctx->key + offset;
641 /* only use complete blocks */
642 n = nbytes & ~(AES_BLOCK_SIZE - 1);
643 out = walk->dst.virt.addr;
644 in = walk->src.virt.addr;
646 ret = crypt_s390_km(func, param, out, in, n);
647 if (ret < 0 || ret != n)
650 nbytes &= AES_BLOCK_SIZE - 1;
651 ret = blkcipher_walk_done(desc, walk, nbytes);
652 } while ((nbytes = walk->nbytes));
657 static int xts_aes_encrypt(struct blkcipher_desc *desc,
658 struct scatterlist *dst, struct scatterlist *src,
661 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
662 struct blkcipher_walk walk;
664 if (unlikely(xts_ctx->key_len == 48))
665 return xts_fallback_encrypt(desc, dst, src, nbytes);
667 blkcipher_walk_init(&walk, dst, src, nbytes);
668 return xts_aes_crypt(desc, xts_ctx->enc, xts_ctx, &walk);
671 static int xts_aes_decrypt(struct blkcipher_desc *desc,
672 struct scatterlist *dst, struct scatterlist *src,
675 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
676 struct blkcipher_walk walk;
678 if (unlikely(xts_ctx->key_len == 48))
679 return xts_fallback_decrypt(desc, dst, src, nbytes);
681 blkcipher_walk_init(&walk, dst, src, nbytes);
682 return xts_aes_crypt(desc, xts_ctx->dec, xts_ctx, &walk);
685 static int xts_fallback_init(struct crypto_tfm *tfm)
687 const char *name = tfm->__crt_alg->cra_name;
688 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
690 xts_ctx->fallback = crypto_alloc_blkcipher(name, 0,
691 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
693 if (IS_ERR(xts_ctx->fallback)) {
694 pr_err("Allocating XTS fallback algorithm %s failed\n",
696 return PTR_ERR(xts_ctx->fallback);
701 static void xts_fallback_exit(struct crypto_tfm *tfm)
703 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
705 crypto_free_blkcipher(xts_ctx->fallback);
706 xts_ctx->fallback = NULL;
709 static struct crypto_alg xts_aes_alg = {
710 .cra_name = "xts(aes)",
711 .cra_driver_name = "xts-aes-s390",
712 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
713 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
714 CRYPTO_ALG_NEED_FALLBACK,
715 .cra_blocksize = AES_BLOCK_SIZE,
716 .cra_ctxsize = sizeof(struct s390_xts_ctx),
717 .cra_type = &crypto_blkcipher_type,
718 .cra_module = THIS_MODULE,
719 .cra_init = xts_fallback_init,
720 .cra_exit = xts_fallback_exit,
723 .min_keysize = 2 * AES_MIN_KEY_SIZE,
724 .max_keysize = 2 * AES_MAX_KEY_SIZE,
725 .ivsize = AES_BLOCK_SIZE,
726 .setkey = xts_aes_set_key,
727 .encrypt = xts_aes_encrypt,
728 .decrypt = xts_aes_decrypt,
733 static int xts_aes_alg_reg;
735 static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
736 unsigned int key_len)
738 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
742 sctx->enc = KMCTR_AES_128_ENCRYPT;
743 sctx->dec = KMCTR_AES_128_DECRYPT;
746 sctx->enc = KMCTR_AES_192_ENCRYPT;
747 sctx->dec = KMCTR_AES_192_DECRYPT;
750 sctx->enc = KMCTR_AES_256_ENCRYPT;
751 sctx->dec = KMCTR_AES_256_DECRYPT;
755 return aes_set_key(tfm, in_key, key_len);
758 static int ctr_aes_crypt(struct blkcipher_desc *desc, long func,
759 struct s390_aes_ctx *sctx, struct blkcipher_walk *walk)
761 int ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
762 unsigned int i, n, nbytes;
763 u8 buf[AES_BLOCK_SIZE];
769 memcpy(ctrblk, walk->iv, AES_BLOCK_SIZE);
770 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
771 out = walk->dst.virt.addr;
772 in = walk->src.virt.addr;
773 while (nbytes >= AES_BLOCK_SIZE) {
774 /* only use complete blocks, max. PAGE_SIZE */
775 n = (nbytes > PAGE_SIZE) ? PAGE_SIZE :
776 nbytes & ~(AES_BLOCK_SIZE - 1);
777 for (i = AES_BLOCK_SIZE; i < n; i += AES_BLOCK_SIZE) {
778 memcpy(ctrblk + i, ctrblk + i - AES_BLOCK_SIZE,
780 crypto_inc(ctrblk + i, AES_BLOCK_SIZE);
782 ret = crypt_s390_kmctr(func, sctx->key, out, in, n, ctrblk);
783 if (ret < 0 || ret != n)
785 if (n > AES_BLOCK_SIZE)
786 memcpy(ctrblk, ctrblk + n - AES_BLOCK_SIZE,
788 crypto_inc(ctrblk, AES_BLOCK_SIZE);
793 ret = blkcipher_walk_done(desc, walk, nbytes);
796 * final block may be < AES_BLOCK_SIZE, copy only nbytes
799 out = walk->dst.virt.addr;
800 in = walk->src.virt.addr;
801 ret = crypt_s390_kmctr(func, sctx->key, buf, in,
802 AES_BLOCK_SIZE, ctrblk);
803 if (ret < 0 || ret != AES_BLOCK_SIZE)
805 memcpy(out, buf, nbytes);
806 crypto_inc(ctrblk, AES_BLOCK_SIZE);
807 ret = blkcipher_walk_done(desc, walk, 0);
809 memcpy(walk->iv, ctrblk, AES_BLOCK_SIZE);
813 static int ctr_aes_encrypt(struct blkcipher_desc *desc,
814 struct scatterlist *dst, struct scatterlist *src,
817 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
818 struct blkcipher_walk walk;
820 blkcipher_walk_init(&walk, dst, src, nbytes);
821 return ctr_aes_crypt(desc, sctx->enc, sctx, &walk);
824 static int ctr_aes_decrypt(struct blkcipher_desc *desc,
825 struct scatterlist *dst, struct scatterlist *src,
828 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
829 struct blkcipher_walk walk;
831 blkcipher_walk_init(&walk, dst, src, nbytes);
832 return ctr_aes_crypt(desc, sctx->dec, sctx, &walk);
835 static struct crypto_alg ctr_aes_alg = {
836 .cra_name = "ctr(aes)",
837 .cra_driver_name = "ctr-aes-s390",
838 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
839 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
841 .cra_ctxsize = sizeof(struct s390_aes_ctx),
842 .cra_type = &crypto_blkcipher_type,
843 .cra_module = THIS_MODULE,
846 .min_keysize = AES_MIN_KEY_SIZE,
847 .max_keysize = AES_MAX_KEY_SIZE,
848 .ivsize = AES_BLOCK_SIZE,
849 .setkey = ctr_aes_set_key,
850 .encrypt = ctr_aes_encrypt,
851 .decrypt = ctr_aes_decrypt,
856 static int ctr_aes_alg_reg;
858 static int __init aes_s390_init(void)
862 if (crypt_s390_func_available(KM_AES_128_ENCRYPT, CRYPT_S390_MSA))
863 keylen_flag |= AES_KEYLEN_128;
864 if (crypt_s390_func_available(KM_AES_192_ENCRYPT, CRYPT_S390_MSA))
865 keylen_flag |= AES_KEYLEN_192;
866 if (crypt_s390_func_available(KM_AES_256_ENCRYPT, CRYPT_S390_MSA))
867 keylen_flag |= AES_KEYLEN_256;
872 /* z9 109 and z9 BC/EC only support 128 bit key length */
873 if (keylen_flag == AES_KEYLEN_128)
874 pr_info("AES hardware acceleration is only available for"
877 ret = crypto_register_alg(&aes_alg);
881 ret = crypto_register_alg(&ecb_aes_alg);
885 ret = crypto_register_alg(&cbc_aes_alg);
889 if (crypt_s390_func_available(KM_XTS_128_ENCRYPT,
890 CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
891 crypt_s390_func_available(KM_XTS_256_ENCRYPT,
892 CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
893 ret = crypto_register_alg(&xts_aes_alg);
899 if (crypt_s390_func_available(KMCTR_AES_128_ENCRYPT,
900 CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
901 crypt_s390_func_available(KMCTR_AES_192_ENCRYPT,
902 CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
903 crypt_s390_func_available(KMCTR_AES_256_ENCRYPT,
904 CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
905 ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
910 ret = crypto_register_alg(&ctr_aes_alg);
912 free_page((unsigned long) ctrblk);
922 crypto_unregister_alg(&xts_aes_alg);
924 crypto_unregister_alg(&cbc_aes_alg);
926 crypto_unregister_alg(&ecb_aes_alg);
928 crypto_unregister_alg(&aes_alg);
933 static void __exit aes_s390_fini(void)
935 if (ctr_aes_alg_reg) {
936 crypto_unregister_alg(&ctr_aes_alg);
937 free_page((unsigned long) ctrblk);
940 crypto_unregister_alg(&xts_aes_alg);
941 crypto_unregister_alg(&cbc_aes_alg);
942 crypto_unregister_alg(&ecb_aes_alg);
943 crypto_unregister_alg(&aes_alg);
946 module_init(aes_s390_init);
947 module_exit(aes_s390_fini);
949 MODULE_ALIAS("aes-all");
951 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
952 MODULE_LICENSE("GPL");