1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Linear symmetric key cipher operations.
5 * Generic encrypt/decrypt wrapper for ciphers.
7 * Copyright (c) 2023 Herbert Xu <herbert@gondor.apana.org.au>
10 #include <linux/cryptouser.h>
11 #include <linux/err.h>
12 #include <linux/export.h>
13 #include <linux/kernel.h>
14 #include <linux/seq_file.h>
15 #include <linux/slab.h>
16 #include <linux/string.h>
17 #include <net/netlink.h>
20 static inline struct crypto_lskcipher *__crypto_lskcipher_cast(
21 struct crypto_tfm *tfm)
23 return container_of(tfm, struct crypto_lskcipher, base);
26 static inline struct lskcipher_alg *__crypto_lskcipher_alg(
27 struct crypto_alg *alg)
29 return container_of(alg, struct lskcipher_alg, co.base);
32 static inline struct crypto_istat_cipher *lskcipher_get_stat(
33 struct lskcipher_alg *alg)
35 return skcipher_get_stat_common(&alg->co);
38 static inline int crypto_lskcipher_errstat(struct lskcipher_alg *alg, int err)
40 struct crypto_istat_cipher *istat = lskcipher_get_stat(alg);
42 if (!IS_ENABLED(CONFIG_CRYPTO_STATS))
46 atomic64_inc(&istat->err_cnt);
51 static int lskcipher_setkey_unaligned(struct crypto_lskcipher *tfm,
52 const u8 *key, unsigned int keylen)
54 unsigned long alignmask = crypto_lskcipher_alignmask(tfm);
55 struct lskcipher_alg *cipher = crypto_lskcipher_alg(tfm);
56 u8 *buffer, *alignbuffer;
60 absize = keylen + alignmask;
61 buffer = kmalloc(absize, GFP_ATOMIC);
65 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
66 memcpy(alignbuffer, key, keylen);
67 ret = cipher->setkey(tfm, alignbuffer, keylen);
68 kfree_sensitive(buffer);
72 int crypto_lskcipher_setkey(struct crypto_lskcipher *tfm, const u8 *key,
75 unsigned long alignmask = crypto_lskcipher_alignmask(tfm);
76 struct lskcipher_alg *cipher = crypto_lskcipher_alg(tfm);
78 if (keylen < cipher->co.min_keysize || keylen > cipher->co.max_keysize)
81 if ((unsigned long)key & alignmask)
82 return lskcipher_setkey_unaligned(tfm, key, keylen);
84 return cipher->setkey(tfm, key, keylen);
86 EXPORT_SYMBOL_GPL(crypto_lskcipher_setkey);
88 static int crypto_lskcipher_crypt_unaligned(
89 struct crypto_lskcipher *tfm, const u8 *src, u8 *dst, unsigned len,
90 u8 *iv, int (*crypt)(struct crypto_lskcipher *tfm, const u8 *src,
91 u8 *dst, unsigned len, u8 *iv, u32 flags))
93 unsigned statesize = crypto_lskcipher_statesize(tfm);
94 unsigned ivsize = crypto_lskcipher_ivsize(tfm);
95 unsigned bs = crypto_lskcipher_blocksize(tfm);
96 unsigned cs = crypto_lskcipher_chunksize(tfm);
101 BUILD_BUG_ON(MAX_CIPHER_BLOCKSIZE > PAGE_SIZE ||
102 MAX_CIPHER_ALIGNMASK >= PAGE_SIZE);
104 tiv = kmalloc(PAGE_SIZE, GFP_ATOMIC);
108 memcpy(tiv, iv, ivsize + statesize);
110 p = kmalloc(PAGE_SIZE, GFP_ATOMIC);
116 unsigned chunk = min((unsigned)PAGE_SIZE, len);
122 memcpy(p, src, chunk);
123 err = crypt(tfm, p, p, chunk, tiv, CRYPTO_LSKCIPHER_FLAG_FINAL);
127 memcpy(dst, p, chunk);
133 err = len ? -EINVAL : 0;
136 memcpy(iv, tiv, ivsize + statesize);
138 kfree_sensitive(tiv);
142 static int crypto_lskcipher_crypt(struct crypto_lskcipher *tfm, const u8 *src,
143 u8 *dst, unsigned len, u8 *iv,
144 int (*crypt)(struct crypto_lskcipher *tfm,
145 const u8 *src, u8 *dst,
146 unsigned len, u8 *iv,
149 unsigned long alignmask = crypto_lskcipher_alignmask(tfm);
150 struct lskcipher_alg *alg = crypto_lskcipher_alg(tfm);
153 if (((unsigned long)src | (unsigned long)dst | (unsigned long)iv) &
155 ret = crypto_lskcipher_crypt_unaligned(tfm, src, dst, len, iv,
160 ret = crypt(tfm, src, dst, len, iv, CRYPTO_LSKCIPHER_FLAG_FINAL);
163 return crypto_lskcipher_errstat(alg, ret);
166 int crypto_lskcipher_encrypt(struct crypto_lskcipher *tfm, const u8 *src,
167 u8 *dst, unsigned len, u8 *iv)
169 struct lskcipher_alg *alg = crypto_lskcipher_alg(tfm);
171 if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
172 struct crypto_istat_cipher *istat = lskcipher_get_stat(alg);
174 atomic64_inc(&istat->encrypt_cnt);
175 atomic64_add(len, &istat->encrypt_tlen);
178 return crypto_lskcipher_crypt(tfm, src, dst, len, iv, alg->encrypt);
180 EXPORT_SYMBOL_GPL(crypto_lskcipher_encrypt);
182 int crypto_lskcipher_decrypt(struct crypto_lskcipher *tfm, const u8 *src,
183 u8 *dst, unsigned len, u8 *iv)
185 struct lskcipher_alg *alg = crypto_lskcipher_alg(tfm);
187 if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
188 struct crypto_istat_cipher *istat = lskcipher_get_stat(alg);
190 atomic64_inc(&istat->decrypt_cnt);
191 atomic64_add(len, &istat->decrypt_tlen);
194 return crypto_lskcipher_crypt(tfm, src, dst, len, iv, alg->decrypt);
196 EXPORT_SYMBOL_GPL(crypto_lskcipher_decrypt);
198 static int crypto_lskcipher_crypt_sg(struct skcipher_request *req,
199 int (*crypt)(struct crypto_lskcipher *tfm,
200 const u8 *src, u8 *dst,
201 unsigned len, u8 *ivs,
204 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
205 struct crypto_lskcipher **ctx = crypto_skcipher_ctx(skcipher);
206 u8 *ivs = skcipher_request_ctx(req);
207 struct crypto_lskcipher *tfm = *ctx;
208 struct skcipher_walk walk;
213 ivsize = crypto_lskcipher_ivsize(tfm);
214 ivs = PTR_ALIGN(ivs, crypto_skcipher_alignmask(skcipher) + 1);
216 flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
218 if (req->base.flags & CRYPTO_SKCIPHER_REQ_CONT)
219 flags |= CRYPTO_LSKCIPHER_FLAG_CONT;
221 memcpy(ivs, req->iv, ivsize);
223 if (!(req->base.flags & CRYPTO_SKCIPHER_REQ_NOTFINAL))
224 flags |= CRYPTO_LSKCIPHER_FLAG_FINAL;
226 err = skcipher_walk_virt(&walk, req, false);
228 while (walk.nbytes) {
229 err = crypt(tfm, walk.src.virt.addr, walk.dst.virt.addr,
231 flags & ~(walk.nbytes == walk.total ?
232 0 : CRYPTO_LSKCIPHER_FLAG_FINAL));
233 err = skcipher_walk_done(&walk, err);
234 flags |= CRYPTO_LSKCIPHER_FLAG_CONT;
237 if (flags & CRYPTO_LSKCIPHER_FLAG_FINAL)
238 memcpy(req->iv, ivs, ivsize);
243 int crypto_lskcipher_encrypt_sg(struct skcipher_request *req)
245 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
246 struct crypto_lskcipher **ctx = crypto_skcipher_ctx(skcipher);
247 struct lskcipher_alg *alg = crypto_lskcipher_alg(*ctx);
249 return crypto_lskcipher_crypt_sg(req, alg->encrypt);
252 int crypto_lskcipher_decrypt_sg(struct skcipher_request *req)
254 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
255 struct crypto_lskcipher **ctx = crypto_skcipher_ctx(skcipher);
256 struct lskcipher_alg *alg = crypto_lskcipher_alg(*ctx);
258 return crypto_lskcipher_crypt_sg(req, alg->decrypt);
261 static void crypto_lskcipher_exit_tfm(struct crypto_tfm *tfm)
263 struct crypto_lskcipher *skcipher = __crypto_lskcipher_cast(tfm);
264 struct lskcipher_alg *alg = crypto_lskcipher_alg(skcipher);
269 static int crypto_lskcipher_init_tfm(struct crypto_tfm *tfm)
271 struct crypto_lskcipher *skcipher = __crypto_lskcipher_cast(tfm);
272 struct lskcipher_alg *alg = crypto_lskcipher_alg(skcipher);
275 skcipher->base.exit = crypto_lskcipher_exit_tfm;
278 return alg->init(skcipher);
283 static void crypto_lskcipher_free_instance(struct crypto_instance *inst)
285 struct lskcipher_instance *skcipher =
286 container_of(inst, struct lskcipher_instance, s.base);
288 skcipher->free(skcipher);
291 static void __maybe_unused crypto_lskcipher_show(
292 struct seq_file *m, struct crypto_alg *alg)
294 struct lskcipher_alg *skcipher = __crypto_lskcipher_alg(alg);
296 seq_printf(m, "type : lskcipher\n");
297 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
298 seq_printf(m, "min keysize : %u\n", skcipher->co.min_keysize);
299 seq_printf(m, "max keysize : %u\n", skcipher->co.max_keysize);
300 seq_printf(m, "ivsize : %u\n", skcipher->co.ivsize);
301 seq_printf(m, "chunksize : %u\n", skcipher->co.chunksize);
302 seq_printf(m, "statesize : %u\n", skcipher->co.statesize);
305 static int __maybe_unused crypto_lskcipher_report(
306 struct sk_buff *skb, struct crypto_alg *alg)
308 struct lskcipher_alg *skcipher = __crypto_lskcipher_alg(alg);
309 struct crypto_report_blkcipher rblkcipher;
311 memset(&rblkcipher, 0, sizeof(rblkcipher));
313 strscpy(rblkcipher.type, "lskcipher", sizeof(rblkcipher.type));
314 strscpy(rblkcipher.geniv, "<none>", sizeof(rblkcipher.geniv));
316 rblkcipher.blocksize = alg->cra_blocksize;
317 rblkcipher.min_keysize = skcipher->co.min_keysize;
318 rblkcipher.max_keysize = skcipher->co.max_keysize;
319 rblkcipher.ivsize = skcipher->co.ivsize;
321 return nla_put(skb, CRYPTOCFGA_REPORT_BLKCIPHER,
322 sizeof(rblkcipher), &rblkcipher);
325 static int __maybe_unused crypto_lskcipher_report_stat(
326 struct sk_buff *skb, struct crypto_alg *alg)
328 struct lskcipher_alg *skcipher = __crypto_lskcipher_alg(alg);
329 struct crypto_istat_cipher *istat;
330 struct crypto_stat_cipher rcipher;
332 istat = lskcipher_get_stat(skcipher);
334 memset(&rcipher, 0, sizeof(rcipher));
336 strscpy(rcipher.type, "cipher", sizeof(rcipher.type));
338 rcipher.stat_encrypt_cnt = atomic64_read(&istat->encrypt_cnt);
339 rcipher.stat_encrypt_tlen = atomic64_read(&istat->encrypt_tlen);
340 rcipher.stat_decrypt_cnt = atomic64_read(&istat->decrypt_cnt);
341 rcipher.stat_decrypt_tlen = atomic64_read(&istat->decrypt_tlen);
342 rcipher.stat_err_cnt = atomic64_read(&istat->err_cnt);
344 return nla_put(skb, CRYPTOCFGA_STAT_CIPHER, sizeof(rcipher), &rcipher);
347 static const struct crypto_type crypto_lskcipher_type = {
348 .extsize = crypto_alg_extsize,
349 .init_tfm = crypto_lskcipher_init_tfm,
350 .free = crypto_lskcipher_free_instance,
351 #ifdef CONFIG_PROC_FS
352 .show = crypto_lskcipher_show,
354 #if IS_ENABLED(CONFIG_CRYPTO_USER)
355 .report = crypto_lskcipher_report,
357 #ifdef CONFIG_CRYPTO_STATS
358 .report_stat = crypto_lskcipher_report_stat,
360 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
361 .maskset = CRYPTO_ALG_TYPE_MASK,
362 .type = CRYPTO_ALG_TYPE_LSKCIPHER,
363 .tfmsize = offsetof(struct crypto_lskcipher, base),
366 static void crypto_lskcipher_exit_tfm_sg(struct crypto_tfm *tfm)
368 struct crypto_lskcipher **ctx = crypto_tfm_ctx(tfm);
370 crypto_free_lskcipher(*ctx);
373 int crypto_init_lskcipher_ops_sg(struct crypto_tfm *tfm)
375 struct crypto_lskcipher **ctx = crypto_tfm_ctx(tfm);
376 struct crypto_alg *calg = tfm->__crt_alg;
377 struct crypto_lskcipher *skcipher;
379 if (!crypto_mod_get(calg))
382 skcipher = crypto_create_tfm(calg, &crypto_lskcipher_type);
383 if (IS_ERR(skcipher)) {
384 crypto_mod_put(calg);
385 return PTR_ERR(skcipher);
389 tfm->exit = crypto_lskcipher_exit_tfm_sg;
394 int crypto_grab_lskcipher(struct crypto_lskcipher_spawn *spawn,
395 struct crypto_instance *inst,
396 const char *name, u32 type, u32 mask)
398 spawn->base.frontend = &crypto_lskcipher_type;
399 return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
401 EXPORT_SYMBOL_GPL(crypto_grab_lskcipher);
403 struct crypto_lskcipher *crypto_alloc_lskcipher(const char *alg_name,
406 return crypto_alloc_tfm(alg_name, &crypto_lskcipher_type, type, mask);
408 EXPORT_SYMBOL_GPL(crypto_alloc_lskcipher);
410 static int lskcipher_prepare_alg(struct lskcipher_alg *alg)
412 struct crypto_alg *base = &alg->co.base;
415 err = skcipher_prepare_alg_common(&alg->co);
419 if (alg->co.chunksize & (alg->co.chunksize - 1))
422 base->cra_type = &crypto_lskcipher_type;
423 base->cra_flags |= CRYPTO_ALG_TYPE_LSKCIPHER;
428 int crypto_register_lskcipher(struct lskcipher_alg *alg)
430 struct crypto_alg *base = &alg->co.base;
433 err = lskcipher_prepare_alg(alg);
437 return crypto_register_alg(base);
439 EXPORT_SYMBOL_GPL(crypto_register_lskcipher);
441 void crypto_unregister_lskcipher(struct lskcipher_alg *alg)
443 crypto_unregister_alg(&alg->co.base);
445 EXPORT_SYMBOL_GPL(crypto_unregister_lskcipher);
447 int crypto_register_lskciphers(struct lskcipher_alg *algs, int count)
451 for (i = 0; i < count; i++) {
452 ret = crypto_register_lskcipher(&algs[i]);
460 for (--i; i >= 0; --i)
461 crypto_unregister_lskcipher(&algs[i]);
465 EXPORT_SYMBOL_GPL(crypto_register_lskciphers);
467 void crypto_unregister_lskciphers(struct lskcipher_alg *algs, int count)
471 for (i = count - 1; i >= 0; --i)
472 crypto_unregister_lskcipher(&algs[i]);
474 EXPORT_SYMBOL_GPL(crypto_unregister_lskciphers);
476 int lskcipher_register_instance(struct crypto_template *tmpl,
477 struct lskcipher_instance *inst)
481 if (WARN_ON(!inst->free))
484 err = lskcipher_prepare_alg(&inst->alg);
488 return crypto_register_instance(tmpl, lskcipher_crypto_instance(inst));
490 EXPORT_SYMBOL_GPL(lskcipher_register_instance);
492 static int lskcipher_setkey_simple(struct crypto_lskcipher *tfm, const u8 *key,
495 struct crypto_lskcipher *cipher = lskcipher_cipher_simple(tfm);
497 crypto_lskcipher_clear_flags(cipher, CRYPTO_TFM_REQ_MASK);
498 crypto_lskcipher_set_flags(cipher, crypto_lskcipher_get_flags(tfm) &
499 CRYPTO_TFM_REQ_MASK);
500 return crypto_lskcipher_setkey(cipher, key, keylen);
503 static int lskcipher_init_tfm_simple(struct crypto_lskcipher *tfm)
505 struct lskcipher_instance *inst = lskcipher_alg_instance(tfm);
506 struct crypto_lskcipher **ctx = crypto_lskcipher_ctx(tfm);
507 struct crypto_lskcipher_spawn *spawn;
508 struct crypto_lskcipher *cipher;
510 spawn = lskcipher_instance_ctx(inst);
511 cipher = crypto_spawn_lskcipher(spawn);
513 return PTR_ERR(cipher);
519 static void lskcipher_exit_tfm_simple(struct crypto_lskcipher *tfm)
521 struct crypto_lskcipher **ctx = crypto_lskcipher_ctx(tfm);
523 crypto_free_lskcipher(*ctx);
526 static void lskcipher_free_instance_simple(struct lskcipher_instance *inst)
528 crypto_drop_lskcipher(lskcipher_instance_ctx(inst));
533 * lskcipher_alloc_instance_simple - allocate instance of simple block cipher
535 * Allocate an lskcipher_instance for a simple block cipher mode of operation,
536 * e.g. cbc or ecb. The instance context will have just a single crypto_spawn,
537 * that for the underlying cipher. The {min,max}_keysize, ivsize, blocksize,
538 * alignmask, and priority are set from the underlying cipher but can be
539 * overridden if needed. The tfm context defaults to
540 * struct crypto_lskcipher *, and default ->setkey(), ->init(), and
541 * ->exit() methods are installed.
543 * @tmpl: the template being instantiated
544 * @tb: the template parameters
546 * Return: a pointer to the new instance, or an ERR_PTR(). The caller still
547 * needs to register the instance.
549 struct lskcipher_instance *lskcipher_alloc_instance_simple(
550 struct crypto_template *tmpl, struct rtattr **tb)
553 struct lskcipher_instance *inst;
554 struct crypto_lskcipher_spawn *spawn;
555 char ecb_name[CRYPTO_MAX_ALG_NAME];
556 struct lskcipher_alg *cipher_alg;
557 const char *cipher_name;
560 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_LSKCIPHER, &mask);
564 cipher_name = crypto_attr_alg_name(tb[1]);
565 if (IS_ERR(cipher_name))
566 return ERR_CAST(cipher_name);
568 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
570 return ERR_PTR(-ENOMEM);
572 spawn = lskcipher_instance_ctx(inst);
573 err = crypto_grab_lskcipher(spawn,
574 lskcipher_crypto_instance(inst),
575 cipher_name, 0, mask);
578 if (err == -ENOENT && !!memcmp(tmpl->name, "ecb", 4)) {
580 if (snprintf(ecb_name, CRYPTO_MAX_ALG_NAME, "ecb(%s)",
581 cipher_name) >= CRYPTO_MAX_ALG_NAME)
584 err = crypto_grab_lskcipher(spawn,
585 lskcipher_crypto_instance(inst),
592 cipher_alg = crypto_lskcipher_spawn_alg(spawn);
594 err = crypto_inst_setname(lskcipher_crypto_instance(inst), tmpl->name,
595 &cipher_alg->co.base);
603 len = strscpy(ecb_name, &cipher_alg->co.base.cra_name[4],
608 if (ecb_name[len - 1] != ')')
611 ecb_name[len - 1] = 0;
614 if (snprintf(inst->alg.co.base.cra_name, CRYPTO_MAX_ALG_NAME,
615 "%s(%s)", tmpl->name, ecb_name) >=
619 if (strcmp(ecb_name, cipher_name) &&
620 snprintf(inst->alg.co.base.cra_driver_name,
622 "%s(%s)", tmpl->name, cipher_name) >=
626 /* Don't allow nesting. */
628 if ((cipher_alg->co.base.cra_flags & CRYPTO_ALG_INSTANCE))
633 if (cipher_alg->co.ivsize)
636 inst->free = lskcipher_free_instance_simple;
638 /* Default algorithm properties, can be overridden */
639 inst->alg.co.base.cra_blocksize = cipher_alg->co.base.cra_blocksize;
640 inst->alg.co.base.cra_alignmask = cipher_alg->co.base.cra_alignmask;
641 inst->alg.co.base.cra_priority = cipher_alg->co.base.cra_priority;
642 inst->alg.co.min_keysize = cipher_alg->co.min_keysize;
643 inst->alg.co.max_keysize = cipher_alg->co.max_keysize;
644 inst->alg.co.ivsize = cipher_alg->co.base.cra_blocksize;
645 inst->alg.co.statesize = cipher_alg->co.statesize;
647 /* Use struct crypto_lskcipher * by default, can be overridden */
648 inst->alg.co.base.cra_ctxsize = sizeof(struct crypto_lskcipher *);
649 inst->alg.setkey = lskcipher_setkey_simple;
650 inst->alg.init = lskcipher_init_tfm_simple;
651 inst->alg.exit = lskcipher_exit_tfm_simple;
656 lskcipher_free_instance_simple(inst);
659 EXPORT_SYMBOL_GPL(lskcipher_alloc_instance_simple);