1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Scatterlist Cryptographic API.
5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
10 * and Nettle, by Niels Möller.
13 #include <linux/err.h>
14 #include <linux/errno.h>
15 #include <linux/jump_label.h>
16 #include <linux/kernel.h>
17 #include <linux/kmod.h>
18 #include <linux/module.h>
19 #include <linux/param.h>
20 #include <linux/sched/signal.h>
21 #include <linux/slab.h>
22 #include <linux/string.h>
23 #include <linux/completion.h>
26 LIST_HEAD(crypto_alg_list);
27 EXPORT_SYMBOL_GPL(crypto_alg_list);
28 DECLARE_RWSEM(crypto_alg_sem);
29 EXPORT_SYMBOL_GPL(crypto_alg_sem);
31 BLOCKING_NOTIFIER_HEAD(crypto_chain);
32 EXPORT_SYMBOL_GPL(crypto_chain);
34 DEFINE_STATIC_KEY_FALSE(crypto_boot_test_finished);
36 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
38 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
40 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
42 EXPORT_SYMBOL_GPL(crypto_mod_get);
44 void crypto_mod_put(struct crypto_alg *alg)
46 struct module *module = alg->cra_module;
51 EXPORT_SYMBOL_GPL(crypto_mod_put);
53 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
56 struct crypto_alg *q, *alg = NULL;
59 list_for_each_entry(q, &crypto_alg_list, cra_list) {
62 if (crypto_is_moribund(q))
65 if ((q->cra_flags ^ type) & mask)
68 if (crypto_is_larval(q) &&
69 !crypto_is_test_larval((struct crypto_larval *)q) &&
70 ((struct crypto_larval *)q)->mask != mask)
73 exact = !strcmp(q->cra_driver_name, name);
74 fuzzy = !strcmp(q->cra_name, name);
75 if (!exact && !(fuzzy && q->cra_priority > best))
78 if (unlikely(!crypto_mod_get(q)))
81 best = q->cra_priority;
93 static void crypto_larval_destroy(struct crypto_alg *alg)
95 struct crypto_larval *larval = (void *)alg;
97 BUG_ON(!crypto_is_larval(alg));
98 if (!IS_ERR_OR_NULL(larval->adult))
99 crypto_mod_put(larval->adult);
103 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
105 struct crypto_larval *larval;
107 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
109 return ERR_PTR(-ENOMEM);
112 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
113 larval->alg.cra_priority = -1;
114 larval->alg.cra_destroy = crypto_larval_destroy;
116 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
117 init_completion(&larval->completion);
121 EXPORT_SYMBOL_GPL(crypto_larval_alloc);
123 static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
126 struct crypto_alg *alg;
127 struct crypto_larval *larval;
129 larval = crypto_larval_alloc(name, type, mask);
131 return ERR_CAST(larval);
133 refcount_set(&larval->alg.cra_refcnt, 2);
135 down_write(&crypto_alg_sem);
136 alg = __crypto_alg_lookup(name, type, mask);
139 list_add(&alg->cra_list, &crypto_alg_list);
141 up_write(&crypto_alg_sem);
143 if (alg != &larval->alg) {
145 if (crypto_is_larval(alg))
146 alg = crypto_larval_wait(alg);
152 void crypto_larval_kill(struct crypto_alg *alg)
154 struct crypto_larval *larval = (void *)alg;
156 down_write(&crypto_alg_sem);
157 list_del(&alg->cra_list);
158 up_write(&crypto_alg_sem);
159 complete_all(&larval->completion);
162 EXPORT_SYMBOL_GPL(crypto_larval_kill);
164 void crypto_wait_for_test(struct crypto_larval *larval)
168 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
169 if (err != NOTIFY_STOP) {
170 if (WARN_ON(err != NOTIFY_DONE))
172 crypto_alg_tested(larval->alg.cra_driver_name, 0);
175 err = wait_for_completion_killable(&larval->completion);
178 crypto_notify(CRYPTO_MSG_ALG_LOADED, larval);
181 crypto_larval_kill(&larval->alg);
183 EXPORT_SYMBOL_GPL(crypto_wait_for_test);
185 static void crypto_start_test(struct crypto_larval *larval)
187 if (!crypto_is_test_larval(larval))
190 if (larval->test_started)
193 down_write(&crypto_alg_sem);
194 if (larval->test_started) {
195 up_write(&crypto_alg_sem);
199 larval->test_started = true;
200 up_write(&crypto_alg_sem);
202 crypto_wait_for_test(larval);
205 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
207 struct crypto_larval *larval = (void *)alg;
210 if (!static_branch_likely(&crypto_boot_test_finished))
211 crypto_start_test(larval);
213 timeout = wait_for_completion_killable_timeout(
214 &larval->completion, 60 * HZ);
218 alg = ERR_PTR(-EINTR);
220 alg = ERR_PTR(-ETIMEDOUT);
222 alg = ERR_PTR(-ENOENT);
223 else if (IS_ERR(alg))
225 else if (crypto_is_test_larval(larval) &&
226 !(alg->cra_flags & CRYPTO_ALG_TESTED))
227 alg = ERR_PTR(-EAGAIN);
228 else if (!crypto_mod_get(alg))
229 alg = ERR_PTR(-EAGAIN);
230 crypto_mod_put(&larval->alg);
235 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
238 struct crypto_alg *alg;
241 if (!((type | mask) & CRYPTO_ALG_TESTED))
242 test |= CRYPTO_ALG_TESTED;
244 down_read(&crypto_alg_sem);
245 alg = __crypto_alg_lookup(name, type | test, mask | test);
247 alg = __crypto_alg_lookup(name, type, mask);
248 if (alg && !crypto_is_larval(alg)) {
251 alg = ERR_PTR(-ELIBBAD);
254 up_read(&crypto_alg_sem);
259 static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
262 struct crypto_alg *alg;
265 return ERR_PTR(-ENOENT);
267 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
268 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
270 alg = crypto_alg_lookup(name, type, mask);
271 if (!alg && !(mask & CRYPTO_NOLOAD)) {
272 request_module("crypto-%s", name);
274 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
275 CRYPTO_ALG_NEED_FALLBACK))
276 request_module("crypto-%s-all", name);
278 alg = crypto_alg_lookup(name, type, mask);
281 if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
282 alg = crypto_larval_wait(alg);
284 alg = crypto_larval_add(name, type, mask);
289 int crypto_probing_notify(unsigned long val, void *v)
293 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
294 if (ok == NOTIFY_DONE) {
295 request_module("cryptomgr");
296 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
301 EXPORT_SYMBOL_GPL(crypto_probing_notify);
303 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
305 struct crypto_alg *alg;
306 struct crypto_alg *larval;
310 * If the internal flag is set for a cipher, require a caller to
311 * to invoke the cipher with the internal flag to use that cipher.
312 * Also, if a caller wants to allocate a cipher that may or may
313 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
314 * !(mask & CRYPTO_ALG_INTERNAL).
316 if (!((type | mask) & CRYPTO_ALG_INTERNAL))
317 mask |= CRYPTO_ALG_INTERNAL;
319 larval = crypto_larval_lookup(name, type, mask);
320 if (IS_ERR(larval) || !crypto_is_larval(larval))
323 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
325 if (ok == NOTIFY_STOP)
326 alg = crypto_larval_wait(larval);
328 crypto_mod_put(larval);
329 alg = ERR_PTR(-ENOENT);
331 crypto_larval_kill(larval);
334 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
336 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
338 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
341 return type_obj->init(tfm, type, mask);
345 static void crypto_exit_ops(struct crypto_tfm *tfm)
347 const struct crypto_type *type = tfm->__crt_alg->cra_type;
349 if (type && tfm->exit)
353 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
355 const struct crypto_type *type_obj = alg->cra_type;
358 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
360 return len + type_obj->ctxsize(alg, type, mask);
362 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
366 case CRYPTO_ALG_TYPE_CIPHER:
367 len += crypto_cipher_ctxsize(alg);
370 case CRYPTO_ALG_TYPE_COMPRESS:
371 len += crypto_compress_ctxsize(alg);
378 void crypto_shoot_alg(struct crypto_alg *alg)
380 down_write(&crypto_alg_sem);
381 alg->cra_flags |= CRYPTO_ALG_DYING;
382 up_write(&crypto_alg_sem);
384 EXPORT_SYMBOL_GPL(crypto_shoot_alg);
386 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
389 struct crypto_tfm *tfm = NULL;
390 unsigned int tfm_size;
393 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
394 tfm = kzalloc(tfm_size, GFP_KERNEL);
398 tfm->__crt_alg = alg;
400 err = crypto_init_ops(tfm, type, mask);
404 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
405 goto cra_init_failed;
410 crypto_exit_ops(tfm);
413 crypto_shoot_alg(alg);
420 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
423 * crypto_alloc_base - Locate algorithm and allocate transform
424 * @alg_name: Name of algorithm
425 * @type: Type of algorithm
426 * @mask: Mask for type comparison
428 * This function should not be used by new algorithm types.
429 * Please use crypto_alloc_tfm instead.
431 * crypto_alloc_base() will first attempt to locate an already loaded
432 * algorithm. If that fails and the kernel supports dynamically loadable
433 * modules, it will then attempt to load a module of the same name or
434 * alias. If that fails it will send a query to any loaded crypto manager
435 * to construct an algorithm on the fly. A refcount is grabbed on the
436 * algorithm which is then associated with the new transform.
438 * The returned transform is of a non-determinate type. Most people
439 * should use one of the more specific allocation functions such as
440 * crypto_alloc_skcipher().
442 * In case of error the return value is an error pointer.
444 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
446 struct crypto_tfm *tfm;
450 struct crypto_alg *alg;
452 alg = crypto_alg_mod_lookup(alg_name, type, mask);
458 tfm = __crypto_alloc_tfm(alg, type, mask);
468 if (fatal_signal_pending(current)) {
476 EXPORT_SYMBOL_GPL(crypto_alloc_base);
478 void *crypto_create_tfm_node(struct crypto_alg *alg,
479 const struct crypto_type *frontend,
483 struct crypto_tfm *tfm = NULL;
484 unsigned int tfmsize;
488 tfmsize = frontend->tfmsize;
489 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
491 mem = kzalloc_node(total, GFP_KERNEL, node);
495 tfm = (struct crypto_tfm *)(mem + tfmsize);
496 tfm->__crt_alg = alg;
499 err = frontend->init_tfm(tfm);
503 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
504 goto cra_init_failed;
509 crypto_exit_ops(tfm);
512 crypto_shoot_alg(alg);
519 EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
521 struct crypto_alg *crypto_find_alg(const char *alg_name,
522 const struct crypto_type *frontend,
526 type &= frontend->maskclear;
527 mask &= frontend->maskclear;
528 type |= frontend->type;
529 mask |= frontend->maskset;
532 return crypto_alg_mod_lookup(alg_name, type, mask);
534 EXPORT_SYMBOL_GPL(crypto_find_alg);
537 * crypto_alloc_tfm_node - Locate algorithm and allocate transform
538 * @alg_name: Name of algorithm
539 * @frontend: Frontend algorithm type
540 * @type: Type of algorithm
541 * @mask: Mask for type comparison
542 * @node: NUMA node in which users desire to put requests, if node is
543 * NUMA_NO_NODE, it means users have no special requirement.
545 * crypto_alloc_tfm() will first attempt to locate an already loaded
546 * algorithm. If that fails and the kernel supports dynamically loadable
547 * modules, it will then attempt to load a module of the same name or
548 * alias. If that fails it will send a query to any loaded crypto manager
549 * to construct an algorithm on the fly. A refcount is grabbed on the
550 * algorithm which is then associated with the new transform.
552 * The returned transform is of a non-determinate type. Most people
553 * should use one of the more specific allocation functions such as
554 * crypto_alloc_skcipher().
556 * In case of error the return value is an error pointer.
559 void *crypto_alloc_tfm_node(const char *alg_name,
560 const struct crypto_type *frontend, u32 type, u32 mask,
567 struct crypto_alg *alg;
569 alg = crypto_find_alg(alg_name, frontend, type, mask);
575 tfm = crypto_create_tfm_node(alg, frontend, node);
585 if (fatal_signal_pending(current)) {
593 EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
596 * crypto_destroy_tfm - Free crypto transform
597 * @mem: Start of tfm slab
598 * @tfm: Transform to free
600 * This function frees up the transform and any associated resources,
601 * then drops the refcount on the associated algorithm.
603 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
605 struct crypto_alg *alg;
607 if (IS_ERR_OR_NULL(mem))
610 alg = tfm->__crt_alg;
612 if (!tfm->exit && alg->cra_exit)
614 crypto_exit_ops(tfm);
616 kfree_sensitive(mem);
618 EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
620 int crypto_has_alg(const char *name, u32 type, u32 mask)
623 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
632 EXPORT_SYMBOL_GPL(crypto_has_alg);
634 void crypto_req_done(struct crypto_async_request *req, int err)
636 struct crypto_wait *wait = req->data;
638 if (err == -EINPROGRESS)
642 complete(&wait->completion);
644 EXPORT_SYMBOL_GPL(crypto_req_done);
646 MODULE_DESCRIPTION("Cryptographic core API");
647 MODULE_LICENSE("GPL");
648 MODULE_SOFTDEP("pre: cryptomgr");