2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 * Copyright (c) 2019 Google LLC
10 * Updated RFC4106 AES-GCM testing.
11 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
12 * Adrian Hoban <adrian.hoban@intel.com>
13 * Gabriele Paoloni <gabriele.paoloni@intel.com>
14 * Tadeusz Struk (tadeusz.struk@intel.com)
15 * Copyright (c) 2010, Intel Corporation.
17 * This program is free software; you can redistribute it and/or modify it
18 * under the terms of the GNU General Public License as published by the Free
19 * Software Foundation; either version 2 of the License, or (at your option)
24 #include <crypto/aead.h>
25 #include <crypto/hash.h>
26 #include <crypto/skcipher.h>
27 #include <linux/err.h>
28 #include <linux/fips.h>
29 #include <linux/module.h>
30 #include <linux/once.h>
31 #include <linux/scatterlist.h>
32 #include <linux/slab.h>
33 #include <linux/string.h>
34 #include <crypto/rng.h>
35 #include <crypto/drbg.h>
36 #include <crypto/akcipher.h>
37 #include <crypto/kpp.h>
38 #include <crypto/acompress.h>
43 module_param(notests, bool, 0644);
44 MODULE_PARM_DESC(notests, "disable crypto self-tests");
46 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
49 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
59 * Need slab memory for testing (size in number of pages).
64 * Indexes into the xbuf to simulate cross-page access.
76 * Used by test_cipher()
81 struct aead_test_suite {
82 const struct aead_testvec *vecs;
86 struct cipher_test_suite {
87 const struct cipher_testvec *vecs;
91 struct comp_test_suite {
93 const struct comp_testvec *vecs;
98 struct hash_test_suite {
99 const struct hash_testvec *vecs;
103 struct cprng_test_suite {
104 const struct cprng_testvec *vecs;
108 struct drbg_test_suite {
109 const struct drbg_testvec *vecs;
113 struct akcipher_test_suite {
114 const struct akcipher_testvec *vecs;
118 struct kpp_test_suite {
119 const struct kpp_testvec *vecs;
123 struct alg_test_desc {
125 int (*test)(const struct alg_test_desc *desc, const char *driver,
127 int fips_allowed; /* set if alg is allowed in fips mode */
130 struct aead_test_suite aead;
131 struct cipher_test_suite cipher;
132 struct comp_test_suite comp;
133 struct hash_test_suite hash;
134 struct cprng_test_suite cprng;
135 struct drbg_test_suite drbg;
136 struct akcipher_test_suite akcipher;
137 struct kpp_test_suite kpp;
141 static const unsigned int IDX[8] = {
142 IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
144 static void hexdump(unsigned char *buf, unsigned int len)
146 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
151 static int __testmgr_alloc_buf(char *buf[XBUFSIZE], int order)
155 for (i = 0; i < XBUFSIZE; i++) {
156 buf[i] = (char *)__get_free_pages(GFP_KERNEL, order);
165 free_pages((unsigned long)buf[i], order);
170 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
172 return __testmgr_alloc_buf(buf, 0);
175 static void __testmgr_free_buf(char *buf[XBUFSIZE], int order)
179 for (i = 0; i < XBUFSIZE; i++)
180 free_pages((unsigned long)buf[i], order);
183 static void testmgr_free_buf(char *buf[XBUFSIZE])
185 __testmgr_free_buf(buf, 0);
188 #define TESTMGR_POISON_BYTE 0xfe
189 #define TESTMGR_POISON_LEN 16
191 static inline void testmgr_poison(void *addr, size_t len)
193 memset(addr, TESTMGR_POISON_BYTE, len);
196 /* Is the memory region still fully poisoned? */
197 static inline bool testmgr_is_poison(const void *addr, size_t len)
199 return memchr_inv(addr, TESTMGR_POISON_BYTE, len) == NULL;
202 /* flush type for hash algorithms */
204 /* merge with update of previous buffer(s) */
207 /* update with previous buffer(s) before doing this one */
210 /* likewise, but also export and re-import the intermediate state */
214 /* finalization function for hash algorithms */
215 enum finalization_type {
216 FINALIZATION_TYPE_FINAL, /* use final() */
217 FINALIZATION_TYPE_FINUP, /* use finup() */
218 FINALIZATION_TYPE_DIGEST, /* use digest() */
221 #define TEST_SG_TOTAL 10000
224 * struct test_sg_division - description of a scatterlist entry
226 * This struct describes one entry of a scatterlist being constructed to check a
227 * crypto test vector.
229 * @proportion_of_total: length of this chunk relative to the total length,
230 * given as a proportion out of TEST_SG_TOTAL so that it
231 * scales to fit any test vector
232 * @offset: byte offset into a 2-page buffer at which this chunk will start
233 * @offset_relative_to_alignmask: if true, add the algorithm's alignmask to the
235 * @flush_type: for hashes, whether an update() should be done now vs.
236 * continuing to accumulate data
238 struct test_sg_division {
239 unsigned int proportion_of_total;
241 bool offset_relative_to_alignmask;
242 enum flush_type flush_type;
246 * struct testvec_config - configuration for testing a crypto test vector
248 * This struct describes the data layout and other parameters with which each
249 * crypto test vector can be tested.
251 * @name: name of this config, logged for debugging purposes if a test fails
252 * @inplace: operate on the data in-place, if applicable for the algorithm type?
253 * @req_flags: extra request_flags, e.g. CRYPTO_TFM_REQ_MAY_SLEEP
254 * @src_divs: description of how to arrange the source scatterlist
255 * @dst_divs: description of how to arrange the dst scatterlist, if applicable
256 * for the algorithm type. Defaults to @src_divs if unset.
257 * @iv_offset: misalignment of the IV in the range [0..MAX_ALGAPI_ALIGNMASK+1],
258 * where 0 is aligned to a 2*(MAX_ALGAPI_ALIGNMASK+1) byte boundary
259 * @iv_offset_relative_to_alignmask: if true, add the algorithm's alignmask to
261 * @finalization_type: what finalization function to use for hashes
263 struct testvec_config {
267 struct test_sg_division src_divs[XBUFSIZE];
268 struct test_sg_division dst_divs[XBUFSIZE];
269 unsigned int iv_offset;
270 bool iv_offset_relative_to_alignmask;
271 enum finalization_type finalization_type;
274 #define TESTVEC_CONFIG_NAMELEN 192
276 static unsigned int count_test_sg_divisions(const struct test_sg_division *divs)
278 unsigned int remaining = TEST_SG_TOTAL;
279 unsigned int ndivs = 0;
282 remaining -= divs[ndivs++].proportion_of_total;
288 static bool valid_sg_divisions(const struct test_sg_division *divs,
289 unsigned int count, bool *any_flushes_ret)
291 unsigned int total = 0;
294 for (i = 0; i < count && total != TEST_SG_TOTAL; i++) {
295 if (divs[i].proportion_of_total <= 0 ||
296 divs[i].proportion_of_total > TEST_SG_TOTAL - total)
298 total += divs[i].proportion_of_total;
299 if (divs[i].flush_type != FLUSH_TYPE_NONE)
300 *any_flushes_ret = true;
302 return total == TEST_SG_TOTAL &&
303 memchr_inv(&divs[i], 0, (count - i) * sizeof(divs[0])) == NULL;
307 * Check whether the given testvec_config is valid. This isn't strictly needed
308 * since every testvec_config should be valid, but check anyway so that people
309 * don't unknowingly add broken configs that don't do what they wanted.
311 static bool valid_testvec_config(const struct testvec_config *cfg)
313 bool any_flushes = false;
315 if (cfg->name == NULL)
318 if (!valid_sg_divisions(cfg->src_divs, ARRAY_SIZE(cfg->src_divs),
322 if (cfg->dst_divs[0].proportion_of_total) {
323 if (!valid_sg_divisions(cfg->dst_divs,
324 ARRAY_SIZE(cfg->dst_divs),
328 if (memchr_inv(cfg->dst_divs, 0, sizeof(cfg->dst_divs)))
330 /* defaults to dst_divs=src_divs */
334 (cfg->iv_offset_relative_to_alignmask ? MAX_ALGAPI_ALIGNMASK : 0) >
335 MAX_ALGAPI_ALIGNMASK + 1)
338 if (any_flushes && cfg->finalization_type == FINALIZATION_TYPE_DIGEST)
345 char *bufs[XBUFSIZE];
346 struct scatterlist sgl[XBUFSIZE];
347 struct scatterlist sgl_saved[XBUFSIZE];
348 struct scatterlist *sgl_ptr;
352 static int init_test_sglist(struct test_sglist *tsgl)
354 return __testmgr_alloc_buf(tsgl->bufs, 1 /* two pages per buffer */);
357 static void destroy_test_sglist(struct test_sglist *tsgl)
359 return __testmgr_free_buf(tsgl->bufs, 1 /* two pages per buffer */);
363 * build_test_sglist() - build a scatterlist for a crypto test
365 * @tsgl: the scatterlist to build. @tsgl->bufs[] contains an array of 2-page
366 * buffers which the scatterlist @tsgl->sgl[] will be made to point into.
367 * @divs: the layout specification on which the scatterlist will be based
368 * @alignmask: the algorithm's alignmask
369 * @total_len: the total length of the scatterlist to build in bytes
370 * @data: if non-NULL, the buffers will be filled with this data until it ends.
371 * Otherwise the buffers will be poisoned. In both cases, some bytes
372 * past the end of each buffer will be poisoned to help detect overruns.
373 * @out_divs: if non-NULL, the test_sg_division to which each scatterlist entry
374 * corresponds will be returned here. This will match @divs except
375 * that divisions resolving to a length of 0 are omitted as they are
376 * not included in the scatterlist.
378 * Return: 0 or a -errno value
380 static int build_test_sglist(struct test_sglist *tsgl,
381 const struct test_sg_division *divs,
382 const unsigned int alignmask,
383 const unsigned int total_len,
384 struct iov_iter *data,
385 const struct test_sg_division *out_divs[XBUFSIZE])
388 const struct test_sg_division *div;
390 } partitions[XBUFSIZE];
391 const unsigned int ndivs = count_test_sg_divisions(divs);
392 unsigned int len_remaining = total_len;
395 BUILD_BUG_ON(ARRAY_SIZE(partitions) != ARRAY_SIZE(tsgl->sgl));
396 if (WARN_ON(ndivs > ARRAY_SIZE(partitions)))
399 /* Calculate the (div, length) pairs */
401 for (i = 0; i < ndivs; i++) {
402 unsigned int len_this_sg =
404 (total_len * divs[i].proportion_of_total +
405 TEST_SG_TOTAL / 2) / TEST_SG_TOTAL);
407 if (len_this_sg != 0) {
408 partitions[tsgl->nents].div = &divs[i];
409 partitions[tsgl->nents].length = len_this_sg;
411 len_remaining -= len_this_sg;
414 if (tsgl->nents == 0) {
415 partitions[tsgl->nents].div = &divs[0];
416 partitions[tsgl->nents].length = 0;
419 partitions[tsgl->nents - 1].length += len_remaining;
421 /* Set up the sgl entries and fill the data or poison */
422 sg_init_table(tsgl->sgl, tsgl->nents);
423 for (i = 0; i < tsgl->nents; i++) {
424 unsigned int offset = partitions[i].div->offset;
427 if (partitions[i].div->offset_relative_to_alignmask)
430 while (offset + partitions[i].length + TESTMGR_POISON_LEN >
432 if (WARN_ON(offset <= 0))
437 addr = &tsgl->bufs[i][offset];
438 sg_set_buf(&tsgl->sgl[i], addr, partitions[i].length);
441 out_divs[i] = partitions[i].div;
444 size_t copy_len, copied;
446 copy_len = min(partitions[i].length, data->count);
447 copied = copy_from_iter(addr, copy_len, data);
448 if (WARN_ON(copied != copy_len))
450 testmgr_poison(addr + copy_len, partitions[i].length +
451 TESTMGR_POISON_LEN - copy_len);
453 testmgr_poison(addr, partitions[i].length +
458 sg_mark_end(&tsgl->sgl[tsgl->nents - 1]);
459 tsgl->sgl_ptr = tsgl->sgl;
460 memcpy(tsgl->sgl_saved, tsgl->sgl, tsgl->nents * sizeof(tsgl->sgl[0]));
465 * Verify that a scatterlist crypto operation produced the correct output.
467 * @tsgl: scatterlist containing the actual output
468 * @expected_output: buffer containing the expected output
469 * @len_to_check: length of @expected_output in bytes
470 * @unchecked_prefix_len: number of ignored bytes in @tsgl prior to real result
471 * @check_poison: verify that the poison bytes after each chunk are intact?
473 * Return: 0 if correct, -EINVAL if incorrect, -EOVERFLOW if buffer overrun.
475 static int verify_correct_output(const struct test_sglist *tsgl,
476 const char *expected_output,
477 unsigned int len_to_check,
478 unsigned int unchecked_prefix_len,
483 for (i = 0; i < tsgl->nents; i++) {
484 struct scatterlist *sg = &tsgl->sgl_ptr[i];
485 unsigned int len = sg->length;
486 unsigned int offset = sg->offset;
487 const char *actual_output;
489 if (unchecked_prefix_len) {
490 if (unchecked_prefix_len >= len) {
491 unchecked_prefix_len -= len;
494 offset += unchecked_prefix_len;
495 len -= unchecked_prefix_len;
496 unchecked_prefix_len = 0;
498 len = min(len, len_to_check);
499 actual_output = page_address(sg_page(sg)) + offset;
500 if (memcmp(expected_output, actual_output, len) != 0)
503 !testmgr_is_poison(actual_output + len, TESTMGR_POISON_LEN))
506 expected_output += len;
508 if (WARN_ON(len_to_check != 0))
513 static bool is_test_sglist_corrupted(const struct test_sglist *tsgl)
517 for (i = 0; i < tsgl->nents; i++) {
518 if (tsgl->sgl[i].page_link != tsgl->sgl_saved[i].page_link)
520 if (tsgl->sgl[i].offset != tsgl->sgl_saved[i].offset)
522 if (tsgl->sgl[i].length != tsgl->sgl_saved[i].length)
528 struct cipher_test_sglists {
529 struct test_sglist src;
530 struct test_sglist dst;
533 static struct cipher_test_sglists *alloc_cipher_test_sglists(void)
535 struct cipher_test_sglists *tsgls;
537 tsgls = kmalloc(sizeof(*tsgls), GFP_KERNEL);
541 if (init_test_sglist(&tsgls->src) != 0)
543 if (init_test_sglist(&tsgls->dst) != 0)
544 goto fail_destroy_src;
549 destroy_test_sglist(&tsgls->src);
555 static void free_cipher_test_sglists(struct cipher_test_sglists *tsgls)
558 destroy_test_sglist(&tsgls->src);
559 destroy_test_sglist(&tsgls->dst);
564 /* Build the src and dst scatterlists for an skcipher or AEAD test */
565 static int build_cipher_test_sglists(struct cipher_test_sglists *tsgls,
566 const struct testvec_config *cfg,
567 unsigned int alignmask,
568 unsigned int src_total_len,
569 unsigned int dst_total_len,
570 const struct kvec *inputs,
571 unsigned int nr_inputs)
573 struct iov_iter input;
576 iov_iter_kvec(&input, WRITE, inputs, nr_inputs, src_total_len);
577 err = build_test_sglist(&tsgls->src, cfg->src_divs, alignmask,
579 max(dst_total_len, src_total_len) :
586 tsgls->dst.sgl_ptr = tsgls->src.sgl;
587 tsgls->dst.nents = tsgls->src.nents;
590 return build_test_sglist(&tsgls->dst,
591 cfg->dst_divs[0].proportion_of_total ?
592 cfg->dst_divs : cfg->src_divs,
593 alignmask, dst_total_len, NULL, NULL);
596 static int ahash_guard_result(char *result, char c, int size)
600 for (i = 0; i < size; i++) {
608 static int ahash_partial_update(struct ahash_request **preq,
609 struct crypto_ahash *tfm, const struct hash_testvec *template,
610 void *hash_buff, int k, int temp, struct scatterlist *sg,
611 const char *algo, char *result, struct crypto_wait *wait)
614 struct ahash_request *req;
615 int statesize, ret = -EINVAL;
616 static const unsigned char guard[] = { 0x00, 0xba, 0xad, 0x00 };
617 int digestsize = crypto_ahash_digestsize(tfm);
620 statesize = crypto_ahash_statesize(
621 crypto_ahash_reqtfm(req));
622 state = kmalloc(statesize + sizeof(guard), GFP_KERNEL);
624 pr_err("alg: hash: Failed to alloc state for %s\n", algo);
627 memcpy(state + statesize, guard, sizeof(guard));
628 memset(result, 1, digestsize);
629 ret = crypto_ahash_export(req, state);
630 WARN_ON(memcmp(state + statesize, guard, sizeof(guard)));
632 pr_err("alg: hash: Failed to export() for %s\n", algo);
635 ret = ahash_guard_result(result, 1, digestsize);
637 pr_err("alg: hash: Failed, export used req->result for %s\n",
641 ahash_request_free(req);
642 req = ahash_request_alloc(tfm, GFP_KERNEL);
644 pr_err("alg: hash: Failed to alloc request for %s\n", algo);
647 ahash_request_set_callback(req,
648 CRYPTO_TFM_REQ_MAY_BACKLOG,
649 crypto_req_done, wait);
651 memcpy(hash_buff, template->plaintext + temp,
653 sg_init_one(&sg[0], hash_buff, template->tap[k]);
654 ahash_request_set_crypt(req, sg, result, template->tap[k]);
655 ret = crypto_ahash_import(req, state);
657 pr_err("alg: hash: Failed to import() for %s\n", algo);
660 ret = ahash_guard_result(result, 1, digestsize);
662 pr_err("alg: hash: Failed, import used req->result for %s\n",
666 ret = crypto_wait_req(crypto_ahash_update(req), wait);
673 ahash_request_free(req);
686 static int __test_hash(struct crypto_ahash *tfm,
687 const struct hash_testvec *template, unsigned int tcount,
688 enum hash_test test_type, const int align_offset)
690 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
691 size_t digest_size = crypto_ahash_digestsize(tfm);
692 unsigned int i, j, k, temp;
693 struct scatterlist sg[8];
696 struct ahash_request *req;
697 struct crypto_wait wait;
699 char *xbuf[XBUFSIZE];
702 result = kmalloc(digest_size, GFP_KERNEL);
705 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
708 if (testmgr_alloc_buf(xbuf))
711 crypto_init_wait(&wait);
713 req = ahash_request_alloc(tfm, GFP_KERNEL);
715 printk(KERN_ERR "alg: hash: Failed to allocate request for "
719 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
720 crypto_req_done, &wait);
723 for (i = 0; i < tcount; i++) {
728 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
732 memset(result, 0, digest_size);
735 hash_buff += align_offset;
737 memcpy(hash_buff, template[i].plaintext, template[i].psize);
738 sg_init_one(&sg[0], hash_buff, template[i].psize);
740 if (template[i].ksize) {
741 crypto_ahash_clear_flags(tfm, ~0);
742 if (template[i].ksize > MAX_KEYLEN) {
743 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
744 j, algo, template[i].ksize, MAX_KEYLEN);
748 memcpy(key, template[i].key, template[i].ksize);
749 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
751 printk(KERN_ERR "alg: hash: setkey failed on "
752 "test %d for %s: ret=%d\n", j, algo,
758 ahash_request_set_crypt(req, sg, result, template[i].psize);
760 case HASH_TEST_DIGEST:
761 ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
763 pr_err("alg: hash: digest failed on test %d "
764 "for %s: ret=%d\n", j, algo, -ret);
769 case HASH_TEST_FINAL:
770 memset(result, 1, digest_size);
771 ret = crypto_wait_req(crypto_ahash_init(req), &wait);
773 pr_err("alg: hash: init failed on test %d "
774 "for %s: ret=%d\n", j, algo, -ret);
777 ret = ahash_guard_result(result, 1, digest_size);
779 pr_err("alg: hash: init failed on test %d "
780 "for %s: used req->result\n", j, algo);
783 ret = crypto_wait_req(crypto_ahash_update(req), &wait);
785 pr_err("alg: hash: update failed on test %d "
786 "for %s: ret=%d\n", j, algo, -ret);
789 ret = ahash_guard_result(result, 1, digest_size);
791 pr_err("alg: hash: update failed on test %d "
792 "for %s: used req->result\n", j, algo);
795 ret = crypto_wait_req(crypto_ahash_final(req), &wait);
797 pr_err("alg: hash: final failed on test %d "
798 "for %s: ret=%d\n", j, algo, -ret);
803 case HASH_TEST_FINUP:
804 memset(result, 1, digest_size);
805 ret = crypto_wait_req(crypto_ahash_init(req), &wait);
807 pr_err("alg: hash: init failed on test %d "
808 "for %s: ret=%d\n", j, algo, -ret);
811 ret = ahash_guard_result(result, 1, digest_size);
813 pr_err("alg: hash: init failed on test %d "
814 "for %s: used req->result\n", j, algo);
817 ret = crypto_wait_req(crypto_ahash_finup(req), &wait);
819 pr_err("alg: hash: final failed on test %d "
820 "for %s: ret=%d\n", j, algo, -ret);
826 if (memcmp(result, template[i].digest,
827 crypto_ahash_digestsize(tfm))) {
828 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
830 hexdump(result, crypto_ahash_digestsize(tfm));
840 for (i = 0; i < tcount; i++) {
841 /* alignment tests are only done with continuous buffers */
842 if (align_offset != 0)
849 memset(result, 0, digest_size);
852 sg_init_table(sg, template[i].np);
854 for (k = 0; k < template[i].np; k++) {
855 if (WARN_ON(offset_in_page(IDX[k]) +
856 template[i].tap[k] > PAGE_SIZE))
859 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
860 offset_in_page(IDX[k]),
861 template[i].plaintext + temp,
864 temp += template[i].tap[k];
867 if (template[i].ksize) {
868 if (template[i].ksize > MAX_KEYLEN) {
869 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
870 j, algo, template[i].ksize, MAX_KEYLEN);
874 crypto_ahash_clear_flags(tfm, ~0);
875 memcpy(key, template[i].key, template[i].ksize);
876 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
879 printk(KERN_ERR "alg: hash: setkey "
880 "failed on chunking test %d "
881 "for %s: ret=%d\n", j, algo, -ret);
886 ahash_request_set_crypt(req, sg, result, template[i].psize);
887 ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
889 pr_err("alg: hash: digest failed on chunking test %d for %s: ret=%d\n",
894 if (memcmp(result, template[i].digest,
895 crypto_ahash_digestsize(tfm))) {
896 printk(KERN_ERR "alg: hash: Chunking test %d "
897 "failed for %s\n", j, algo);
898 hexdump(result, crypto_ahash_digestsize(tfm));
904 /* partial update exercise */
906 for (i = 0; i < tcount; i++) {
907 /* alignment tests are only done with continuous buffers */
908 if (align_offset != 0)
911 if (template[i].np < 2)
915 memset(result, 0, digest_size);
919 memcpy(hash_buff, template[i].plaintext,
921 sg_init_one(&sg[0], hash_buff, template[i].tap[0]);
923 if (template[i].ksize) {
924 crypto_ahash_clear_flags(tfm, ~0);
925 if (template[i].ksize > MAX_KEYLEN) {
926 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
927 j, algo, template[i].ksize, MAX_KEYLEN);
931 memcpy(key, template[i].key, template[i].ksize);
932 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
934 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n",
940 ahash_request_set_crypt(req, sg, result, template[i].tap[0]);
941 ret = crypto_wait_req(crypto_ahash_init(req), &wait);
943 pr_err("alg: hash: init failed on test %d for %s: ret=%d\n",
947 ret = crypto_wait_req(crypto_ahash_update(req), &wait);
949 pr_err("alg: hash: update failed on test %d for %s: ret=%d\n",
954 temp = template[i].tap[0];
955 for (k = 1; k < template[i].np; k++) {
956 ret = ahash_partial_update(&req, tfm, &template[i],
957 hash_buff, k, temp, &sg[0], algo, result,
960 pr_err("alg: hash: partial update failed on test %d for %s: ret=%d\n",
964 temp += template[i].tap[k];
966 ret = crypto_wait_req(crypto_ahash_final(req), &wait);
968 pr_err("alg: hash: final failed on test %d for %s: ret=%d\n",
972 if (memcmp(result, template[i].digest,
973 crypto_ahash_digestsize(tfm))) {
974 pr_err("alg: hash: Partial Test %d failed for %s\n",
976 hexdump(result, crypto_ahash_digestsize(tfm));
985 ahash_request_free(req);
987 testmgr_free_buf(xbuf);
994 static int test_hash(struct crypto_ahash *tfm,
995 const struct hash_testvec *template,
996 unsigned int tcount, enum hash_test test_type)
998 unsigned int alignmask;
1001 ret = __test_hash(tfm, template, tcount, test_type, 0);
1005 /* test unaligned buffers, check with one byte offset */
1006 ret = __test_hash(tfm, template, tcount, test_type, 1);
1010 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1012 /* Check if alignment mask for tfm is correctly set. */
1013 ret = __test_hash(tfm, template, tcount, test_type,
1022 static int __test_aead(struct crypto_aead *tfm, int enc,
1023 const struct aead_testvec *template, unsigned int tcount,
1024 const bool diff_dst, const int align_offset)
1026 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
1027 unsigned int i, j, k, n, temp;
1031 struct aead_request *req;
1032 struct scatterlist *sg;
1033 struct scatterlist *sgout;
1035 struct crypto_wait wait;
1036 unsigned int authsize, iv_len;
1038 char *xbuf[XBUFSIZE];
1039 char *xoutbuf[XBUFSIZE];
1040 char *axbuf[XBUFSIZE];
1042 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
1045 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
1048 if (testmgr_alloc_buf(xbuf))
1050 if (testmgr_alloc_buf(axbuf))
1052 if (diff_dst && testmgr_alloc_buf(xoutbuf))
1055 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
1056 sg = kmalloc(array3_size(sizeof(*sg), 8, (diff_dst ? 4 : 2)),
1072 crypto_init_wait(&wait);
1074 req = aead_request_alloc(tfm, GFP_KERNEL);
1076 pr_err("alg: aead%s: Failed to allocate request for %s\n",
1081 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1082 crypto_req_done, &wait);
1084 iv_len = crypto_aead_ivsize(tfm);
1086 for (i = 0, j = 0; i < tcount; i++) {
1087 const char *input, *expected_output;
1088 unsigned int inlen, outlen;
1089 char *inbuf, *outbuf, *assocbuf;
1094 if (template[i].novrfy)
1096 input = template[i].ptext;
1097 inlen = template[i].plen;
1098 expected_output = template[i].ctext;
1099 outlen = template[i].clen;
1101 input = template[i].ctext;
1102 inlen = template[i].clen;
1103 expected_output = template[i].ptext;
1104 outlen = template[i].plen;
1109 /* some templates have no input data but they will
1112 inbuf = xbuf[0] + align_offset;
1113 assocbuf = axbuf[0];
1116 if (WARN_ON(align_offset + template[i].clen > PAGE_SIZE ||
1117 template[i].alen > PAGE_SIZE))
1120 memcpy(inbuf, input, inlen);
1121 memcpy(assocbuf, template[i].assoc, template[i].alen);
1123 memcpy(iv, template[i].iv, iv_len);
1125 memset(iv, 0, iv_len);
1127 crypto_aead_clear_flags(tfm, ~0);
1129 crypto_aead_set_flags(tfm,
1130 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1132 if (template[i].klen > MAX_KEYLEN) {
1133 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
1134 d, j, algo, template[i].klen,
1139 memcpy(key, template[i].key, template[i].klen);
1141 ret = crypto_aead_setkey(tfm, key, template[i].klen);
1142 if (template[i].fail == !ret) {
1143 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
1144 d, j, algo, crypto_aead_get_flags(tfm));
1149 authsize = template[i].clen - template[i].plen;
1150 ret = crypto_aead_setauthsize(tfm, authsize);
1152 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
1153 d, authsize, j, algo);
1157 k = !!template[i].alen;
1158 sg_init_table(sg, k + 1);
1159 sg_set_buf(&sg[0], assocbuf, template[i].alen);
1160 sg_set_buf(&sg[k], inbuf, template[i].clen);
1164 sg_init_table(sgout, k + 1);
1165 sg_set_buf(&sgout[0], assocbuf, template[i].alen);
1167 outbuf = xoutbuf[0] + align_offset;
1168 sg_set_buf(&sgout[k], outbuf, template[i].clen);
1171 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, inlen,
1174 aead_request_set_ad(req, template[i].alen);
1176 ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
1177 : crypto_aead_decrypt(req), &wait);
1181 if (template[i].novrfy) {
1182 /* verification was supposed to fail */
1183 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
1185 /* so really, we got a bad message */
1191 if (template[i].novrfy)
1192 /* verification failure was expected */
1196 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
1197 d, e, j, algo, -ret);
1201 if (memcmp(outbuf, expected_output, outlen)) {
1202 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
1204 hexdump(outbuf, outlen);
1210 for (i = 0, j = 0; i < tcount; i++) {
1211 const char *input, *expected_output;
1212 unsigned int inlen, outlen;
1214 /* alignment tests are only done with continuous buffers */
1215 if (align_offset != 0)
1218 if (!template[i].np)
1222 if (template[i].novrfy)
1224 input = template[i].ptext;
1225 inlen = template[i].plen;
1226 expected_output = template[i].ctext;
1227 outlen = template[i].clen;
1229 input = template[i].ctext;
1230 inlen = template[i].clen;
1231 expected_output = template[i].ptext;
1232 outlen = template[i].plen;
1238 memcpy(iv, template[i].iv, iv_len);
1240 memset(iv, 0, MAX_IVLEN);
1242 crypto_aead_clear_flags(tfm, ~0);
1244 crypto_aead_set_flags(tfm,
1245 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1246 if (template[i].klen > MAX_KEYLEN) {
1247 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
1248 d, j, algo, template[i].klen, MAX_KEYLEN);
1252 memcpy(key, template[i].key, template[i].klen);
1254 ret = crypto_aead_setkey(tfm, key, template[i].klen);
1255 if (template[i].fail == !ret) {
1256 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
1257 d, j, algo, crypto_aead_get_flags(tfm));
1262 authsize = template[i].clen - template[i].plen;
1265 sg_init_table(sg, template[i].anp + template[i].np);
1267 sg_init_table(sgout, template[i].anp + template[i].np);
1270 for (k = 0, temp = 0; k < template[i].anp; k++) {
1271 if (WARN_ON(offset_in_page(IDX[k]) +
1272 template[i].atap[k] > PAGE_SIZE))
1275 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
1276 offset_in_page(IDX[k]),
1277 template[i].assoc + temp,
1278 template[i].atap[k]),
1279 template[i].atap[k]);
1281 sg_set_buf(&sgout[k],
1282 axbuf[IDX[k] >> PAGE_SHIFT] +
1283 offset_in_page(IDX[k]),
1284 template[i].atap[k]);
1285 temp += template[i].atap[k];
1288 for (k = 0, temp = 0; k < template[i].np; k++) {
1289 n = template[i].tap[k];
1290 if (k == template[i].np - 1 && !enc)
1293 if (WARN_ON(offset_in_page(IDX[k]) + n > PAGE_SIZE))
1296 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1297 memcpy(q, input + temp, n);
1298 sg_set_buf(&sg[template[i].anp + k], q, n);
1301 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1302 offset_in_page(IDX[k]);
1306 sg_set_buf(&sgout[template[i].anp + k], q, n);
1309 if (k == template[i].np - 1 && enc)
1311 if (offset_in_page(q) + n < PAGE_SIZE)
1317 ret = crypto_aead_setauthsize(tfm, authsize);
1319 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
1320 d, authsize, j, algo);
1325 if (WARN_ON(sg[template[i].anp + k - 1].offset +
1326 sg[template[i].anp + k - 1].length +
1327 authsize > PAGE_SIZE)) {
1333 sgout[template[i].anp + k - 1].length +=
1335 sg[template[i].anp + k - 1].length += authsize;
1338 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1341 aead_request_set_ad(req, template[i].alen);
1343 ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
1344 : crypto_aead_decrypt(req), &wait);
1348 if (template[i].novrfy) {
1349 /* verification was supposed to fail */
1350 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
1352 /* so really, we got a bad message */
1358 if (template[i].novrfy)
1359 /* verification failure was expected */
1363 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
1364 d, e, j, algo, -ret);
1369 for (k = 0, temp = 0; k < template[i].np; k++) {
1371 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1372 offset_in_page(IDX[k]);
1374 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1375 offset_in_page(IDX[k]);
1377 n = template[i].tap[k];
1378 if (k == template[i].np - 1 && enc)
1381 if (memcmp(q, expected_output + temp, n)) {
1382 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
1389 if (k == template[i].np - 1 && !enc) {
1390 if (!diff_dst && memcmp(q, input + temp + n,
1396 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1400 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1401 d, j, e, k, algo, n);
1406 temp += template[i].tap[k];
1413 aead_request_free(req);
1417 testmgr_free_buf(xoutbuf);
1419 testmgr_free_buf(axbuf);
1421 testmgr_free_buf(xbuf);
1428 static int test_aead(struct crypto_aead *tfm, int enc,
1429 const struct aead_testvec *template, unsigned int tcount)
1431 unsigned int alignmask;
1434 /* test 'dst == src' case */
1435 ret = __test_aead(tfm, enc, template, tcount, false, 0);
1439 /* test 'dst != src' case */
1440 ret = __test_aead(tfm, enc, template, tcount, true, 0);
1444 /* test unaligned buffers, check with one byte offset */
1445 ret = __test_aead(tfm, enc, template, tcount, true, 1);
1449 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1451 /* Check if alignment mask for tfm is correctly set. */
1452 ret = __test_aead(tfm, enc, template, tcount, true,
1461 static int test_cipher(struct crypto_cipher *tfm, int enc,
1462 const struct cipher_testvec *template,
1463 unsigned int tcount)
1465 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
1466 unsigned int i, j, k;
1469 const char *input, *result;
1471 char *xbuf[XBUFSIZE];
1474 if (testmgr_alloc_buf(xbuf))
1483 for (i = 0; i < tcount; i++) {
1487 if (fips_enabled && template[i].fips_skip)
1490 input = enc ? template[i].ptext : template[i].ctext;
1491 result = enc ? template[i].ctext : template[i].ptext;
1495 if (WARN_ON(template[i].len > PAGE_SIZE))
1499 memcpy(data, input, template[i].len);
1501 crypto_cipher_clear_flags(tfm, ~0);
1503 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1505 ret = crypto_cipher_setkey(tfm, template[i].key,
1507 if (template[i].fail == !ret) {
1508 printk(KERN_ERR "alg: cipher: setkey failed "
1509 "on test %d for %s: flags=%x\n", j,
1510 algo, crypto_cipher_get_flags(tfm));
1515 for (k = 0; k < template[i].len;
1516 k += crypto_cipher_blocksize(tfm)) {
1518 crypto_cipher_encrypt_one(tfm, data + k,
1521 crypto_cipher_decrypt_one(tfm, data + k,
1526 if (memcmp(q, result, template[i].len)) {
1527 printk(KERN_ERR "alg: cipher: Test %d failed "
1528 "on %s for %s\n", j, e, algo);
1529 hexdump(q, template[i].len);
1538 testmgr_free_buf(xbuf);
1543 static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1544 const struct cipher_testvec *template,
1545 unsigned int tcount,
1546 const bool diff_dst, const int align_offset)
1549 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
1550 unsigned int i, j, k, n, temp;
1552 struct skcipher_request *req;
1553 struct scatterlist sg[8];
1554 struct scatterlist sgout[8];
1556 struct crypto_wait wait;
1557 const char *input, *result;
1560 char *xbuf[XBUFSIZE];
1561 char *xoutbuf[XBUFSIZE];
1563 unsigned int ivsize = crypto_skcipher_ivsize(tfm);
1565 if (testmgr_alloc_buf(xbuf))
1568 if (diff_dst && testmgr_alloc_buf(xoutbuf))
1581 crypto_init_wait(&wait);
1583 req = skcipher_request_alloc(tfm, GFP_KERNEL);
1585 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
1590 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1591 crypto_req_done, &wait);
1594 for (i = 0; i < tcount; i++) {
1595 if (template[i].np && !template[i].also_non_np)
1598 if (fips_enabled && template[i].fips_skip)
1601 if (template[i].iv && !(template[i].generates_iv && enc))
1602 memcpy(iv, template[i].iv, ivsize);
1604 memset(iv, 0, MAX_IVLEN);
1606 input = enc ? template[i].ptext : template[i].ctext;
1607 result = enc ? template[i].ctext : template[i].ptext;
1610 if (WARN_ON(align_offset + template[i].len > PAGE_SIZE))
1614 data += align_offset;
1615 memcpy(data, input, template[i].len);
1617 crypto_skcipher_clear_flags(tfm, ~0);
1619 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1621 ret = crypto_skcipher_setkey(tfm, template[i].key,
1623 if (template[i].fail == !ret) {
1624 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
1625 d, j, algo, crypto_skcipher_get_flags(tfm));
1630 sg_init_one(&sg[0], data, template[i].len);
1633 data += align_offset;
1634 sg_init_one(&sgout[0], data, template[i].len);
1637 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1638 template[i].len, iv);
1639 ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
1640 crypto_skcipher_decrypt(req), &wait);
1643 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1644 d, e, j, algo, -ret);
1649 if (memcmp(q, result, template[i].len)) {
1650 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
1652 hexdump(q, template[i].len);
1657 if (template[i].generates_iv && enc &&
1658 memcmp(iv, template[i].iv, crypto_skcipher_ivsize(tfm))) {
1659 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1661 hexdump(iv, crypto_skcipher_ivsize(tfm));
1668 for (i = 0; i < tcount; i++) {
1669 /* alignment tests are only done with continuous buffers */
1670 if (align_offset != 0)
1673 if (!template[i].np)
1676 if (fips_enabled && template[i].fips_skip)
1679 if (template[i].iv && !(template[i].generates_iv && enc))
1680 memcpy(iv, template[i].iv, ivsize);
1682 memset(iv, 0, MAX_IVLEN);
1684 input = enc ? template[i].ptext : template[i].ctext;
1685 result = enc ? template[i].ctext : template[i].ptext;
1687 crypto_skcipher_clear_flags(tfm, ~0);
1689 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1691 ret = crypto_skcipher_setkey(tfm, template[i].key,
1693 if (template[i].fail == !ret) {
1694 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1695 d, j, algo, crypto_skcipher_get_flags(tfm));
1702 sg_init_table(sg, template[i].np);
1704 sg_init_table(sgout, template[i].np);
1705 for (k = 0; k < template[i].np; k++) {
1706 if (WARN_ON(offset_in_page(IDX[k]) +
1707 template[i].tap[k] > PAGE_SIZE))
1710 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1712 memcpy(q, input + temp, template[i].tap[k]);
1714 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1715 q[template[i].tap[k]] = 0;
1717 sg_set_buf(&sg[k], q, template[i].tap[k]);
1719 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1720 offset_in_page(IDX[k]);
1722 sg_set_buf(&sgout[k], q, template[i].tap[k]);
1724 memset(q, 0, template[i].tap[k]);
1725 if (offset_in_page(q) +
1726 template[i].tap[k] < PAGE_SIZE)
1727 q[template[i].tap[k]] = 0;
1730 temp += template[i].tap[k];
1733 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1734 template[i].len, iv);
1736 ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
1737 crypto_skcipher_decrypt(req), &wait);
1740 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1741 d, e, j, algo, -ret);
1747 for (k = 0; k < template[i].np; k++) {
1749 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1750 offset_in_page(IDX[k]);
1752 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1753 offset_in_page(IDX[k]);
1755 if (memcmp(q, result + temp, template[i].tap[k])) {
1756 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1758 hexdump(q, template[i].tap[k]);
1762 q += template[i].tap[k];
1763 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1766 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1767 d, j, e, k, algo, n);
1771 temp += template[i].tap[k];
1778 skcipher_request_free(req);
1780 testmgr_free_buf(xoutbuf);
1782 testmgr_free_buf(xbuf);
1787 static int test_skcipher(struct crypto_skcipher *tfm, int enc,
1788 const struct cipher_testvec *template,
1789 unsigned int tcount)
1791 unsigned int alignmask;
1794 /* test 'dst == src' case */
1795 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1799 /* test 'dst != src' case */
1800 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1804 /* test unaligned buffers, check with one byte offset */
1805 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1809 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1811 /* Check if alignment mask for tfm is correctly set. */
1812 ret = __test_skcipher(tfm, enc, template, tcount, true,
1821 static int test_comp(struct crypto_comp *tfm,
1822 const struct comp_testvec *ctemplate,
1823 const struct comp_testvec *dtemplate,
1824 int ctcount, int dtcount)
1826 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1827 char *output, *decomp_output;
1831 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1835 decomp_output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1836 if (!decomp_output) {
1841 for (i = 0; i < ctcount; i++) {
1843 unsigned int dlen = COMP_BUF_SIZE;
1845 memset(output, 0, COMP_BUF_SIZE);
1846 memset(decomp_output, 0, COMP_BUF_SIZE);
1848 ilen = ctemplate[i].inlen;
1849 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1850 ilen, output, &dlen);
1852 printk(KERN_ERR "alg: comp: compression failed "
1853 "on test %d for %s: ret=%d\n", i + 1, algo,
1859 dlen = COMP_BUF_SIZE;
1860 ret = crypto_comp_decompress(tfm, output,
1861 ilen, decomp_output, &dlen);
1863 pr_err("alg: comp: compression failed: decompress: on test %d for %s failed: ret=%d\n",
1868 if (dlen != ctemplate[i].inlen) {
1869 printk(KERN_ERR "alg: comp: Compression test %d "
1870 "failed for %s: output len = %d\n", i + 1, algo,
1876 if (memcmp(decomp_output, ctemplate[i].input,
1877 ctemplate[i].inlen)) {
1878 pr_err("alg: comp: compression failed: output differs: on test %d for %s\n",
1880 hexdump(decomp_output, dlen);
1886 for (i = 0; i < dtcount; i++) {
1888 unsigned int dlen = COMP_BUF_SIZE;
1890 memset(decomp_output, 0, COMP_BUF_SIZE);
1892 ilen = dtemplate[i].inlen;
1893 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1894 ilen, decomp_output, &dlen);
1896 printk(KERN_ERR "alg: comp: decompression failed "
1897 "on test %d for %s: ret=%d\n", i + 1, algo,
1902 if (dlen != dtemplate[i].outlen) {
1903 printk(KERN_ERR "alg: comp: Decompression test %d "
1904 "failed for %s: output len = %d\n", i + 1, algo,
1910 if (memcmp(decomp_output, dtemplate[i].output, dlen)) {
1911 printk(KERN_ERR "alg: comp: Decompression test %d "
1912 "failed for %s\n", i + 1, algo);
1913 hexdump(decomp_output, dlen);
1922 kfree(decomp_output);
1927 static int test_acomp(struct crypto_acomp *tfm,
1928 const struct comp_testvec *ctemplate,
1929 const struct comp_testvec *dtemplate,
1930 int ctcount, int dtcount)
1932 const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm));
1934 char *output, *decomp_out;
1936 struct scatterlist src, dst;
1937 struct acomp_req *req;
1938 struct crypto_wait wait;
1940 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1944 decomp_out = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1950 for (i = 0; i < ctcount; i++) {
1951 unsigned int dlen = COMP_BUF_SIZE;
1952 int ilen = ctemplate[i].inlen;
1955 input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL);
1961 memset(output, 0, dlen);
1962 crypto_init_wait(&wait);
1963 sg_init_one(&src, input_vec, ilen);
1964 sg_init_one(&dst, output, dlen);
1966 req = acomp_request_alloc(tfm);
1968 pr_err("alg: acomp: request alloc failed for %s\n",
1975 acomp_request_set_params(req, &src, &dst, ilen, dlen);
1976 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1977 crypto_req_done, &wait);
1979 ret = crypto_wait_req(crypto_acomp_compress(req), &wait);
1981 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1984 acomp_request_free(req);
1989 dlen = COMP_BUF_SIZE;
1990 sg_init_one(&src, output, ilen);
1991 sg_init_one(&dst, decomp_out, dlen);
1992 crypto_init_wait(&wait);
1993 acomp_request_set_params(req, &src, &dst, ilen, dlen);
1995 ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
1997 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
2000 acomp_request_free(req);
2004 if (req->dlen != ctemplate[i].inlen) {
2005 pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n",
2006 i + 1, algo, req->dlen);
2009 acomp_request_free(req);
2013 if (memcmp(input_vec, decomp_out, req->dlen)) {
2014 pr_err("alg: acomp: Compression test %d failed for %s\n",
2016 hexdump(output, req->dlen);
2019 acomp_request_free(req);
2024 acomp_request_free(req);
2027 for (i = 0; i < dtcount; i++) {
2028 unsigned int dlen = COMP_BUF_SIZE;
2029 int ilen = dtemplate[i].inlen;
2032 input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL);
2038 memset(output, 0, dlen);
2039 crypto_init_wait(&wait);
2040 sg_init_one(&src, input_vec, ilen);
2041 sg_init_one(&dst, output, dlen);
2043 req = acomp_request_alloc(tfm);
2045 pr_err("alg: acomp: request alloc failed for %s\n",
2052 acomp_request_set_params(req, &src, &dst, ilen, dlen);
2053 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2054 crypto_req_done, &wait);
2056 ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
2058 pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
2061 acomp_request_free(req);
2065 if (req->dlen != dtemplate[i].outlen) {
2066 pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n",
2067 i + 1, algo, req->dlen);
2070 acomp_request_free(req);
2074 if (memcmp(output, dtemplate[i].output, req->dlen)) {
2075 pr_err("alg: acomp: Decompression test %d failed for %s\n",
2077 hexdump(output, req->dlen);
2080 acomp_request_free(req);
2085 acomp_request_free(req);
2096 static int test_cprng(struct crypto_rng *tfm,
2097 const struct cprng_testvec *template,
2098 unsigned int tcount)
2100 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
2101 int err = 0, i, j, seedsize;
2105 seedsize = crypto_rng_seedsize(tfm);
2107 seed = kmalloc(seedsize, GFP_KERNEL);
2109 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
2114 for (i = 0; i < tcount; i++) {
2115 memset(result, 0, 32);
2117 memcpy(seed, template[i].v, template[i].vlen);
2118 memcpy(seed + template[i].vlen, template[i].key,
2120 memcpy(seed + template[i].vlen + template[i].klen,
2121 template[i].dt, template[i].dtlen);
2123 err = crypto_rng_reset(tfm, seed, seedsize);
2125 printk(KERN_ERR "alg: cprng: Failed to reset rng "
2130 for (j = 0; j < template[i].loops; j++) {
2131 err = crypto_rng_get_bytes(tfm, result,
2134 printk(KERN_ERR "alg: cprng: Failed to obtain "
2135 "the correct amount of random data for "
2136 "%s (requested %d)\n", algo,
2142 err = memcmp(result, template[i].result,
2145 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
2147 hexdump(result, template[i].rlen);
2158 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
2161 const struct aead_test_suite *suite = &desc->suite.aead;
2162 struct crypto_aead *tfm;
2165 tfm = crypto_alloc_aead(driver, type, mask);
2167 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
2168 "%ld\n", driver, PTR_ERR(tfm));
2169 return PTR_ERR(tfm);
2172 err = test_aead(tfm, ENCRYPT, suite->vecs, suite->count);
2174 err = test_aead(tfm, DECRYPT, suite->vecs, suite->count);
2176 crypto_free_aead(tfm);
2180 static int alg_test_cipher(const struct alg_test_desc *desc,
2181 const char *driver, u32 type, u32 mask)
2183 const struct cipher_test_suite *suite = &desc->suite.cipher;
2184 struct crypto_cipher *tfm;
2187 tfm = crypto_alloc_cipher(driver, type, mask);
2189 printk(KERN_ERR "alg: cipher: Failed to load transform for "
2190 "%s: %ld\n", driver, PTR_ERR(tfm));
2191 return PTR_ERR(tfm);
2194 err = test_cipher(tfm, ENCRYPT, suite->vecs, suite->count);
2196 err = test_cipher(tfm, DECRYPT, suite->vecs, suite->count);
2198 crypto_free_cipher(tfm);
2202 static int alg_test_skcipher(const struct alg_test_desc *desc,
2203 const char *driver, u32 type, u32 mask)
2205 const struct cipher_test_suite *suite = &desc->suite.cipher;
2206 struct crypto_skcipher *tfm;
2209 tfm = crypto_alloc_skcipher(driver, type, mask);
2211 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
2212 "%s: %ld\n", driver, PTR_ERR(tfm));
2213 return PTR_ERR(tfm);
2216 err = test_skcipher(tfm, ENCRYPT, suite->vecs, suite->count);
2218 err = test_skcipher(tfm, DECRYPT, suite->vecs, suite->count);
2220 crypto_free_skcipher(tfm);
2224 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
2227 struct crypto_comp *comp;
2228 struct crypto_acomp *acomp;
2230 u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK;
2232 if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) {
2233 acomp = crypto_alloc_acomp(driver, type, mask);
2234 if (IS_ERR(acomp)) {
2235 pr_err("alg: acomp: Failed to load transform for %s: %ld\n",
2236 driver, PTR_ERR(acomp));
2237 return PTR_ERR(acomp);
2239 err = test_acomp(acomp, desc->suite.comp.comp.vecs,
2240 desc->suite.comp.decomp.vecs,
2241 desc->suite.comp.comp.count,
2242 desc->suite.comp.decomp.count);
2243 crypto_free_acomp(acomp);
2245 comp = crypto_alloc_comp(driver, type, mask);
2247 pr_err("alg: comp: Failed to load transform for %s: %ld\n",
2248 driver, PTR_ERR(comp));
2249 return PTR_ERR(comp);
2252 err = test_comp(comp, desc->suite.comp.comp.vecs,
2253 desc->suite.comp.decomp.vecs,
2254 desc->suite.comp.comp.count,
2255 desc->suite.comp.decomp.count);
2257 crypto_free_comp(comp);
2262 static int __alg_test_hash(const struct hash_testvec *template,
2263 unsigned int tcount, const char *driver,
2266 struct crypto_ahash *tfm;
2269 tfm = crypto_alloc_ahash(driver, type, mask);
2271 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
2272 "%ld\n", driver, PTR_ERR(tfm));
2273 return PTR_ERR(tfm);
2276 err = test_hash(tfm, template, tcount, HASH_TEST_DIGEST);
2278 err = test_hash(tfm, template, tcount, HASH_TEST_FINAL);
2280 err = test_hash(tfm, template, tcount, HASH_TEST_FINUP);
2281 crypto_free_ahash(tfm);
2285 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
2288 const struct hash_testvec *template = desc->suite.hash.vecs;
2289 unsigned int tcount = desc->suite.hash.count;
2290 unsigned int nr_unkeyed, nr_keyed;
2294 * For OPTIONAL_KEY algorithms, we have to do all the unkeyed tests
2295 * first, before setting a key on the tfm. To make this easier, we
2296 * require that the unkeyed test vectors (if any) are listed first.
2299 for (nr_unkeyed = 0; nr_unkeyed < tcount; nr_unkeyed++) {
2300 if (template[nr_unkeyed].ksize)
2303 for (nr_keyed = 0; nr_unkeyed + nr_keyed < tcount; nr_keyed++) {
2304 if (!template[nr_unkeyed + nr_keyed].ksize) {
2305 pr_err("alg: hash: test vectors for %s out of order, "
2306 "unkeyed ones must come first\n", desc->alg);
2313 err = __alg_test_hash(template, nr_unkeyed, driver, type, mask);
2314 template += nr_unkeyed;
2317 if (!err && nr_keyed)
2318 err = __alg_test_hash(template, nr_keyed, driver, type, mask);
2323 static int alg_test_crc32c(const struct alg_test_desc *desc,
2324 const char *driver, u32 type, u32 mask)
2326 struct crypto_shash *tfm;
2330 err = alg_test_hash(desc, driver, type, mask);
2334 tfm = crypto_alloc_shash(driver, type, mask);
2336 if (PTR_ERR(tfm) == -ENOENT) {
2338 * This crc32c implementation is only available through
2339 * ahash API, not the shash API, so the remaining part
2340 * of the test is not applicable to it.
2344 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
2345 "%ld\n", driver, PTR_ERR(tfm));
2346 return PTR_ERR(tfm);
2350 SHASH_DESC_ON_STACK(shash, tfm);
2351 u32 *ctx = (u32 *)shash_desc_ctx(shash);
2357 err = crypto_shash_final(shash, (u8 *)&val);
2359 printk(KERN_ERR "alg: crc32c: Operation failed for "
2360 "%s: %d\n", driver, err);
2364 if (val != cpu_to_le32(~420553207)) {
2365 pr_err("alg: crc32c: Test failed for %s: %u\n",
2366 driver, le32_to_cpu(val));
2371 crypto_free_shash(tfm);
2376 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
2379 struct crypto_rng *rng;
2382 rng = crypto_alloc_rng(driver, type, mask);
2384 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
2385 "%ld\n", driver, PTR_ERR(rng));
2386 return PTR_ERR(rng);
2389 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
2391 crypto_free_rng(rng);
2397 static int drbg_cavs_test(const struct drbg_testvec *test, int pr,
2398 const char *driver, u32 type, u32 mask)
2401 struct crypto_rng *drng;
2402 struct drbg_test_data test_data;
2403 struct drbg_string addtl, pers, testentropy;
2404 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
2409 drng = crypto_alloc_rng(driver, type, mask);
2411 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
2417 test_data.testentropy = &testentropy;
2418 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
2419 drbg_string_fill(&pers, test->pers, test->perslen);
2420 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
2422 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
2426 drbg_string_fill(&addtl, test->addtla, test->addtllen);
2428 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
2429 ret = crypto_drbg_get_bytes_addtl_test(drng,
2430 buf, test->expectedlen, &addtl, &test_data);
2432 ret = crypto_drbg_get_bytes_addtl(drng,
2433 buf, test->expectedlen, &addtl);
2436 printk(KERN_ERR "alg: drbg: could not obtain random data for "
2437 "driver %s\n", driver);
2441 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
2443 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
2444 ret = crypto_drbg_get_bytes_addtl_test(drng,
2445 buf, test->expectedlen, &addtl, &test_data);
2447 ret = crypto_drbg_get_bytes_addtl(drng,
2448 buf, test->expectedlen, &addtl);
2451 printk(KERN_ERR "alg: drbg: could not obtain random data for "
2452 "driver %s\n", driver);
2456 ret = memcmp(test->expected, buf, test->expectedlen);
2459 crypto_free_rng(drng);
2465 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
2471 const struct drbg_testvec *template = desc->suite.drbg.vecs;
2472 unsigned int tcount = desc->suite.drbg.count;
2474 if (0 == memcmp(driver, "drbg_pr_", 8))
2477 for (i = 0; i < tcount; i++) {
2478 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
2480 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
2490 static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec,
2493 struct kpp_request *req;
2494 void *input_buf = NULL;
2495 void *output_buf = NULL;
2496 void *a_public = NULL;
2498 void *shared_secret = NULL;
2499 struct crypto_wait wait;
2500 unsigned int out_len_max;
2502 struct scatterlist src, dst;
2504 req = kpp_request_alloc(tfm, GFP_KERNEL);
2508 crypto_init_wait(&wait);
2510 err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
2514 out_len_max = crypto_kpp_maxsize(tfm);
2515 output_buf = kzalloc(out_len_max, GFP_KERNEL);
2521 /* Use appropriate parameter as base */
2522 kpp_request_set_input(req, NULL, 0);
2523 sg_init_one(&dst, output_buf, out_len_max);
2524 kpp_request_set_output(req, &dst, out_len_max);
2525 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2526 crypto_req_done, &wait);
2528 /* Compute party A's public key */
2529 err = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait);
2531 pr_err("alg: %s: Party A: generate public key test failed. err %d\n",
2537 /* Save party A's public key */
2538 a_public = kmemdup(sg_virt(req->dst), out_len_max, GFP_KERNEL);
2544 /* Verify calculated public key */
2545 if (memcmp(vec->expected_a_public, sg_virt(req->dst),
2546 vec->expected_a_public_size)) {
2547 pr_err("alg: %s: Party A: generate public key test failed. Invalid output\n",
2554 /* Calculate shared secret key by using counter part (b) public key. */
2555 input_buf = kmemdup(vec->b_public, vec->b_public_size, GFP_KERNEL);
2561 sg_init_one(&src, input_buf, vec->b_public_size);
2562 sg_init_one(&dst, output_buf, out_len_max);
2563 kpp_request_set_input(req, &src, vec->b_public_size);
2564 kpp_request_set_output(req, &dst, out_len_max);
2565 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2566 crypto_req_done, &wait);
2567 err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait);
2569 pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n",
2575 /* Save the shared secret obtained by party A */
2576 a_ss = kmemdup(sg_virt(req->dst), vec->expected_ss_size, GFP_KERNEL);
2583 * Calculate party B's shared secret by using party A's
2586 err = crypto_kpp_set_secret(tfm, vec->b_secret,
2587 vec->b_secret_size);
2591 sg_init_one(&src, a_public, vec->expected_a_public_size);
2592 sg_init_one(&dst, output_buf, out_len_max);
2593 kpp_request_set_input(req, &src, vec->expected_a_public_size);
2594 kpp_request_set_output(req, &dst, out_len_max);
2595 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2596 crypto_req_done, &wait);
2597 err = crypto_wait_req(crypto_kpp_compute_shared_secret(req),
2600 pr_err("alg: %s: Party B: compute shared secret failed. err %d\n",
2605 shared_secret = a_ss;
2607 shared_secret = (void *)vec->expected_ss;
2611 * verify shared secret from which the user will derive
2612 * secret key by executing whatever hash it has chosen
2614 if (memcmp(shared_secret, sg_virt(req->dst),
2615 vec->expected_ss_size)) {
2616 pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
2628 kpp_request_free(req);
2632 static int test_kpp(struct crypto_kpp *tfm, const char *alg,
2633 const struct kpp_testvec *vecs, unsigned int tcount)
2637 for (i = 0; i < tcount; i++) {
2638 ret = do_test_kpp(tfm, vecs++, alg);
2640 pr_err("alg: %s: test failed on vector %d, err=%d\n",
2648 static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver,
2651 struct crypto_kpp *tfm;
2654 tfm = crypto_alloc_kpp(driver, type, mask);
2656 pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
2657 driver, PTR_ERR(tfm));
2658 return PTR_ERR(tfm);
2660 if (desc->suite.kpp.vecs)
2661 err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs,
2662 desc->suite.kpp.count);
2664 crypto_free_kpp(tfm);
2668 static int test_akcipher_one(struct crypto_akcipher *tfm,
2669 const struct akcipher_testvec *vecs)
2671 char *xbuf[XBUFSIZE];
2672 struct akcipher_request *req;
2673 void *outbuf_enc = NULL;
2674 void *outbuf_dec = NULL;
2675 struct crypto_wait wait;
2676 unsigned int out_len_max, out_len = 0;
2678 struct scatterlist src, dst, src_tab[2];
2680 unsigned int m_size, c_size;
2683 if (testmgr_alloc_buf(xbuf))
2686 req = akcipher_request_alloc(tfm, GFP_KERNEL);
2690 crypto_init_wait(&wait);
2692 if (vecs->public_key_vec)
2693 err = crypto_akcipher_set_pub_key(tfm, vecs->key,
2696 err = crypto_akcipher_set_priv_key(tfm, vecs->key,
2702 out_len_max = crypto_akcipher_maxsize(tfm);
2705 * First run test which do not require a private key, such as
2706 * encrypt or verify.
2708 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
2712 if (!vecs->siggen_sigver_test) {
2714 m_size = vecs->m_size;
2716 c_size = vecs->c_size;
2719 /* Swap args so we could keep plaintext (digest)
2720 * in vecs->m, and cooked signature in vecs->c.
2722 m = vecs->c; /* signature */
2723 m_size = vecs->c_size;
2724 c = vecs->m; /* digest */
2725 c_size = vecs->m_size;
2729 if (WARN_ON(m_size > PAGE_SIZE))
2731 memcpy(xbuf[0], m, m_size);
2733 sg_init_table(src_tab, 2);
2734 sg_set_buf(&src_tab[0], xbuf[0], 8);
2735 sg_set_buf(&src_tab[1], xbuf[0] + 8, m_size - 8);
2736 sg_init_one(&dst, outbuf_enc, out_len_max);
2737 akcipher_request_set_crypt(req, src_tab, &dst, m_size,
2739 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2740 crypto_req_done, &wait);
2742 err = crypto_wait_req(vecs->siggen_sigver_test ?
2743 /* Run asymmetric signature verification */
2744 crypto_akcipher_verify(req) :
2745 /* Run asymmetric encrypt */
2746 crypto_akcipher_encrypt(req), &wait);
2748 pr_err("alg: akcipher: %s test failed. err %d\n", op, err);
2751 if (req->dst_len != c_size) {
2752 pr_err("alg: akcipher: %s test failed. Invalid output len\n",
2757 /* verify that encrypted message is equal to expected */
2758 if (memcmp(c, outbuf_enc, c_size)) {
2759 pr_err("alg: akcipher: %s test failed. Invalid output\n", op);
2760 hexdump(outbuf_enc, c_size);
2766 * Don't invoke (decrypt or sign) test which require a private key
2767 * for vectors with only a public key.
2769 if (vecs->public_key_vec) {
2773 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
2779 op = vecs->siggen_sigver_test ? "sign" : "decrypt";
2780 if (WARN_ON(c_size > PAGE_SIZE))
2782 memcpy(xbuf[0], c, c_size);
2784 sg_init_one(&src, xbuf[0], c_size);
2785 sg_init_one(&dst, outbuf_dec, out_len_max);
2786 crypto_init_wait(&wait);
2787 akcipher_request_set_crypt(req, &src, &dst, c_size, out_len_max);
2789 err = crypto_wait_req(vecs->siggen_sigver_test ?
2790 /* Run asymmetric signature generation */
2791 crypto_akcipher_sign(req) :
2792 /* Run asymmetric decrypt */
2793 crypto_akcipher_decrypt(req), &wait);
2795 pr_err("alg: akcipher: %s test failed. err %d\n", op, err);
2798 out_len = req->dst_len;
2799 if (out_len < m_size) {
2800 pr_err("alg: akcipher: %s test failed. Invalid output len %u\n",
2805 /* verify that decrypted message is equal to the original msg */
2806 if (memchr_inv(outbuf_dec, 0, out_len - m_size) ||
2807 memcmp(m, outbuf_dec + out_len - m_size, m_size)) {
2808 pr_err("alg: akcipher: %s test failed. Invalid output\n", op);
2809 hexdump(outbuf_dec, out_len);
2816 akcipher_request_free(req);
2818 testmgr_free_buf(xbuf);
2822 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
2823 const struct akcipher_testvec *vecs,
2824 unsigned int tcount)
2827 crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm));
2830 for (i = 0; i < tcount; i++) {
2831 ret = test_akcipher_one(tfm, vecs++);
2835 pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
2842 static int alg_test_akcipher(const struct alg_test_desc *desc,
2843 const char *driver, u32 type, u32 mask)
2845 struct crypto_akcipher *tfm;
2848 tfm = crypto_alloc_akcipher(driver, type, mask);
2850 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
2851 driver, PTR_ERR(tfm));
2852 return PTR_ERR(tfm);
2854 if (desc->suite.akcipher.vecs)
2855 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
2856 desc->suite.akcipher.count);
2858 crypto_free_akcipher(tfm);
2862 static int alg_test_null(const struct alg_test_desc *desc,
2863 const char *driver, u32 type, u32 mask)
2868 #define __VECS(tv) { .vecs = tv, .count = ARRAY_SIZE(tv) }
2870 /* Please keep this list sorted by algorithm name. */
2871 static const struct alg_test_desc alg_test_descs[] = {
2873 .alg = "adiantum(xchacha12,aes)",
2874 .test = alg_test_skcipher,
2876 .cipher = __VECS(adiantum_xchacha12_aes_tv_template)
2879 .alg = "adiantum(xchacha20,aes)",
2880 .test = alg_test_skcipher,
2882 .cipher = __VECS(adiantum_xchacha20_aes_tv_template)
2886 .test = alg_test_aead,
2888 .aead = __VECS(aegis128_tv_template)
2892 .test = alg_test_aead,
2894 .aead = __VECS(aegis128l_tv_template)
2898 .test = alg_test_aead,
2900 .aead = __VECS(aegis256_tv_template)
2903 .alg = "ansi_cprng",
2904 .test = alg_test_cprng,
2906 .cprng = __VECS(ansi_cprng_aes_tv_template)
2909 .alg = "authenc(hmac(md5),ecb(cipher_null))",
2910 .test = alg_test_aead,
2912 .aead = __VECS(hmac_md5_ecb_cipher_null_tv_template)
2915 .alg = "authenc(hmac(sha1),cbc(aes))",
2916 .test = alg_test_aead,
2919 .aead = __VECS(hmac_sha1_aes_cbc_tv_temp)
2922 .alg = "authenc(hmac(sha1),cbc(des))",
2923 .test = alg_test_aead,
2925 .aead = __VECS(hmac_sha1_des_cbc_tv_temp)
2928 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
2929 .test = alg_test_aead,
2932 .aead = __VECS(hmac_sha1_des3_ede_cbc_tv_temp)
2935 .alg = "authenc(hmac(sha1),ctr(aes))",
2936 .test = alg_test_null,
2939 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
2940 .test = alg_test_aead,
2942 .aead = __VECS(hmac_sha1_ecb_cipher_null_tv_temp)
2945 .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2946 .test = alg_test_null,
2949 .alg = "authenc(hmac(sha224),cbc(des))",
2950 .test = alg_test_aead,
2952 .aead = __VECS(hmac_sha224_des_cbc_tv_temp)
2955 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
2956 .test = alg_test_aead,
2959 .aead = __VECS(hmac_sha224_des3_ede_cbc_tv_temp)
2962 .alg = "authenc(hmac(sha256),cbc(aes))",
2963 .test = alg_test_aead,
2966 .aead = __VECS(hmac_sha256_aes_cbc_tv_temp)
2969 .alg = "authenc(hmac(sha256),cbc(des))",
2970 .test = alg_test_aead,
2972 .aead = __VECS(hmac_sha256_des_cbc_tv_temp)
2975 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
2976 .test = alg_test_aead,
2979 .aead = __VECS(hmac_sha256_des3_ede_cbc_tv_temp)
2982 .alg = "authenc(hmac(sha256),ctr(aes))",
2983 .test = alg_test_null,
2986 .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2987 .test = alg_test_null,
2990 .alg = "authenc(hmac(sha384),cbc(des))",
2991 .test = alg_test_aead,
2993 .aead = __VECS(hmac_sha384_des_cbc_tv_temp)
2996 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
2997 .test = alg_test_aead,
3000 .aead = __VECS(hmac_sha384_des3_ede_cbc_tv_temp)
3003 .alg = "authenc(hmac(sha384),ctr(aes))",
3004 .test = alg_test_null,
3007 .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
3008 .test = alg_test_null,
3011 .alg = "authenc(hmac(sha512),cbc(aes))",
3013 .test = alg_test_aead,
3015 .aead = __VECS(hmac_sha512_aes_cbc_tv_temp)
3018 .alg = "authenc(hmac(sha512),cbc(des))",
3019 .test = alg_test_aead,
3021 .aead = __VECS(hmac_sha512_des_cbc_tv_temp)
3024 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
3025 .test = alg_test_aead,
3028 .aead = __VECS(hmac_sha512_des3_ede_cbc_tv_temp)
3031 .alg = "authenc(hmac(sha512),ctr(aes))",
3032 .test = alg_test_null,
3035 .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
3036 .test = alg_test_null,
3040 .test = alg_test_skcipher,
3043 .cipher = __VECS(aes_cbc_tv_template)
3046 .alg = "cbc(anubis)",
3047 .test = alg_test_skcipher,
3049 .cipher = __VECS(anubis_cbc_tv_template)
3052 .alg = "cbc(blowfish)",
3053 .test = alg_test_skcipher,
3055 .cipher = __VECS(bf_cbc_tv_template)
3058 .alg = "cbc(camellia)",
3059 .test = alg_test_skcipher,
3061 .cipher = __VECS(camellia_cbc_tv_template)
3064 .alg = "cbc(cast5)",
3065 .test = alg_test_skcipher,
3067 .cipher = __VECS(cast5_cbc_tv_template)
3070 .alg = "cbc(cast6)",
3071 .test = alg_test_skcipher,
3073 .cipher = __VECS(cast6_cbc_tv_template)
3077 .test = alg_test_skcipher,
3079 .cipher = __VECS(des_cbc_tv_template)
3082 .alg = "cbc(des3_ede)",
3083 .test = alg_test_skcipher,
3086 .cipher = __VECS(des3_ede_cbc_tv_template)
3089 /* Same as cbc(aes) except the key is stored in
3090 * hardware secure memory which we reference by index
3093 .test = alg_test_null,
3096 .alg = "cbc(serpent)",
3097 .test = alg_test_skcipher,
3099 .cipher = __VECS(serpent_cbc_tv_template)
3103 .test = alg_test_skcipher,
3105 .cipher = __VECS(sm4_cbc_tv_template)
3108 .alg = "cbc(twofish)",
3109 .test = alg_test_skcipher,
3111 .cipher = __VECS(tf_cbc_tv_template)
3114 .alg = "cbcmac(aes)",
3116 .test = alg_test_hash,
3118 .hash = __VECS(aes_cbcmac_tv_template)
3122 .test = alg_test_aead,
3125 .aead = __VECS(aes_ccm_tv_template)
3129 .test = alg_test_skcipher,
3132 .cipher = __VECS(aes_cfb_tv_template)
3136 .test = alg_test_skcipher,
3138 .cipher = __VECS(chacha20_tv_template)
3143 .test = alg_test_hash,
3145 .hash = __VECS(aes_cmac128_tv_template)
3148 .alg = "cmac(des3_ede)",
3150 .test = alg_test_hash,
3152 .hash = __VECS(des3_ede_cmac64_tv_template)
3155 .alg = "compress_null",
3156 .test = alg_test_null,
3159 .test = alg_test_hash,
3162 .hash = __VECS(crc32_tv_template)
3166 .test = alg_test_crc32c,
3169 .hash = __VECS(crc32c_tv_template)
3173 .test = alg_test_hash,
3176 .hash = __VECS(crct10dif_tv_template)
3180 .test = alg_test_skcipher,
3183 .cipher = __VECS(aes_ctr_tv_template)
3186 .alg = "ctr(blowfish)",
3187 .test = alg_test_skcipher,
3189 .cipher = __VECS(bf_ctr_tv_template)
3192 .alg = "ctr(camellia)",
3193 .test = alg_test_skcipher,
3195 .cipher = __VECS(camellia_ctr_tv_template)
3198 .alg = "ctr(cast5)",
3199 .test = alg_test_skcipher,
3201 .cipher = __VECS(cast5_ctr_tv_template)
3204 .alg = "ctr(cast6)",
3205 .test = alg_test_skcipher,
3207 .cipher = __VECS(cast6_ctr_tv_template)
3211 .test = alg_test_skcipher,
3213 .cipher = __VECS(des_ctr_tv_template)
3216 .alg = "ctr(des3_ede)",
3217 .test = alg_test_skcipher,
3220 .cipher = __VECS(des3_ede_ctr_tv_template)
3223 /* Same as ctr(aes) except the key is stored in
3224 * hardware secure memory which we reference by index
3227 .test = alg_test_null,
3230 .alg = "ctr(serpent)",
3231 .test = alg_test_skcipher,
3233 .cipher = __VECS(serpent_ctr_tv_template)
3237 .test = alg_test_skcipher,
3239 .cipher = __VECS(sm4_ctr_tv_template)
3242 .alg = "ctr(twofish)",
3243 .test = alg_test_skcipher,
3245 .cipher = __VECS(tf_ctr_tv_template)
3248 .alg = "cts(cbc(aes))",
3249 .test = alg_test_skcipher,
3252 .cipher = __VECS(cts_mode_tv_template)
3256 .test = alg_test_comp,
3260 .comp = __VECS(deflate_comp_tv_template),
3261 .decomp = __VECS(deflate_decomp_tv_template)
3266 .test = alg_test_kpp,
3269 .kpp = __VECS(dh_tv_template)
3272 .alg = "digest_null",
3273 .test = alg_test_null,
3275 .alg = "drbg_nopr_ctr_aes128",
3276 .test = alg_test_drbg,
3279 .drbg = __VECS(drbg_nopr_ctr_aes128_tv_template)
3282 .alg = "drbg_nopr_ctr_aes192",
3283 .test = alg_test_drbg,
3286 .drbg = __VECS(drbg_nopr_ctr_aes192_tv_template)
3289 .alg = "drbg_nopr_ctr_aes256",
3290 .test = alg_test_drbg,
3293 .drbg = __VECS(drbg_nopr_ctr_aes256_tv_template)
3297 * There is no need to specifically test the DRBG with every
3298 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
3300 .alg = "drbg_nopr_hmac_sha1",
3302 .test = alg_test_null,
3304 .alg = "drbg_nopr_hmac_sha256",
3305 .test = alg_test_drbg,
3308 .drbg = __VECS(drbg_nopr_hmac_sha256_tv_template)
3311 /* covered by drbg_nopr_hmac_sha256 test */
3312 .alg = "drbg_nopr_hmac_sha384",
3314 .test = alg_test_null,
3316 .alg = "drbg_nopr_hmac_sha512",
3317 .test = alg_test_null,
3320 .alg = "drbg_nopr_sha1",
3322 .test = alg_test_null,
3324 .alg = "drbg_nopr_sha256",
3325 .test = alg_test_drbg,
3328 .drbg = __VECS(drbg_nopr_sha256_tv_template)
3331 /* covered by drbg_nopr_sha256 test */
3332 .alg = "drbg_nopr_sha384",
3334 .test = alg_test_null,
3336 .alg = "drbg_nopr_sha512",
3338 .test = alg_test_null,
3340 .alg = "drbg_pr_ctr_aes128",
3341 .test = alg_test_drbg,
3344 .drbg = __VECS(drbg_pr_ctr_aes128_tv_template)
3347 /* covered by drbg_pr_ctr_aes128 test */
3348 .alg = "drbg_pr_ctr_aes192",
3350 .test = alg_test_null,
3352 .alg = "drbg_pr_ctr_aes256",
3354 .test = alg_test_null,
3356 .alg = "drbg_pr_hmac_sha1",
3358 .test = alg_test_null,
3360 .alg = "drbg_pr_hmac_sha256",
3361 .test = alg_test_drbg,
3364 .drbg = __VECS(drbg_pr_hmac_sha256_tv_template)
3367 /* covered by drbg_pr_hmac_sha256 test */
3368 .alg = "drbg_pr_hmac_sha384",
3370 .test = alg_test_null,
3372 .alg = "drbg_pr_hmac_sha512",
3373 .test = alg_test_null,
3376 .alg = "drbg_pr_sha1",
3378 .test = alg_test_null,
3380 .alg = "drbg_pr_sha256",
3381 .test = alg_test_drbg,
3384 .drbg = __VECS(drbg_pr_sha256_tv_template)
3387 /* covered by drbg_pr_sha256 test */
3388 .alg = "drbg_pr_sha384",
3390 .test = alg_test_null,
3392 .alg = "drbg_pr_sha512",
3394 .test = alg_test_null,
3397 .test = alg_test_skcipher,
3400 .cipher = __VECS(aes_tv_template)
3403 .alg = "ecb(anubis)",
3404 .test = alg_test_skcipher,
3406 .cipher = __VECS(anubis_tv_template)
3410 .test = alg_test_skcipher,
3412 .cipher = __VECS(arc4_tv_template)
3415 .alg = "ecb(blowfish)",
3416 .test = alg_test_skcipher,
3418 .cipher = __VECS(bf_tv_template)
3421 .alg = "ecb(camellia)",
3422 .test = alg_test_skcipher,
3424 .cipher = __VECS(camellia_tv_template)
3427 .alg = "ecb(cast5)",
3428 .test = alg_test_skcipher,
3430 .cipher = __VECS(cast5_tv_template)
3433 .alg = "ecb(cast6)",
3434 .test = alg_test_skcipher,
3436 .cipher = __VECS(cast6_tv_template)
3439 .alg = "ecb(cipher_null)",
3440 .test = alg_test_null,
3444 .test = alg_test_skcipher,
3446 .cipher = __VECS(des_tv_template)
3449 .alg = "ecb(des3_ede)",
3450 .test = alg_test_skcipher,
3453 .cipher = __VECS(des3_ede_tv_template)
3456 .alg = "ecb(fcrypt)",
3457 .test = alg_test_skcipher,
3460 .vecs = fcrypt_pcbc_tv_template,
3465 .alg = "ecb(khazad)",
3466 .test = alg_test_skcipher,
3468 .cipher = __VECS(khazad_tv_template)
3471 /* Same as ecb(aes) except the key is stored in
3472 * hardware secure memory which we reference by index
3475 .test = alg_test_null,
3479 .test = alg_test_skcipher,
3481 .cipher = __VECS(seed_tv_template)
3484 .alg = "ecb(serpent)",
3485 .test = alg_test_skcipher,
3487 .cipher = __VECS(serpent_tv_template)
3491 .test = alg_test_skcipher,
3493 .cipher = __VECS(sm4_tv_template)
3497 .test = alg_test_skcipher,
3499 .cipher = __VECS(tea_tv_template)
3502 .alg = "ecb(tnepres)",
3503 .test = alg_test_skcipher,
3505 .cipher = __VECS(tnepres_tv_template)
3508 .alg = "ecb(twofish)",
3509 .test = alg_test_skcipher,
3511 .cipher = __VECS(tf_tv_template)
3515 .test = alg_test_skcipher,
3517 .cipher = __VECS(xeta_tv_template)
3521 .test = alg_test_skcipher,
3523 .cipher = __VECS(xtea_tv_template)
3527 .test = alg_test_kpp,
3530 .kpp = __VECS(ecdh_tv_template)
3534 .test = alg_test_aead,
3537 .aead = __VECS(aes_gcm_tv_template)
3541 .test = alg_test_hash,
3544 .hash = __VECS(ghash_tv_template)
3548 .test = alg_test_hash,
3550 .hash = __VECS(hmac_md5_tv_template)
3553 .alg = "hmac(rmd128)",
3554 .test = alg_test_hash,
3556 .hash = __VECS(hmac_rmd128_tv_template)
3559 .alg = "hmac(rmd160)",
3560 .test = alg_test_hash,
3562 .hash = __VECS(hmac_rmd160_tv_template)
3565 .alg = "hmac(sha1)",
3566 .test = alg_test_hash,
3569 .hash = __VECS(hmac_sha1_tv_template)
3572 .alg = "hmac(sha224)",
3573 .test = alg_test_hash,
3576 .hash = __VECS(hmac_sha224_tv_template)
3579 .alg = "hmac(sha256)",
3580 .test = alg_test_hash,
3583 .hash = __VECS(hmac_sha256_tv_template)
3586 .alg = "hmac(sha3-224)",
3587 .test = alg_test_hash,
3590 .hash = __VECS(hmac_sha3_224_tv_template)
3593 .alg = "hmac(sha3-256)",
3594 .test = alg_test_hash,
3597 .hash = __VECS(hmac_sha3_256_tv_template)
3600 .alg = "hmac(sha3-384)",
3601 .test = alg_test_hash,
3604 .hash = __VECS(hmac_sha3_384_tv_template)
3607 .alg = "hmac(sha3-512)",
3608 .test = alg_test_hash,
3611 .hash = __VECS(hmac_sha3_512_tv_template)
3614 .alg = "hmac(sha384)",
3615 .test = alg_test_hash,
3618 .hash = __VECS(hmac_sha384_tv_template)
3621 .alg = "hmac(sha512)",
3622 .test = alg_test_hash,
3625 .hash = __VECS(hmac_sha512_tv_template)
3628 .alg = "hmac(streebog256)",
3629 .test = alg_test_hash,
3631 .hash = __VECS(hmac_streebog256_tv_template)
3634 .alg = "hmac(streebog512)",
3635 .test = alg_test_hash,
3637 .hash = __VECS(hmac_streebog512_tv_template)
3640 .alg = "jitterentropy_rng",
3642 .test = alg_test_null,
3645 .test = alg_test_skcipher,
3648 .cipher = __VECS(aes_kw_tv_template)
3652 .test = alg_test_skcipher,
3654 .cipher = __VECS(aes_lrw_tv_template)
3657 .alg = "lrw(camellia)",
3658 .test = alg_test_skcipher,
3660 .cipher = __VECS(camellia_lrw_tv_template)
3663 .alg = "lrw(cast6)",
3664 .test = alg_test_skcipher,
3666 .cipher = __VECS(cast6_lrw_tv_template)
3669 .alg = "lrw(serpent)",
3670 .test = alg_test_skcipher,
3672 .cipher = __VECS(serpent_lrw_tv_template)
3675 .alg = "lrw(twofish)",
3676 .test = alg_test_skcipher,
3678 .cipher = __VECS(tf_lrw_tv_template)
3682 .test = alg_test_comp,
3686 .comp = __VECS(lz4_comp_tv_template),
3687 .decomp = __VECS(lz4_decomp_tv_template)
3692 .test = alg_test_comp,
3696 .comp = __VECS(lz4hc_comp_tv_template),
3697 .decomp = __VECS(lz4hc_decomp_tv_template)
3702 .test = alg_test_comp,
3706 .comp = __VECS(lzo_comp_tv_template),
3707 .decomp = __VECS(lzo_decomp_tv_template)
3712 .test = alg_test_hash,
3714 .hash = __VECS(md4_tv_template)
3718 .test = alg_test_hash,
3720 .hash = __VECS(md5_tv_template)
3723 .alg = "michael_mic",
3724 .test = alg_test_hash,
3726 .hash = __VECS(michael_mic_tv_template)
3730 .test = alg_test_aead,
3732 .aead = __VECS(morus1280_tv_template)
3736 .test = alg_test_aead,
3738 .aead = __VECS(morus640_tv_template)
3741 .alg = "nhpoly1305",
3742 .test = alg_test_hash,
3744 .hash = __VECS(nhpoly1305_tv_template)
3748 .test = alg_test_skcipher,
3751 .cipher = __VECS(aes_ofb_tv_template)
3754 /* Same as ofb(aes) except the key is stored in
3755 * hardware secure memory which we reference by index
3758 .test = alg_test_null,
3761 .alg = "pcbc(fcrypt)",
3762 .test = alg_test_skcipher,
3764 .cipher = __VECS(fcrypt_pcbc_tv_template)
3767 .alg = "pkcs1pad(rsa,sha224)",
3768 .test = alg_test_null,
3771 .alg = "pkcs1pad(rsa,sha256)",
3772 .test = alg_test_akcipher,
3775 .akcipher = __VECS(pkcs1pad_rsa_tv_template)
3778 .alg = "pkcs1pad(rsa,sha384)",
3779 .test = alg_test_null,
3782 .alg = "pkcs1pad(rsa,sha512)",
3783 .test = alg_test_null,
3787 .test = alg_test_hash,
3789 .hash = __VECS(poly1305_tv_template)
3792 .alg = "rfc3686(ctr(aes))",
3793 .test = alg_test_skcipher,
3796 .cipher = __VECS(aes_ctr_rfc3686_tv_template)
3799 .alg = "rfc4106(gcm(aes))",
3800 .test = alg_test_aead,
3803 .aead = __VECS(aes_gcm_rfc4106_tv_template)
3806 .alg = "rfc4309(ccm(aes))",
3807 .test = alg_test_aead,
3810 .aead = __VECS(aes_ccm_rfc4309_tv_template)
3813 .alg = "rfc4543(gcm(aes))",
3814 .test = alg_test_aead,
3816 .aead = __VECS(aes_gcm_rfc4543_tv_template)
3819 .alg = "rfc7539(chacha20,poly1305)",
3820 .test = alg_test_aead,
3822 .aead = __VECS(rfc7539_tv_template)
3825 .alg = "rfc7539esp(chacha20,poly1305)",
3826 .test = alg_test_aead,
3828 .aead = __VECS(rfc7539esp_tv_template)
3832 .test = alg_test_hash,
3834 .hash = __VECS(rmd128_tv_template)
3838 .test = alg_test_hash,
3840 .hash = __VECS(rmd160_tv_template)
3844 .test = alg_test_hash,
3846 .hash = __VECS(rmd256_tv_template)
3850 .test = alg_test_hash,
3852 .hash = __VECS(rmd320_tv_template)
3856 .test = alg_test_akcipher,
3859 .akcipher = __VECS(rsa_tv_template)
3863 .test = alg_test_skcipher,
3865 .cipher = __VECS(salsa20_stream_tv_template)
3869 .test = alg_test_hash,
3872 .hash = __VECS(sha1_tv_template)
3876 .test = alg_test_hash,
3879 .hash = __VECS(sha224_tv_template)
3883 .test = alg_test_hash,
3886 .hash = __VECS(sha256_tv_template)
3890 .test = alg_test_hash,
3893 .hash = __VECS(sha3_224_tv_template)
3897 .test = alg_test_hash,
3900 .hash = __VECS(sha3_256_tv_template)
3904 .test = alg_test_hash,
3907 .hash = __VECS(sha3_384_tv_template)
3911 .test = alg_test_hash,
3914 .hash = __VECS(sha3_512_tv_template)
3918 .test = alg_test_hash,
3921 .hash = __VECS(sha384_tv_template)
3925 .test = alg_test_hash,
3928 .hash = __VECS(sha512_tv_template)
3932 .test = alg_test_hash,
3934 .hash = __VECS(sm3_tv_template)
3937 .alg = "streebog256",
3938 .test = alg_test_hash,
3940 .hash = __VECS(streebog256_tv_template)
3943 .alg = "streebog512",
3944 .test = alg_test_hash,
3946 .hash = __VECS(streebog512_tv_template)
3950 .test = alg_test_hash,
3952 .hash = __VECS(tgr128_tv_template)
3956 .test = alg_test_hash,
3958 .hash = __VECS(tgr160_tv_template)
3962 .test = alg_test_hash,
3964 .hash = __VECS(tgr192_tv_template)
3967 .alg = "vmac64(aes)",
3968 .test = alg_test_hash,
3970 .hash = __VECS(vmac64_aes_tv_template)
3974 .test = alg_test_hash,
3976 .hash = __VECS(wp256_tv_template)
3980 .test = alg_test_hash,
3982 .hash = __VECS(wp384_tv_template)
3986 .test = alg_test_hash,
3988 .hash = __VECS(wp512_tv_template)
3992 .test = alg_test_hash,
3994 .hash = __VECS(aes_xcbc128_tv_template)
3998 .test = alg_test_skcipher,
4000 .cipher = __VECS(xchacha12_tv_template)
4004 .test = alg_test_skcipher,
4006 .cipher = __VECS(xchacha20_tv_template)
4010 .test = alg_test_skcipher,
4013 .cipher = __VECS(aes_xts_tv_template)
4016 .alg = "xts(camellia)",
4017 .test = alg_test_skcipher,
4019 .cipher = __VECS(camellia_xts_tv_template)
4022 .alg = "xts(cast6)",
4023 .test = alg_test_skcipher,
4025 .cipher = __VECS(cast6_xts_tv_template)
4028 /* Same as xts(aes) except the key is stored in
4029 * hardware secure memory which we reference by index
4032 .test = alg_test_null,
4035 .alg = "xts(serpent)",
4036 .test = alg_test_skcipher,
4038 .cipher = __VECS(serpent_xts_tv_template)
4041 .alg = "xts(twofish)",
4042 .test = alg_test_skcipher,
4044 .cipher = __VECS(tf_xts_tv_template)
4047 .alg = "xts4096(paes)",
4048 .test = alg_test_null,
4051 .alg = "xts512(paes)",
4052 .test = alg_test_null,
4055 .alg = "zlib-deflate",
4056 .test = alg_test_comp,
4060 .comp = __VECS(zlib_deflate_comp_tv_template),
4061 .decomp = __VECS(zlib_deflate_decomp_tv_template)
4066 .test = alg_test_comp,
4070 .comp = __VECS(zstd_comp_tv_template),
4071 .decomp = __VECS(zstd_decomp_tv_template)
4077 static void alg_check_test_descs_order(void)
4081 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
4082 int diff = strcmp(alg_test_descs[i - 1].alg,
4083 alg_test_descs[i].alg);
4085 if (WARN_ON(diff > 0)) {
4086 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
4087 alg_test_descs[i - 1].alg,
4088 alg_test_descs[i].alg);
4091 if (WARN_ON(diff == 0)) {
4092 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
4093 alg_test_descs[i].alg);
4098 static void alg_check_testvec_configs(void)
4102 static void testmgr_onetime_init(void)
4104 alg_check_test_descs_order();
4105 alg_check_testvec_configs();
4108 static int alg_find_test(const char *alg)
4111 int end = ARRAY_SIZE(alg_test_descs);
4113 while (start < end) {
4114 int i = (start + end) / 2;
4115 int diff = strcmp(alg_test_descs[i].alg, alg);
4133 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
4139 if (!fips_enabled && notests) {
4140 printk_once(KERN_INFO "alg: self-tests disabled\n");
4144 DO_ONCE(testmgr_onetime_init);
4146 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
4147 char nalg[CRYPTO_MAX_ALG_NAME];
4149 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
4151 return -ENAMETOOLONG;
4153 i = alg_find_test(nalg);
4157 if (fips_enabled && !alg_test_descs[i].fips_allowed)
4160 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
4164 i = alg_find_test(alg);
4165 j = alg_find_test(driver);
4169 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
4170 (j >= 0 && !alg_test_descs[j].fips_allowed)))
4175 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
4177 if (j >= 0 && j != i)
4178 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
4182 if (fips_enabled && rc)
4183 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
4185 if (fips_enabled && !rc)
4186 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
4191 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
4197 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
4199 EXPORT_SYMBOL_GPL(alg_test);