]> git.proxmox.com Git - mirror_ubuntu-jammy-kernel.git/blob - crypto/testmgr.c
X.509: parse public key parameters from x509 for akcipher
[mirror_ubuntu-jammy-kernel.git] / crypto / testmgr.c
1 /*
2 * Algorithm testing framework and tests.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 * Copyright (c) 2019 Google LLC
9 *
10 * Updated RFC4106 AES-GCM testing.
11 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
12 * Adrian Hoban <adrian.hoban@intel.com>
13 * Gabriele Paoloni <gabriele.paoloni@intel.com>
14 * Tadeusz Struk (tadeusz.struk@intel.com)
15 * Copyright (c) 2010, Intel Corporation.
16 *
17 * This program is free software; you can redistribute it and/or modify it
18 * under the terms of the GNU General Public License as published by the Free
19 * Software Foundation; either version 2 of the License, or (at your option)
20 * any later version.
21 *
22 */
23
24 #include <crypto/aead.h>
25 #include <crypto/hash.h>
26 #include <crypto/skcipher.h>
27 #include <linux/err.h>
28 #include <linux/fips.h>
29 #include <linux/module.h>
30 #include <linux/once.h>
31 #include <linux/random.h>
32 #include <linux/scatterlist.h>
33 #include <linux/slab.h>
34 #include <linux/string.h>
35 #include <crypto/rng.h>
36 #include <crypto/drbg.h>
37 #include <crypto/akcipher.h>
38 #include <crypto/kpp.h>
39 #include <crypto/acompress.h>
40 #include <crypto/internal/simd.h>
41
42 #include "internal.h"
43
44 static bool notests;
45 module_param(notests, bool, 0644);
46 MODULE_PARM_DESC(notests, "disable crypto self-tests");
47
48 static bool panic_on_fail;
49 module_param(panic_on_fail, bool, 0444);
50
51 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
52 static bool noextratests;
53 module_param(noextratests, bool, 0644);
54 MODULE_PARM_DESC(noextratests, "disable expensive crypto self-tests");
55
56 static unsigned int fuzz_iterations = 100;
57 module_param(fuzz_iterations, uint, 0644);
58 MODULE_PARM_DESC(fuzz_iterations, "number of fuzz test iterations");
59
60 DEFINE_PER_CPU(bool, crypto_simd_disabled_for_test);
61 EXPORT_PER_CPU_SYMBOL_GPL(crypto_simd_disabled_for_test);
62 #endif
63
64 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
65
66 /* a perfect nop */
67 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
68 {
69 return 0;
70 }
71
72 #else
73
74 #include "testmgr.h"
75
76 /*
77 * Need slab memory for testing (size in number of pages).
78 */
79 #define XBUFSIZE 8
80
81 /*
82 * Used by test_cipher()
83 */
84 #define ENCRYPT 1
85 #define DECRYPT 0
86
87 struct aead_test_suite {
88 const struct aead_testvec *vecs;
89 unsigned int count;
90 };
91
92 struct cipher_test_suite {
93 const struct cipher_testvec *vecs;
94 unsigned int count;
95 };
96
97 struct comp_test_suite {
98 struct {
99 const struct comp_testvec *vecs;
100 unsigned int count;
101 } comp, decomp;
102 };
103
104 struct hash_test_suite {
105 const struct hash_testvec *vecs;
106 unsigned int count;
107 };
108
109 struct cprng_test_suite {
110 const struct cprng_testvec *vecs;
111 unsigned int count;
112 };
113
114 struct drbg_test_suite {
115 const struct drbg_testvec *vecs;
116 unsigned int count;
117 };
118
119 struct akcipher_test_suite {
120 const struct akcipher_testvec *vecs;
121 unsigned int count;
122 };
123
124 struct kpp_test_suite {
125 const struct kpp_testvec *vecs;
126 unsigned int count;
127 };
128
129 struct alg_test_desc {
130 const char *alg;
131 int (*test)(const struct alg_test_desc *desc, const char *driver,
132 u32 type, u32 mask);
133 int fips_allowed; /* set if alg is allowed in fips mode */
134
135 union {
136 struct aead_test_suite aead;
137 struct cipher_test_suite cipher;
138 struct comp_test_suite comp;
139 struct hash_test_suite hash;
140 struct cprng_test_suite cprng;
141 struct drbg_test_suite drbg;
142 struct akcipher_test_suite akcipher;
143 struct kpp_test_suite kpp;
144 } suite;
145 };
146
147 static void hexdump(unsigned char *buf, unsigned int len)
148 {
149 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
150 16, 1,
151 buf, len, false);
152 }
153
154 static int __testmgr_alloc_buf(char *buf[XBUFSIZE], int order)
155 {
156 int i;
157
158 for (i = 0; i < XBUFSIZE; i++) {
159 buf[i] = (char *)__get_free_pages(GFP_KERNEL, order);
160 if (!buf[i])
161 goto err_free_buf;
162 }
163
164 return 0;
165
166 err_free_buf:
167 while (i-- > 0)
168 free_pages((unsigned long)buf[i], order);
169
170 return -ENOMEM;
171 }
172
173 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
174 {
175 return __testmgr_alloc_buf(buf, 0);
176 }
177
178 static void __testmgr_free_buf(char *buf[XBUFSIZE], int order)
179 {
180 int i;
181
182 for (i = 0; i < XBUFSIZE; i++)
183 free_pages((unsigned long)buf[i], order);
184 }
185
186 static void testmgr_free_buf(char *buf[XBUFSIZE])
187 {
188 __testmgr_free_buf(buf, 0);
189 }
190
191 #define TESTMGR_POISON_BYTE 0xfe
192 #define TESTMGR_POISON_LEN 16
193
194 static inline void testmgr_poison(void *addr, size_t len)
195 {
196 memset(addr, TESTMGR_POISON_BYTE, len);
197 }
198
199 /* Is the memory region still fully poisoned? */
200 static inline bool testmgr_is_poison(const void *addr, size_t len)
201 {
202 return memchr_inv(addr, TESTMGR_POISON_BYTE, len) == NULL;
203 }
204
205 /* flush type for hash algorithms */
206 enum flush_type {
207 /* merge with update of previous buffer(s) */
208 FLUSH_TYPE_NONE = 0,
209
210 /* update with previous buffer(s) before doing this one */
211 FLUSH_TYPE_FLUSH,
212
213 /* likewise, but also export and re-import the intermediate state */
214 FLUSH_TYPE_REIMPORT,
215 };
216
217 /* finalization function for hash algorithms */
218 enum finalization_type {
219 FINALIZATION_TYPE_FINAL, /* use final() */
220 FINALIZATION_TYPE_FINUP, /* use finup() */
221 FINALIZATION_TYPE_DIGEST, /* use digest() */
222 };
223
224 #define TEST_SG_TOTAL 10000
225
226 /**
227 * struct test_sg_division - description of a scatterlist entry
228 *
229 * This struct describes one entry of a scatterlist being constructed to check a
230 * crypto test vector.
231 *
232 * @proportion_of_total: length of this chunk relative to the total length,
233 * given as a proportion out of TEST_SG_TOTAL so that it
234 * scales to fit any test vector
235 * @offset: byte offset into a 2-page buffer at which this chunk will start
236 * @offset_relative_to_alignmask: if true, add the algorithm's alignmask to the
237 * @offset
238 * @flush_type: for hashes, whether an update() should be done now vs.
239 * continuing to accumulate data
240 * @nosimd: if doing the pending update(), do it with SIMD disabled?
241 */
242 struct test_sg_division {
243 unsigned int proportion_of_total;
244 unsigned int offset;
245 bool offset_relative_to_alignmask;
246 enum flush_type flush_type;
247 bool nosimd;
248 };
249
250 /**
251 * struct testvec_config - configuration for testing a crypto test vector
252 *
253 * This struct describes the data layout and other parameters with which each
254 * crypto test vector can be tested.
255 *
256 * @name: name of this config, logged for debugging purposes if a test fails
257 * @inplace: operate on the data in-place, if applicable for the algorithm type?
258 * @req_flags: extra request_flags, e.g. CRYPTO_TFM_REQ_MAY_SLEEP
259 * @src_divs: description of how to arrange the source scatterlist
260 * @dst_divs: description of how to arrange the dst scatterlist, if applicable
261 * for the algorithm type. Defaults to @src_divs if unset.
262 * @iv_offset: misalignment of the IV in the range [0..MAX_ALGAPI_ALIGNMASK+1],
263 * where 0 is aligned to a 2*(MAX_ALGAPI_ALIGNMASK+1) byte boundary
264 * @iv_offset_relative_to_alignmask: if true, add the algorithm's alignmask to
265 * the @iv_offset
266 * @finalization_type: what finalization function to use for hashes
267 * @nosimd: execute with SIMD disabled? Requires !CRYPTO_TFM_REQ_MAY_SLEEP.
268 */
269 struct testvec_config {
270 const char *name;
271 bool inplace;
272 u32 req_flags;
273 struct test_sg_division src_divs[XBUFSIZE];
274 struct test_sg_division dst_divs[XBUFSIZE];
275 unsigned int iv_offset;
276 bool iv_offset_relative_to_alignmask;
277 enum finalization_type finalization_type;
278 bool nosimd;
279 };
280
281 #define TESTVEC_CONFIG_NAMELEN 192
282
283 /*
284 * The following are the lists of testvec_configs to test for each algorithm
285 * type when the basic crypto self-tests are enabled, i.e. when
286 * CONFIG_CRYPTO_MANAGER_DISABLE_TESTS is unset. They aim to provide good test
287 * coverage, while keeping the test time much shorter than the full fuzz tests
288 * so that the basic tests can be enabled in a wider range of circumstances.
289 */
290
291 /* Configs for skciphers and aeads */
292 static const struct testvec_config default_cipher_testvec_configs[] = {
293 {
294 .name = "in-place",
295 .inplace = true,
296 .src_divs = { { .proportion_of_total = 10000 } },
297 }, {
298 .name = "out-of-place",
299 .src_divs = { { .proportion_of_total = 10000 } },
300 }, {
301 .name = "unaligned buffer, offset=1",
302 .src_divs = { { .proportion_of_total = 10000, .offset = 1 } },
303 .iv_offset = 1,
304 }, {
305 .name = "buffer aligned only to alignmask",
306 .src_divs = {
307 {
308 .proportion_of_total = 10000,
309 .offset = 1,
310 .offset_relative_to_alignmask = true,
311 },
312 },
313 .iv_offset = 1,
314 .iv_offset_relative_to_alignmask = true,
315 }, {
316 .name = "two even aligned splits",
317 .src_divs = {
318 { .proportion_of_total = 5000 },
319 { .proportion_of_total = 5000 },
320 },
321 }, {
322 .name = "uneven misaligned splits, may sleep",
323 .req_flags = CRYPTO_TFM_REQ_MAY_SLEEP,
324 .src_divs = {
325 { .proportion_of_total = 1900, .offset = 33 },
326 { .proportion_of_total = 3300, .offset = 7 },
327 { .proportion_of_total = 4800, .offset = 18 },
328 },
329 .iv_offset = 3,
330 }, {
331 .name = "misaligned splits crossing pages, inplace",
332 .inplace = true,
333 .src_divs = {
334 {
335 .proportion_of_total = 7500,
336 .offset = PAGE_SIZE - 32
337 }, {
338 .proportion_of_total = 2500,
339 .offset = PAGE_SIZE - 7
340 },
341 },
342 }
343 };
344
345 static const struct testvec_config default_hash_testvec_configs[] = {
346 {
347 .name = "init+update+final aligned buffer",
348 .src_divs = { { .proportion_of_total = 10000 } },
349 .finalization_type = FINALIZATION_TYPE_FINAL,
350 }, {
351 .name = "init+finup aligned buffer",
352 .src_divs = { { .proportion_of_total = 10000 } },
353 .finalization_type = FINALIZATION_TYPE_FINUP,
354 }, {
355 .name = "digest aligned buffer",
356 .src_divs = { { .proportion_of_total = 10000 } },
357 .finalization_type = FINALIZATION_TYPE_DIGEST,
358 }, {
359 .name = "init+update+final misaligned buffer",
360 .src_divs = { { .proportion_of_total = 10000, .offset = 1 } },
361 .finalization_type = FINALIZATION_TYPE_FINAL,
362 }, {
363 .name = "digest buffer aligned only to alignmask",
364 .src_divs = {
365 {
366 .proportion_of_total = 10000,
367 .offset = 1,
368 .offset_relative_to_alignmask = true,
369 },
370 },
371 .finalization_type = FINALIZATION_TYPE_DIGEST,
372 }, {
373 .name = "init+update+update+final two even splits",
374 .src_divs = {
375 { .proportion_of_total = 5000 },
376 {
377 .proportion_of_total = 5000,
378 .flush_type = FLUSH_TYPE_FLUSH,
379 },
380 },
381 .finalization_type = FINALIZATION_TYPE_FINAL,
382 }, {
383 .name = "digest uneven misaligned splits, may sleep",
384 .req_flags = CRYPTO_TFM_REQ_MAY_SLEEP,
385 .src_divs = {
386 { .proportion_of_total = 1900, .offset = 33 },
387 { .proportion_of_total = 3300, .offset = 7 },
388 { .proportion_of_total = 4800, .offset = 18 },
389 },
390 .finalization_type = FINALIZATION_TYPE_DIGEST,
391 }, {
392 .name = "digest misaligned splits crossing pages",
393 .src_divs = {
394 {
395 .proportion_of_total = 7500,
396 .offset = PAGE_SIZE - 32,
397 }, {
398 .proportion_of_total = 2500,
399 .offset = PAGE_SIZE - 7,
400 },
401 },
402 .finalization_type = FINALIZATION_TYPE_DIGEST,
403 }, {
404 .name = "import/export",
405 .src_divs = {
406 {
407 .proportion_of_total = 6500,
408 .flush_type = FLUSH_TYPE_REIMPORT,
409 }, {
410 .proportion_of_total = 3500,
411 .flush_type = FLUSH_TYPE_REIMPORT,
412 },
413 },
414 .finalization_type = FINALIZATION_TYPE_FINAL,
415 }
416 };
417
418 static unsigned int count_test_sg_divisions(const struct test_sg_division *divs)
419 {
420 unsigned int remaining = TEST_SG_TOTAL;
421 unsigned int ndivs = 0;
422
423 do {
424 remaining -= divs[ndivs++].proportion_of_total;
425 } while (remaining);
426
427 return ndivs;
428 }
429
430 #define SGDIVS_HAVE_FLUSHES BIT(0)
431 #define SGDIVS_HAVE_NOSIMD BIT(1)
432
433 static bool valid_sg_divisions(const struct test_sg_division *divs,
434 unsigned int count, int *flags_ret)
435 {
436 unsigned int total = 0;
437 unsigned int i;
438
439 for (i = 0; i < count && total != TEST_SG_TOTAL; i++) {
440 if (divs[i].proportion_of_total <= 0 ||
441 divs[i].proportion_of_total > TEST_SG_TOTAL - total)
442 return false;
443 total += divs[i].proportion_of_total;
444 if (divs[i].flush_type != FLUSH_TYPE_NONE)
445 *flags_ret |= SGDIVS_HAVE_FLUSHES;
446 if (divs[i].nosimd)
447 *flags_ret |= SGDIVS_HAVE_NOSIMD;
448 }
449 return total == TEST_SG_TOTAL &&
450 memchr_inv(&divs[i], 0, (count - i) * sizeof(divs[0])) == NULL;
451 }
452
453 /*
454 * Check whether the given testvec_config is valid. This isn't strictly needed
455 * since every testvec_config should be valid, but check anyway so that people
456 * don't unknowingly add broken configs that don't do what they wanted.
457 */
458 static bool valid_testvec_config(const struct testvec_config *cfg)
459 {
460 int flags = 0;
461
462 if (cfg->name == NULL)
463 return false;
464
465 if (!valid_sg_divisions(cfg->src_divs, ARRAY_SIZE(cfg->src_divs),
466 &flags))
467 return false;
468
469 if (cfg->dst_divs[0].proportion_of_total) {
470 if (!valid_sg_divisions(cfg->dst_divs,
471 ARRAY_SIZE(cfg->dst_divs), &flags))
472 return false;
473 } else {
474 if (memchr_inv(cfg->dst_divs, 0, sizeof(cfg->dst_divs)))
475 return false;
476 /* defaults to dst_divs=src_divs */
477 }
478
479 if (cfg->iv_offset +
480 (cfg->iv_offset_relative_to_alignmask ? MAX_ALGAPI_ALIGNMASK : 0) >
481 MAX_ALGAPI_ALIGNMASK + 1)
482 return false;
483
484 if ((flags & (SGDIVS_HAVE_FLUSHES | SGDIVS_HAVE_NOSIMD)) &&
485 cfg->finalization_type == FINALIZATION_TYPE_DIGEST)
486 return false;
487
488 if ((cfg->nosimd || (flags & SGDIVS_HAVE_NOSIMD)) &&
489 (cfg->req_flags & CRYPTO_TFM_REQ_MAY_SLEEP))
490 return false;
491
492 return true;
493 }
494
495 struct test_sglist {
496 char *bufs[XBUFSIZE];
497 struct scatterlist sgl[XBUFSIZE];
498 struct scatterlist sgl_saved[XBUFSIZE];
499 struct scatterlist *sgl_ptr;
500 unsigned int nents;
501 };
502
503 static int init_test_sglist(struct test_sglist *tsgl)
504 {
505 return __testmgr_alloc_buf(tsgl->bufs, 1 /* two pages per buffer */);
506 }
507
508 static void destroy_test_sglist(struct test_sglist *tsgl)
509 {
510 return __testmgr_free_buf(tsgl->bufs, 1 /* two pages per buffer */);
511 }
512
513 /**
514 * build_test_sglist() - build a scatterlist for a crypto test
515 *
516 * @tsgl: the scatterlist to build. @tsgl->bufs[] contains an array of 2-page
517 * buffers which the scatterlist @tsgl->sgl[] will be made to point into.
518 * @divs: the layout specification on which the scatterlist will be based
519 * @alignmask: the algorithm's alignmask
520 * @total_len: the total length of the scatterlist to build in bytes
521 * @data: if non-NULL, the buffers will be filled with this data until it ends.
522 * Otherwise the buffers will be poisoned. In both cases, some bytes
523 * past the end of each buffer will be poisoned to help detect overruns.
524 * @out_divs: if non-NULL, the test_sg_division to which each scatterlist entry
525 * corresponds will be returned here. This will match @divs except
526 * that divisions resolving to a length of 0 are omitted as they are
527 * not included in the scatterlist.
528 *
529 * Return: 0 or a -errno value
530 */
531 static int build_test_sglist(struct test_sglist *tsgl,
532 const struct test_sg_division *divs,
533 const unsigned int alignmask,
534 const unsigned int total_len,
535 struct iov_iter *data,
536 const struct test_sg_division *out_divs[XBUFSIZE])
537 {
538 struct {
539 const struct test_sg_division *div;
540 size_t length;
541 } partitions[XBUFSIZE];
542 const unsigned int ndivs = count_test_sg_divisions(divs);
543 unsigned int len_remaining = total_len;
544 unsigned int i;
545
546 BUILD_BUG_ON(ARRAY_SIZE(partitions) != ARRAY_SIZE(tsgl->sgl));
547 if (WARN_ON(ndivs > ARRAY_SIZE(partitions)))
548 return -EINVAL;
549
550 /* Calculate the (div, length) pairs */
551 tsgl->nents = 0;
552 for (i = 0; i < ndivs; i++) {
553 unsigned int len_this_sg =
554 min(len_remaining,
555 (total_len * divs[i].proportion_of_total +
556 TEST_SG_TOTAL / 2) / TEST_SG_TOTAL);
557
558 if (len_this_sg != 0) {
559 partitions[tsgl->nents].div = &divs[i];
560 partitions[tsgl->nents].length = len_this_sg;
561 tsgl->nents++;
562 len_remaining -= len_this_sg;
563 }
564 }
565 if (tsgl->nents == 0) {
566 partitions[tsgl->nents].div = &divs[0];
567 partitions[tsgl->nents].length = 0;
568 tsgl->nents++;
569 }
570 partitions[tsgl->nents - 1].length += len_remaining;
571
572 /* Set up the sgl entries and fill the data or poison */
573 sg_init_table(tsgl->sgl, tsgl->nents);
574 for (i = 0; i < tsgl->nents; i++) {
575 unsigned int offset = partitions[i].div->offset;
576 void *addr;
577
578 if (partitions[i].div->offset_relative_to_alignmask)
579 offset += alignmask;
580
581 while (offset + partitions[i].length + TESTMGR_POISON_LEN >
582 2 * PAGE_SIZE) {
583 if (WARN_ON(offset <= 0))
584 return -EINVAL;
585 offset /= 2;
586 }
587
588 addr = &tsgl->bufs[i][offset];
589 sg_set_buf(&tsgl->sgl[i], addr, partitions[i].length);
590
591 if (out_divs)
592 out_divs[i] = partitions[i].div;
593
594 if (data) {
595 size_t copy_len, copied;
596
597 copy_len = min(partitions[i].length, data->count);
598 copied = copy_from_iter(addr, copy_len, data);
599 if (WARN_ON(copied != copy_len))
600 return -EINVAL;
601 testmgr_poison(addr + copy_len, partitions[i].length +
602 TESTMGR_POISON_LEN - copy_len);
603 } else {
604 testmgr_poison(addr, partitions[i].length +
605 TESTMGR_POISON_LEN);
606 }
607 }
608
609 sg_mark_end(&tsgl->sgl[tsgl->nents - 1]);
610 tsgl->sgl_ptr = tsgl->sgl;
611 memcpy(tsgl->sgl_saved, tsgl->sgl, tsgl->nents * sizeof(tsgl->sgl[0]));
612 return 0;
613 }
614
615 /*
616 * Verify that a scatterlist crypto operation produced the correct output.
617 *
618 * @tsgl: scatterlist containing the actual output
619 * @expected_output: buffer containing the expected output
620 * @len_to_check: length of @expected_output in bytes
621 * @unchecked_prefix_len: number of ignored bytes in @tsgl prior to real result
622 * @check_poison: verify that the poison bytes after each chunk are intact?
623 *
624 * Return: 0 if correct, -EINVAL if incorrect, -EOVERFLOW if buffer overrun.
625 */
626 static int verify_correct_output(const struct test_sglist *tsgl,
627 const char *expected_output,
628 unsigned int len_to_check,
629 unsigned int unchecked_prefix_len,
630 bool check_poison)
631 {
632 unsigned int i;
633
634 for (i = 0; i < tsgl->nents; i++) {
635 struct scatterlist *sg = &tsgl->sgl_ptr[i];
636 unsigned int len = sg->length;
637 unsigned int offset = sg->offset;
638 const char *actual_output;
639
640 if (unchecked_prefix_len) {
641 if (unchecked_prefix_len >= len) {
642 unchecked_prefix_len -= len;
643 continue;
644 }
645 offset += unchecked_prefix_len;
646 len -= unchecked_prefix_len;
647 unchecked_prefix_len = 0;
648 }
649 len = min(len, len_to_check);
650 actual_output = page_address(sg_page(sg)) + offset;
651 if (memcmp(expected_output, actual_output, len) != 0)
652 return -EINVAL;
653 if (check_poison &&
654 !testmgr_is_poison(actual_output + len, TESTMGR_POISON_LEN))
655 return -EOVERFLOW;
656 len_to_check -= len;
657 expected_output += len;
658 }
659 if (WARN_ON(len_to_check != 0))
660 return -EINVAL;
661 return 0;
662 }
663
664 static bool is_test_sglist_corrupted(const struct test_sglist *tsgl)
665 {
666 unsigned int i;
667
668 for (i = 0; i < tsgl->nents; i++) {
669 if (tsgl->sgl[i].page_link != tsgl->sgl_saved[i].page_link)
670 return true;
671 if (tsgl->sgl[i].offset != tsgl->sgl_saved[i].offset)
672 return true;
673 if (tsgl->sgl[i].length != tsgl->sgl_saved[i].length)
674 return true;
675 }
676 return false;
677 }
678
679 struct cipher_test_sglists {
680 struct test_sglist src;
681 struct test_sglist dst;
682 };
683
684 static struct cipher_test_sglists *alloc_cipher_test_sglists(void)
685 {
686 struct cipher_test_sglists *tsgls;
687
688 tsgls = kmalloc(sizeof(*tsgls), GFP_KERNEL);
689 if (!tsgls)
690 return NULL;
691
692 if (init_test_sglist(&tsgls->src) != 0)
693 goto fail_kfree;
694 if (init_test_sglist(&tsgls->dst) != 0)
695 goto fail_destroy_src;
696
697 return tsgls;
698
699 fail_destroy_src:
700 destroy_test_sglist(&tsgls->src);
701 fail_kfree:
702 kfree(tsgls);
703 return NULL;
704 }
705
706 static void free_cipher_test_sglists(struct cipher_test_sglists *tsgls)
707 {
708 if (tsgls) {
709 destroy_test_sglist(&tsgls->src);
710 destroy_test_sglist(&tsgls->dst);
711 kfree(tsgls);
712 }
713 }
714
715 /* Build the src and dst scatterlists for an skcipher or AEAD test */
716 static int build_cipher_test_sglists(struct cipher_test_sglists *tsgls,
717 const struct testvec_config *cfg,
718 unsigned int alignmask,
719 unsigned int src_total_len,
720 unsigned int dst_total_len,
721 const struct kvec *inputs,
722 unsigned int nr_inputs)
723 {
724 struct iov_iter input;
725 int err;
726
727 iov_iter_kvec(&input, WRITE, inputs, nr_inputs, src_total_len);
728 err = build_test_sglist(&tsgls->src, cfg->src_divs, alignmask,
729 cfg->inplace ?
730 max(dst_total_len, src_total_len) :
731 src_total_len,
732 &input, NULL);
733 if (err)
734 return err;
735
736 if (cfg->inplace) {
737 tsgls->dst.sgl_ptr = tsgls->src.sgl;
738 tsgls->dst.nents = tsgls->src.nents;
739 return 0;
740 }
741 return build_test_sglist(&tsgls->dst,
742 cfg->dst_divs[0].proportion_of_total ?
743 cfg->dst_divs : cfg->src_divs,
744 alignmask, dst_total_len, NULL, NULL);
745 }
746
747 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
748 static char *generate_random_sgl_divisions(struct test_sg_division *divs,
749 size_t max_divs, char *p, char *end,
750 bool gen_flushes, u32 req_flags)
751 {
752 struct test_sg_division *div = divs;
753 unsigned int remaining = TEST_SG_TOTAL;
754
755 do {
756 unsigned int this_len;
757 const char *flushtype_str;
758
759 if (div == &divs[max_divs - 1] || prandom_u32() % 2 == 0)
760 this_len = remaining;
761 else
762 this_len = 1 + (prandom_u32() % remaining);
763 div->proportion_of_total = this_len;
764
765 if (prandom_u32() % 4 == 0)
766 div->offset = (PAGE_SIZE - 128) + (prandom_u32() % 128);
767 else if (prandom_u32() % 2 == 0)
768 div->offset = prandom_u32() % 32;
769 else
770 div->offset = prandom_u32() % PAGE_SIZE;
771 if (prandom_u32() % 8 == 0)
772 div->offset_relative_to_alignmask = true;
773
774 div->flush_type = FLUSH_TYPE_NONE;
775 if (gen_flushes) {
776 switch (prandom_u32() % 4) {
777 case 0:
778 div->flush_type = FLUSH_TYPE_REIMPORT;
779 break;
780 case 1:
781 div->flush_type = FLUSH_TYPE_FLUSH;
782 break;
783 }
784 }
785
786 if (div->flush_type != FLUSH_TYPE_NONE &&
787 !(req_flags & CRYPTO_TFM_REQ_MAY_SLEEP) &&
788 prandom_u32() % 2 == 0)
789 div->nosimd = true;
790
791 switch (div->flush_type) {
792 case FLUSH_TYPE_FLUSH:
793 if (div->nosimd)
794 flushtype_str = "<flush,nosimd>";
795 else
796 flushtype_str = "<flush>";
797 break;
798 case FLUSH_TYPE_REIMPORT:
799 if (div->nosimd)
800 flushtype_str = "<reimport,nosimd>";
801 else
802 flushtype_str = "<reimport>";
803 break;
804 default:
805 flushtype_str = "";
806 break;
807 }
808
809 BUILD_BUG_ON(TEST_SG_TOTAL != 10000); /* for "%u.%u%%" */
810 p += scnprintf(p, end - p, "%s%u.%u%%@%s+%u%s", flushtype_str,
811 this_len / 100, this_len % 100,
812 div->offset_relative_to_alignmask ?
813 "alignmask" : "",
814 div->offset, this_len == remaining ? "" : ", ");
815 remaining -= this_len;
816 div++;
817 } while (remaining);
818
819 return p;
820 }
821
822 /* Generate a random testvec_config for fuzz testing */
823 static void generate_random_testvec_config(struct testvec_config *cfg,
824 char *name, size_t max_namelen)
825 {
826 char *p = name;
827 char * const end = name + max_namelen;
828
829 memset(cfg, 0, sizeof(*cfg));
830
831 cfg->name = name;
832
833 p += scnprintf(p, end - p, "random:");
834
835 if (prandom_u32() % 2 == 0) {
836 cfg->inplace = true;
837 p += scnprintf(p, end - p, " inplace");
838 }
839
840 if (prandom_u32() % 2 == 0) {
841 cfg->req_flags |= CRYPTO_TFM_REQ_MAY_SLEEP;
842 p += scnprintf(p, end - p, " may_sleep");
843 }
844
845 switch (prandom_u32() % 4) {
846 case 0:
847 cfg->finalization_type = FINALIZATION_TYPE_FINAL;
848 p += scnprintf(p, end - p, " use_final");
849 break;
850 case 1:
851 cfg->finalization_type = FINALIZATION_TYPE_FINUP;
852 p += scnprintf(p, end - p, " use_finup");
853 break;
854 default:
855 cfg->finalization_type = FINALIZATION_TYPE_DIGEST;
856 p += scnprintf(p, end - p, " use_digest");
857 break;
858 }
859
860 if (!(cfg->req_flags & CRYPTO_TFM_REQ_MAY_SLEEP) &&
861 prandom_u32() % 2 == 0) {
862 cfg->nosimd = true;
863 p += scnprintf(p, end - p, " nosimd");
864 }
865
866 p += scnprintf(p, end - p, " src_divs=[");
867 p = generate_random_sgl_divisions(cfg->src_divs,
868 ARRAY_SIZE(cfg->src_divs), p, end,
869 (cfg->finalization_type !=
870 FINALIZATION_TYPE_DIGEST),
871 cfg->req_flags);
872 p += scnprintf(p, end - p, "]");
873
874 if (!cfg->inplace && prandom_u32() % 2 == 0) {
875 p += scnprintf(p, end - p, " dst_divs=[");
876 p = generate_random_sgl_divisions(cfg->dst_divs,
877 ARRAY_SIZE(cfg->dst_divs),
878 p, end, false,
879 cfg->req_flags);
880 p += scnprintf(p, end - p, "]");
881 }
882
883 if (prandom_u32() % 2 == 0) {
884 cfg->iv_offset = 1 + (prandom_u32() % MAX_ALGAPI_ALIGNMASK);
885 p += scnprintf(p, end - p, " iv_offset=%u", cfg->iv_offset);
886 }
887
888 WARN_ON_ONCE(!valid_testvec_config(cfg));
889 }
890
891 static void crypto_disable_simd_for_test(void)
892 {
893 preempt_disable();
894 __this_cpu_write(crypto_simd_disabled_for_test, true);
895 }
896
897 static void crypto_reenable_simd_for_test(void)
898 {
899 __this_cpu_write(crypto_simd_disabled_for_test, false);
900 preempt_enable();
901 }
902 #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
903 static void crypto_disable_simd_for_test(void)
904 {
905 }
906
907 static void crypto_reenable_simd_for_test(void)
908 {
909 }
910 #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
911
912 static int do_ahash_op(int (*op)(struct ahash_request *req),
913 struct ahash_request *req,
914 struct crypto_wait *wait, bool nosimd)
915 {
916 int err;
917
918 if (nosimd)
919 crypto_disable_simd_for_test();
920
921 err = op(req);
922
923 if (nosimd)
924 crypto_reenable_simd_for_test();
925
926 return crypto_wait_req(err, wait);
927 }
928
929 static int check_nonfinal_hash_op(const char *op, int err,
930 u8 *result, unsigned int digestsize,
931 const char *driver, unsigned int vec_num,
932 const struct testvec_config *cfg)
933 {
934 if (err) {
935 pr_err("alg: hash: %s %s() failed with err %d on test vector %u, cfg=\"%s\"\n",
936 driver, op, err, vec_num, cfg->name);
937 return err;
938 }
939 if (!testmgr_is_poison(result, digestsize)) {
940 pr_err("alg: hash: %s %s() used result buffer on test vector %u, cfg=\"%s\"\n",
941 driver, op, vec_num, cfg->name);
942 return -EINVAL;
943 }
944 return 0;
945 }
946
947 static int test_hash_vec_cfg(const char *driver,
948 const struct hash_testvec *vec,
949 unsigned int vec_num,
950 const struct testvec_config *cfg,
951 struct ahash_request *req,
952 struct test_sglist *tsgl,
953 u8 *hashstate)
954 {
955 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
956 const unsigned int alignmask = crypto_ahash_alignmask(tfm);
957 const unsigned int digestsize = crypto_ahash_digestsize(tfm);
958 const unsigned int statesize = crypto_ahash_statesize(tfm);
959 const u32 req_flags = CRYPTO_TFM_REQ_MAY_BACKLOG | cfg->req_flags;
960 const struct test_sg_division *divs[XBUFSIZE];
961 DECLARE_CRYPTO_WAIT(wait);
962 struct kvec _input;
963 struct iov_iter input;
964 unsigned int i;
965 struct scatterlist *pending_sgl;
966 unsigned int pending_len;
967 u8 result[HASH_MAX_DIGESTSIZE + TESTMGR_POISON_LEN];
968 int err;
969
970 /* Set the key, if specified */
971 if (vec->ksize) {
972 err = crypto_ahash_setkey(tfm, vec->key, vec->ksize);
973 if (err) {
974 pr_err("alg: hash: %s setkey failed with err %d on test vector %u; flags=%#x\n",
975 driver, err, vec_num,
976 crypto_ahash_get_flags(tfm));
977 return err;
978 }
979 }
980
981 /* Build the scatterlist for the source data */
982 _input.iov_base = (void *)vec->plaintext;
983 _input.iov_len = vec->psize;
984 iov_iter_kvec(&input, WRITE, &_input, 1, vec->psize);
985 err = build_test_sglist(tsgl, cfg->src_divs, alignmask, vec->psize,
986 &input, divs);
987 if (err) {
988 pr_err("alg: hash: %s: error preparing scatterlist for test vector %u, cfg=\"%s\"\n",
989 driver, vec_num, cfg->name);
990 return err;
991 }
992
993 /* Do the actual hashing */
994
995 testmgr_poison(req->__ctx, crypto_ahash_reqsize(tfm));
996 testmgr_poison(result, digestsize + TESTMGR_POISON_LEN);
997
998 if (cfg->finalization_type == FINALIZATION_TYPE_DIGEST) {
999 /* Just using digest() */
1000 ahash_request_set_callback(req, req_flags, crypto_req_done,
1001 &wait);
1002 ahash_request_set_crypt(req, tsgl->sgl, result, vec->psize);
1003 err = do_ahash_op(crypto_ahash_digest, req, &wait, cfg->nosimd);
1004 if (err) {
1005 pr_err("alg: hash: %s digest() failed with err %d on test vector %u, cfg=\"%s\"\n",
1006 driver, err, vec_num, cfg->name);
1007 return err;
1008 }
1009 goto result_ready;
1010 }
1011
1012 /* Using init(), zero or more update(), then final() or finup() */
1013
1014 ahash_request_set_callback(req, req_flags, crypto_req_done, &wait);
1015 ahash_request_set_crypt(req, NULL, result, 0);
1016 err = do_ahash_op(crypto_ahash_init, req, &wait, cfg->nosimd);
1017 err = check_nonfinal_hash_op("init", err, result, digestsize,
1018 driver, vec_num, cfg);
1019 if (err)
1020 return err;
1021
1022 pending_sgl = NULL;
1023 pending_len = 0;
1024 for (i = 0; i < tsgl->nents; i++) {
1025 if (divs[i]->flush_type != FLUSH_TYPE_NONE &&
1026 pending_sgl != NULL) {
1027 /* update() with the pending data */
1028 ahash_request_set_callback(req, req_flags,
1029 crypto_req_done, &wait);
1030 ahash_request_set_crypt(req, pending_sgl, result,
1031 pending_len);
1032 err = do_ahash_op(crypto_ahash_update, req, &wait,
1033 divs[i]->nosimd);
1034 err = check_nonfinal_hash_op("update", err,
1035 result, digestsize,
1036 driver, vec_num, cfg);
1037 if (err)
1038 return err;
1039 pending_sgl = NULL;
1040 pending_len = 0;
1041 }
1042 if (divs[i]->flush_type == FLUSH_TYPE_REIMPORT) {
1043 /* Test ->export() and ->import() */
1044 testmgr_poison(hashstate + statesize,
1045 TESTMGR_POISON_LEN);
1046 err = crypto_ahash_export(req, hashstate);
1047 err = check_nonfinal_hash_op("export", err,
1048 result, digestsize,
1049 driver, vec_num, cfg);
1050 if (err)
1051 return err;
1052 if (!testmgr_is_poison(hashstate + statesize,
1053 TESTMGR_POISON_LEN)) {
1054 pr_err("alg: hash: %s export() overran state buffer on test vector %u, cfg=\"%s\"\n",
1055 driver, vec_num, cfg->name);
1056 return -EOVERFLOW;
1057 }
1058
1059 testmgr_poison(req->__ctx, crypto_ahash_reqsize(tfm));
1060 err = crypto_ahash_import(req, hashstate);
1061 err = check_nonfinal_hash_op("import", err,
1062 result, digestsize,
1063 driver, vec_num, cfg);
1064 if (err)
1065 return err;
1066 }
1067 if (pending_sgl == NULL)
1068 pending_sgl = &tsgl->sgl[i];
1069 pending_len += tsgl->sgl[i].length;
1070 }
1071
1072 ahash_request_set_callback(req, req_flags, crypto_req_done, &wait);
1073 ahash_request_set_crypt(req, pending_sgl, result, pending_len);
1074 if (cfg->finalization_type == FINALIZATION_TYPE_FINAL) {
1075 /* finish with update() and final() */
1076 err = do_ahash_op(crypto_ahash_update, req, &wait, cfg->nosimd);
1077 err = check_nonfinal_hash_op("update", err, result, digestsize,
1078 driver, vec_num, cfg);
1079 if (err)
1080 return err;
1081 err = do_ahash_op(crypto_ahash_final, req, &wait, cfg->nosimd);
1082 if (err) {
1083 pr_err("alg: hash: %s final() failed with err %d on test vector %u, cfg=\"%s\"\n",
1084 driver, err, vec_num, cfg->name);
1085 return err;
1086 }
1087 } else {
1088 /* finish with finup() */
1089 err = do_ahash_op(crypto_ahash_finup, req, &wait, cfg->nosimd);
1090 if (err) {
1091 pr_err("alg: hash: %s finup() failed with err %d on test vector %u, cfg=\"%s\"\n",
1092 driver, err, vec_num, cfg->name);
1093 return err;
1094 }
1095 }
1096
1097 result_ready:
1098 /* Check that the algorithm produced the correct digest */
1099 if (memcmp(result, vec->digest, digestsize) != 0) {
1100 pr_err("alg: hash: %s test failed (wrong result) on test vector %u, cfg=\"%s\"\n",
1101 driver, vec_num, cfg->name);
1102 return -EINVAL;
1103 }
1104 if (!testmgr_is_poison(&result[digestsize], TESTMGR_POISON_LEN)) {
1105 pr_err("alg: hash: %s overran result buffer on test vector %u, cfg=\"%s\"\n",
1106 driver, vec_num, cfg->name);
1107 return -EOVERFLOW;
1108 }
1109
1110 return 0;
1111 }
1112
1113 static int test_hash_vec(const char *driver, const struct hash_testvec *vec,
1114 unsigned int vec_num, struct ahash_request *req,
1115 struct test_sglist *tsgl, u8 *hashstate)
1116 {
1117 unsigned int i;
1118 int err;
1119
1120 for (i = 0; i < ARRAY_SIZE(default_hash_testvec_configs); i++) {
1121 err = test_hash_vec_cfg(driver, vec, vec_num,
1122 &default_hash_testvec_configs[i],
1123 req, tsgl, hashstate);
1124 if (err)
1125 return err;
1126 }
1127
1128 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
1129 if (!noextratests) {
1130 struct testvec_config cfg;
1131 char cfgname[TESTVEC_CONFIG_NAMELEN];
1132
1133 for (i = 0; i < fuzz_iterations; i++) {
1134 generate_random_testvec_config(&cfg, cfgname,
1135 sizeof(cfgname));
1136 err = test_hash_vec_cfg(driver, vec, vec_num, &cfg,
1137 req, tsgl, hashstate);
1138 if (err)
1139 return err;
1140 }
1141 }
1142 #endif
1143 return 0;
1144 }
1145
1146 static int __alg_test_hash(const struct hash_testvec *vecs,
1147 unsigned int num_vecs, const char *driver,
1148 u32 type, u32 mask)
1149 {
1150 struct crypto_ahash *tfm;
1151 struct ahash_request *req = NULL;
1152 struct test_sglist *tsgl = NULL;
1153 u8 *hashstate = NULL;
1154 unsigned int i;
1155 int err;
1156
1157 tfm = crypto_alloc_ahash(driver, type, mask);
1158 if (IS_ERR(tfm)) {
1159 pr_err("alg: hash: failed to allocate transform for %s: %ld\n",
1160 driver, PTR_ERR(tfm));
1161 return PTR_ERR(tfm);
1162 }
1163
1164 req = ahash_request_alloc(tfm, GFP_KERNEL);
1165 if (!req) {
1166 pr_err("alg: hash: failed to allocate request for %s\n",
1167 driver);
1168 err = -ENOMEM;
1169 goto out;
1170 }
1171
1172 tsgl = kmalloc(sizeof(*tsgl), GFP_KERNEL);
1173 if (!tsgl || init_test_sglist(tsgl) != 0) {
1174 pr_err("alg: hash: failed to allocate test buffers for %s\n",
1175 driver);
1176 kfree(tsgl);
1177 tsgl = NULL;
1178 err = -ENOMEM;
1179 goto out;
1180 }
1181
1182 hashstate = kmalloc(crypto_ahash_statesize(tfm) + TESTMGR_POISON_LEN,
1183 GFP_KERNEL);
1184 if (!hashstate) {
1185 pr_err("alg: hash: failed to allocate hash state buffer for %s\n",
1186 driver);
1187 err = -ENOMEM;
1188 goto out;
1189 }
1190
1191 for (i = 0; i < num_vecs; i++) {
1192 err = test_hash_vec(driver, &vecs[i], i, req, tsgl, hashstate);
1193 if (err)
1194 goto out;
1195 }
1196 err = 0;
1197 out:
1198 kfree(hashstate);
1199 if (tsgl) {
1200 destroy_test_sglist(tsgl);
1201 kfree(tsgl);
1202 }
1203 ahash_request_free(req);
1204 crypto_free_ahash(tfm);
1205 return err;
1206 }
1207
1208 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1209 u32 type, u32 mask)
1210 {
1211 const struct hash_testvec *template = desc->suite.hash.vecs;
1212 unsigned int tcount = desc->suite.hash.count;
1213 unsigned int nr_unkeyed, nr_keyed;
1214 int err;
1215
1216 /*
1217 * For OPTIONAL_KEY algorithms, we have to do all the unkeyed tests
1218 * first, before setting a key on the tfm. To make this easier, we
1219 * require that the unkeyed test vectors (if any) are listed first.
1220 */
1221
1222 for (nr_unkeyed = 0; nr_unkeyed < tcount; nr_unkeyed++) {
1223 if (template[nr_unkeyed].ksize)
1224 break;
1225 }
1226 for (nr_keyed = 0; nr_unkeyed + nr_keyed < tcount; nr_keyed++) {
1227 if (!template[nr_unkeyed + nr_keyed].ksize) {
1228 pr_err("alg: hash: test vectors for %s out of order, "
1229 "unkeyed ones must come first\n", desc->alg);
1230 return -EINVAL;
1231 }
1232 }
1233
1234 err = 0;
1235 if (nr_unkeyed) {
1236 err = __alg_test_hash(template, nr_unkeyed, driver, type, mask);
1237 template += nr_unkeyed;
1238 }
1239
1240 if (!err && nr_keyed)
1241 err = __alg_test_hash(template, nr_keyed, driver, type, mask);
1242
1243 return err;
1244 }
1245
1246 static int test_aead_vec_cfg(const char *driver, int enc,
1247 const struct aead_testvec *vec,
1248 unsigned int vec_num,
1249 const struct testvec_config *cfg,
1250 struct aead_request *req,
1251 struct cipher_test_sglists *tsgls)
1252 {
1253 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1254 const unsigned int alignmask = crypto_aead_alignmask(tfm);
1255 const unsigned int ivsize = crypto_aead_ivsize(tfm);
1256 const unsigned int authsize = vec->clen - vec->plen;
1257 const u32 req_flags = CRYPTO_TFM_REQ_MAY_BACKLOG | cfg->req_flags;
1258 const char *op = enc ? "encryption" : "decryption";
1259 DECLARE_CRYPTO_WAIT(wait);
1260 u8 _iv[3 * (MAX_ALGAPI_ALIGNMASK + 1) + MAX_IVLEN];
1261 u8 *iv = PTR_ALIGN(&_iv[0], 2 * (MAX_ALGAPI_ALIGNMASK + 1)) +
1262 cfg->iv_offset +
1263 (cfg->iv_offset_relative_to_alignmask ? alignmask : 0);
1264 struct kvec input[2];
1265 int err;
1266
1267 /* Set the key */
1268 if (vec->wk)
1269 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1270 else
1271 crypto_aead_clear_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1272 err = crypto_aead_setkey(tfm, vec->key, vec->klen);
1273 if (err) {
1274 if (vec->fail) /* expectedly failed to set key? */
1275 return 0;
1276 pr_err("alg: aead: %s setkey failed with err %d on test vector %u; flags=%#x\n",
1277 driver, err, vec_num, crypto_aead_get_flags(tfm));
1278 return err;
1279 }
1280 if (vec->fail) {
1281 pr_err("alg: aead: %s setkey unexpectedly succeeded on test vector %u\n",
1282 driver, vec_num);
1283 return -EINVAL;
1284 }
1285
1286 /* Set the authentication tag size */
1287 err = crypto_aead_setauthsize(tfm, authsize);
1288 if (err) {
1289 pr_err("alg: aead: %s setauthsize failed with err %d on test vector %u\n",
1290 driver, err, vec_num);
1291 return err;
1292 }
1293
1294 /* The IV must be copied to a buffer, as the algorithm may modify it */
1295 if (WARN_ON(ivsize > MAX_IVLEN))
1296 return -EINVAL;
1297 if (vec->iv)
1298 memcpy(iv, vec->iv, ivsize);
1299 else
1300 memset(iv, 0, ivsize);
1301
1302 /* Build the src/dst scatterlists */
1303 input[0].iov_base = (void *)vec->assoc;
1304 input[0].iov_len = vec->alen;
1305 input[1].iov_base = enc ? (void *)vec->ptext : (void *)vec->ctext;
1306 input[1].iov_len = enc ? vec->plen : vec->clen;
1307 err = build_cipher_test_sglists(tsgls, cfg, alignmask,
1308 vec->alen + (enc ? vec->plen :
1309 vec->clen),
1310 vec->alen + (enc ? vec->clen :
1311 vec->plen),
1312 input, 2);
1313 if (err) {
1314 pr_err("alg: aead: %s %s: error preparing scatterlists for test vector %u, cfg=\"%s\"\n",
1315 driver, op, vec_num, cfg->name);
1316 return err;
1317 }
1318
1319 /* Do the actual encryption or decryption */
1320 testmgr_poison(req->__ctx, crypto_aead_reqsize(tfm));
1321 aead_request_set_callback(req, req_flags, crypto_req_done, &wait);
1322 aead_request_set_crypt(req, tsgls->src.sgl_ptr, tsgls->dst.sgl_ptr,
1323 enc ? vec->plen : vec->clen, iv);
1324 aead_request_set_ad(req, vec->alen);
1325 if (cfg->nosimd)
1326 crypto_disable_simd_for_test();
1327 err = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
1328 if (cfg->nosimd)
1329 crypto_reenable_simd_for_test();
1330 err = crypto_wait_req(err, &wait);
1331 if (err) {
1332 if (err == -EBADMSG && vec->novrfy)
1333 return 0;
1334 pr_err("alg: aead: %s %s failed with err %d on test vector %u, cfg=\"%s\"\n",
1335 driver, op, err, vec_num, cfg->name);
1336 return err;
1337 }
1338 if (vec->novrfy) {
1339 pr_err("alg: aead: %s %s unexpectedly succeeded on test vector %u, cfg=\"%s\"\n",
1340 driver, op, vec_num, cfg->name);
1341 return -EINVAL;
1342 }
1343
1344 /* Check that the algorithm didn't overwrite things it shouldn't have */
1345 if (req->cryptlen != (enc ? vec->plen : vec->clen) ||
1346 req->assoclen != vec->alen ||
1347 req->iv != iv ||
1348 req->src != tsgls->src.sgl_ptr ||
1349 req->dst != tsgls->dst.sgl_ptr ||
1350 crypto_aead_reqtfm(req) != tfm ||
1351 req->base.complete != crypto_req_done ||
1352 req->base.flags != req_flags ||
1353 req->base.data != &wait) {
1354 pr_err("alg: aead: %s %s corrupted request struct on test vector %u, cfg=\"%s\"\n",
1355 driver, op, vec_num, cfg->name);
1356 if (req->cryptlen != (enc ? vec->plen : vec->clen))
1357 pr_err("alg: aead: changed 'req->cryptlen'\n");
1358 if (req->assoclen != vec->alen)
1359 pr_err("alg: aead: changed 'req->assoclen'\n");
1360 if (req->iv != iv)
1361 pr_err("alg: aead: changed 'req->iv'\n");
1362 if (req->src != tsgls->src.sgl_ptr)
1363 pr_err("alg: aead: changed 'req->src'\n");
1364 if (req->dst != tsgls->dst.sgl_ptr)
1365 pr_err("alg: aead: changed 'req->dst'\n");
1366 if (crypto_aead_reqtfm(req) != tfm)
1367 pr_err("alg: aead: changed 'req->base.tfm'\n");
1368 if (req->base.complete != crypto_req_done)
1369 pr_err("alg: aead: changed 'req->base.complete'\n");
1370 if (req->base.flags != req_flags)
1371 pr_err("alg: aead: changed 'req->base.flags'\n");
1372 if (req->base.data != &wait)
1373 pr_err("alg: aead: changed 'req->base.data'\n");
1374 return -EINVAL;
1375 }
1376 if (is_test_sglist_corrupted(&tsgls->src)) {
1377 pr_err("alg: aead: %s %s corrupted src sgl on test vector %u, cfg=\"%s\"\n",
1378 driver, op, vec_num, cfg->name);
1379 return -EINVAL;
1380 }
1381 if (tsgls->dst.sgl_ptr != tsgls->src.sgl &&
1382 is_test_sglist_corrupted(&tsgls->dst)) {
1383 pr_err("alg: aead: %s %s corrupted dst sgl on test vector %u, cfg=\"%s\"\n",
1384 driver, op, vec_num, cfg->name);
1385 return -EINVAL;
1386 }
1387
1388 /* Check for the correct output (ciphertext or plaintext) */
1389 err = verify_correct_output(&tsgls->dst, enc ? vec->ctext : vec->ptext,
1390 enc ? vec->clen : vec->plen,
1391 vec->alen, enc || !cfg->inplace);
1392 if (err == -EOVERFLOW) {
1393 pr_err("alg: aead: %s %s overran dst buffer on test vector %u, cfg=\"%s\"\n",
1394 driver, op, vec_num, cfg->name);
1395 return err;
1396 }
1397 if (err) {
1398 pr_err("alg: aead: %s %s test failed (wrong result) on test vector %u, cfg=\"%s\"\n",
1399 driver, op, vec_num, cfg->name);
1400 return err;
1401 }
1402
1403 return 0;
1404 }
1405
1406 static int test_aead_vec(const char *driver, int enc,
1407 const struct aead_testvec *vec, unsigned int vec_num,
1408 struct aead_request *req,
1409 struct cipher_test_sglists *tsgls)
1410 {
1411 unsigned int i;
1412 int err;
1413
1414 if (enc && vec->novrfy)
1415 return 0;
1416
1417 for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++) {
1418 err = test_aead_vec_cfg(driver, enc, vec, vec_num,
1419 &default_cipher_testvec_configs[i],
1420 req, tsgls);
1421 if (err)
1422 return err;
1423 }
1424
1425 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
1426 if (!noextratests) {
1427 struct testvec_config cfg;
1428 char cfgname[TESTVEC_CONFIG_NAMELEN];
1429
1430 for (i = 0; i < fuzz_iterations; i++) {
1431 generate_random_testvec_config(&cfg, cfgname,
1432 sizeof(cfgname));
1433 err = test_aead_vec_cfg(driver, enc, vec, vec_num,
1434 &cfg, req, tsgls);
1435 if (err)
1436 return err;
1437 }
1438 }
1439 #endif
1440 return 0;
1441 }
1442
1443 static int test_aead(const char *driver, int enc,
1444 const struct aead_test_suite *suite,
1445 struct aead_request *req,
1446 struct cipher_test_sglists *tsgls)
1447 {
1448 unsigned int i;
1449 int err;
1450
1451 for (i = 0; i < suite->count; i++) {
1452 err = test_aead_vec(driver, enc, &suite->vecs[i], i, req,
1453 tsgls);
1454 if (err)
1455 return err;
1456 }
1457 return 0;
1458 }
1459
1460 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1461 u32 type, u32 mask)
1462 {
1463 const struct aead_test_suite *suite = &desc->suite.aead;
1464 struct crypto_aead *tfm;
1465 struct aead_request *req = NULL;
1466 struct cipher_test_sglists *tsgls = NULL;
1467 int err;
1468
1469 if (suite->count <= 0) {
1470 pr_err("alg: aead: empty test suite for %s\n", driver);
1471 return -EINVAL;
1472 }
1473
1474 tfm = crypto_alloc_aead(driver, type, mask);
1475 if (IS_ERR(tfm)) {
1476 pr_err("alg: aead: failed to allocate transform for %s: %ld\n",
1477 driver, PTR_ERR(tfm));
1478 return PTR_ERR(tfm);
1479 }
1480
1481 req = aead_request_alloc(tfm, GFP_KERNEL);
1482 if (!req) {
1483 pr_err("alg: aead: failed to allocate request for %s\n",
1484 driver);
1485 err = -ENOMEM;
1486 goto out;
1487 }
1488
1489 tsgls = alloc_cipher_test_sglists();
1490 if (!tsgls) {
1491 pr_err("alg: aead: failed to allocate test buffers for %s\n",
1492 driver);
1493 err = -ENOMEM;
1494 goto out;
1495 }
1496
1497 err = test_aead(driver, ENCRYPT, suite, req, tsgls);
1498 if (err)
1499 goto out;
1500
1501 err = test_aead(driver, DECRYPT, suite, req, tsgls);
1502 out:
1503 free_cipher_test_sglists(tsgls);
1504 aead_request_free(req);
1505 crypto_free_aead(tfm);
1506 return err;
1507 }
1508
1509 static int test_cipher(struct crypto_cipher *tfm, int enc,
1510 const struct cipher_testvec *template,
1511 unsigned int tcount)
1512 {
1513 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
1514 unsigned int i, j, k;
1515 char *q;
1516 const char *e;
1517 const char *input, *result;
1518 void *data;
1519 char *xbuf[XBUFSIZE];
1520 int ret = -ENOMEM;
1521
1522 if (testmgr_alloc_buf(xbuf))
1523 goto out_nobuf;
1524
1525 if (enc == ENCRYPT)
1526 e = "encryption";
1527 else
1528 e = "decryption";
1529
1530 j = 0;
1531 for (i = 0; i < tcount; i++) {
1532
1533 if (fips_enabled && template[i].fips_skip)
1534 continue;
1535
1536 input = enc ? template[i].ptext : template[i].ctext;
1537 result = enc ? template[i].ctext : template[i].ptext;
1538 j++;
1539
1540 ret = -EINVAL;
1541 if (WARN_ON(template[i].len > PAGE_SIZE))
1542 goto out;
1543
1544 data = xbuf[0];
1545 memcpy(data, input, template[i].len);
1546
1547 crypto_cipher_clear_flags(tfm, ~0);
1548 if (template[i].wk)
1549 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1550
1551 ret = crypto_cipher_setkey(tfm, template[i].key,
1552 template[i].klen);
1553 if (template[i].fail == !ret) {
1554 printk(KERN_ERR "alg: cipher: setkey failed "
1555 "on test %d for %s: flags=%x\n", j,
1556 algo, crypto_cipher_get_flags(tfm));
1557 goto out;
1558 } else if (ret)
1559 continue;
1560
1561 for (k = 0; k < template[i].len;
1562 k += crypto_cipher_blocksize(tfm)) {
1563 if (enc)
1564 crypto_cipher_encrypt_one(tfm, data + k,
1565 data + k);
1566 else
1567 crypto_cipher_decrypt_one(tfm, data + k,
1568 data + k);
1569 }
1570
1571 q = data;
1572 if (memcmp(q, result, template[i].len)) {
1573 printk(KERN_ERR "alg: cipher: Test %d failed "
1574 "on %s for %s\n", j, e, algo);
1575 hexdump(q, template[i].len);
1576 ret = -EINVAL;
1577 goto out;
1578 }
1579 }
1580
1581 ret = 0;
1582
1583 out:
1584 testmgr_free_buf(xbuf);
1585 out_nobuf:
1586 return ret;
1587 }
1588
1589 static int test_skcipher_vec_cfg(const char *driver, int enc,
1590 const struct cipher_testvec *vec,
1591 unsigned int vec_num,
1592 const struct testvec_config *cfg,
1593 struct skcipher_request *req,
1594 struct cipher_test_sglists *tsgls)
1595 {
1596 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
1597 const unsigned int alignmask = crypto_skcipher_alignmask(tfm);
1598 const unsigned int ivsize = crypto_skcipher_ivsize(tfm);
1599 const u32 req_flags = CRYPTO_TFM_REQ_MAY_BACKLOG | cfg->req_flags;
1600 const char *op = enc ? "encryption" : "decryption";
1601 DECLARE_CRYPTO_WAIT(wait);
1602 u8 _iv[3 * (MAX_ALGAPI_ALIGNMASK + 1) + MAX_IVLEN];
1603 u8 *iv = PTR_ALIGN(&_iv[0], 2 * (MAX_ALGAPI_ALIGNMASK + 1)) +
1604 cfg->iv_offset +
1605 (cfg->iv_offset_relative_to_alignmask ? alignmask : 0);
1606 struct kvec input;
1607 int err;
1608
1609 /* Set the key */
1610 if (vec->wk)
1611 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1612 else
1613 crypto_skcipher_clear_flags(tfm,
1614 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1615 err = crypto_skcipher_setkey(tfm, vec->key, vec->klen);
1616 if (err) {
1617 if (vec->fail) /* expectedly failed to set key? */
1618 return 0;
1619 pr_err("alg: skcipher: %s setkey failed with err %d on test vector %u; flags=%#x\n",
1620 driver, err, vec_num, crypto_skcipher_get_flags(tfm));
1621 return err;
1622 }
1623 if (vec->fail) {
1624 pr_err("alg: skcipher: %s setkey unexpectedly succeeded on test vector %u\n",
1625 driver, vec_num);
1626 return -EINVAL;
1627 }
1628
1629 /* The IV must be copied to a buffer, as the algorithm may modify it */
1630 if (ivsize) {
1631 if (WARN_ON(ivsize > MAX_IVLEN))
1632 return -EINVAL;
1633 if (vec->generates_iv && !enc)
1634 memcpy(iv, vec->iv_out, ivsize);
1635 else if (vec->iv)
1636 memcpy(iv, vec->iv, ivsize);
1637 else
1638 memset(iv, 0, ivsize);
1639 } else {
1640 if (vec->generates_iv) {
1641 pr_err("alg: skcipher: %s has ivsize=0 but test vector %u generates IV!\n",
1642 driver, vec_num);
1643 return -EINVAL;
1644 }
1645 iv = NULL;
1646 }
1647
1648 /* Build the src/dst scatterlists */
1649 input.iov_base = enc ? (void *)vec->ptext : (void *)vec->ctext;
1650 input.iov_len = vec->len;
1651 err = build_cipher_test_sglists(tsgls, cfg, alignmask,
1652 vec->len, vec->len, &input, 1);
1653 if (err) {
1654 pr_err("alg: skcipher: %s %s: error preparing scatterlists for test vector %u, cfg=\"%s\"\n",
1655 driver, op, vec_num, cfg->name);
1656 return err;
1657 }
1658
1659 /* Do the actual encryption or decryption */
1660 testmgr_poison(req->__ctx, crypto_skcipher_reqsize(tfm));
1661 skcipher_request_set_callback(req, req_flags, crypto_req_done, &wait);
1662 skcipher_request_set_crypt(req, tsgls->src.sgl_ptr, tsgls->dst.sgl_ptr,
1663 vec->len, iv);
1664 if (cfg->nosimd)
1665 crypto_disable_simd_for_test();
1666 err = enc ? crypto_skcipher_encrypt(req) : crypto_skcipher_decrypt(req);
1667 if (cfg->nosimd)
1668 crypto_reenable_simd_for_test();
1669 err = crypto_wait_req(err, &wait);
1670 if (err) {
1671 pr_err("alg: skcipher: %s %s failed with err %d on test vector %u, cfg=\"%s\"\n",
1672 driver, op, err, vec_num, cfg->name);
1673 return err;
1674 }
1675
1676 /* Check that the algorithm didn't overwrite things it shouldn't have */
1677 if (req->cryptlen != vec->len ||
1678 req->iv != iv ||
1679 req->src != tsgls->src.sgl_ptr ||
1680 req->dst != tsgls->dst.sgl_ptr ||
1681 crypto_skcipher_reqtfm(req) != tfm ||
1682 req->base.complete != crypto_req_done ||
1683 req->base.flags != req_flags ||
1684 req->base.data != &wait) {
1685 pr_err("alg: skcipher: %s %s corrupted request struct on test vector %u, cfg=\"%s\"\n",
1686 driver, op, vec_num, cfg->name);
1687 if (req->cryptlen != vec->len)
1688 pr_err("alg: skcipher: changed 'req->cryptlen'\n");
1689 if (req->iv != iv)
1690 pr_err("alg: skcipher: changed 'req->iv'\n");
1691 if (req->src != tsgls->src.sgl_ptr)
1692 pr_err("alg: skcipher: changed 'req->src'\n");
1693 if (req->dst != tsgls->dst.sgl_ptr)
1694 pr_err("alg: skcipher: changed 'req->dst'\n");
1695 if (crypto_skcipher_reqtfm(req) != tfm)
1696 pr_err("alg: skcipher: changed 'req->base.tfm'\n");
1697 if (req->base.complete != crypto_req_done)
1698 pr_err("alg: skcipher: changed 'req->base.complete'\n");
1699 if (req->base.flags != req_flags)
1700 pr_err("alg: skcipher: changed 'req->base.flags'\n");
1701 if (req->base.data != &wait)
1702 pr_err("alg: skcipher: changed 'req->base.data'\n");
1703 return -EINVAL;
1704 }
1705 if (is_test_sglist_corrupted(&tsgls->src)) {
1706 pr_err("alg: skcipher: %s %s corrupted src sgl on test vector %u, cfg=\"%s\"\n",
1707 driver, op, vec_num, cfg->name);
1708 return -EINVAL;
1709 }
1710 if (tsgls->dst.sgl_ptr != tsgls->src.sgl &&
1711 is_test_sglist_corrupted(&tsgls->dst)) {
1712 pr_err("alg: skcipher: %s %s corrupted dst sgl on test vector %u, cfg=\"%s\"\n",
1713 driver, op, vec_num, cfg->name);
1714 return -EINVAL;
1715 }
1716
1717 /* Check for the correct output (ciphertext or plaintext) */
1718 err = verify_correct_output(&tsgls->dst, enc ? vec->ctext : vec->ptext,
1719 vec->len, 0, true);
1720 if (err == -EOVERFLOW) {
1721 pr_err("alg: skcipher: %s %s overran dst buffer on test vector %u, cfg=\"%s\"\n",
1722 driver, op, vec_num, cfg->name);
1723 return err;
1724 }
1725 if (err) {
1726 pr_err("alg: skcipher: %s %s test failed (wrong result) on test vector %u, cfg=\"%s\"\n",
1727 driver, op, vec_num, cfg->name);
1728 return err;
1729 }
1730
1731 /* If applicable, check that the algorithm generated the correct IV */
1732 if (vec->iv_out && memcmp(iv, vec->iv_out, ivsize) != 0) {
1733 pr_err("alg: skcipher: %s %s test failed (wrong output IV) on test vector %u, cfg=\"%s\"\n",
1734 driver, op, vec_num, cfg->name);
1735 hexdump(iv, ivsize);
1736 return -EINVAL;
1737 }
1738
1739 return 0;
1740 }
1741
1742 static int test_skcipher_vec(const char *driver, int enc,
1743 const struct cipher_testvec *vec,
1744 unsigned int vec_num,
1745 struct skcipher_request *req,
1746 struct cipher_test_sglists *tsgls)
1747 {
1748 unsigned int i;
1749 int err;
1750
1751 if (fips_enabled && vec->fips_skip)
1752 return 0;
1753
1754 for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++) {
1755 err = test_skcipher_vec_cfg(driver, enc, vec, vec_num,
1756 &default_cipher_testvec_configs[i],
1757 req, tsgls);
1758 if (err)
1759 return err;
1760 }
1761
1762 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
1763 if (!noextratests) {
1764 struct testvec_config cfg;
1765 char cfgname[TESTVEC_CONFIG_NAMELEN];
1766
1767 for (i = 0; i < fuzz_iterations; i++) {
1768 generate_random_testvec_config(&cfg, cfgname,
1769 sizeof(cfgname));
1770 err = test_skcipher_vec_cfg(driver, enc, vec, vec_num,
1771 &cfg, req, tsgls);
1772 if (err)
1773 return err;
1774 }
1775 }
1776 #endif
1777 return 0;
1778 }
1779
1780 static int test_skcipher(const char *driver, int enc,
1781 const struct cipher_test_suite *suite,
1782 struct skcipher_request *req,
1783 struct cipher_test_sglists *tsgls)
1784 {
1785 unsigned int i;
1786 int err;
1787
1788 for (i = 0; i < suite->count; i++) {
1789 err = test_skcipher_vec(driver, enc, &suite->vecs[i], i, req,
1790 tsgls);
1791 if (err)
1792 return err;
1793 }
1794 return 0;
1795 }
1796
1797 static int alg_test_skcipher(const struct alg_test_desc *desc,
1798 const char *driver, u32 type, u32 mask)
1799 {
1800 const struct cipher_test_suite *suite = &desc->suite.cipher;
1801 struct crypto_skcipher *tfm;
1802 struct skcipher_request *req = NULL;
1803 struct cipher_test_sglists *tsgls = NULL;
1804 int err;
1805
1806 if (suite->count <= 0) {
1807 pr_err("alg: skcipher: empty test suite for %s\n", driver);
1808 return -EINVAL;
1809 }
1810
1811 tfm = crypto_alloc_skcipher(driver, type, mask);
1812 if (IS_ERR(tfm)) {
1813 pr_err("alg: skcipher: failed to allocate transform for %s: %ld\n",
1814 driver, PTR_ERR(tfm));
1815 return PTR_ERR(tfm);
1816 }
1817
1818 req = skcipher_request_alloc(tfm, GFP_KERNEL);
1819 if (!req) {
1820 pr_err("alg: skcipher: failed to allocate request for %s\n",
1821 driver);
1822 err = -ENOMEM;
1823 goto out;
1824 }
1825
1826 tsgls = alloc_cipher_test_sglists();
1827 if (!tsgls) {
1828 pr_err("alg: skcipher: failed to allocate test buffers for %s\n",
1829 driver);
1830 err = -ENOMEM;
1831 goto out;
1832 }
1833
1834 err = test_skcipher(driver, ENCRYPT, suite, req, tsgls);
1835 if (err)
1836 goto out;
1837
1838 err = test_skcipher(driver, DECRYPT, suite, req, tsgls);
1839 out:
1840 free_cipher_test_sglists(tsgls);
1841 skcipher_request_free(req);
1842 crypto_free_skcipher(tfm);
1843 return err;
1844 }
1845
1846 static int test_comp(struct crypto_comp *tfm,
1847 const struct comp_testvec *ctemplate,
1848 const struct comp_testvec *dtemplate,
1849 int ctcount, int dtcount)
1850 {
1851 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1852 char *output, *decomp_output;
1853 unsigned int i;
1854 int ret;
1855
1856 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1857 if (!output)
1858 return -ENOMEM;
1859
1860 decomp_output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1861 if (!decomp_output) {
1862 kfree(output);
1863 return -ENOMEM;
1864 }
1865
1866 for (i = 0; i < ctcount; i++) {
1867 int ilen;
1868 unsigned int dlen = COMP_BUF_SIZE;
1869
1870 memset(output, 0, COMP_BUF_SIZE);
1871 memset(decomp_output, 0, COMP_BUF_SIZE);
1872
1873 ilen = ctemplate[i].inlen;
1874 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1875 ilen, output, &dlen);
1876 if (ret) {
1877 printk(KERN_ERR "alg: comp: compression failed "
1878 "on test %d for %s: ret=%d\n", i + 1, algo,
1879 -ret);
1880 goto out;
1881 }
1882
1883 ilen = dlen;
1884 dlen = COMP_BUF_SIZE;
1885 ret = crypto_comp_decompress(tfm, output,
1886 ilen, decomp_output, &dlen);
1887 if (ret) {
1888 pr_err("alg: comp: compression failed: decompress: on test %d for %s failed: ret=%d\n",
1889 i + 1, algo, -ret);
1890 goto out;
1891 }
1892
1893 if (dlen != ctemplate[i].inlen) {
1894 printk(KERN_ERR "alg: comp: Compression test %d "
1895 "failed for %s: output len = %d\n", i + 1, algo,
1896 dlen);
1897 ret = -EINVAL;
1898 goto out;
1899 }
1900
1901 if (memcmp(decomp_output, ctemplate[i].input,
1902 ctemplate[i].inlen)) {
1903 pr_err("alg: comp: compression failed: output differs: on test %d for %s\n",
1904 i + 1, algo);
1905 hexdump(decomp_output, dlen);
1906 ret = -EINVAL;
1907 goto out;
1908 }
1909 }
1910
1911 for (i = 0; i < dtcount; i++) {
1912 int ilen;
1913 unsigned int dlen = COMP_BUF_SIZE;
1914
1915 memset(decomp_output, 0, COMP_BUF_SIZE);
1916
1917 ilen = dtemplate[i].inlen;
1918 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1919 ilen, decomp_output, &dlen);
1920 if (ret) {
1921 printk(KERN_ERR "alg: comp: decompression failed "
1922 "on test %d for %s: ret=%d\n", i + 1, algo,
1923 -ret);
1924 goto out;
1925 }
1926
1927 if (dlen != dtemplate[i].outlen) {
1928 printk(KERN_ERR "alg: comp: Decompression test %d "
1929 "failed for %s: output len = %d\n", i + 1, algo,
1930 dlen);
1931 ret = -EINVAL;
1932 goto out;
1933 }
1934
1935 if (memcmp(decomp_output, dtemplate[i].output, dlen)) {
1936 printk(KERN_ERR "alg: comp: Decompression test %d "
1937 "failed for %s\n", i + 1, algo);
1938 hexdump(decomp_output, dlen);
1939 ret = -EINVAL;
1940 goto out;
1941 }
1942 }
1943
1944 ret = 0;
1945
1946 out:
1947 kfree(decomp_output);
1948 kfree(output);
1949 return ret;
1950 }
1951
1952 static int test_acomp(struct crypto_acomp *tfm,
1953 const struct comp_testvec *ctemplate,
1954 const struct comp_testvec *dtemplate,
1955 int ctcount, int dtcount)
1956 {
1957 const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm));
1958 unsigned int i;
1959 char *output, *decomp_out;
1960 int ret;
1961 struct scatterlist src, dst;
1962 struct acomp_req *req;
1963 struct crypto_wait wait;
1964
1965 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1966 if (!output)
1967 return -ENOMEM;
1968
1969 decomp_out = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1970 if (!decomp_out) {
1971 kfree(output);
1972 return -ENOMEM;
1973 }
1974
1975 for (i = 0; i < ctcount; i++) {
1976 unsigned int dlen = COMP_BUF_SIZE;
1977 int ilen = ctemplate[i].inlen;
1978 void *input_vec;
1979
1980 input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL);
1981 if (!input_vec) {
1982 ret = -ENOMEM;
1983 goto out;
1984 }
1985
1986 memset(output, 0, dlen);
1987 crypto_init_wait(&wait);
1988 sg_init_one(&src, input_vec, ilen);
1989 sg_init_one(&dst, output, dlen);
1990
1991 req = acomp_request_alloc(tfm);
1992 if (!req) {
1993 pr_err("alg: acomp: request alloc failed for %s\n",
1994 algo);
1995 kfree(input_vec);
1996 ret = -ENOMEM;
1997 goto out;
1998 }
1999
2000 acomp_request_set_params(req, &src, &dst, ilen, dlen);
2001 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2002 crypto_req_done, &wait);
2003
2004 ret = crypto_wait_req(crypto_acomp_compress(req), &wait);
2005 if (ret) {
2006 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
2007 i + 1, algo, -ret);
2008 kfree(input_vec);
2009 acomp_request_free(req);
2010 goto out;
2011 }
2012
2013 ilen = req->dlen;
2014 dlen = COMP_BUF_SIZE;
2015 sg_init_one(&src, output, ilen);
2016 sg_init_one(&dst, decomp_out, dlen);
2017 crypto_init_wait(&wait);
2018 acomp_request_set_params(req, &src, &dst, ilen, dlen);
2019
2020 ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
2021 if (ret) {
2022 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
2023 i + 1, algo, -ret);
2024 kfree(input_vec);
2025 acomp_request_free(req);
2026 goto out;
2027 }
2028
2029 if (req->dlen != ctemplate[i].inlen) {
2030 pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n",
2031 i + 1, algo, req->dlen);
2032 ret = -EINVAL;
2033 kfree(input_vec);
2034 acomp_request_free(req);
2035 goto out;
2036 }
2037
2038 if (memcmp(input_vec, decomp_out, req->dlen)) {
2039 pr_err("alg: acomp: Compression test %d failed for %s\n",
2040 i + 1, algo);
2041 hexdump(output, req->dlen);
2042 ret = -EINVAL;
2043 kfree(input_vec);
2044 acomp_request_free(req);
2045 goto out;
2046 }
2047
2048 kfree(input_vec);
2049 acomp_request_free(req);
2050 }
2051
2052 for (i = 0; i < dtcount; i++) {
2053 unsigned int dlen = COMP_BUF_SIZE;
2054 int ilen = dtemplate[i].inlen;
2055 void *input_vec;
2056
2057 input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL);
2058 if (!input_vec) {
2059 ret = -ENOMEM;
2060 goto out;
2061 }
2062
2063 memset(output, 0, dlen);
2064 crypto_init_wait(&wait);
2065 sg_init_one(&src, input_vec, ilen);
2066 sg_init_one(&dst, output, dlen);
2067
2068 req = acomp_request_alloc(tfm);
2069 if (!req) {
2070 pr_err("alg: acomp: request alloc failed for %s\n",
2071 algo);
2072 kfree(input_vec);
2073 ret = -ENOMEM;
2074 goto out;
2075 }
2076
2077 acomp_request_set_params(req, &src, &dst, ilen, dlen);
2078 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2079 crypto_req_done, &wait);
2080
2081 ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
2082 if (ret) {
2083 pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
2084 i + 1, algo, -ret);
2085 kfree(input_vec);
2086 acomp_request_free(req);
2087 goto out;
2088 }
2089
2090 if (req->dlen != dtemplate[i].outlen) {
2091 pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n",
2092 i + 1, algo, req->dlen);
2093 ret = -EINVAL;
2094 kfree(input_vec);
2095 acomp_request_free(req);
2096 goto out;
2097 }
2098
2099 if (memcmp(output, dtemplate[i].output, req->dlen)) {
2100 pr_err("alg: acomp: Decompression test %d failed for %s\n",
2101 i + 1, algo);
2102 hexdump(output, req->dlen);
2103 ret = -EINVAL;
2104 kfree(input_vec);
2105 acomp_request_free(req);
2106 goto out;
2107 }
2108
2109 kfree(input_vec);
2110 acomp_request_free(req);
2111 }
2112
2113 ret = 0;
2114
2115 out:
2116 kfree(decomp_out);
2117 kfree(output);
2118 return ret;
2119 }
2120
2121 static int test_cprng(struct crypto_rng *tfm,
2122 const struct cprng_testvec *template,
2123 unsigned int tcount)
2124 {
2125 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
2126 int err = 0, i, j, seedsize;
2127 u8 *seed;
2128 char result[32];
2129
2130 seedsize = crypto_rng_seedsize(tfm);
2131
2132 seed = kmalloc(seedsize, GFP_KERNEL);
2133 if (!seed) {
2134 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
2135 "for %s\n", algo);
2136 return -ENOMEM;
2137 }
2138
2139 for (i = 0; i < tcount; i++) {
2140 memset(result, 0, 32);
2141
2142 memcpy(seed, template[i].v, template[i].vlen);
2143 memcpy(seed + template[i].vlen, template[i].key,
2144 template[i].klen);
2145 memcpy(seed + template[i].vlen + template[i].klen,
2146 template[i].dt, template[i].dtlen);
2147
2148 err = crypto_rng_reset(tfm, seed, seedsize);
2149 if (err) {
2150 printk(KERN_ERR "alg: cprng: Failed to reset rng "
2151 "for %s\n", algo);
2152 goto out;
2153 }
2154
2155 for (j = 0; j < template[i].loops; j++) {
2156 err = crypto_rng_get_bytes(tfm, result,
2157 template[i].rlen);
2158 if (err < 0) {
2159 printk(KERN_ERR "alg: cprng: Failed to obtain "
2160 "the correct amount of random data for "
2161 "%s (requested %d)\n", algo,
2162 template[i].rlen);
2163 goto out;
2164 }
2165 }
2166
2167 err = memcmp(result, template[i].result,
2168 template[i].rlen);
2169 if (err) {
2170 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
2171 i, algo);
2172 hexdump(result, template[i].rlen);
2173 err = -EINVAL;
2174 goto out;
2175 }
2176 }
2177
2178 out:
2179 kfree(seed);
2180 return err;
2181 }
2182
2183 static int alg_test_cipher(const struct alg_test_desc *desc,
2184 const char *driver, u32 type, u32 mask)
2185 {
2186 const struct cipher_test_suite *suite = &desc->suite.cipher;
2187 struct crypto_cipher *tfm;
2188 int err;
2189
2190 tfm = crypto_alloc_cipher(driver, type, mask);
2191 if (IS_ERR(tfm)) {
2192 printk(KERN_ERR "alg: cipher: Failed to load transform for "
2193 "%s: %ld\n", driver, PTR_ERR(tfm));
2194 return PTR_ERR(tfm);
2195 }
2196
2197 err = test_cipher(tfm, ENCRYPT, suite->vecs, suite->count);
2198 if (!err)
2199 err = test_cipher(tfm, DECRYPT, suite->vecs, suite->count);
2200
2201 crypto_free_cipher(tfm);
2202 return err;
2203 }
2204
2205 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
2206 u32 type, u32 mask)
2207 {
2208 struct crypto_comp *comp;
2209 struct crypto_acomp *acomp;
2210 int err;
2211 u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK;
2212
2213 if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) {
2214 acomp = crypto_alloc_acomp(driver, type, mask);
2215 if (IS_ERR(acomp)) {
2216 pr_err("alg: acomp: Failed to load transform for %s: %ld\n",
2217 driver, PTR_ERR(acomp));
2218 return PTR_ERR(acomp);
2219 }
2220 err = test_acomp(acomp, desc->suite.comp.comp.vecs,
2221 desc->suite.comp.decomp.vecs,
2222 desc->suite.comp.comp.count,
2223 desc->suite.comp.decomp.count);
2224 crypto_free_acomp(acomp);
2225 } else {
2226 comp = crypto_alloc_comp(driver, type, mask);
2227 if (IS_ERR(comp)) {
2228 pr_err("alg: comp: Failed to load transform for %s: %ld\n",
2229 driver, PTR_ERR(comp));
2230 return PTR_ERR(comp);
2231 }
2232
2233 err = test_comp(comp, desc->suite.comp.comp.vecs,
2234 desc->suite.comp.decomp.vecs,
2235 desc->suite.comp.comp.count,
2236 desc->suite.comp.decomp.count);
2237
2238 crypto_free_comp(comp);
2239 }
2240 return err;
2241 }
2242
2243 static int alg_test_crc32c(const struct alg_test_desc *desc,
2244 const char *driver, u32 type, u32 mask)
2245 {
2246 struct crypto_shash *tfm;
2247 __le32 val;
2248 int err;
2249
2250 err = alg_test_hash(desc, driver, type, mask);
2251 if (err)
2252 return err;
2253
2254 tfm = crypto_alloc_shash(driver, type, mask);
2255 if (IS_ERR(tfm)) {
2256 if (PTR_ERR(tfm) == -ENOENT) {
2257 /*
2258 * This crc32c implementation is only available through
2259 * ahash API, not the shash API, so the remaining part
2260 * of the test is not applicable to it.
2261 */
2262 return 0;
2263 }
2264 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
2265 "%ld\n", driver, PTR_ERR(tfm));
2266 return PTR_ERR(tfm);
2267 }
2268
2269 do {
2270 SHASH_DESC_ON_STACK(shash, tfm);
2271 u32 *ctx = (u32 *)shash_desc_ctx(shash);
2272
2273 shash->tfm = tfm;
2274 shash->flags = 0;
2275
2276 *ctx = 420553207;
2277 err = crypto_shash_final(shash, (u8 *)&val);
2278 if (err) {
2279 printk(KERN_ERR "alg: crc32c: Operation failed for "
2280 "%s: %d\n", driver, err);
2281 break;
2282 }
2283
2284 if (val != cpu_to_le32(~420553207)) {
2285 pr_err("alg: crc32c: Test failed for %s: %u\n",
2286 driver, le32_to_cpu(val));
2287 err = -EINVAL;
2288 }
2289 } while (0);
2290
2291 crypto_free_shash(tfm);
2292
2293 return err;
2294 }
2295
2296 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
2297 u32 type, u32 mask)
2298 {
2299 struct crypto_rng *rng;
2300 int err;
2301
2302 rng = crypto_alloc_rng(driver, type, mask);
2303 if (IS_ERR(rng)) {
2304 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
2305 "%ld\n", driver, PTR_ERR(rng));
2306 return PTR_ERR(rng);
2307 }
2308
2309 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
2310
2311 crypto_free_rng(rng);
2312
2313 return err;
2314 }
2315
2316
2317 static int drbg_cavs_test(const struct drbg_testvec *test, int pr,
2318 const char *driver, u32 type, u32 mask)
2319 {
2320 int ret = -EAGAIN;
2321 struct crypto_rng *drng;
2322 struct drbg_test_data test_data;
2323 struct drbg_string addtl, pers, testentropy;
2324 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
2325
2326 if (!buf)
2327 return -ENOMEM;
2328
2329 drng = crypto_alloc_rng(driver, type, mask);
2330 if (IS_ERR(drng)) {
2331 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
2332 "%s\n", driver);
2333 kzfree(buf);
2334 return -ENOMEM;
2335 }
2336
2337 test_data.testentropy = &testentropy;
2338 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
2339 drbg_string_fill(&pers, test->pers, test->perslen);
2340 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
2341 if (ret) {
2342 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
2343 goto outbuf;
2344 }
2345
2346 drbg_string_fill(&addtl, test->addtla, test->addtllen);
2347 if (pr) {
2348 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
2349 ret = crypto_drbg_get_bytes_addtl_test(drng,
2350 buf, test->expectedlen, &addtl, &test_data);
2351 } else {
2352 ret = crypto_drbg_get_bytes_addtl(drng,
2353 buf, test->expectedlen, &addtl);
2354 }
2355 if (ret < 0) {
2356 printk(KERN_ERR "alg: drbg: could not obtain random data for "
2357 "driver %s\n", driver);
2358 goto outbuf;
2359 }
2360
2361 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
2362 if (pr) {
2363 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
2364 ret = crypto_drbg_get_bytes_addtl_test(drng,
2365 buf, test->expectedlen, &addtl, &test_data);
2366 } else {
2367 ret = crypto_drbg_get_bytes_addtl(drng,
2368 buf, test->expectedlen, &addtl);
2369 }
2370 if (ret < 0) {
2371 printk(KERN_ERR "alg: drbg: could not obtain random data for "
2372 "driver %s\n", driver);
2373 goto outbuf;
2374 }
2375
2376 ret = memcmp(test->expected, buf, test->expectedlen);
2377
2378 outbuf:
2379 crypto_free_rng(drng);
2380 kzfree(buf);
2381 return ret;
2382 }
2383
2384
2385 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
2386 u32 type, u32 mask)
2387 {
2388 int err = 0;
2389 int pr = 0;
2390 int i = 0;
2391 const struct drbg_testvec *template = desc->suite.drbg.vecs;
2392 unsigned int tcount = desc->suite.drbg.count;
2393
2394 if (0 == memcmp(driver, "drbg_pr_", 8))
2395 pr = 1;
2396
2397 for (i = 0; i < tcount; i++) {
2398 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
2399 if (err) {
2400 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
2401 i, driver);
2402 err = -EINVAL;
2403 break;
2404 }
2405 }
2406 return err;
2407
2408 }
2409
2410 static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec,
2411 const char *alg)
2412 {
2413 struct kpp_request *req;
2414 void *input_buf = NULL;
2415 void *output_buf = NULL;
2416 void *a_public = NULL;
2417 void *a_ss = NULL;
2418 void *shared_secret = NULL;
2419 struct crypto_wait wait;
2420 unsigned int out_len_max;
2421 int err = -ENOMEM;
2422 struct scatterlist src, dst;
2423
2424 req = kpp_request_alloc(tfm, GFP_KERNEL);
2425 if (!req)
2426 return err;
2427
2428 crypto_init_wait(&wait);
2429
2430 err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
2431 if (err < 0)
2432 goto free_req;
2433
2434 out_len_max = crypto_kpp_maxsize(tfm);
2435 output_buf = kzalloc(out_len_max, GFP_KERNEL);
2436 if (!output_buf) {
2437 err = -ENOMEM;
2438 goto free_req;
2439 }
2440
2441 /* Use appropriate parameter as base */
2442 kpp_request_set_input(req, NULL, 0);
2443 sg_init_one(&dst, output_buf, out_len_max);
2444 kpp_request_set_output(req, &dst, out_len_max);
2445 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2446 crypto_req_done, &wait);
2447
2448 /* Compute party A's public key */
2449 err = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait);
2450 if (err) {
2451 pr_err("alg: %s: Party A: generate public key test failed. err %d\n",
2452 alg, err);
2453 goto free_output;
2454 }
2455
2456 if (vec->genkey) {
2457 /* Save party A's public key */
2458 a_public = kmemdup(sg_virt(req->dst), out_len_max, GFP_KERNEL);
2459 if (!a_public) {
2460 err = -ENOMEM;
2461 goto free_output;
2462 }
2463 } else {
2464 /* Verify calculated public key */
2465 if (memcmp(vec->expected_a_public, sg_virt(req->dst),
2466 vec->expected_a_public_size)) {
2467 pr_err("alg: %s: Party A: generate public key test failed. Invalid output\n",
2468 alg);
2469 err = -EINVAL;
2470 goto free_output;
2471 }
2472 }
2473
2474 /* Calculate shared secret key by using counter part (b) public key. */
2475 input_buf = kmemdup(vec->b_public, vec->b_public_size, GFP_KERNEL);
2476 if (!input_buf) {
2477 err = -ENOMEM;
2478 goto free_output;
2479 }
2480
2481 sg_init_one(&src, input_buf, vec->b_public_size);
2482 sg_init_one(&dst, output_buf, out_len_max);
2483 kpp_request_set_input(req, &src, vec->b_public_size);
2484 kpp_request_set_output(req, &dst, out_len_max);
2485 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2486 crypto_req_done, &wait);
2487 err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait);
2488 if (err) {
2489 pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n",
2490 alg, err);
2491 goto free_all;
2492 }
2493
2494 if (vec->genkey) {
2495 /* Save the shared secret obtained by party A */
2496 a_ss = kmemdup(sg_virt(req->dst), vec->expected_ss_size, GFP_KERNEL);
2497 if (!a_ss) {
2498 err = -ENOMEM;
2499 goto free_all;
2500 }
2501
2502 /*
2503 * Calculate party B's shared secret by using party A's
2504 * public key.
2505 */
2506 err = crypto_kpp_set_secret(tfm, vec->b_secret,
2507 vec->b_secret_size);
2508 if (err < 0)
2509 goto free_all;
2510
2511 sg_init_one(&src, a_public, vec->expected_a_public_size);
2512 sg_init_one(&dst, output_buf, out_len_max);
2513 kpp_request_set_input(req, &src, vec->expected_a_public_size);
2514 kpp_request_set_output(req, &dst, out_len_max);
2515 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2516 crypto_req_done, &wait);
2517 err = crypto_wait_req(crypto_kpp_compute_shared_secret(req),
2518 &wait);
2519 if (err) {
2520 pr_err("alg: %s: Party B: compute shared secret failed. err %d\n",
2521 alg, err);
2522 goto free_all;
2523 }
2524
2525 shared_secret = a_ss;
2526 } else {
2527 shared_secret = (void *)vec->expected_ss;
2528 }
2529
2530 /*
2531 * verify shared secret from which the user will derive
2532 * secret key by executing whatever hash it has chosen
2533 */
2534 if (memcmp(shared_secret, sg_virt(req->dst),
2535 vec->expected_ss_size)) {
2536 pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
2537 alg);
2538 err = -EINVAL;
2539 }
2540
2541 free_all:
2542 kfree(a_ss);
2543 kfree(input_buf);
2544 free_output:
2545 kfree(a_public);
2546 kfree(output_buf);
2547 free_req:
2548 kpp_request_free(req);
2549 return err;
2550 }
2551
2552 static int test_kpp(struct crypto_kpp *tfm, const char *alg,
2553 const struct kpp_testvec *vecs, unsigned int tcount)
2554 {
2555 int ret, i;
2556
2557 for (i = 0; i < tcount; i++) {
2558 ret = do_test_kpp(tfm, vecs++, alg);
2559 if (ret) {
2560 pr_err("alg: %s: test failed on vector %d, err=%d\n",
2561 alg, i + 1, ret);
2562 return ret;
2563 }
2564 }
2565 return 0;
2566 }
2567
2568 static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver,
2569 u32 type, u32 mask)
2570 {
2571 struct crypto_kpp *tfm;
2572 int err = 0;
2573
2574 tfm = crypto_alloc_kpp(driver, type, mask);
2575 if (IS_ERR(tfm)) {
2576 pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
2577 driver, PTR_ERR(tfm));
2578 return PTR_ERR(tfm);
2579 }
2580 if (desc->suite.kpp.vecs)
2581 err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs,
2582 desc->suite.kpp.count);
2583
2584 crypto_free_kpp(tfm);
2585 return err;
2586 }
2587
2588 static u8 *test_pack_u32(u8 *dst, u32 val)
2589 {
2590 memcpy(dst, &val, sizeof(val));
2591 return dst + sizeof(val);
2592 }
2593
2594 static int test_akcipher_one(struct crypto_akcipher *tfm,
2595 const struct akcipher_testvec *vecs)
2596 {
2597 char *xbuf[XBUFSIZE];
2598 struct akcipher_request *req;
2599 void *outbuf_enc = NULL;
2600 void *outbuf_dec = NULL;
2601 struct crypto_wait wait;
2602 unsigned int out_len_max, out_len = 0;
2603 int err = -ENOMEM;
2604 struct scatterlist src, dst, src_tab[3];
2605 const char *m, *c;
2606 unsigned int m_size, c_size;
2607 const char *op;
2608 u8 *key, *ptr;
2609
2610 if (testmgr_alloc_buf(xbuf))
2611 return err;
2612
2613 req = akcipher_request_alloc(tfm, GFP_KERNEL);
2614 if (!req)
2615 goto free_xbuf;
2616
2617 crypto_init_wait(&wait);
2618
2619 key = kmalloc(vecs->key_len + sizeof(u32) * 2 + vecs->param_len,
2620 GFP_KERNEL);
2621 if (!key)
2622 goto free_xbuf;
2623 memcpy(key, vecs->key, vecs->key_len);
2624 ptr = key + vecs->key_len;
2625 ptr = test_pack_u32(ptr, vecs->algo);
2626 ptr = test_pack_u32(ptr, vecs->param_len);
2627 memcpy(ptr, vecs->params, vecs->param_len);
2628
2629 if (vecs->public_key_vec)
2630 err = crypto_akcipher_set_pub_key(tfm, key, vecs->key_len);
2631 else
2632 err = crypto_akcipher_set_priv_key(tfm, key, vecs->key_len);
2633 if (err)
2634 goto free_req;
2635
2636 /*
2637 * First run test which do not require a private key, such as
2638 * encrypt or verify.
2639 */
2640 err = -ENOMEM;
2641 out_len_max = crypto_akcipher_maxsize(tfm);
2642 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
2643 if (!outbuf_enc)
2644 goto free_req;
2645
2646 if (!vecs->siggen_sigver_test) {
2647 m = vecs->m;
2648 m_size = vecs->m_size;
2649 c = vecs->c;
2650 c_size = vecs->c_size;
2651 op = "encrypt";
2652 } else {
2653 /* Swap args so we could keep plaintext (digest)
2654 * in vecs->m, and cooked signature in vecs->c.
2655 */
2656 m = vecs->c; /* signature */
2657 m_size = vecs->c_size;
2658 c = vecs->m; /* digest */
2659 c_size = vecs->m_size;
2660 op = "verify";
2661 }
2662
2663 if (WARN_ON(m_size > PAGE_SIZE))
2664 goto free_all;
2665 memcpy(xbuf[0], m, m_size);
2666
2667 sg_init_table(src_tab, 3);
2668 sg_set_buf(&src_tab[0], xbuf[0], 8);
2669 sg_set_buf(&src_tab[1], xbuf[0] + 8, m_size - 8);
2670 if (vecs->siggen_sigver_test) {
2671 if (WARN_ON(c_size > PAGE_SIZE))
2672 goto free_all;
2673 memcpy(xbuf[1], c, c_size);
2674 sg_set_buf(&src_tab[2], xbuf[1], c_size);
2675 akcipher_request_set_crypt(req, src_tab, NULL, m_size, c_size);
2676 } else {
2677 sg_init_one(&dst, outbuf_enc, out_len_max);
2678 akcipher_request_set_crypt(req, src_tab, &dst, m_size,
2679 out_len_max);
2680 }
2681 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2682 crypto_req_done, &wait);
2683
2684 err = crypto_wait_req(vecs->siggen_sigver_test ?
2685 /* Run asymmetric signature verification */
2686 crypto_akcipher_verify(req) :
2687 /* Run asymmetric encrypt */
2688 crypto_akcipher_encrypt(req), &wait);
2689 if (err) {
2690 pr_err("alg: akcipher: %s test failed. err %d\n", op, err);
2691 goto free_all;
2692 }
2693 if (!vecs->siggen_sigver_test) {
2694 if (req->dst_len != c_size) {
2695 pr_err("alg: akcipher: %s test failed. Invalid output len\n",
2696 op);
2697 err = -EINVAL;
2698 goto free_all;
2699 }
2700 /* verify that encrypted message is equal to expected */
2701 if (memcmp(c, outbuf_enc, c_size) != 0) {
2702 pr_err("alg: akcipher: %s test failed. Invalid output\n",
2703 op);
2704 hexdump(outbuf_enc, c_size);
2705 err = -EINVAL;
2706 goto free_all;
2707 }
2708 }
2709
2710 /*
2711 * Don't invoke (decrypt or sign) test which require a private key
2712 * for vectors with only a public key.
2713 */
2714 if (vecs->public_key_vec) {
2715 err = 0;
2716 goto free_all;
2717 }
2718 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
2719 if (!outbuf_dec) {
2720 err = -ENOMEM;
2721 goto free_all;
2722 }
2723
2724 op = vecs->siggen_sigver_test ? "sign" : "decrypt";
2725 if (WARN_ON(c_size > PAGE_SIZE))
2726 goto free_all;
2727 memcpy(xbuf[0], c, c_size);
2728
2729 sg_init_one(&src, xbuf[0], c_size);
2730 sg_init_one(&dst, outbuf_dec, out_len_max);
2731 crypto_init_wait(&wait);
2732 akcipher_request_set_crypt(req, &src, &dst, c_size, out_len_max);
2733
2734 err = crypto_wait_req(vecs->siggen_sigver_test ?
2735 /* Run asymmetric signature generation */
2736 crypto_akcipher_sign(req) :
2737 /* Run asymmetric decrypt */
2738 crypto_akcipher_decrypt(req), &wait);
2739 if (err) {
2740 pr_err("alg: akcipher: %s test failed. err %d\n", op, err);
2741 goto free_all;
2742 }
2743 out_len = req->dst_len;
2744 if (out_len < m_size) {
2745 pr_err("alg: akcipher: %s test failed. Invalid output len %u\n",
2746 op, out_len);
2747 err = -EINVAL;
2748 goto free_all;
2749 }
2750 /* verify that decrypted message is equal to the original msg */
2751 if (memchr_inv(outbuf_dec, 0, out_len - m_size) ||
2752 memcmp(m, outbuf_dec + out_len - m_size, m_size)) {
2753 pr_err("alg: akcipher: %s test failed. Invalid output\n", op);
2754 hexdump(outbuf_dec, out_len);
2755 err = -EINVAL;
2756 }
2757 free_all:
2758 kfree(outbuf_dec);
2759 kfree(outbuf_enc);
2760 free_req:
2761 akcipher_request_free(req);
2762 kfree(key);
2763 free_xbuf:
2764 testmgr_free_buf(xbuf);
2765 return err;
2766 }
2767
2768 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
2769 const struct akcipher_testvec *vecs,
2770 unsigned int tcount)
2771 {
2772 const char *algo =
2773 crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm));
2774 int ret, i;
2775
2776 for (i = 0; i < tcount; i++) {
2777 ret = test_akcipher_one(tfm, vecs++);
2778 if (!ret)
2779 continue;
2780
2781 pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
2782 i + 1, algo, ret);
2783 return ret;
2784 }
2785 return 0;
2786 }
2787
2788 static int alg_test_akcipher(const struct alg_test_desc *desc,
2789 const char *driver, u32 type, u32 mask)
2790 {
2791 struct crypto_akcipher *tfm;
2792 int err = 0;
2793
2794 tfm = crypto_alloc_akcipher(driver, type, mask);
2795 if (IS_ERR(tfm)) {
2796 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
2797 driver, PTR_ERR(tfm));
2798 return PTR_ERR(tfm);
2799 }
2800 if (desc->suite.akcipher.vecs)
2801 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
2802 desc->suite.akcipher.count);
2803
2804 crypto_free_akcipher(tfm);
2805 return err;
2806 }
2807
2808 static int alg_test_null(const struct alg_test_desc *desc,
2809 const char *driver, u32 type, u32 mask)
2810 {
2811 return 0;
2812 }
2813
2814 #define __VECS(tv) { .vecs = tv, .count = ARRAY_SIZE(tv) }
2815
2816 /* Please keep this list sorted by algorithm name. */
2817 static const struct alg_test_desc alg_test_descs[] = {
2818 {
2819 .alg = "adiantum(xchacha12,aes)",
2820 .test = alg_test_skcipher,
2821 .suite = {
2822 .cipher = __VECS(adiantum_xchacha12_aes_tv_template)
2823 },
2824 }, {
2825 .alg = "adiantum(xchacha20,aes)",
2826 .test = alg_test_skcipher,
2827 .suite = {
2828 .cipher = __VECS(adiantum_xchacha20_aes_tv_template)
2829 },
2830 }, {
2831 .alg = "aegis128",
2832 .test = alg_test_aead,
2833 .suite = {
2834 .aead = __VECS(aegis128_tv_template)
2835 }
2836 }, {
2837 .alg = "aegis128l",
2838 .test = alg_test_aead,
2839 .suite = {
2840 .aead = __VECS(aegis128l_tv_template)
2841 }
2842 }, {
2843 .alg = "aegis256",
2844 .test = alg_test_aead,
2845 .suite = {
2846 .aead = __VECS(aegis256_tv_template)
2847 }
2848 }, {
2849 .alg = "ansi_cprng",
2850 .test = alg_test_cprng,
2851 .suite = {
2852 .cprng = __VECS(ansi_cprng_aes_tv_template)
2853 }
2854 }, {
2855 .alg = "authenc(hmac(md5),ecb(cipher_null))",
2856 .test = alg_test_aead,
2857 .suite = {
2858 .aead = __VECS(hmac_md5_ecb_cipher_null_tv_template)
2859 }
2860 }, {
2861 .alg = "authenc(hmac(sha1),cbc(aes))",
2862 .test = alg_test_aead,
2863 .fips_allowed = 1,
2864 .suite = {
2865 .aead = __VECS(hmac_sha1_aes_cbc_tv_temp)
2866 }
2867 }, {
2868 .alg = "authenc(hmac(sha1),cbc(des))",
2869 .test = alg_test_aead,
2870 .suite = {
2871 .aead = __VECS(hmac_sha1_des_cbc_tv_temp)
2872 }
2873 }, {
2874 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
2875 .test = alg_test_aead,
2876 .fips_allowed = 1,
2877 .suite = {
2878 .aead = __VECS(hmac_sha1_des3_ede_cbc_tv_temp)
2879 }
2880 }, {
2881 .alg = "authenc(hmac(sha1),ctr(aes))",
2882 .test = alg_test_null,
2883 .fips_allowed = 1,
2884 }, {
2885 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
2886 .test = alg_test_aead,
2887 .suite = {
2888 .aead = __VECS(hmac_sha1_ecb_cipher_null_tv_temp)
2889 }
2890 }, {
2891 .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2892 .test = alg_test_null,
2893 .fips_allowed = 1,
2894 }, {
2895 .alg = "authenc(hmac(sha224),cbc(des))",
2896 .test = alg_test_aead,
2897 .suite = {
2898 .aead = __VECS(hmac_sha224_des_cbc_tv_temp)
2899 }
2900 }, {
2901 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
2902 .test = alg_test_aead,
2903 .fips_allowed = 1,
2904 .suite = {
2905 .aead = __VECS(hmac_sha224_des3_ede_cbc_tv_temp)
2906 }
2907 }, {
2908 .alg = "authenc(hmac(sha256),cbc(aes))",
2909 .test = alg_test_aead,
2910 .fips_allowed = 1,
2911 .suite = {
2912 .aead = __VECS(hmac_sha256_aes_cbc_tv_temp)
2913 }
2914 }, {
2915 .alg = "authenc(hmac(sha256),cbc(des))",
2916 .test = alg_test_aead,
2917 .suite = {
2918 .aead = __VECS(hmac_sha256_des_cbc_tv_temp)
2919 }
2920 }, {
2921 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
2922 .test = alg_test_aead,
2923 .fips_allowed = 1,
2924 .suite = {
2925 .aead = __VECS(hmac_sha256_des3_ede_cbc_tv_temp)
2926 }
2927 }, {
2928 .alg = "authenc(hmac(sha256),ctr(aes))",
2929 .test = alg_test_null,
2930 .fips_allowed = 1,
2931 }, {
2932 .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2933 .test = alg_test_null,
2934 .fips_allowed = 1,
2935 }, {
2936 .alg = "authenc(hmac(sha384),cbc(des))",
2937 .test = alg_test_aead,
2938 .suite = {
2939 .aead = __VECS(hmac_sha384_des_cbc_tv_temp)
2940 }
2941 }, {
2942 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
2943 .test = alg_test_aead,
2944 .fips_allowed = 1,
2945 .suite = {
2946 .aead = __VECS(hmac_sha384_des3_ede_cbc_tv_temp)
2947 }
2948 }, {
2949 .alg = "authenc(hmac(sha384),ctr(aes))",
2950 .test = alg_test_null,
2951 .fips_allowed = 1,
2952 }, {
2953 .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2954 .test = alg_test_null,
2955 .fips_allowed = 1,
2956 }, {
2957 .alg = "authenc(hmac(sha512),cbc(aes))",
2958 .fips_allowed = 1,
2959 .test = alg_test_aead,
2960 .suite = {
2961 .aead = __VECS(hmac_sha512_aes_cbc_tv_temp)
2962 }
2963 }, {
2964 .alg = "authenc(hmac(sha512),cbc(des))",
2965 .test = alg_test_aead,
2966 .suite = {
2967 .aead = __VECS(hmac_sha512_des_cbc_tv_temp)
2968 }
2969 }, {
2970 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
2971 .test = alg_test_aead,
2972 .fips_allowed = 1,
2973 .suite = {
2974 .aead = __VECS(hmac_sha512_des3_ede_cbc_tv_temp)
2975 }
2976 }, {
2977 .alg = "authenc(hmac(sha512),ctr(aes))",
2978 .test = alg_test_null,
2979 .fips_allowed = 1,
2980 }, {
2981 .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2982 .test = alg_test_null,
2983 .fips_allowed = 1,
2984 }, {
2985 .alg = "cbc(aes)",
2986 .test = alg_test_skcipher,
2987 .fips_allowed = 1,
2988 .suite = {
2989 .cipher = __VECS(aes_cbc_tv_template)
2990 },
2991 }, {
2992 .alg = "cbc(anubis)",
2993 .test = alg_test_skcipher,
2994 .suite = {
2995 .cipher = __VECS(anubis_cbc_tv_template)
2996 },
2997 }, {
2998 .alg = "cbc(blowfish)",
2999 .test = alg_test_skcipher,
3000 .suite = {
3001 .cipher = __VECS(bf_cbc_tv_template)
3002 },
3003 }, {
3004 .alg = "cbc(camellia)",
3005 .test = alg_test_skcipher,
3006 .suite = {
3007 .cipher = __VECS(camellia_cbc_tv_template)
3008 },
3009 }, {
3010 .alg = "cbc(cast5)",
3011 .test = alg_test_skcipher,
3012 .suite = {
3013 .cipher = __VECS(cast5_cbc_tv_template)
3014 },
3015 }, {
3016 .alg = "cbc(cast6)",
3017 .test = alg_test_skcipher,
3018 .suite = {
3019 .cipher = __VECS(cast6_cbc_tv_template)
3020 },
3021 }, {
3022 .alg = "cbc(des)",
3023 .test = alg_test_skcipher,
3024 .suite = {
3025 .cipher = __VECS(des_cbc_tv_template)
3026 },
3027 }, {
3028 .alg = "cbc(des3_ede)",
3029 .test = alg_test_skcipher,
3030 .fips_allowed = 1,
3031 .suite = {
3032 .cipher = __VECS(des3_ede_cbc_tv_template)
3033 },
3034 }, {
3035 /* Same as cbc(aes) except the key is stored in
3036 * hardware secure memory which we reference by index
3037 */
3038 .alg = "cbc(paes)",
3039 .test = alg_test_null,
3040 .fips_allowed = 1,
3041 }, {
3042 .alg = "cbc(serpent)",
3043 .test = alg_test_skcipher,
3044 .suite = {
3045 .cipher = __VECS(serpent_cbc_tv_template)
3046 },
3047 }, {
3048 .alg = "cbc(sm4)",
3049 .test = alg_test_skcipher,
3050 .suite = {
3051 .cipher = __VECS(sm4_cbc_tv_template)
3052 }
3053 }, {
3054 .alg = "cbc(twofish)",
3055 .test = alg_test_skcipher,
3056 .suite = {
3057 .cipher = __VECS(tf_cbc_tv_template)
3058 },
3059 }, {
3060 .alg = "cbcmac(aes)",
3061 .fips_allowed = 1,
3062 .test = alg_test_hash,
3063 .suite = {
3064 .hash = __VECS(aes_cbcmac_tv_template)
3065 }
3066 }, {
3067 .alg = "ccm(aes)",
3068 .test = alg_test_aead,
3069 .fips_allowed = 1,
3070 .suite = {
3071 .aead = __VECS(aes_ccm_tv_template)
3072 }
3073 }, {
3074 .alg = "cfb(aes)",
3075 .test = alg_test_skcipher,
3076 .fips_allowed = 1,
3077 .suite = {
3078 .cipher = __VECS(aes_cfb_tv_template)
3079 },
3080 }, {
3081 .alg = "chacha20",
3082 .test = alg_test_skcipher,
3083 .suite = {
3084 .cipher = __VECS(chacha20_tv_template)
3085 },
3086 }, {
3087 .alg = "cmac(aes)",
3088 .fips_allowed = 1,
3089 .test = alg_test_hash,
3090 .suite = {
3091 .hash = __VECS(aes_cmac128_tv_template)
3092 }
3093 }, {
3094 .alg = "cmac(des3_ede)",
3095 .fips_allowed = 1,
3096 .test = alg_test_hash,
3097 .suite = {
3098 .hash = __VECS(des3_ede_cmac64_tv_template)
3099 }
3100 }, {
3101 .alg = "compress_null",
3102 .test = alg_test_null,
3103 }, {
3104 .alg = "crc32",
3105 .test = alg_test_hash,
3106 .fips_allowed = 1,
3107 .suite = {
3108 .hash = __VECS(crc32_tv_template)
3109 }
3110 }, {
3111 .alg = "crc32c",
3112 .test = alg_test_crc32c,
3113 .fips_allowed = 1,
3114 .suite = {
3115 .hash = __VECS(crc32c_tv_template)
3116 }
3117 }, {
3118 .alg = "crct10dif",
3119 .test = alg_test_hash,
3120 .fips_allowed = 1,
3121 .suite = {
3122 .hash = __VECS(crct10dif_tv_template)
3123 }
3124 }, {
3125 .alg = "ctr(aes)",
3126 .test = alg_test_skcipher,
3127 .fips_allowed = 1,
3128 .suite = {
3129 .cipher = __VECS(aes_ctr_tv_template)
3130 }
3131 }, {
3132 .alg = "ctr(blowfish)",
3133 .test = alg_test_skcipher,
3134 .suite = {
3135 .cipher = __VECS(bf_ctr_tv_template)
3136 }
3137 }, {
3138 .alg = "ctr(camellia)",
3139 .test = alg_test_skcipher,
3140 .suite = {
3141 .cipher = __VECS(camellia_ctr_tv_template)
3142 }
3143 }, {
3144 .alg = "ctr(cast5)",
3145 .test = alg_test_skcipher,
3146 .suite = {
3147 .cipher = __VECS(cast5_ctr_tv_template)
3148 }
3149 }, {
3150 .alg = "ctr(cast6)",
3151 .test = alg_test_skcipher,
3152 .suite = {
3153 .cipher = __VECS(cast6_ctr_tv_template)
3154 }
3155 }, {
3156 .alg = "ctr(des)",
3157 .test = alg_test_skcipher,
3158 .suite = {
3159 .cipher = __VECS(des_ctr_tv_template)
3160 }
3161 }, {
3162 .alg = "ctr(des3_ede)",
3163 .test = alg_test_skcipher,
3164 .fips_allowed = 1,
3165 .suite = {
3166 .cipher = __VECS(des3_ede_ctr_tv_template)
3167 }
3168 }, {
3169 /* Same as ctr(aes) except the key is stored in
3170 * hardware secure memory which we reference by index
3171 */
3172 .alg = "ctr(paes)",
3173 .test = alg_test_null,
3174 .fips_allowed = 1,
3175 }, {
3176 .alg = "ctr(serpent)",
3177 .test = alg_test_skcipher,
3178 .suite = {
3179 .cipher = __VECS(serpent_ctr_tv_template)
3180 }
3181 }, {
3182 .alg = "ctr(sm4)",
3183 .test = alg_test_skcipher,
3184 .suite = {
3185 .cipher = __VECS(sm4_ctr_tv_template)
3186 }
3187 }, {
3188 .alg = "ctr(twofish)",
3189 .test = alg_test_skcipher,
3190 .suite = {
3191 .cipher = __VECS(tf_ctr_tv_template)
3192 }
3193 }, {
3194 .alg = "cts(cbc(aes))",
3195 .test = alg_test_skcipher,
3196 .fips_allowed = 1,
3197 .suite = {
3198 .cipher = __VECS(cts_mode_tv_template)
3199 }
3200 }, {
3201 .alg = "deflate",
3202 .test = alg_test_comp,
3203 .fips_allowed = 1,
3204 .suite = {
3205 .comp = {
3206 .comp = __VECS(deflate_comp_tv_template),
3207 .decomp = __VECS(deflate_decomp_tv_template)
3208 }
3209 }
3210 }, {
3211 .alg = "dh",
3212 .test = alg_test_kpp,
3213 .fips_allowed = 1,
3214 .suite = {
3215 .kpp = __VECS(dh_tv_template)
3216 }
3217 }, {
3218 .alg = "digest_null",
3219 .test = alg_test_null,
3220 }, {
3221 .alg = "drbg_nopr_ctr_aes128",
3222 .test = alg_test_drbg,
3223 .fips_allowed = 1,
3224 .suite = {
3225 .drbg = __VECS(drbg_nopr_ctr_aes128_tv_template)
3226 }
3227 }, {
3228 .alg = "drbg_nopr_ctr_aes192",
3229 .test = alg_test_drbg,
3230 .fips_allowed = 1,
3231 .suite = {
3232 .drbg = __VECS(drbg_nopr_ctr_aes192_tv_template)
3233 }
3234 }, {
3235 .alg = "drbg_nopr_ctr_aes256",
3236 .test = alg_test_drbg,
3237 .fips_allowed = 1,
3238 .suite = {
3239 .drbg = __VECS(drbg_nopr_ctr_aes256_tv_template)
3240 }
3241 }, {
3242 /*
3243 * There is no need to specifically test the DRBG with every
3244 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
3245 */
3246 .alg = "drbg_nopr_hmac_sha1",
3247 .fips_allowed = 1,
3248 .test = alg_test_null,
3249 }, {
3250 .alg = "drbg_nopr_hmac_sha256",
3251 .test = alg_test_drbg,
3252 .fips_allowed = 1,
3253 .suite = {
3254 .drbg = __VECS(drbg_nopr_hmac_sha256_tv_template)
3255 }
3256 }, {
3257 /* covered by drbg_nopr_hmac_sha256 test */
3258 .alg = "drbg_nopr_hmac_sha384",
3259 .fips_allowed = 1,
3260 .test = alg_test_null,
3261 }, {
3262 .alg = "drbg_nopr_hmac_sha512",
3263 .test = alg_test_null,
3264 .fips_allowed = 1,
3265 }, {
3266 .alg = "drbg_nopr_sha1",
3267 .fips_allowed = 1,
3268 .test = alg_test_null,
3269 }, {
3270 .alg = "drbg_nopr_sha256",
3271 .test = alg_test_drbg,
3272 .fips_allowed = 1,
3273 .suite = {
3274 .drbg = __VECS(drbg_nopr_sha256_tv_template)
3275 }
3276 }, {
3277 /* covered by drbg_nopr_sha256 test */
3278 .alg = "drbg_nopr_sha384",
3279 .fips_allowed = 1,
3280 .test = alg_test_null,
3281 }, {
3282 .alg = "drbg_nopr_sha512",
3283 .fips_allowed = 1,
3284 .test = alg_test_null,
3285 }, {
3286 .alg = "drbg_pr_ctr_aes128",
3287 .test = alg_test_drbg,
3288 .fips_allowed = 1,
3289 .suite = {
3290 .drbg = __VECS(drbg_pr_ctr_aes128_tv_template)
3291 }
3292 }, {
3293 /* covered by drbg_pr_ctr_aes128 test */
3294 .alg = "drbg_pr_ctr_aes192",
3295 .fips_allowed = 1,
3296 .test = alg_test_null,
3297 }, {
3298 .alg = "drbg_pr_ctr_aes256",
3299 .fips_allowed = 1,
3300 .test = alg_test_null,
3301 }, {
3302 .alg = "drbg_pr_hmac_sha1",
3303 .fips_allowed = 1,
3304 .test = alg_test_null,
3305 }, {
3306 .alg = "drbg_pr_hmac_sha256",
3307 .test = alg_test_drbg,
3308 .fips_allowed = 1,
3309 .suite = {
3310 .drbg = __VECS(drbg_pr_hmac_sha256_tv_template)
3311 }
3312 }, {
3313 /* covered by drbg_pr_hmac_sha256 test */
3314 .alg = "drbg_pr_hmac_sha384",
3315 .fips_allowed = 1,
3316 .test = alg_test_null,
3317 }, {
3318 .alg = "drbg_pr_hmac_sha512",
3319 .test = alg_test_null,
3320 .fips_allowed = 1,
3321 }, {
3322 .alg = "drbg_pr_sha1",
3323 .fips_allowed = 1,
3324 .test = alg_test_null,
3325 }, {
3326 .alg = "drbg_pr_sha256",
3327 .test = alg_test_drbg,
3328 .fips_allowed = 1,
3329 .suite = {
3330 .drbg = __VECS(drbg_pr_sha256_tv_template)
3331 }
3332 }, {
3333 /* covered by drbg_pr_sha256 test */
3334 .alg = "drbg_pr_sha384",
3335 .fips_allowed = 1,
3336 .test = alg_test_null,
3337 }, {
3338 .alg = "drbg_pr_sha512",
3339 .fips_allowed = 1,
3340 .test = alg_test_null,
3341 }, {
3342 .alg = "ecb(aes)",
3343 .test = alg_test_skcipher,
3344 .fips_allowed = 1,
3345 .suite = {
3346 .cipher = __VECS(aes_tv_template)
3347 }
3348 }, {
3349 .alg = "ecb(anubis)",
3350 .test = alg_test_skcipher,
3351 .suite = {
3352 .cipher = __VECS(anubis_tv_template)
3353 }
3354 }, {
3355 .alg = "ecb(arc4)",
3356 .test = alg_test_skcipher,
3357 .suite = {
3358 .cipher = __VECS(arc4_tv_template)
3359 }
3360 }, {
3361 .alg = "ecb(blowfish)",
3362 .test = alg_test_skcipher,
3363 .suite = {
3364 .cipher = __VECS(bf_tv_template)
3365 }
3366 }, {
3367 .alg = "ecb(camellia)",
3368 .test = alg_test_skcipher,
3369 .suite = {
3370 .cipher = __VECS(camellia_tv_template)
3371 }
3372 }, {
3373 .alg = "ecb(cast5)",
3374 .test = alg_test_skcipher,
3375 .suite = {
3376 .cipher = __VECS(cast5_tv_template)
3377 }
3378 }, {
3379 .alg = "ecb(cast6)",
3380 .test = alg_test_skcipher,
3381 .suite = {
3382 .cipher = __VECS(cast6_tv_template)
3383 }
3384 }, {
3385 .alg = "ecb(cipher_null)",
3386 .test = alg_test_null,
3387 .fips_allowed = 1,
3388 }, {
3389 .alg = "ecb(des)",
3390 .test = alg_test_skcipher,
3391 .suite = {
3392 .cipher = __VECS(des_tv_template)
3393 }
3394 }, {
3395 .alg = "ecb(des3_ede)",
3396 .test = alg_test_skcipher,
3397 .fips_allowed = 1,
3398 .suite = {
3399 .cipher = __VECS(des3_ede_tv_template)
3400 }
3401 }, {
3402 .alg = "ecb(fcrypt)",
3403 .test = alg_test_skcipher,
3404 .suite = {
3405 .cipher = {
3406 .vecs = fcrypt_pcbc_tv_template,
3407 .count = 1
3408 }
3409 }
3410 }, {
3411 .alg = "ecb(khazad)",
3412 .test = alg_test_skcipher,
3413 .suite = {
3414 .cipher = __VECS(khazad_tv_template)
3415 }
3416 }, {
3417 /* Same as ecb(aes) except the key is stored in
3418 * hardware secure memory which we reference by index
3419 */
3420 .alg = "ecb(paes)",
3421 .test = alg_test_null,
3422 .fips_allowed = 1,
3423 }, {
3424 .alg = "ecb(seed)",
3425 .test = alg_test_skcipher,
3426 .suite = {
3427 .cipher = __VECS(seed_tv_template)
3428 }
3429 }, {
3430 .alg = "ecb(serpent)",
3431 .test = alg_test_skcipher,
3432 .suite = {
3433 .cipher = __VECS(serpent_tv_template)
3434 }
3435 }, {
3436 .alg = "ecb(sm4)",
3437 .test = alg_test_skcipher,
3438 .suite = {
3439 .cipher = __VECS(sm4_tv_template)
3440 }
3441 }, {
3442 .alg = "ecb(tea)",
3443 .test = alg_test_skcipher,
3444 .suite = {
3445 .cipher = __VECS(tea_tv_template)
3446 }
3447 }, {
3448 .alg = "ecb(tnepres)",
3449 .test = alg_test_skcipher,
3450 .suite = {
3451 .cipher = __VECS(tnepres_tv_template)
3452 }
3453 }, {
3454 .alg = "ecb(twofish)",
3455 .test = alg_test_skcipher,
3456 .suite = {
3457 .cipher = __VECS(tf_tv_template)
3458 }
3459 }, {
3460 .alg = "ecb(xeta)",
3461 .test = alg_test_skcipher,
3462 .suite = {
3463 .cipher = __VECS(xeta_tv_template)
3464 }
3465 }, {
3466 .alg = "ecb(xtea)",
3467 .test = alg_test_skcipher,
3468 .suite = {
3469 .cipher = __VECS(xtea_tv_template)
3470 }
3471 }, {
3472 .alg = "ecdh",
3473 .test = alg_test_kpp,
3474 .fips_allowed = 1,
3475 .suite = {
3476 .kpp = __VECS(ecdh_tv_template)
3477 }
3478 }, {
3479 .alg = "gcm(aes)",
3480 .test = alg_test_aead,
3481 .fips_allowed = 1,
3482 .suite = {
3483 .aead = __VECS(aes_gcm_tv_template)
3484 }
3485 }, {
3486 .alg = "ghash",
3487 .test = alg_test_hash,
3488 .fips_allowed = 1,
3489 .suite = {
3490 .hash = __VECS(ghash_tv_template)
3491 }
3492 }, {
3493 .alg = "hmac(md5)",
3494 .test = alg_test_hash,
3495 .suite = {
3496 .hash = __VECS(hmac_md5_tv_template)
3497 }
3498 }, {
3499 .alg = "hmac(rmd128)",
3500 .test = alg_test_hash,
3501 .suite = {
3502 .hash = __VECS(hmac_rmd128_tv_template)
3503 }
3504 }, {
3505 .alg = "hmac(rmd160)",
3506 .test = alg_test_hash,
3507 .suite = {
3508 .hash = __VECS(hmac_rmd160_tv_template)
3509 }
3510 }, {
3511 .alg = "hmac(sha1)",
3512 .test = alg_test_hash,
3513 .fips_allowed = 1,
3514 .suite = {
3515 .hash = __VECS(hmac_sha1_tv_template)
3516 }
3517 }, {
3518 .alg = "hmac(sha224)",
3519 .test = alg_test_hash,
3520 .fips_allowed = 1,
3521 .suite = {
3522 .hash = __VECS(hmac_sha224_tv_template)
3523 }
3524 }, {
3525 .alg = "hmac(sha256)",
3526 .test = alg_test_hash,
3527 .fips_allowed = 1,
3528 .suite = {
3529 .hash = __VECS(hmac_sha256_tv_template)
3530 }
3531 }, {
3532 .alg = "hmac(sha3-224)",
3533 .test = alg_test_hash,
3534 .fips_allowed = 1,
3535 .suite = {
3536 .hash = __VECS(hmac_sha3_224_tv_template)
3537 }
3538 }, {
3539 .alg = "hmac(sha3-256)",
3540 .test = alg_test_hash,
3541 .fips_allowed = 1,
3542 .suite = {
3543 .hash = __VECS(hmac_sha3_256_tv_template)
3544 }
3545 }, {
3546 .alg = "hmac(sha3-384)",
3547 .test = alg_test_hash,
3548 .fips_allowed = 1,
3549 .suite = {
3550 .hash = __VECS(hmac_sha3_384_tv_template)
3551 }
3552 }, {
3553 .alg = "hmac(sha3-512)",
3554 .test = alg_test_hash,
3555 .fips_allowed = 1,
3556 .suite = {
3557 .hash = __VECS(hmac_sha3_512_tv_template)
3558 }
3559 }, {
3560 .alg = "hmac(sha384)",
3561 .test = alg_test_hash,
3562 .fips_allowed = 1,
3563 .suite = {
3564 .hash = __VECS(hmac_sha384_tv_template)
3565 }
3566 }, {
3567 .alg = "hmac(sha512)",
3568 .test = alg_test_hash,
3569 .fips_allowed = 1,
3570 .suite = {
3571 .hash = __VECS(hmac_sha512_tv_template)
3572 }
3573 }, {
3574 .alg = "hmac(streebog256)",
3575 .test = alg_test_hash,
3576 .suite = {
3577 .hash = __VECS(hmac_streebog256_tv_template)
3578 }
3579 }, {
3580 .alg = "hmac(streebog512)",
3581 .test = alg_test_hash,
3582 .suite = {
3583 .hash = __VECS(hmac_streebog512_tv_template)
3584 }
3585 }, {
3586 .alg = "jitterentropy_rng",
3587 .fips_allowed = 1,
3588 .test = alg_test_null,
3589 }, {
3590 .alg = "kw(aes)",
3591 .test = alg_test_skcipher,
3592 .fips_allowed = 1,
3593 .suite = {
3594 .cipher = __VECS(aes_kw_tv_template)
3595 }
3596 }, {
3597 .alg = "lrw(aes)",
3598 .test = alg_test_skcipher,
3599 .suite = {
3600 .cipher = __VECS(aes_lrw_tv_template)
3601 }
3602 }, {
3603 .alg = "lrw(camellia)",
3604 .test = alg_test_skcipher,
3605 .suite = {
3606 .cipher = __VECS(camellia_lrw_tv_template)
3607 }
3608 }, {
3609 .alg = "lrw(cast6)",
3610 .test = alg_test_skcipher,
3611 .suite = {
3612 .cipher = __VECS(cast6_lrw_tv_template)
3613 }
3614 }, {
3615 .alg = "lrw(serpent)",
3616 .test = alg_test_skcipher,
3617 .suite = {
3618 .cipher = __VECS(serpent_lrw_tv_template)
3619 }
3620 }, {
3621 .alg = "lrw(twofish)",
3622 .test = alg_test_skcipher,
3623 .suite = {
3624 .cipher = __VECS(tf_lrw_tv_template)
3625 }
3626 }, {
3627 .alg = "lz4",
3628 .test = alg_test_comp,
3629 .fips_allowed = 1,
3630 .suite = {
3631 .comp = {
3632 .comp = __VECS(lz4_comp_tv_template),
3633 .decomp = __VECS(lz4_decomp_tv_template)
3634 }
3635 }
3636 }, {
3637 .alg = "lz4hc",
3638 .test = alg_test_comp,
3639 .fips_allowed = 1,
3640 .suite = {
3641 .comp = {
3642 .comp = __VECS(lz4hc_comp_tv_template),
3643 .decomp = __VECS(lz4hc_decomp_tv_template)
3644 }
3645 }
3646 }, {
3647 .alg = "lzo",
3648 .test = alg_test_comp,
3649 .fips_allowed = 1,
3650 .suite = {
3651 .comp = {
3652 .comp = __VECS(lzo_comp_tv_template),
3653 .decomp = __VECS(lzo_decomp_tv_template)
3654 }
3655 }
3656 }, {
3657 .alg = "md4",
3658 .test = alg_test_hash,
3659 .suite = {
3660 .hash = __VECS(md4_tv_template)
3661 }
3662 }, {
3663 .alg = "md5",
3664 .test = alg_test_hash,
3665 .suite = {
3666 .hash = __VECS(md5_tv_template)
3667 }
3668 }, {
3669 .alg = "michael_mic",
3670 .test = alg_test_hash,
3671 .suite = {
3672 .hash = __VECS(michael_mic_tv_template)
3673 }
3674 }, {
3675 .alg = "morus1280",
3676 .test = alg_test_aead,
3677 .suite = {
3678 .aead = __VECS(morus1280_tv_template)
3679 }
3680 }, {
3681 .alg = "morus640",
3682 .test = alg_test_aead,
3683 .suite = {
3684 .aead = __VECS(morus640_tv_template)
3685 }
3686 }, {
3687 .alg = "nhpoly1305",
3688 .test = alg_test_hash,
3689 .suite = {
3690 .hash = __VECS(nhpoly1305_tv_template)
3691 }
3692 }, {
3693 .alg = "ofb(aes)",
3694 .test = alg_test_skcipher,
3695 .fips_allowed = 1,
3696 .suite = {
3697 .cipher = __VECS(aes_ofb_tv_template)
3698 }
3699 }, {
3700 /* Same as ofb(aes) except the key is stored in
3701 * hardware secure memory which we reference by index
3702 */
3703 .alg = "ofb(paes)",
3704 .test = alg_test_null,
3705 .fips_allowed = 1,
3706 }, {
3707 .alg = "pcbc(fcrypt)",
3708 .test = alg_test_skcipher,
3709 .suite = {
3710 .cipher = __VECS(fcrypt_pcbc_tv_template)
3711 }
3712 }, {
3713 .alg = "pkcs1pad(rsa,sha224)",
3714 .test = alg_test_null,
3715 .fips_allowed = 1,
3716 }, {
3717 .alg = "pkcs1pad(rsa,sha256)",
3718 .test = alg_test_akcipher,
3719 .fips_allowed = 1,
3720 .suite = {
3721 .akcipher = __VECS(pkcs1pad_rsa_tv_template)
3722 }
3723 }, {
3724 .alg = "pkcs1pad(rsa,sha384)",
3725 .test = alg_test_null,
3726 .fips_allowed = 1,
3727 }, {
3728 .alg = "pkcs1pad(rsa,sha512)",
3729 .test = alg_test_null,
3730 .fips_allowed = 1,
3731 }, {
3732 .alg = "poly1305",
3733 .test = alg_test_hash,
3734 .suite = {
3735 .hash = __VECS(poly1305_tv_template)
3736 }
3737 }, {
3738 .alg = "rfc3686(ctr(aes))",
3739 .test = alg_test_skcipher,
3740 .fips_allowed = 1,
3741 .suite = {
3742 .cipher = __VECS(aes_ctr_rfc3686_tv_template)
3743 }
3744 }, {
3745 .alg = "rfc4106(gcm(aes))",
3746 .test = alg_test_aead,
3747 .fips_allowed = 1,
3748 .suite = {
3749 .aead = __VECS(aes_gcm_rfc4106_tv_template)
3750 }
3751 }, {
3752 .alg = "rfc4309(ccm(aes))",
3753 .test = alg_test_aead,
3754 .fips_allowed = 1,
3755 .suite = {
3756 .aead = __VECS(aes_ccm_rfc4309_tv_template)
3757 }
3758 }, {
3759 .alg = "rfc4543(gcm(aes))",
3760 .test = alg_test_aead,
3761 .suite = {
3762 .aead = __VECS(aes_gcm_rfc4543_tv_template)
3763 }
3764 }, {
3765 .alg = "rfc7539(chacha20,poly1305)",
3766 .test = alg_test_aead,
3767 .suite = {
3768 .aead = __VECS(rfc7539_tv_template)
3769 }
3770 }, {
3771 .alg = "rfc7539esp(chacha20,poly1305)",
3772 .test = alg_test_aead,
3773 .suite = {
3774 .aead = __VECS(rfc7539esp_tv_template)
3775 }
3776 }, {
3777 .alg = "rmd128",
3778 .test = alg_test_hash,
3779 .suite = {
3780 .hash = __VECS(rmd128_tv_template)
3781 }
3782 }, {
3783 .alg = "rmd160",
3784 .test = alg_test_hash,
3785 .suite = {
3786 .hash = __VECS(rmd160_tv_template)
3787 }
3788 }, {
3789 .alg = "rmd256",
3790 .test = alg_test_hash,
3791 .suite = {
3792 .hash = __VECS(rmd256_tv_template)
3793 }
3794 }, {
3795 .alg = "rmd320",
3796 .test = alg_test_hash,
3797 .suite = {
3798 .hash = __VECS(rmd320_tv_template)
3799 }
3800 }, {
3801 .alg = "rsa",
3802 .test = alg_test_akcipher,
3803 .fips_allowed = 1,
3804 .suite = {
3805 .akcipher = __VECS(rsa_tv_template)
3806 }
3807 }, {
3808 .alg = "salsa20",
3809 .test = alg_test_skcipher,
3810 .suite = {
3811 .cipher = __VECS(salsa20_stream_tv_template)
3812 }
3813 }, {
3814 .alg = "sha1",
3815 .test = alg_test_hash,
3816 .fips_allowed = 1,
3817 .suite = {
3818 .hash = __VECS(sha1_tv_template)
3819 }
3820 }, {
3821 .alg = "sha224",
3822 .test = alg_test_hash,
3823 .fips_allowed = 1,
3824 .suite = {
3825 .hash = __VECS(sha224_tv_template)
3826 }
3827 }, {
3828 .alg = "sha256",
3829 .test = alg_test_hash,
3830 .fips_allowed = 1,
3831 .suite = {
3832 .hash = __VECS(sha256_tv_template)
3833 }
3834 }, {
3835 .alg = "sha3-224",
3836 .test = alg_test_hash,
3837 .fips_allowed = 1,
3838 .suite = {
3839 .hash = __VECS(sha3_224_tv_template)
3840 }
3841 }, {
3842 .alg = "sha3-256",
3843 .test = alg_test_hash,
3844 .fips_allowed = 1,
3845 .suite = {
3846 .hash = __VECS(sha3_256_tv_template)
3847 }
3848 }, {
3849 .alg = "sha3-384",
3850 .test = alg_test_hash,
3851 .fips_allowed = 1,
3852 .suite = {
3853 .hash = __VECS(sha3_384_tv_template)
3854 }
3855 }, {
3856 .alg = "sha3-512",
3857 .test = alg_test_hash,
3858 .fips_allowed = 1,
3859 .suite = {
3860 .hash = __VECS(sha3_512_tv_template)
3861 }
3862 }, {
3863 .alg = "sha384",
3864 .test = alg_test_hash,
3865 .fips_allowed = 1,
3866 .suite = {
3867 .hash = __VECS(sha384_tv_template)
3868 }
3869 }, {
3870 .alg = "sha512",
3871 .test = alg_test_hash,
3872 .fips_allowed = 1,
3873 .suite = {
3874 .hash = __VECS(sha512_tv_template)
3875 }
3876 }, {
3877 .alg = "sm3",
3878 .test = alg_test_hash,
3879 .suite = {
3880 .hash = __VECS(sm3_tv_template)
3881 }
3882 }, {
3883 .alg = "streebog256",
3884 .test = alg_test_hash,
3885 .suite = {
3886 .hash = __VECS(streebog256_tv_template)
3887 }
3888 }, {
3889 .alg = "streebog512",
3890 .test = alg_test_hash,
3891 .suite = {
3892 .hash = __VECS(streebog512_tv_template)
3893 }
3894 }, {
3895 .alg = "tgr128",
3896 .test = alg_test_hash,
3897 .suite = {
3898 .hash = __VECS(tgr128_tv_template)
3899 }
3900 }, {
3901 .alg = "tgr160",
3902 .test = alg_test_hash,
3903 .suite = {
3904 .hash = __VECS(tgr160_tv_template)
3905 }
3906 }, {
3907 .alg = "tgr192",
3908 .test = alg_test_hash,
3909 .suite = {
3910 .hash = __VECS(tgr192_tv_template)
3911 }
3912 }, {
3913 .alg = "vmac64(aes)",
3914 .test = alg_test_hash,
3915 .suite = {
3916 .hash = __VECS(vmac64_aes_tv_template)
3917 }
3918 }, {
3919 .alg = "wp256",
3920 .test = alg_test_hash,
3921 .suite = {
3922 .hash = __VECS(wp256_tv_template)
3923 }
3924 }, {
3925 .alg = "wp384",
3926 .test = alg_test_hash,
3927 .suite = {
3928 .hash = __VECS(wp384_tv_template)
3929 }
3930 }, {
3931 .alg = "wp512",
3932 .test = alg_test_hash,
3933 .suite = {
3934 .hash = __VECS(wp512_tv_template)
3935 }
3936 }, {
3937 .alg = "xcbc(aes)",
3938 .test = alg_test_hash,
3939 .suite = {
3940 .hash = __VECS(aes_xcbc128_tv_template)
3941 }
3942 }, {
3943 .alg = "xchacha12",
3944 .test = alg_test_skcipher,
3945 .suite = {
3946 .cipher = __VECS(xchacha12_tv_template)
3947 },
3948 }, {
3949 .alg = "xchacha20",
3950 .test = alg_test_skcipher,
3951 .suite = {
3952 .cipher = __VECS(xchacha20_tv_template)
3953 },
3954 }, {
3955 .alg = "xts(aes)",
3956 .test = alg_test_skcipher,
3957 .fips_allowed = 1,
3958 .suite = {
3959 .cipher = __VECS(aes_xts_tv_template)
3960 }
3961 }, {
3962 .alg = "xts(camellia)",
3963 .test = alg_test_skcipher,
3964 .suite = {
3965 .cipher = __VECS(camellia_xts_tv_template)
3966 }
3967 }, {
3968 .alg = "xts(cast6)",
3969 .test = alg_test_skcipher,
3970 .suite = {
3971 .cipher = __VECS(cast6_xts_tv_template)
3972 }
3973 }, {
3974 /* Same as xts(aes) except the key is stored in
3975 * hardware secure memory which we reference by index
3976 */
3977 .alg = "xts(paes)",
3978 .test = alg_test_null,
3979 .fips_allowed = 1,
3980 }, {
3981 .alg = "xts(serpent)",
3982 .test = alg_test_skcipher,
3983 .suite = {
3984 .cipher = __VECS(serpent_xts_tv_template)
3985 }
3986 }, {
3987 .alg = "xts(twofish)",
3988 .test = alg_test_skcipher,
3989 .suite = {
3990 .cipher = __VECS(tf_xts_tv_template)
3991 }
3992 }, {
3993 .alg = "xts4096(paes)",
3994 .test = alg_test_null,
3995 .fips_allowed = 1,
3996 }, {
3997 .alg = "xts512(paes)",
3998 .test = alg_test_null,
3999 .fips_allowed = 1,
4000 }, {
4001 .alg = "zlib-deflate",
4002 .test = alg_test_comp,
4003 .fips_allowed = 1,
4004 .suite = {
4005 .comp = {
4006 .comp = __VECS(zlib_deflate_comp_tv_template),
4007 .decomp = __VECS(zlib_deflate_decomp_tv_template)
4008 }
4009 }
4010 }, {
4011 .alg = "zstd",
4012 .test = alg_test_comp,
4013 .fips_allowed = 1,
4014 .suite = {
4015 .comp = {
4016 .comp = __VECS(zstd_comp_tv_template),
4017 .decomp = __VECS(zstd_decomp_tv_template)
4018 }
4019 }
4020 }
4021 };
4022
4023 static void alg_check_test_descs_order(void)
4024 {
4025 int i;
4026
4027 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
4028 int diff = strcmp(alg_test_descs[i - 1].alg,
4029 alg_test_descs[i].alg);
4030
4031 if (WARN_ON(diff > 0)) {
4032 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
4033 alg_test_descs[i - 1].alg,
4034 alg_test_descs[i].alg);
4035 }
4036
4037 if (WARN_ON(diff == 0)) {
4038 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
4039 alg_test_descs[i].alg);
4040 }
4041 }
4042 }
4043
4044 static void alg_check_testvec_configs(void)
4045 {
4046 int i;
4047
4048 for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++)
4049 WARN_ON(!valid_testvec_config(
4050 &default_cipher_testvec_configs[i]));
4051
4052 for (i = 0; i < ARRAY_SIZE(default_hash_testvec_configs); i++)
4053 WARN_ON(!valid_testvec_config(
4054 &default_hash_testvec_configs[i]));
4055 }
4056
4057 static void testmgr_onetime_init(void)
4058 {
4059 alg_check_test_descs_order();
4060 alg_check_testvec_configs();
4061
4062 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
4063 pr_warn("alg: extra crypto tests enabled. This is intended for developer use only.\n");
4064 #endif
4065 }
4066
4067 static int alg_find_test(const char *alg)
4068 {
4069 int start = 0;
4070 int end = ARRAY_SIZE(alg_test_descs);
4071
4072 while (start < end) {
4073 int i = (start + end) / 2;
4074 int diff = strcmp(alg_test_descs[i].alg, alg);
4075
4076 if (diff > 0) {
4077 end = i;
4078 continue;
4079 }
4080
4081 if (diff < 0) {
4082 start = i + 1;
4083 continue;
4084 }
4085
4086 return i;
4087 }
4088
4089 return -1;
4090 }
4091
4092 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
4093 {
4094 int i;
4095 int j;
4096 int rc;
4097
4098 if (!fips_enabled && notests) {
4099 printk_once(KERN_INFO "alg: self-tests disabled\n");
4100 return 0;
4101 }
4102
4103 DO_ONCE(testmgr_onetime_init);
4104
4105 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
4106 char nalg[CRYPTO_MAX_ALG_NAME];
4107
4108 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
4109 sizeof(nalg))
4110 return -ENAMETOOLONG;
4111
4112 i = alg_find_test(nalg);
4113 if (i < 0)
4114 goto notest;
4115
4116 if (fips_enabled && !alg_test_descs[i].fips_allowed)
4117 goto non_fips_alg;
4118
4119 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
4120 goto test_done;
4121 }
4122
4123 i = alg_find_test(alg);
4124 j = alg_find_test(driver);
4125 if (i < 0 && j < 0)
4126 goto notest;
4127
4128 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
4129 (j >= 0 && !alg_test_descs[j].fips_allowed)))
4130 goto non_fips_alg;
4131
4132 rc = 0;
4133 if (i >= 0)
4134 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
4135 type, mask);
4136 if (j >= 0 && j != i)
4137 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
4138 type, mask);
4139
4140 test_done:
4141 if (rc && (fips_enabled || panic_on_fail))
4142 panic("alg: self-tests for %s (%s) failed in %s mode!\n",
4143 driver, alg, fips_enabled ? "fips" : "panic_on_fail");
4144
4145 if (fips_enabled && !rc)
4146 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
4147
4148 return rc;
4149
4150 notest:
4151 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
4152 return 0;
4153 non_fips_alg:
4154 return -EINVAL;
4155 }
4156
4157 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
4158
4159 EXPORT_SYMBOL_GPL(alg_test);