]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blob - crypto/testmgr.c
crypto: api - Fix race condition in crypto_spawn_alg
[mirror_ubuntu-bionic-kernel.git] / crypto / testmgr.c
1 /*
2 * Algorithm testing framework and tests.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
15 *
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
19 * any later version.
20 *
21 */
22
23 #include <crypto/aead.h>
24 #include <crypto/hash.h>
25 #include <crypto/skcipher.h>
26 #include <linux/err.h>
27 #include <linux/fips.h>
28 #include <linux/module.h>
29 #include <linux/scatterlist.h>
30 #include <linux/slab.h>
31 #include <linux/string.h>
32 #include <crypto/rng.h>
33 #include <crypto/drbg.h>
34 #include <crypto/akcipher.h>
35 #include <crypto/kpp.h>
36 #include <crypto/acompress.h>
37
38 #include "internal.h"
39
40 static bool notests;
41 module_param(notests, bool, 0644);
42 MODULE_PARM_DESC(notests, "disable crypto self-tests");
43
44 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
45
46 /* a perfect nop */
47 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
48 {
49 return 0;
50 }
51
52 #else
53
54 #include "testmgr.h"
55
56 /*
57 * Need slab memory for testing (size in number of pages).
58 */
59 #define XBUFSIZE 8
60
61 /*
62 * Indexes into the xbuf to simulate cross-page access.
63 */
64 #define IDX1 32
65 #define IDX2 32400
66 #define IDX3 1511
67 #define IDX4 8193
68 #define IDX5 22222
69 #define IDX6 17101
70 #define IDX7 27333
71 #define IDX8 3000
72
73 /*
74 * Used by test_cipher()
75 */
76 #define ENCRYPT 1
77 #define DECRYPT 0
78
79 struct aead_test_suite {
80 struct {
81 const struct aead_testvec *vecs;
82 unsigned int count;
83 } enc, dec;
84 };
85
86 struct cipher_test_suite {
87 struct {
88 const struct cipher_testvec *vecs;
89 unsigned int count;
90 } enc, dec;
91 };
92
93 struct comp_test_suite {
94 struct {
95 const struct comp_testvec *vecs;
96 unsigned int count;
97 } comp, decomp;
98 };
99
100 struct hash_test_suite {
101 const struct hash_testvec *vecs;
102 unsigned int count;
103 };
104
105 struct cprng_test_suite {
106 const struct cprng_testvec *vecs;
107 unsigned int count;
108 };
109
110 struct drbg_test_suite {
111 const struct drbg_testvec *vecs;
112 unsigned int count;
113 };
114
115 struct akcipher_test_suite {
116 const struct akcipher_testvec *vecs;
117 unsigned int count;
118 };
119
120 struct kpp_test_suite {
121 const struct kpp_testvec *vecs;
122 unsigned int count;
123 };
124
125 struct alg_test_desc {
126 const char *alg;
127 int (*test)(const struct alg_test_desc *desc, const char *driver,
128 u32 type, u32 mask);
129 int fips_allowed; /* set if alg is allowed in fips mode */
130
131 union {
132 struct aead_test_suite aead;
133 struct cipher_test_suite cipher;
134 struct comp_test_suite comp;
135 struct hash_test_suite hash;
136 struct cprng_test_suite cprng;
137 struct drbg_test_suite drbg;
138 struct akcipher_test_suite akcipher;
139 struct kpp_test_suite kpp;
140 } suite;
141 };
142
143 static const unsigned int IDX[8] = {
144 IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
145
146 static void hexdump(unsigned char *buf, unsigned int len)
147 {
148 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
149 16, 1,
150 buf, len, false);
151 }
152
153 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
154 {
155 int i;
156
157 for (i = 0; i < XBUFSIZE; i++) {
158 buf[i] = (void *)__get_free_page(GFP_KERNEL);
159 if (!buf[i])
160 goto err_free_buf;
161 }
162
163 return 0;
164
165 err_free_buf:
166 while (i-- > 0)
167 free_page((unsigned long)buf[i]);
168
169 return -ENOMEM;
170 }
171
172 static void testmgr_free_buf(char *buf[XBUFSIZE])
173 {
174 int i;
175
176 for (i = 0; i < XBUFSIZE; i++)
177 free_page((unsigned long)buf[i]);
178 }
179
180 static int ahash_partial_update(struct ahash_request **preq,
181 struct crypto_ahash *tfm, const struct hash_testvec *template,
182 void *hash_buff, int k, int temp, struct scatterlist *sg,
183 const char *algo, char *result, struct crypto_wait *wait)
184 {
185 char *state;
186 struct ahash_request *req;
187 int statesize, ret = -EINVAL;
188 const char guard[] = { 0x00, 0xba, 0xad, 0x00 };
189
190 req = *preq;
191 statesize = crypto_ahash_statesize(
192 crypto_ahash_reqtfm(req));
193 state = kmalloc(statesize + sizeof(guard), GFP_KERNEL);
194 if (!state) {
195 pr_err("alg: hash: Failed to alloc state for %s\n", algo);
196 goto out_nostate;
197 }
198 memcpy(state + statesize, guard, sizeof(guard));
199 ret = crypto_ahash_export(req, state);
200 WARN_ON(memcmp(state + statesize, guard, sizeof(guard)));
201 if (ret) {
202 pr_err("alg: hash: Failed to export() for %s\n", algo);
203 goto out;
204 }
205 ahash_request_free(req);
206 req = ahash_request_alloc(tfm, GFP_KERNEL);
207 if (!req) {
208 pr_err("alg: hash: Failed to alloc request for %s\n", algo);
209 goto out_noreq;
210 }
211 ahash_request_set_callback(req,
212 CRYPTO_TFM_REQ_MAY_BACKLOG,
213 crypto_req_done, wait);
214
215 memcpy(hash_buff, template->plaintext + temp,
216 template->tap[k]);
217 sg_init_one(&sg[0], hash_buff, template->tap[k]);
218 ahash_request_set_crypt(req, sg, result, template->tap[k]);
219 ret = crypto_ahash_import(req, state);
220 if (ret) {
221 pr_err("alg: hash: Failed to import() for %s\n", algo);
222 goto out;
223 }
224 ret = crypto_wait_req(crypto_ahash_update(req), wait);
225 if (ret)
226 goto out;
227 *preq = req;
228 ret = 0;
229 goto out_noreq;
230 out:
231 ahash_request_free(req);
232 out_noreq:
233 kfree(state);
234 out_nostate:
235 return ret;
236 }
237
238 static int __test_hash(struct crypto_ahash *tfm,
239 const struct hash_testvec *template, unsigned int tcount,
240 bool use_digest, const int align_offset)
241 {
242 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
243 size_t digest_size = crypto_ahash_digestsize(tfm);
244 unsigned int i, j, k, temp;
245 struct scatterlist sg[8];
246 char *result;
247 char *key;
248 struct ahash_request *req;
249 struct crypto_wait wait;
250 void *hash_buff;
251 char *xbuf[XBUFSIZE];
252 int ret = -ENOMEM;
253
254 result = kmalloc(digest_size, GFP_KERNEL);
255 if (!result)
256 return ret;
257 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
258 if (!key)
259 goto out_nobuf;
260 if (testmgr_alloc_buf(xbuf))
261 goto out_nobuf;
262
263 crypto_init_wait(&wait);
264
265 req = ahash_request_alloc(tfm, GFP_KERNEL);
266 if (!req) {
267 printk(KERN_ERR "alg: hash: Failed to allocate request for "
268 "%s\n", algo);
269 goto out_noreq;
270 }
271 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
272 crypto_req_done, &wait);
273
274 j = 0;
275 for (i = 0; i < tcount; i++) {
276 if (template[i].np)
277 continue;
278
279 ret = -EINVAL;
280 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
281 goto out;
282
283 j++;
284 memset(result, 0, digest_size);
285
286 hash_buff = xbuf[0];
287 hash_buff += align_offset;
288
289 memcpy(hash_buff, template[i].plaintext, template[i].psize);
290 sg_init_one(&sg[0], hash_buff, template[i].psize);
291
292 if (template[i].ksize) {
293 crypto_ahash_clear_flags(tfm, ~0);
294 if (template[i].ksize > MAX_KEYLEN) {
295 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
296 j, algo, template[i].ksize, MAX_KEYLEN);
297 ret = -EINVAL;
298 goto out;
299 }
300 memcpy(key, template[i].key, template[i].ksize);
301 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
302 if (ret) {
303 printk(KERN_ERR "alg: hash: setkey failed on "
304 "test %d for %s: ret=%d\n", j, algo,
305 -ret);
306 goto out;
307 }
308 }
309
310 ahash_request_set_crypt(req, sg, result, template[i].psize);
311 if (use_digest) {
312 ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
313 if (ret) {
314 pr_err("alg: hash: digest failed on test %d "
315 "for %s: ret=%d\n", j, algo, -ret);
316 goto out;
317 }
318 } else {
319 ret = crypto_wait_req(crypto_ahash_init(req), &wait);
320 if (ret) {
321 pr_err("alg: hash: init failed on test %d "
322 "for %s: ret=%d\n", j, algo, -ret);
323 goto out;
324 }
325 ret = crypto_wait_req(crypto_ahash_update(req), &wait);
326 if (ret) {
327 pr_err("alg: hash: update failed on test %d "
328 "for %s: ret=%d\n", j, algo, -ret);
329 goto out;
330 }
331 ret = crypto_wait_req(crypto_ahash_final(req), &wait);
332 if (ret) {
333 pr_err("alg: hash: final failed on test %d "
334 "for %s: ret=%d\n", j, algo, -ret);
335 goto out;
336 }
337 }
338
339 if (memcmp(result, template[i].digest,
340 crypto_ahash_digestsize(tfm))) {
341 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
342 j, algo);
343 hexdump(result, crypto_ahash_digestsize(tfm));
344 ret = -EINVAL;
345 goto out;
346 }
347 }
348
349 j = 0;
350 for (i = 0; i < tcount; i++) {
351 /* alignment tests are only done with continuous buffers */
352 if (align_offset != 0)
353 break;
354
355 if (!template[i].np)
356 continue;
357
358 j++;
359 memset(result, 0, digest_size);
360
361 temp = 0;
362 sg_init_table(sg, template[i].np);
363 ret = -EINVAL;
364 for (k = 0; k < template[i].np; k++) {
365 if (WARN_ON(offset_in_page(IDX[k]) +
366 template[i].tap[k] > PAGE_SIZE))
367 goto out;
368 sg_set_buf(&sg[k],
369 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
370 offset_in_page(IDX[k]),
371 template[i].plaintext + temp,
372 template[i].tap[k]),
373 template[i].tap[k]);
374 temp += template[i].tap[k];
375 }
376
377 if (template[i].ksize) {
378 if (template[i].ksize > MAX_KEYLEN) {
379 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
380 j, algo, template[i].ksize, MAX_KEYLEN);
381 ret = -EINVAL;
382 goto out;
383 }
384 crypto_ahash_clear_flags(tfm, ~0);
385 memcpy(key, template[i].key, template[i].ksize);
386 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
387
388 if (ret) {
389 printk(KERN_ERR "alg: hash: setkey "
390 "failed on chunking test %d "
391 "for %s: ret=%d\n", j, algo, -ret);
392 goto out;
393 }
394 }
395
396 ahash_request_set_crypt(req, sg, result, template[i].psize);
397 ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
398 if (ret) {
399 pr_err("alg: hash: digest failed on chunking test %d for %s: ret=%d\n",
400 j, algo, -ret);
401 goto out;
402 }
403
404 if (memcmp(result, template[i].digest,
405 crypto_ahash_digestsize(tfm))) {
406 printk(KERN_ERR "alg: hash: Chunking test %d "
407 "failed for %s\n", j, algo);
408 hexdump(result, crypto_ahash_digestsize(tfm));
409 ret = -EINVAL;
410 goto out;
411 }
412 }
413
414 /* partial update exercise */
415 j = 0;
416 for (i = 0; i < tcount; i++) {
417 /* alignment tests are only done with continuous buffers */
418 if (align_offset != 0)
419 break;
420
421 if (template[i].np < 2)
422 continue;
423
424 j++;
425 memset(result, 0, digest_size);
426
427 ret = -EINVAL;
428 hash_buff = xbuf[0];
429 memcpy(hash_buff, template[i].plaintext,
430 template[i].tap[0]);
431 sg_init_one(&sg[0], hash_buff, template[i].tap[0]);
432
433 if (template[i].ksize) {
434 crypto_ahash_clear_flags(tfm, ~0);
435 if (template[i].ksize > MAX_KEYLEN) {
436 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
437 j, algo, template[i].ksize, MAX_KEYLEN);
438 ret = -EINVAL;
439 goto out;
440 }
441 memcpy(key, template[i].key, template[i].ksize);
442 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
443 if (ret) {
444 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n",
445 j, algo, -ret);
446 goto out;
447 }
448 }
449
450 ahash_request_set_crypt(req, sg, result, template[i].tap[0]);
451 ret = crypto_wait_req(crypto_ahash_init(req), &wait);
452 if (ret) {
453 pr_err("alg: hash: init failed on test %d for %s: ret=%d\n",
454 j, algo, -ret);
455 goto out;
456 }
457 ret = crypto_wait_req(crypto_ahash_update(req), &wait);
458 if (ret) {
459 pr_err("alg: hash: update failed on test %d for %s: ret=%d\n",
460 j, algo, -ret);
461 goto out;
462 }
463
464 temp = template[i].tap[0];
465 for (k = 1; k < template[i].np; k++) {
466 ret = ahash_partial_update(&req, tfm, &template[i],
467 hash_buff, k, temp, &sg[0], algo, result,
468 &wait);
469 if (ret) {
470 pr_err("alg: hash: partial update failed on test %d for %s: ret=%d\n",
471 j, algo, -ret);
472 goto out_noreq;
473 }
474 temp += template[i].tap[k];
475 }
476 ret = crypto_wait_req(crypto_ahash_final(req), &wait);
477 if (ret) {
478 pr_err("alg: hash: final failed on test %d for %s: ret=%d\n",
479 j, algo, -ret);
480 goto out;
481 }
482 if (memcmp(result, template[i].digest,
483 crypto_ahash_digestsize(tfm))) {
484 pr_err("alg: hash: Partial Test %d failed for %s\n",
485 j, algo);
486 hexdump(result, crypto_ahash_digestsize(tfm));
487 ret = -EINVAL;
488 goto out;
489 }
490 }
491
492 ret = 0;
493
494 out:
495 ahash_request_free(req);
496 out_noreq:
497 testmgr_free_buf(xbuf);
498 out_nobuf:
499 kfree(key);
500 kfree(result);
501 return ret;
502 }
503
504 static int test_hash(struct crypto_ahash *tfm,
505 const struct hash_testvec *template,
506 unsigned int tcount, bool use_digest)
507 {
508 unsigned int alignmask;
509 int ret;
510
511 ret = __test_hash(tfm, template, tcount, use_digest, 0);
512 if (ret)
513 return ret;
514
515 /* test unaligned buffers, check with one byte offset */
516 ret = __test_hash(tfm, template, tcount, use_digest, 1);
517 if (ret)
518 return ret;
519
520 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
521 if (alignmask) {
522 /* Check if alignment mask for tfm is correctly set. */
523 ret = __test_hash(tfm, template, tcount, use_digest,
524 alignmask + 1);
525 if (ret)
526 return ret;
527 }
528
529 return 0;
530 }
531
532 static int __test_aead(struct crypto_aead *tfm, int enc,
533 const struct aead_testvec *template, unsigned int tcount,
534 const bool diff_dst, const int align_offset)
535 {
536 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
537 unsigned int i, j, k, n, temp;
538 int ret = -ENOMEM;
539 char *q;
540 char *key;
541 struct aead_request *req;
542 struct scatterlist *sg;
543 struct scatterlist *sgout;
544 const char *e, *d;
545 struct crypto_wait wait;
546 unsigned int authsize, iv_len;
547 void *input;
548 void *output;
549 void *assoc;
550 char *iv;
551 char *xbuf[XBUFSIZE];
552 char *xoutbuf[XBUFSIZE];
553 char *axbuf[XBUFSIZE];
554
555 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
556 if (!iv)
557 return ret;
558 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
559 if (!key)
560 goto out_noxbuf;
561 if (testmgr_alloc_buf(xbuf))
562 goto out_noxbuf;
563 if (testmgr_alloc_buf(axbuf))
564 goto out_noaxbuf;
565 if (diff_dst && testmgr_alloc_buf(xoutbuf))
566 goto out_nooutbuf;
567
568 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
569 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL);
570 if (!sg)
571 goto out_nosg;
572 sgout = &sg[16];
573
574 if (diff_dst)
575 d = "-ddst";
576 else
577 d = "";
578
579 if (enc == ENCRYPT)
580 e = "encryption";
581 else
582 e = "decryption";
583
584 crypto_init_wait(&wait);
585
586 req = aead_request_alloc(tfm, GFP_KERNEL);
587 if (!req) {
588 pr_err("alg: aead%s: Failed to allocate request for %s\n",
589 d, algo);
590 goto out;
591 }
592
593 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
594 crypto_req_done, &wait);
595
596 iv_len = crypto_aead_ivsize(tfm);
597
598 for (i = 0, j = 0; i < tcount; i++) {
599 if (template[i].np)
600 continue;
601
602 j++;
603
604 /* some templates have no input data but they will
605 * touch input
606 */
607 input = xbuf[0];
608 input += align_offset;
609 assoc = axbuf[0];
610
611 ret = -EINVAL;
612 if (WARN_ON(align_offset + template[i].ilen >
613 PAGE_SIZE || template[i].alen > PAGE_SIZE))
614 goto out;
615
616 memcpy(input, template[i].input, template[i].ilen);
617 memcpy(assoc, template[i].assoc, template[i].alen);
618 if (template[i].iv)
619 memcpy(iv, template[i].iv, iv_len);
620 else
621 memset(iv, 0, iv_len);
622
623 crypto_aead_clear_flags(tfm, ~0);
624 if (template[i].wk)
625 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
626
627 if (template[i].klen > MAX_KEYLEN) {
628 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
629 d, j, algo, template[i].klen,
630 MAX_KEYLEN);
631 ret = -EINVAL;
632 goto out;
633 }
634 memcpy(key, template[i].key, template[i].klen);
635
636 ret = crypto_aead_setkey(tfm, key, template[i].klen);
637 if (template[i].fail == !ret) {
638 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
639 d, j, algo, crypto_aead_get_flags(tfm));
640 goto out;
641 } else if (ret)
642 continue;
643
644 authsize = abs(template[i].rlen - template[i].ilen);
645 ret = crypto_aead_setauthsize(tfm, authsize);
646 if (ret) {
647 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
648 d, authsize, j, algo);
649 goto out;
650 }
651
652 k = !!template[i].alen;
653 sg_init_table(sg, k + 1);
654 sg_set_buf(&sg[0], assoc, template[i].alen);
655 sg_set_buf(&sg[k], input,
656 template[i].ilen + (enc ? authsize : 0));
657 output = input;
658
659 if (diff_dst) {
660 sg_init_table(sgout, k + 1);
661 sg_set_buf(&sgout[0], assoc, template[i].alen);
662
663 output = xoutbuf[0];
664 output += align_offset;
665 sg_set_buf(&sgout[k], output,
666 template[i].rlen + (enc ? 0 : authsize));
667 }
668
669 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
670 template[i].ilen, iv);
671
672 aead_request_set_ad(req, template[i].alen);
673
674 ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
675 : crypto_aead_decrypt(req), &wait);
676
677 switch (ret) {
678 case 0:
679 if (template[i].novrfy) {
680 /* verification was supposed to fail */
681 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
682 d, e, j, algo);
683 /* so really, we got a bad message */
684 ret = -EBADMSG;
685 goto out;
686 }
687 break;
688 case -EBADMSG:
689 if (template[i].novrfy)
690 /* verification failure was expected */
691 continue;
692 /* fall through */
693 default:
694 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
695 d, e, j, algo, -ret);
696 goto out;
697 }
698
699 q = output;
700 if (memcmp(q, template[i].result, template[i].rlen)) {
701 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
702 d, j, e, algo);
703 hexdump(q, template[i].rlen);
704 ret = -EINVAL;
705 goto out;
706 }
707 }
708
709 for (i = 0, j = 0; i < tcount; i++) {
710 /* alignment tests are only done with continuous buffers */
711 if (align_offset != 0)
712 break;
713
714 if (!template[i].np)
715 continue;
716
717 j++;
718
719 if (template[i].iv)
720 memcpy(iv, template[i].iv, iv_len);
721 else
722 memset(iv, 0, MAX_IVLEN);
723
724 crypto_aead_clear_flags(tfm, ~0);
725 if (template[i].wk)
726 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
727 if (template[i].klen > MAX_KEYLEN) {
728 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
729 d, j, algo, template[i].klen, MAX_KEYLEN);
730 ret = -EINVAL;
731 goto out;
732 }
733 memcpy(key, template[i].key, template[i].klen);
734
735 ret = crypto_aead_setkey(tfm, key, template[i].klen);
736 if (template[i].fail == !ret) {
737 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
738 d, j, algo, crypto_aead_get_flags(tfm));
739 goto out;
740 } else if (ret)
741 continue;
742
743 authsize = abs(template[i].rlen - template[i].ilen);
744
745 ret = -EINVAL;
746 sg_init_table(sg, template[i].anp + template[i].np);
747 if (diff_dst)
748 sg_init_table(sgout, template[i].anp + template[i].np);
749
750 ret = -EINVAL;
751 for (k = 0, temp = 0; k < template[i].anp; k++) {
752 if (WARN_ON(offset_in_page(IDX[k]) +
753 template[i].atap[k] > PAGE_SIZE))
754 goto out;
755 sg_set_buf(&sg[k],
756 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
757 offset_in_page(IDX[k]),
758 template[i].assoc + temp,
759 template[i].atap[k]),
760 template[i].atap[k]);
761 if (diff_dst)
762 sg_set_buf(&sgout[k],
763 axbuf[IDX[k] >> PAGE_SHIFT] +
764 offset_in_page(IDX[k]),
765 template[i].atap[k]);
766 temp += template[i].atap[k];
767 }
768
769 for (k = 0, temp = 0; k < template[i].np; k++) {
770 if (WARN_ON(offset_in_page(IDX[k]) +
771 template[i].tap[k] > PAGE_SIZE))
772 goto out;
773
774 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
775 memcpy(q, template[i].input + temp, template[i].tap[k]);
776 sg_set_buf(&sg[template[i].anp + k],
777 q, template[i].tap[k]);
778
779 if (diff_dst) {
780 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
781 offset_in_page(IDX[k]);
782
783 memset(q, 0, template[i].tap[k]);
784
785 sg_set_buf(&sgout[template[i].anp + k],
786 q, template[i].tap[k]);
787 }
788
789 n = template[i].tap[k];
790 if (k == template[i].np - 1 && enc)
791 n += authsize;
792 if (offset_in_page(q) + n < PAGE_SIZE)
793 q[n] = 0;
794
795 temp += template[i].tap[k];
796 }
797
798 ret = crypto_aead_setauthsize(tfm, authsize);
799 if (ret) {
800 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
801 d, authsize, j, algo);
802 goto out;
803 }
804
805 if (enc) {
806 if (WARN_ON(sg[template[i].anp + k - 1].offset +
807 sg[template[i].anp + k - 1].length +
808 authsize > PAGE_SIZE)) {
809 ret = -EINVAL;
810 goto out;
811 }
812
813 if (diff_dst)
814 sgout[template[i].anp + k - 1].length +=
815 authsize;
816 sg[template[i].anp + k - 1].length += authsize;
817 }
818
819 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
820 template[i].ilen,
821 iv);
822
823 aead_request_set_ad(req, template[i].alen);
824
825 ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
826 : crypto_aead_decrypt(req), &wait);
827
828 switch (ret) {
829 case 0:
830 if (template[i].novrfy) {
831 /* verification was supposed to fail */
832 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
833 d, e, j, algo);
834 /* so really, we got a bad message */
835 ret = -EBADMSG;
836 goto out;
837 }
838 break;
839 case -EBADMSG:
840 if (template[i].novrfy)
841 /* verification failure was expected */
842 continue;
843 /* fall through */
844 default:
845 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
846 d, e, j, algo, -ret);
847 goto out;
848 }
849
850 ret = -EINVAL;
851 for (k = 0, temp = 0; k < template[i].np; k++) {
852 if (diff_dst)
853 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
854 offset_in_page(IDX[k]);
855 else
856 q = xbuf[IDX[k] >> PAGE_SHIFT] +
857 offset_in_page(IDX[k]);
858
859 n = template[i].tap[k];
860 if (k == template[i].np - 1)
861 n += enc ? authsize : -authsize;
862
863 if (memcmp(q, template[i].result + temp, n)) {
864 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
865 d, j, e, k, algo);
866 hexdump(q, n);
867 goto out;
868 }
869
870 q += n;
871 if (k == template[i].np - 1 && !enc) {
872 if (!diff_dst &&
873 memcmp(q, template[i].input +
874 temp + n, authsize))
875 n = authsize;
876 else
877 n = 0;
878 } else {
879 for (n = 0; offset_in_page(q + n) && q[n]; n++)
880 ;
881 }
882 if (n) {
883 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
884 d, j, e, k, algo, n);
885 hexdump(q, n);
886 goto out;
887 }
888
889 temp += template[i].tap[k];
890 }
891 }
892
893 ret = 0;
894
895 out:
896 aead_request_free(req);
897 kfree(sg);
898 out_nosg:
899 if (diff_dst)
900 testmgr_free_buf(xoutbuf);
901 out_nooutbuf:
902 testmgr_free_buf(axbuf);
903 out_noaxbuf:
904 testmgr_free_buf(xbuf);
905 out_noxbuf:
906 kfree(key);
907 kfree(iv);
908 return ret;
909 }
910
911 static int test_aead(struct crypto_aead *tfm, int enc,
912 const struct aead_testvec *template, unsigned int tcount)
913 {
914 unsigned int alignmask;
915 int ret;
916
917 /* test 'dst == src' case */
918 ret = __test_aead(tfm, enc, template, tcount, false, 0);
919 if (ret)
920 return ret;
921
922 /* test 'dst != src' case */
923 ret = __test_aead(tfm, enc, template, tcount, true, 0);
924 if (ret)
925 return ret;
926
927 /* test unaligned buffers, check with one byte offset */
928 ret = __test_aead(tfm, enc, template, tcount, true, 1);
929 if (ret)
930 return ret;
931
932 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
933 if (alignmask) {
934 /* Check if alignment mask for tfm is correctly set. */
935 ret = __test_aead(tfm, enc, template, tcount, true,
936 alignmask + 1);
937 if (ret)
938 return ret;
939 }
940
941 return 0;
942 }
943
944 static int test_cipher(struct crypto_cipher *tfm, int enc,
945 const struct cipher_testvec *template,
946 unsigned int tcount)
947 {
948 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
949 unsigned int i, j, k;
950 char *q;
951 const char *e;
952 void *data;
953 char *xbuf[XBUFSIZE];
954 int ret = -ENOMEM;
955
956 if (testmgr_alloc_buf(xbuf))
957 goto out_nobuf;
958
959 if (enc == ENCRYPT)
960 e = "encryption";
961 else
962 e = "decryption";
963
964 j = 0;
965 for (i = 0; i < tcount; i++) {
966 if (template[i].np)
967 continue;
968
969 if (fips_enabled && template[i].fips_skip)
970 continue;
971
972 j++;
973
974 ret = -EINVAL;
975 if (WARN_ON(template[i].ilen > PAGE_SIZE))
976 goto out;
977
978 data = xbuf[0];
979 memcpy(data, template[i].input, template[i].ilen);
980
981 crypto_cipher_clear_flags(tfm, ~0);
982 if (template[i].wk)
983 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
984
985 ret = crypto_cipher_setkey(tfm, template[i].key,
986 template[i].klen);
987 if (template[i].fail == !ret) {
988 printk(KERN_ERR "alg: cipher: setkey failed "
989 "on test %d for %s: flags=%x\n", j,
990 algo, crypto_cipher_get_flags(tfm));
991 goto out;
992 } else if (ret)
993 continue;
994
995 for (k = 0; k < template[i].ilen;
996 k += crypto_cipher_blocksize(tfm)) {
997 if (enc)
998 crypto_cipher_encrypt_one(tfm, data + k,
999 data + k);
1000 else
1001 crypto_cipher_decrypt_one(tfm, data + k,
1002 data + k);
1003 }
1004
1005 q = data;
1006 if (memcmp(q, template[i].result, template[i].rlen)) {
1007 printk(KERN_ERR "alg: cipher: Test %d failed "
1008 "on %s for %s\n", j, e, algo);
1009 hexdump(q, template[i].rlen);
1010 ret = -EINVAL;
1011 goto out;
1012 }
1013 }
1014
1015 ret = 0;
1016
1017 out:
1018 testmgr_free_buf(xbuf);
1019 out_nobuf:
1020 return ret;
1021 }
1022
1023 static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1024 const struct cipher_testvec *template,
1025 unsigned int tcount,
1026 const bool diff_dst, const int align_offset)
1027 {
1028 const char *algo =
1029 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
1030 unsigned int i, j, k, n, temp;
1031 char *q;
1032 struct skcipher_request *req;
1033 struct scatterlist sg[8];
1034 struct scatterlist sgout[8];
1035 const char *e, *d;
1036 struct crypto_wait wait;
1037 void *data;
1038 char iv[MAX_IVLEN];
1039 char *xbuf[XBUFSIZE];
1040 char *xoutbuf[XBUFSIZE];
1041 int ret = -ENOMEM;
1042 unsigned int ivsize = crypto_skcipher_ivsize(tfm);
1043
1044 if (testmgr_alloc_buf(xbuf))
1045 goto out_nobuf;
1046
1047 if (diff_dst && testmgr_alloc_buf(xoutbuf))
1048 goto out_nooutbuf;
1049
1050 if (diff_dst)
1051 d = "-ddst";
1052 else
1053 d = "";
1054
1055 if (enc == ENCRYPT)
1056 e = "encryption";
1057 else
1058 e = "decryption";
1059
1060 crypto_init_wait(&wait);
1061
1062 req = skcipher_request_alloc(tfm, GFP_KERNEL);
1063 if (!req) {
1064 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
1065 d, algo);
1066 goto out;
1067 }
1068
1069 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1070 crypto_req_done, &wait);
1071
1072 j = 0;
1073 for (i = 0; i < tcount; i++) {
1074 if (template[i].np && !template[i].also_non_np)
1075 continue;
1076
1077 if (fips_enabled && template[i].fips_skip)
1078 continue;
1079
1080 if (template[i].iv)
1081 memcpy(iv, template[i].iv, ivsize);
1082 else
1083 memset(iv, 0, MAX_IVLEN);
1084
1085 j++;
1086 ret = -EINVAL;
1087 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE))
1088 goto out;
1089
1090 data = xbuf[0];
1091 data += align_offset;
1092 memcpy(data, template[i].input, template[i].ilen);
1093
1094 crypto_skcipher_clear_flags(tfm, ~0);
1095 if (template[i].wk)
1096 crypto_skcipher_set_flags(tfm,
1097 CRYPTO_TFM_REQ_WEAK_KEY);
1098
1099 ret = crypto_skcipher_setkey(tfm, template[i].key,
1100 template[i].klen);
1101 if (template[i].fail == !ret) {
1102 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
1103 d, j, algo, crypto_skcipher_get_flags(tfm));
1104 goto out;
1105 } else if (ret)
1106 continue;
1107
1108 sg_init_one(&sg[0], data, template[i].ilen);
1109 if (diff_dst) {
1110 data = xoutbuf[0];
1111 data += align_offset;
1112 sg_init_one(&sgout[0], data, template[i].ilen);
1113 }
1114
1115 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1116 template[i].ilen, iv);
1117 ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
1118 crypto_skcipher_decrypt(req), &wait);
1119
1120 if (ret) {
1121 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1122 d, e, j, algo, -ret);
1123 goto out;
1124 }
1125
1126 q = data;
1127 if (memcmp(q, template[i].result, template[i].rlen)) {
1128 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
1129 d, j, e, algo);
1130 hexdump(q, template[i].rlen);
1131 ret = -EINVAL;
1132 goto out;
1133 }
1134
1135 if (template[i].iv_out &&
1136 memcmp(iv, template[i].iv_out,
1137 crypto_skcipher_ivsize(tfm))) {
1138 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1139 d, j, e, algo);
1140 hexdump(iv, crypto_skcipher_ivsize(tfm));
1141 ret = -EINVAL;
1142 goto out;
1143 }
1144 }
1145
1146 j = 0;
1147 for (i = 0; i < tcount; i++) {
1148 /* alignment tests are only done with continuous buffers */
1149 if (align_offset != 0)
1150 break;
1151
1152 if (!template[i].np)
1153 continue;
1154
1155 if (fips_enabled && template[i].fips_skip)
1156 continue;
1157
1158 if (template[i].iv)
1159 memcpy(iv, template[i].iv, ivsize);
1160 else
1161 memset(iv, 0, MAX_IVLEN);
1162
1163 j++;
1164 crypto_skcipher_clear_flags(tfm, ~0);
1165 if (template[i].wk)
1166 crypto_skcipher_set_flags(tfm,
1167 CRYPTO_TFM_REQ_WEAK_KEY);
1168
1169 ret = crypto_skcipher_setkey(tfm, template[i].key,
1170 template[i].klen);
1171 if (template[i].fail == !ret) {
1172 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1173 d, j, algo, crypto_skcipher_get_flags(tfm));
1174 goto out;
1175 } else if (ret)
1176 continue;
1177
1178 temp = 0;
1179 ret = -EINVAL;
1180 sg_init_table(sg, template[i].np);
1181 if (diff_dst)
1182 sg_init_table(sgout, template[i].np);
1183 for (k = 0; k < template[i].np; k++) {
1184 if (WARN_ON(offset_in_page(IDX[k]) +
1185 template[i].tap[k] > PAGE_SIZE))
1186 goto out;
1187
1188 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1189
1190 memcpy(q, template[i].input + temp, template[i].tap[k]);
1191
1192 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1193 q[template[i].tap[k]] = 0;
1194
1195 sg_set_buf(&sg[k], q, template[i].tap[k]);
1196 if (diff_dst) {
1197 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1198 offset_in_page(IDX[k]);
1199
1200 sg_set_buf(&sgout[k], q, template[i].tap[k]);
1201
1202 memset(q, 0, template[i].tap[k]);
1203 if (offset_in_page(q) +
1204 template[i].tap[k] < PAGE_SIZE)
1205 q[template[i].tap[k]] = 0;
1206 }
1207
1208 temp += template[i].tap[k];
1209 }
1210
1211 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1212 template[i].ilen, iv);
1213
1214 ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
1215 crypto_skcipher_decrypt(req), &wait);
1216
1217 if (ret) {
1218 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1219 d, e, j, algo, -ret);
1220 goto out;
1221 }
1222
1223 temp = 0;
1224 ret = -EINVAL;
1225 for (k = 0; k < template[i].np; k++) {
1226 if (diff_dst)
1227 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1228 offset_in_page(IDX[k]);
1229 else
1230 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1231 offset_in_page(IDX[k]);
1232
1233 if (memcmp(q, template[i].result + temp,
1234 template[i].tap[k])) {
1235 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1236 d, j, e, k, algo);
1237 hexdump(q, template[i].tap[k]);
1238 goto out;
1239 }
1240
1241 q += template[i].tap[k];
1242 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1243 ;
1244 if (n) {
1245 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1246 d, j, e, k, algo, n);
1247 hexdump(q, n);
1248 goto out;
1249 }
1250 temp += template[i].tap[k];
1251 }
1252 }
1253
1254 ret = 0;
1255
1256 out:
1257 skcipher_request_free(req);
1258 if (diff_dst)
1259 testmgr_free_buf(xoutbuf);
1260 out_nooutbuf:
1261 testmgr_free_buf(xbuf);
1262 out_nobuf:
1263 return ret;
1264 }
1265
1266 static int test_skcipher(struct crypto_skcipher *tfm, int enc,
1267 const struct cipher_testvec *template,
1268 unsigned int tcount)
1269 {
1270 unsigned int alignmask;
1271 int ret;
1272
1273 /* test 'dst == src' case */
1274 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1275 if (ret)
1276 return ret;
1277
1278 /* test 'dst != src' case */
1279 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1280 if (ret)
1281 return ret;
1282
1283 /* test unaligned buffers, check with one byte offset */
1284 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1285 if (ret)
1286 return ret;
1287
1288 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1289 if (alignmask) {
1290 /* Check if alignment mask for tfm is correctly set. */
1291 ret = __test_skcipher(tfm, enc, template, tcount, true,
1292 alignmask + 1);
1293 if (ret)
1294 return ret;
1295 }
1296
1297 return 0;
1298 }
1299
1300 static int test_comp(struct crypto_comp *tfm,
1301 const struct comp_testvec *ctemplate,
1302 const struct comp_testvec *dtemplate,
1303 int ctcount, int dtcount)
1304 {
1305 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1306 unsigned int i;
1307 char result[COMP_BUF_SIZE];
1308 int ret;
1309
1310 for (i = 0; i < ctcount; i++) {
1311 int ilen;
1312 unsigned int dlen = COMP_BUF_SIZE;
1313
1314 memset(result, 0, sizeof (result));
1315
1316 ilen = ctemplate[i].inlen;
1317 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1318 ilen, result, &dlen);
1319 if (ret) {
1320 printk(KERN_ERR "alg: comp: compression failed "
1321 "on test %d for %s: ret=%d\n", i + 1, algo,
1322 -ret);
1323 goto out;
1324 }
1325
1326 if (dlen != ctemplate[i].outlen) {
1327 printk(KERN_ERR "alg: comp: Compression test %d "
1328 "failed for %s: output len = %d\n", i + 1, algo,
1329 dlen);
1330 ret = -EINVAL;
1331 goto out;
1332 }
1333
1334 if (memcmp(result, ctemplate[i].output, dlen)) {
1335 printk(KERN_ERR "alg: comp: Compression test %d "
1336 "failed for %s\n", i + 1, algo);
1337 hexdump(result, dlen);
1338 ret = -EINVAL;
1339 goto out;
1340 }
1341 }
1342
1343 for (i = 0; i < dtcount; i++) {
1344 int ilen;
1345 unsigned int dlen = COMP_BUF_SIZE;
1346
1347 memset(result, 0, sizeof (result));
1348
1349 ilen = dtemplate[i].inlen;
1350 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1351 ilen, result, &dlen);
1352 if (ret) {
1353 printk(KERN_ERR "alg: comp: decompression failed "
1354 "on test %d for %s: ret=%d\n", i + 1, algo,
1355 -ret);
1356 goto out;
1357 }
1358
1359 if (dlen != dtemplate[i].outlen) {
1360 printk(KERN_ERR "alg: comp: Decompression test %d "
1361 "failed for %s: output len = %d\n", i + 1, algo,
1362 dlen);
1363 ret = -EINVAL;
1364 goto out;
1365 }
1366
1367 if (memcmp(result, dtemplate[i].output, dlen)) {
1368 printk(KERN_ERR "alg: comp: Decompression test %d "
1369 "failed for %s\n", i + 1, algo);
1370 hexdump(result, dlen);
1371 ret = -EINVAL;
1372 goto out;
1373 }
1374 }
1375
1376 ret = 0;
1377
1378 out:
1379 return ret;
1380 }
1381
1382 static int test_acomp(struct crypto_acomp *tfm,
1383 const struct comp_testvec *ctemplate,
1384 const struct comp_testvec *dtemplate,
1385 int ctcount, int dtcount)
1386 {
1387 const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm));
1388 unsigned int i;
1389 char *output, *decomp_out;
1390 int ret;
1391 struct scatterlist src, dst;
1392 struct acomp_req *req;
1393 struct crypto_wait wait;
1394
1395 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1396 if (!output)
1397 return -ENOMEM;
1398
1399 decomp_out = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1400 if (!decomp_out) {
1401 kfree(output);
1402 return -ENOMEM;
1403 }
1404
1405 for (i = 0; i < ctcount; i++) {
1406 unsigned int dlen = COMP_BUF_SIZE;
1407 int ilen = ctemplate[i].inlen;
1408 void *input_vec;
1409
1410 input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL);
1411 if (!input_vec) {
1412 ret = -ENOMEM;
1413 goto out;
1414 }
1415
1416 memset(output, 0, dlen);
1417 crypto_init_wait(&wait);
1418 sg_init_one(&src, input_vec, ilen);
1419 sg_init_one(&dst, output, dlen);
1420
1421 req = acomp_request_alloc(tfm);
1422 if (!req) {
1423 pr_err("alg: acomp: request alloc failed for %s\n",
1424 algo);
1425 kfree(input_vec);
1426 ret = -ENOMEM;
1427 goto out;
1428 }
1429
1430 acomp_request_set_params(req, &src, &dst, ilen, dlen);
1431 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1432 crypto_req_done, &wait);
1433
1434 ret = crypto_wait_req(crypto_acomp_compress(req), &wait);
1435 if (ret) {
1436 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1437 i + 1, algo, -ret);
1438 kfree(input_vec);
1439 acomp_request_free(req);
1440 goto out;
1441 }
1442
1443 ilen = req->dlen;
1444 dlen = COMP_BUF_SIZE;
1445 sg_init_one(&src, output, ilen);
1446 sg_init_one(&dst, decomp_out, dlen);
1447 crypto_init_wait(&wait);
1448 acomp_request_set_params(req, &src, &dst, ilen, dlen);
1449
1450 ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
1451 if (ret) {
1452 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1453 i + 1, algo, -ret);
1454 kfree(input_vec);
1455 acomp_request_free(req);
1456 goto out;
1457 }
1458
1459 if (req->dlen != ctemplate[i].inlen) {
1460 pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n",
1461 i + 1, algo, req->dlen);
1462 ret = -EINVAL;
1463 kfree(input_vec);
1464 acomp_request_free(req);
1465 goto out;
1466 }
1467
1468 if (memcmp(input_vec, decomp_out, req->dlen)) {
1469 pr_err("alg: acomp: Compression test %d failed for %s\n",
1470 i + 1, algo);
1471 hexdump(output, req->dlen);
1472 ret = -EINVAL;
1473 kfree(input_vec);
1474 acomp_request_free(req);
1475 goto out;
1476 }
1477
1478 kfree(input_vec);
1479 acomp_request_free(req);
1480 }
1481
1482 for (i = 0; i < dtcount; i++) {
1483 unsigned int dlen = COMP_BUF_SIZE;
1484 int ilen = dtemplate[i].inlen;
1485 void *input_vec;
1486
1487 input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL);
1488 if (!input_vec) {
1489 ret = -ENOMEM;
1490 goto out;
1491 }
1492
1493 memset(output, 0, dlen);
1494 crypto_init_wait(&wait);
1495 sg_init_one(&src, input_vec, ilen);
1496 sg_init_one(&dst, output, dlen);
1497
1498 req = acomp_request_alloc(tfm);
1499 if (!req) {
1500 pr_err("alg: acomp: request alloc failed for %s\n",
1501 algo);
1502 kfree(input_vec);
1503 ret = -ENOMEM;
1504 goto out;
1505 }
1506
1507 acomp_request_set_params(req, &src, &dst, ilen, dlen);
1508 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1509 crypto_req_done, &wait);
1510
1511 ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
1512 if (ret) {
1513 pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
1514 i + 1, algo, -ret);
1515 kfree(input_vec);
1516 acomp_request_free(req);
1517 goto out;
1518 }
1519
1520 if (req->dlen != dtemplate[i].outlen) {
1521 pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n",
1522 i + 1, algo, req->dlen);
1523 ret = -EINVAL;
1524 kfree(input_vec);
1525 acomp_request_free(req);
1526 goto out;
1527 }
1528
1529 if (memcmp(output, dtemplate[i].output, req->dlen)) {
1530 pr_err("alg: acomp: Decompression test %d failed for %s\n",
1531 i + 1, algo);
1532 hexdump(output, req->dlen);
1533 ret = -EINVAL;
1534 kfree(input_vec);
1535 acomp_request_free(req);
1536 goto out;
1537 }
1538
1539 kfree(input_vec);
1540 acomp_request_free(req);
1541 }
1542
1543 ret = 0;
1544
1545 out:
1546 kfree(decomp_out);
1547 kfree(output);
1548 return ret;
1549 }
1550
1551 static int test_cprng(struct crypto_rng *tfm,
1552 const struct cprng_testvec *template,
1553 unsigned int tcount)
1554 {
1555 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1556 int err = 0, i, j, seedsize;
1557 u8 *seed;
1558 char result[32];
1559
1560 seedsize = crypto_rng_seedsize(tfm);
1561
1562 seed = kmalloc(seedsize, GFP_KERNEL);
1563 if (!seed) {
1564 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1565 "for %s\n", algo);
1566 return -ENOMEM;
1567 }
1568
1569 for (i = 0; i < tcount; i++) {
1570 memset(result, 0, 32);
1571
1572 memcpy(seed, template[i].v, template[i].vlen);
1573 memcpy(seed + template[i].vlen, template[i].key,
1574 template[i].klen);
1575 memcpy(seed + template[i].vlen + template[i].klen,
1576 template[i].dt, template[i].dtlen);
1577
1578 err = crypto_rng_reset(tfm, seed, seedsize);
1579 if (err) {
1580 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1581 "for %s\n", algo);
1582 goto out;
1583 }
1584
1585 for (j = 0; j < template[i].loops; j++) {
1586 err = crypto_rng_get_bytes(tfm, result,
1587 template[i].rlen);
1588 if (err < 0) {
1589 printk(KERN_ERR "alg: cprng: Failed to obtain "
1590 "the correct amount of random data for "
1591 "%s (requested %d)\n", algo,
1592 template[i].rlen);
1593 goto out;
1594 }
1595 }
1596
1597 err = memcmp(result, template[i].result,
1598 template[i].rlen);
1599 if (err) {
1600 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1601 i, algo);
1602 hexdump(result, template[i].rlen);
1603 err = -EINVAL;
1604 goto out;
1605 }
1606 }
1607
1608 out:
1609 kfree(seed);
1610 return err;
1611 }
1612
1613 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1614 u32 type, u32 mask)
1615 {
1616 struct crypto_aead *tfm;
1617 int err = 0;
1618
1619 tfm = crypto_alloc_aead(driver, type, mask);
1620 if (IS_ERR(tfm)) {
1621 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1622 "%ld\n", driver, PTR_ERR(tfm));
1623 return PTR_ERR(tfm);
1624 }
1625
1626 if (desc->suite.aead.enc.vecs) {
1627 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1628 desc->suite.aead.enc.count);
1629 if (err)
1630 goto out;
1631 }
1632
1633 if (!err && desc->suite.aead.dec.vecs)
1634 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1635 desc->suite.aead.dec.count);
1636
1637 out:
1638 crypto_free_aead(tfm);
1639 return err;
1640 }
1641
1642 static int alg_test_cipher(const struct alg_test_desc *desc,
1643 const char *driver, u32 type, u32 mask)
1644 {
1645 struct crypto_cipher *tfm;
1646 int err = 0;
1647
1648 tfm = crypto_alloc_cipher(driver, type, mask);
1649 if (IS_ERR(tfm)) {
1650 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1651 "%s: %ld\n", driver, PTR_ERR(tfm));
1652 return PTR_ERR(tfm);
1653 }
1654
1655 if (desc->suite.cipher.enc.vecs) {
1656 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1657 desc->suite.cipher.enc.count);
1658 if (err)
1659 goto out;
1660 }
1661
1662 if (desc->suite.cipher.dec.vecs)
1663 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1664 desc->suite.cipher.dec.count);
1665
1666 out:
1667 crypto_free_cipher(tfm);
1668 return err;
1669 }
1670
1671 static int alg_test_skcipher(const struct alg_test_desc *desc,
1672 const char *driver, u32 type, u32 mask)
1673 {
1674 struct crypto_skcipher *tfm;
1675 int err = 0;
1676
1677 tfm = crypto_alloc_skcipher(driver, type, mask);
1678 if (IS_ERR(tfm)) {
1679 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1680 "%s: %ld\n", driver, PTR_ERR(tfm));
1681 return PTR_ERR(tfm);
1682 }
1683
1684 if (desc->suite.cipher.enc.vecs) {
1685 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1686 desc->suite.cipher.enc.count);
1687 if (err)
1688 goto out;
1689 }
1690
1691 if (desc->suite.cipher.dec.vecs)
1692 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1693 desc->suite.cipher.dec.count);
1694
1695 out:
1696 crypto_free_skcipher(tfm);
1697 return err;
1698 }
1699
1700 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1701 u32 type, u32 mask)
1702 {
1703 struct crypto_comp *comp;
1704 struct crypto_acomp *acomp;
1705 int err;
1706 u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK;
1707
1708 if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) {
1709 acomp = crypto_alloc_acomp(driver, type, mask);
1710 if (IS_ERR(acomp)) {
1711 pr_err("alg: acomp: Failed to load transform for %s: %ld\n",
1712 driver, PTR_ERR(acomp));
1713 return PTR_ERR(acomp);
1714 }
1715 err = test_acomp(acomp, desc->suite.comp.comp.vecs,
1716 desc->suite.comp.decomp.vecs,
1717 desc->suite.comp.comp.count,
1718 desc->suite.comp.decomp.count);
1719 crypto_free_acomp(acomp);
1720 } else {
1721 comp = crypto_alloc_comp(driver, type, mask);
1722 if (IS_ERR(comp)) {
1723 pr_err("alg: comp: Failed to load transform for %s: %ld\n",
1724 driver, PTR_ERR(comp));
1725 return PTR_ERR(comp);
1726 }
1727
1728 err = test_comp(comp, desc->suite.comp.comp.vecs,
1729 desc->suite.comp.decomp.vecs,
1730 desc->suite.comp.comp.count,
1731 desc->suite.comp.decomp.count);
1732
1733 crypto_free_comp(comp);
1734 }
1735 return err;
1736 }
1737
1738 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1739 u32 type, u32 mask)
1740 {
1741 struct crypto_ahash *tfm;
1742 int err;
1743
1744 tfm = crypto_alloc_ahash(driver, type, mask);
1745 if (IS_ERR(tfm)) {
1746 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1747 "%ld\n", driver, PTR_ERR(tfm));
1748 return PTR_ERR(tfm);
1749 }
1750
1751 err = test_hash(tfm, desc->suite.hash.vecs,
1752 desc->suite.hash.count, true);
1753 if (!err)
1754 err = test_hash(tfm, desc->suite.hash.vecs,
1755 desc->suite.hash.count, false);
1756
1757 crypto_free_ahash(tfm);
1758 return err;
1759 }
1760
1761 static int alg_test_crc32c(const struct alg_test_desc *desc,
1762 const char *driver, u32 type, u32 mask)
1763 {
1764 struct crypto_shash *tfm;
1765 u32 val;
1766 int err;
1767
1768 err = alg_test_hash(desc, driver, type, mask);
1769 if (err)
1770 return err;
1771
1772 tfm = crypto_alloc_shash(driver, type, mask);
1773 if (IS_ERR(tfm)) {
1774 if (PTR_ERR(tfm) == -ENOENT) {
1775 /*
1776 * This crc32c implementation is only available through
1777 * ahash API, not the shash API, so the remaining part
1778 * of the test is not applicable to it.
1779 */
1780 return 0;
1781 }
1782 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1783 "%ld\n", driver, PTR_ERR(tfm));
1784 return PTR_ERR(tfm);
1785 }
1786
1787 do {
1788 SHASH_DESC_ON_STACK(shash, tfm);
1789 u32 *ctx = (u32 *)shash_desc_ctx(shash);
1790
1791 shash->tfm = tfm;
1792 shash->flags = 0;
1793
1794 *ctx = le32_to_cpu(420553207);
1795 err = crypto_shash_final(shash, (u8 *)&val);
1796 if (err) {
1797 printk(KERN_ERR "alg: crc32c: Operation failed for "
1798 "%s: %d\n", driver, err);
1799 break;
1800 }
1801
1802 if (val != ~420553207) {
1803 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1804 "%d\n", driver, val);
1805 err = -EINVAL;
1806 }
1807 } while (0);
1808
1809 crypto_free_shash(tfm);
1810
1811 return err;
1812 }
1813
1814 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1815 u32 type, u32 mask)
1816 {
1817 struct crypto_rng *rng;
1818 int err;
1819
1820 rng = crypto_alloc_rng(driver, type, mask);
1821 if (IS_ERR(rng)) {
1822 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1823 "%ld\n", driver, PTR_ERR(rng));
1824 return PTR_ERR(rng);
1825 }
1826
1827 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1828
1829 crypto_free_rng(rng);
1830
1831 return err;
1832 }
1833
1834
1835 static int drbg_cavs_test(const struct drbg_testvec *test, int pr,
1836 const char *driver, u32 type, u32 mask)
1837 {
1838 int ret = -EAGAIN;
1839 struct crypto_rng *drng;
1840 struct drbg_test_data test_data;
1841 struct drbg_string addtl, pers, testentropy;
1842 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1843
1844 if (!buf)
1845 return -ENOMEM;
1846
1847 drng = crypto_alloc_rng(driver, type, mask);
1848 if (IS_ERR(drng)) {
1849 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1850 "%s\n", driver);
1851 kzfree(buf);
1852 return -ENOMEM;
1853 }
1854
1855 test_data.testentropy = &testentropy;
1856 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1857 drbg_string_fill(&pers, test->pers, test->perslen);
1858 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1859 if (ret) {
1860 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1861 goto outbuf;
1862 }
1863
1864 drbg_string_fill(&addtl, test->addtla, test->addtllen);
1865 if (pr) {
1866 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1867 ret = crypto_drbg_get_bytes_addtl_test(drng,
1868 buf, test->expectedlen, &addtl, &test_data);
1869 } else {
1870 ret = crypto_drbg_get_bytes_addtl(drng,
1871 buf, test->expectedlen, &addtl);
1872 }
1873 if (ret < 0) {
1874 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1875 "driver %s\n", driver);
1876 goto outbuf;
1877 }
1878
1879 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1880 if (pr) {
1881 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1882 ret = crypto_drbg_get_bytes_addtl_test(drng,
1883 buf, test->expectedlen, &addtl, &test_data);
1884 } else {
1885 ret = crypto_drbg_get_bytes_addtl(drng,
1886 buf, test->expectedlen, &addtl);
1887 }
1888 if (ret < 0) {
1889 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1890 "driver %s\n", driver);
1891 goto outbuf;
1892 }
1893
1894 ret = memcmp(test->expected, buf, test->expectedlen);
1895
1896 outbuf:
1897 crypto_free_rng(drng);
1898 kzfree(buf);
1899 return ret;
1900 }
1901
1902
1903 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1904 u32 type, u32 mask)
1905 {
1906 int err = 0;
1907 int pr = 0;
1908 int i = 0;
1909 const struct drbg_testvec *template = desc->suite.drbg.vecs;
1910 unsigned int tcount = desc->suite.drbg.count;
1911
1912 if (0 == memcmp(driver, "drbg_pr_", 8))
1913 pr = 1;
1914
1915 for (i = 0; i < tcount; i++) {
1916 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1917 if (err) {
1918 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1919 i, driver);
1920 err = -EINVAL;
1921 break;
1922 }
1923 }
1924 return err;
1925
1926 }
1927
1928 static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec,
1929 const char *alg)
1930 {
1931 struct kpp_request *req;
1932 void *input_buf = NULL;
1933 void *output_buf = NULL;
1934 void *a_public = NULL;
1935 void *a_ss = NULL;
1936 void *shared_secret = NULL;
1937 struct crypto_wait wait;
1938 unsigned int out_len_max;
1939 int err = -ENOMEM;
1940 struct scatterlist src, dst;
1941
1942 req = kpp_request_alloc(tfm, GFP_KERNEL);
1943 if (!req)
1944 return err;
1945
1946 crypto_init_wait(&wait);
1947
1948 err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
1949 if (err < 0)
1950 goto free_req;
1951
1952 out_len_max = crypto_kpp_maxsize(tfm);
1953 output_buf = kzalloc(out_len_max, GFP_KERNEL);
1954 if (!output_buf) {
1955 err = -ENOMEM;
1956 goto free_req;
1957 }
1958
1959 /* Use appropriate parameter as base */
1960 kpp_request_set_input(req, NULL, 0);
1961 sg_init_one(&dst, output_buf, out_len_max);
1962 kpp_request_set_output(req, &dst, out_len_max);
1963 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1964 crypto_req_done, &wait);
1965
1966 /* Compute party A's public key */
1967 err = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait);
1968 if (err) {
1969 pr_err("alg: %s: Party A: generate public key test failed. err %d\n",
1970 alg, err);
1971 goto free_output;
1972 }
1973
1974 if (vec->genkey) {
1975 /* Save party A's public key */
1976 a_public = kzalloc(out_len_max, GFP_KERNEL);
1977 if (!a_public) {
1978 err = -ENOMEM;
1979 goto free_output;
1980 }
1981 memcpy(a_public, sg_virt(req->dst), out_len_max);
1982 } else {
1983 /* Verify calculated public key */
1984 if (memcmp(vec->expected_a_public, sg_virt(req->dst),
1985 vec->expected_a_public_size)) {
1986 pr_err("alg: %s: Party A: generate public key test failed. Invalid output\n",
1987 alg);
1988 err = -EINVAL;
1989 goto free_output;
1990 }
1991 }
1992
1993 /* Calculate shared secret key by using counter part (b) public key. */
1994 input_buf = kzalloc(vec->b_public_size, GFP_KERNEL);
1995 if (!input_buf) {
1996 err = -ENOMEM;
1997 goto free_output;
1998 }
1999
2000 memcpy(input_buf, vec->b_public, vec->b_public_size);
2001 sg_init_one(&src, input_buf, vec->b_public_size);
2002 sg_init_one(&dst, output_buf, out_len_max);
2003 kpp_request_set_input(req, &src, vec->b_public_size);
2004 kpp_request_set_output(req, &dst, out_len_max);
2005 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2006 crypto_req_done, &wait);
2007 err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait);
2008 if (err) {
2009 pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n",
2010 alg, err);
2011 goto free_all;
2012 }
2013
2014 if (vec->genkey) {
2015 /* Save the shared secret obtained by party A */
2016 a_ss = kzalloc(vec->expected_ss_size, GFP_KERNEL);
2017 if (!a_ss) {
2018 err = -ENOMEM;
2019 goto free_all;
2020 }
2021 memcpy(a_ss, sg_virt(req->dst), vec->expected_ss_size);
2022
2023 /*
2024 * Calculate party B's shared secret by using party A's
2025 * public key.
2026 */
2027 err = crypto_kpp_set_secret(tfm, vec->b_secret,
2028 vec->b_secret_size);
2029 if (err < 0)
2030 goto free_all;
2031
2032 sg_init_one(&src, a_public, vec->expected_a_public_size);
2033 sg_init_one(&dst, output_buf, out_len_max);
2034 kpp_request_set_input(req, &src, vec->expected_a_public_size);
2035 kpp_request_set_output(req, &dst, out_len_max);
2036 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2037 crypto_req_done, &wait);
2038 err = crypto_wait_req(crypto_kpp_compute_shared_secret(req),
2039 &wait);
2040 if (err) {
2041 pr_err("alg: %s: Party B: compute shared secret failed. err %d\n",
2042 alg, err);
2043 goto free_all;
2044 }
2045
2046 shared_secret = a_ss;
2047 } else {
2048 shared_secret = (void *)vec->expected_ss;
2049 }
2050
2051 /*
2052 * verify shared secret from which the user will derive
2053 * secret key by executing whatever hash it has chosen
2054 */
2055 if (memcmp(shared_secret, sg_virt(req->dst),
2056 vec->expected_ss_size)) {
2057 pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
2058 alg);
2059 err = -EINVAL;
2060 }
2061
2062 free_all:
2063 kfree(a_ss);
2064 kfree(input_buf);
2065 free_output:
2066 kfree(a_public);
2067 kfree(output_buf);
2068 free_req:
2069 kpp_request_free(req);
2070 return err;
2071 }
2072
2073 static int test_kpp(struct crypto_kpp *tfm, const char *alg,
2074 const struct kpp_testvec *vecs, unsigned int tcount)
2075 {
2076 int ret, i;
2077
2078 for (i = 0; i < tcount; i++) {
2079 ret = do_test_kpp(tfm, vecs++, alg);
2080 if (ret) {
2081 pr_err("alg: %s: test failed on vector %d, err=%d\n",
2082 alg, i + 1, ret);
2083 return ret;
2084 }
2085 }
2086 return 0;
2087 }
2088
2089 static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver,
2090 u32 type, u32 mask)
2091 {
2092 struct crypto_kpp *tfm;
2093 int err = 0;
2094
2095 tfm = crypto_alloc_kpp(driver, type, mask);
2096 if (IS_ERR(tfm)) {
2097 pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
2098 driver, PTR_ERR(tfm));
2099 return PTR_ERR(tfm);
2100 }
2101 if (desc->suite.kpp.vecs)
2102 err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs,
2103 desc->suite.kpp.count);
2104
2105 crypto_free_kpp(tfm);
2106 return err;
2107 }
2108
2109 static int test_akcipher_one(struct crypto_akcipher *tfm,
2110 const struct akcipher_testvec *vecs)
2111 {
2112 char *xbuf[XBUFSIZE];
2113 struct akcipher_request *req;
2114 void *outbuf_enc = NULL;
2115 void *outbuf_dec = NULL;
2116 struct crypto_wait wait;
2117 unsigned int out_len_max, out_len = 0;
2118 int err = -ENOMEM;
2119 struct scatterlist src, dst, src_tab[2];
2120
2121 if (testmgr_alloc_buf(xbuf))
2122 return err;
2123
2124 req = akcipher_request_alloc(tfm, GFP_KERNEL);
2125 if (!req)
2126 goto free_xbuf;
2127
2128 crypto_init_wait(&wait);
2129
2130 if (vecs->public_key_vec)
2131 err = crypto_akcipher_set_pub_key(tfm, vecs->key,
2132 vecs->key_len);
2133 else
2134 err = crypto_akcipher_set_priv_key(tfm, vecs->key,
2135 vecs->key_len);
2136 if (err)
2137 goto free_req;
2138
2139 err = -ENOMEM;
2140 out_len_max = crypto_akcipher_maxsize(tfm);
2141 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
2142 if (!outbuf_enc)
2143 goto free_req;
2144
2145 if (WARN_ON(vecs->m_size > PAGE_SIZE))
2146 goto free_all;
2147
2148 memcpy(xbuf[0], vecs->m, vecs->m_size);
2149
2150 sg_init_table(src_tab, 2);
2151 sg_set_buf(&src_tab[0], xbuf[0], 8);
2152 sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8);
2153 sg_init_one(&dst, outbuf_enc, out_len_max);
2154 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size,
2155 out_len_max);
2156 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2157 crypto_req_done, &wait);
2158
2159 err = crypto_wait_req(vecs->siggen_sigver_test ?
2160 /* Run asymmetric signature generation */
2161 crypto_akcipher_sign(req) :
2162 /* Run asymmetric encrypt */
2163 crypto_akcipher_encrypt(req), &wait);
2164 if (err) {
2165 pr_err("alg: akcipher: encrypt test failed. err %d\n", err);
2166 goto free_all;
2167 }
2168 if (req->dst_len != vecs->c_size) {
2169 pr_err("alg: akcipher: encrypt test failed. Invalid output len\n");
2170 err = -EINVAL;
2171 goto free_all;
2172 }
2173 /* verify that encrypted message is equal to expected */
2174 if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) {
2175 pr_err("alg: akcipher: encrypt test failed. Invalid output\n");
2176 hexdump(outbuf_enc, vecs->c_size);
2177 err = -EINVAL;
2178 goto free_all;
2179 }
2180 /* Don't invoke decrypt for vectors with public key */
2181 if (vecs->public_key_vec) {
2182 err = 0;
2183 goto free_all;
2184 }
2185 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
2186 if (!outbuf_dec) {
2187 err = -ENOMEM;
2188 goto free_all;
2189 }
2190
2191 if (WARN_ON(vecs->c_size > PAGE_SIZE))
2192 goto free_all;
2193
2194 memcpy(xbuf[0], vecs->c, vecs->c_size);
2195
2196 sg_init_one(&src, xbuf[0], vecs->c_size);
2197 sg_init_one(&dst, outbuf_dec, out_len_max);
2198 crypto_init_wait(&wait);
2199 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max);
2200
2201 err = crypto_wait_req(vecs->siggen_sigver_test ?
2202 /* Run asymmetric signature verification */
2203 crypto_akcipher_verify(req) :
2204 /* Run asymmetric decrypt */
2205 crypto_akcipher_decrypt(req), &wait);
2206 if (err) {
2207 pr_err("alg: akcipher: decrypt test failed. err %d\n", err);
2208 goto free_all;
2209 }
2210 out_len = req->dst_len;
2211 if (out_len < vecs->m_size) {
2212 pr_err("alg: akcipher: decrypt test failed. "
2213 "Invalid output len %u\n", out_len);
2214 err = -EINVAL;
2215 goto free_all;
2216 }
2217 /* verify that decrypted message is equal to the original msg */
2218 if (memchr_inv(outbuf_dec, 0, out_len - vecs->m_size) ||
2219 memcmp(vecs->m, outbuf_dec + out_len - vecs->m_size,
2220 vecs->m_size)) {
2221 pr_err("alg: akcipher: decrypt test failed. Invalid output\n");
2222 hexdump(outbuf_dec, out_len);
2223 err = -EINVAL;
2224 }
2225 free_all:
2226 kfree(outbuf_dec);
2227 kfree(outbuf_enc);
2228 free_req:
2229 akcipher_request_free(req);
2230 free_xbuf:
2231 testmgr_free_buf(xbuf);
2232 return err;
2233 }
2234
2235 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
2236 const struct akcipher_testvec *vecs,
2237 unsigned int tcount)
2238 {
2239 const char *algo =
2240 crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm));
2241 int ret, i;
2242
2243 for (i = 0; i < tcount; i++) {
2244 ret = test_akcipher_one(tfm, vecs++);
2245 if (!ret)
2246 continue;
2247
2248 pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
2249 i + 1, algo, ret);
2250 return ret;
2251 }
2252 return 0;
2253 }
2254
2255 static int alg_test_akcipher(const struct alg_test_desc *desc,
2256 const char *driver, u32 type, u32 mask)
2257 {
2258 struct crypto_akcipher *tfm;
2259 int err = 0;
2260
2261 tfm = crypto_alloc_akcipher(driver, type, mask);
2262 if (IS_ERR(tfm)) {
2263 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
2264 driver, PTR_ERR(tfm));
2265 return PTR_ERR(tfm);
2266 }
2267 if (desc->suite.akcipher.vecs)
2268 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
2269 desc->suite.akcipher.count);
2270
2271 crypto_free_akcipher(tfm);
2272 return err;
2273 }
2274
2275 static int alg_test_null(const struct alg_test_desc *desc,
2276 const char *driver, u32 type, u32 mask)
2277 {
2278 return 0;
2279 }
2280
2281 #define __VECS(tv) { .vecs = tv, .count = ARRAY_SIZE(tv) }
2282
2283 /* Please keep this list sorted by algorithm name. */
2284 static const struct alg_test_desc alg_test_descs[] = {
2285 {
2286 .alg = "ansi_cprng",
2287 .test = alg_test_cprng,
2288 .suite = {
2289 .cprng = __VECS(ansi_cprng_aes_tv_template)
2290 }
2291 }, {
2292 .alg = "authenc(hmac(md5),ecb(cipher_null))",
2293 .test = alg_test_aead,
2294 .suite = {
2295 .aead = {
2296 .enc = __VECS(hmac_md5_ecb_cipher_null_enc_tv_template),
2297 .dec = __VECS(hmac_md5_ecb_cipher_null_dec_tv_template)
2298 }
2299 }
2300 }, {
2301 .alg = "authenc(hmac(sha1),cbc(aes))",
2302 .test = alg_test_aead,
2303 .fips_allowed = 1,
2304 .suite = {
2305 .aead = {
2306 .enc = __VECS(hmac_sha1_aes_cbc_enc_tv_temp)
2307 }
2308 }
2309 }, {
2310 .alg = "authenc(hmac(sha1),cbc(des))",
2311 .test = alg_test_aead,
2312 .suite = {
2313 .aead = {
2314 .enc = __VECS(hmac_sha1_des_cbc_enc_tv_temp)
2315 }
2316 }
2317 }, {
2318 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
2319 .test = alg_test_aead,
2320 .fips_allowed = 1,
2321 .suite = {
2322 .aead = {
2323 .enc = __VECS(hmac_sha1_des3_ede_cbc_enc_tv_temp)
2324 }
2325 }
2326 }, {
2327 .alg = "authenc(hmac(sha1),ctr(aes))",
2328 .test = alg_test_null,
2329 .fips_allowed = 1,
2330 }, {
2331 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
2332 .test = alg_test_aead,
2333 .suite = {
2334 .aead = {
2335 .enc = __VECS(hmac_sha1_ecb_cipher_null_enc_tv_temp),
2336 .dec = __VECS(hmac_sha1_ecb_cipher_null_dec_tv_temp)
2337 }
2338 }
2339 }, {
2340 .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2341 .test = alg_test_null,
2342 .fips_allowed = 1,
2343 }, {
2344 .alg = "authenc(hmac(sha224),cbc(des))",
2345 .test = alg_test_aead,
2346 .suite = {
2347 .aead = {
2348 .enc = __VECS(hmac_sha224_des_cbc_enc_tv_temp)
2349 }
2350 }
2351 }, {
2352 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
2353 .test = alg_test_aead,
2354 .fips_allowed = 1,
2355 .suite = {
2356 .aead = {
2357 .enc = __VECS(hmac_sha224_des3_ede_cbc_enc_tv_temp)
2358 }
2359 }
2360 }, {
2361 .alg = "authenc(hmac(sha256),cbc(aes))",
2362 .test = alg_test_aead,
2363 .fips_allowed = 1,
2364 .suite = {
2365 .aead = {
2366 .enc = __VECS(hmac_sha256_aes_cbc_enc_tv_temp)
2367 }
2368 }
2369 }, {
2370 .alg = "authenc(hmac(sha256),cbc(des))",
2371 .test = alg_test_aead,
2372 .suite = {
2373 .aead = {
2374 .enc = __VECS(hmac_sha256_des_cbc_enc_tv_temp)
2375 }
2376 }
2377 }, {
2378 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
2379 .test = alg_test_aead,
2380 .fips_allowed = 1,
2381 .suite = {
2382 .aead = {
2383 .enc = __VECS(hmac_sha256_des3_ede_cbc_enc_tv_temp)
2384 }
2385 }
2386 }, {
2387 .alg = "authenc(hmac(sha256),ctr(aes))",
2388 .test = alg_test_null,
2389 .fips_allowed = 1,
2390 }, {
2391 .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2392 .test = alg_test_null,
2393 .fips_allowed = 1,
2394 }, {
2395 .alg = "authenc(hmac(sha384),cbc(des))",
2396 .test = alg_test_aead,
2397 .suite = {
2398 .aead = {
2399 .enc = __VECS(hmac_sha384_des_cbc_enc_tv_temp)
2400 }
2401 }
2402 }, {
2403 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
2404 .test = alg_test_aead,
2405 .fips_allowed = 1,
2406 .suite = {
2407 .aead = {
2408 .enc = __VECS(hmac_sha384_des3_ede_cbc_enc_tv_temp)
2409 }
2410 }
2411 }, {
2412 .alg = "authenc(hmac(sha384),ctr(aes))",
2413 .test = alg_test_null,
2414 .fips_allowed = 1,
2415 }, {
2416 .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2417 .test = alg_test_null,
2418 .fips_allowed = 1,
2419 }, {
2420 .alg = "authenc(hmac(sha512),cbc(aes))",
2421 .fips_allowed = 1,
2422 .test = alg_test_aead,
2423 .suite = {
2424 .aead = {
2425 .enc = __VECS(hmac_sha512_aes_cbc_enc_tv_temp)
2426 }
2427 }
2428 }, {
2429 .alg = "authenc(hmac(sha512),cbc(des))",
2430 .test = alg_test_aead,
2431 .suite = {
2432 .aead = {
2433 .enc = __VECS(hmac_sha512_des_cbc_enc_tv_temp)
2434 }
2435 }
2436 }, {
2437 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
2438 .test = alg_test_aead,
2439 .fips_allowed = 1,
2440 .suite = {
2441 .aead = {
2442 .enc = __VECS(hmac_sha512_des3_ede_cbc_enc_tv_temp)
2443 }
2444 }
2445 }, {
2446 .alg = "authenc(hmac(sha512),ctr(aes))",
2447 .test = alg_test_null,
2448 .fips_allowed = 1,
2449 }, {
2450 .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2451 .test = alg_test_null,
2452 .fips_allowed = 1,
2453 }, {
2454 .alg = "cbc(aes)",
2455 .test = alg_test_skcipher,
2456 .fips_allowed = 1,
2457 .suite = {
2458 .cipher = {
2459 .enc = __VECS(aes_cbc_enc_tv_template),
2460 .dec = __VECS(aes_cbc_dec_tv_template)
2461 }
2462 }
2463 }, {
2464 .alg = "cbc(anubis)",
2465 .test = alg_test_skcipher,
2466 .suite = {
2467 .cipher = {
2468 .enc = __VECS(anubis_cbc_enc_tv_template),
2469 .dec = __VECS(anubis_cbc_dec_tv_template)
2470 }
2471 }
2472 }, {
2473 .alg = "cbc(blowfish)",
2474 .test = alg_test_skcipher,
2475 .suite = {
2476 .cipher = {
2477 .enc = __VECS(bf_cbc_enc_tv_template),
2478 .dec = __VECS(bf_cbc_dec_tv_template)
2479 }
2480 }
2481 }, {
2482 .alg = "cbc(camellia)",
2483 .test = alg_test_skcipher,
2484 .suite = {
2485 .cipher = {
2486 .enc = __VECS(camellia_cbc_enc_tv_template),
2487 .dec = __VECS(camellia_cbc_dec_tv_template)
2488 }
2489 }
2490 }, {
2491 .alg = "cbc(cast5)",
2492 .test = alg_test_skcipher,
2493 .suite = {
2494 .cipher = {
2495 .enc = __VECS(cast5_cbc_enc_tv_template),
2496 .dec = __VECS(cast5_cbc_dec_tv_template)
2497 }
2498 }
2499 }, {
2500 .alg = "cbc(cast6)",
2501 .test = alg_test_skcipher,
2502 .suite = {
2503 .cipher = {
2504 .enc = __VECS(cast6_cbc_enc_tv_template),
2505 .dec = __VECS(cast6_cbc_dec_tv_template)
2506 }
2507 }
2508 }, {
2509 .alg = "cbc(des)",
2510 .test = alg_test_skcipher,
2511 .suite = {
2512 .cipher = {
2513 .enc = __VECS(des_cbc_enc_tv_template),
2514 .dec = __VECS(des_cbc_dec_tv_template)
2515 }
2516 }
2517 }, {
2518 .alg = "cbc(des3_ede)",
2519 .test = alg_test_skcipher,
2520 .fips_allowed = 1,
2521 .suite = {
2522 .cipher = {
2523 .enc = __VECS(des3_ede_cbc_enc_tv_template),
2524 .dec = __VECS(des3_ede_cbc_dec_tv_template)
2525 }
2526 }
2527 }, {
2528 .alg = "cbc(serpent)",
2529 .test = alg_test_skcipher,
2530 .suite = {
2531 .cipher = {
2532 .enc = __VECS(serpent_cbc_enc_tv_template),
2533 .dec = __VECS(serpent_cbc_dec_tv_template)
2534 }
2535 }
2536 }, {
2537 .alg = "cbc(twofish)",
2538 .test = alg_test_skcipher,
2539 .suite = {
2540 .cipher = {
2541 .enc = __VECS(tf_cbc_enc_tv_template),
2542 .dec = __VECS(tf_cbc_dec_tv_template)
2543 }
2544 }
2545 }, {
2546 .alg = "cbcmac(aes)",
2547 .fips_allowed = 1,
2548 .test = alg_test_hash,
2549 .suite = {
2550 .hash = __VECS(aes_cbcmac_tv_template)
2551 }
2552 }, {
2553 .alg = "ccm(aes)",
2554 .test = alg_test_aead,
2555 .fips_allowed = 1,
2556 .suite = {
2557 .aead = {
2558 .enc = __VECS(aes_ccm_enc_tv_template),
2559 .dec = __VECS(aes_ccm_dec_tv_template)
2560 }
2561 }
2562 }, {
2563 .alg = "chacha20",
2564 .test = alg_test_skcipher,
2565 .suite = {
2566 .cipher = {
2567 .enc = __VECS(chacha20_enc_tv_template),
2568 .dec = __VECS(chacha20_enc_tv_template),
2569 }
2570 }
2571 }, {
2572 .alg = "cmac(aes)",
2573 .fips_allowed = 1,
2574 .test = alg_test_hash,
2575 .suite = {
2576 .hash = __VECS(aes_cmac128_tv_template)
2577 }
2578 }, {
2579 .alg = "cmac(des3_ede)",
2580 .fips_allowed = 1,
2581 .test = alg_test_hash,
2582 .suite = {
2583 .hash = __VECS(des3_ede_cmac64_tv_template)
2584 }
2585 }, {
2586 .alg = "compress_null",
2587 .test = alg_test_null,
2588 }, {
2589 .alg = "crc32",
2590 .test = alg_test_hash,
2591 .suite = {
2592 .hash = __VECS(crc32_tv_template)
2593 }
2594 }, {
2595 .alg = "crc32c",
2596 .test = alg_test_crc32c,
2597 .fips_allowed = 1,
2598 .suite = {
2599 .hash = __VECS(crc32c_tv_template)
2600 }
2601 }, {
2602 .alg = "crct10dif",
2603 .test = alg_test_hash,
2604 .fips_allowed = 1,
2605 .suite = {
2606 .hash = __VECS(crct10dif_tv_template)
2607 }
2608 }, {
2609 .alg = "ctr(aes)",
2610 .test = alg_test_skcipher,
2611 .fips_allowed = 1,
2612 .suite = {
2613 .cipher = {
2614 .enc = __VECS(aes_ctr_enc_tv_template),
2615 .dec = __VECS(aes_ctr_dec_tv_template)
2616 }
2617 }
2618 }, {
2619 .alg = "ctr(blowfish)",
2620 .test = alg_test_skcipher,
2621 .suite = {
2622 .cipher = {
2623 .enc = __VECS(bf_ctr_enc_tv_template),
2624 .dec = __VECS(bf_ctr_dec_tv_template)
2625 }
2626 }
2627 }, {
2628 .alg = "ctr(camellia)",
2629 .test = alg_test_skcipher,
2630 .suite = {
2631 .cipher = {
2632 .enc = __VECS(camellia_ctr_enc_tv_template),
2633 .dec = __VECS(camellia_ctr_dec_tv_template)
2634 }
2635 }
2636 }, {
2637 .alg = "ctr(cast5)",
2638 .test = alg_test_skcipher,
2639 .suite = {
2640 .cipher = {
2641 .enc = __VECS(cast5_ctr_enc_tv_template),
2642 .dec = __VECS(cast5_ctr_dec_tv_template)
2643 }
2644 }
2645 }, {
2646 .alg = "ctr(cast6)",
2647 .test = alg_test_skcipher,
2648 .suite = {
2649 .cipher = {
2650 .enc = __VECS(cast6_ctr_enc_tv_template),
2651 .dec = __VECS(cast6_ctr_dec_tv_template)
2652 }
2653 }
2654 }, {
2655 .alg = "ctr(des)",
2656 .test = alg_test_skcipher,
2657 .suite = {
2658 .cipher = {
2659 .enc = __VECS(des_ctr_enc_tv_template),
2660 .dec = __VECS(des_ctr_dec_tv_template)
2661 }
2662 }
2663 }, {
2664 .alg = "ctr(des3_ede)",
2665 .test = alg_test_skcipher,
2666 .fips_allowed = 1,
2667 .suite = {
2668 .cipher = {
2669 .enc = __VECS(des3_ede_ctr_enc_tv_template),
2670 .dec = __VECS(des3_ede_ctr_dec_tv_template)
2671 }
2672 }
2673 }, {
2674 .alg = "ctr(serpent)",
2675 .test = alg_test_skcipher,
2676 .suite = {
2677 .cipher = {
2678 .enc = __VECS(serpent_ctr_enc_tv_template),
2679 .dec = __VECS(serpent_ctr_dec_tv_template)
2680 }
2681 }
2682 }, {
2683 .alg = "ctr(twofish)",
2684 .test = alg_test_skcipher,
2685 .suite = {
2686 .cipher = {
2687 .enc = __VECS(tf_ctr_enc_tv_template),
2688 .dec = __VECS(tf_ctr_dec_tv_template)
2689 }
2690 }
2691 }, {
2692 .alg = "cts(cbc(aes))",
2693 .test = alg_test_skcipher,
2694 .suite = {
2695 .cipher = {
2696 .enc = __VECS(cts_mode_enc_tv_template),
2697 .dec = __VECS(cts_mode_dec_tv_template)
2698 }
2699 }
2700 }, {
2701 .alg = "deflate",
2702 .test = alg_test_comp,
2703 .fips_allowed = 1,
2704 .suite = {
2705 .comp = {
2706 .comp = __VECS(deflate_comp_tv_template),
2707 .decomp = __VECS(deflate_decomp_tv_template)
2708 }
2709 }
2710 }, {
2711 .alg = "dh",
2712 .test = alg_test_kpp,
2713 .fips_allowed = 1,
2714 .suite = {
2715 .kpp = __VECS(dh_tv_template)
2716 }
2717 }, {
2718 .alg = "digest_null",
2719 .test = alg_test_null,
2720 }, {
2721 .alg = "drbg_nopr_ctr_aes128",
2722 .test = alg_test_drbg,
2723 .fips_allowed = 1,
2724 .suite = {
2725 .drbg = __VECS(drbg_nopr_ctr_aes128_tv_template)
2726 }
2727 }, {
2728 .alg = "drbg_nopr_ctr_aes192",
2729 .test = alg_test_drbg,
2730 .fips_allowed = 1,
2731 .suite = {
2732 .drbg = __VECS(drbg_nopr_ctr_aes192_tv_template)
2733 }
2734 }, {
2735 .alg = "drbg_nopr_ctr_aes256",
2736 .test = alg_test_drbg,
2737 .fips_allowed = 1,
2738 .suite = {
2739 .drbg = __VECS(drbg_nopr_ctr_aes256_tv_template)
2740 }
2741 }, {
2742 /*
2743 * There is no need to specifically test the DRBG with every
2744 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2745 */
2746 .alg = "drbg_nopr_hmac_sha1",
2747 .fips_allowed = 1,
2748 .test = alg_test_null,
2749 }, {
2750 .alg = "drbg_nopr_hmac_sha256",
2751 .test = alg_test_drbg,
2752 .fips_allowed = 1,
2753 .suite = {
2754 .drbg = __VECS(drbg_nopr_hmac_sha256_tv_template)
2755 }
2756 }, {
2757 /* covered by drbg_nopr_hmac_sha256 test */
2758 .alg = "drbg_nopr_hmac_sha384",
2759 .fips_allowed = 1,
2760 .test = alg_test_null,
2761 }, {
2762 .alg = "drbg_nopr_hmac_sha512",
2763 .test = alg_test_null,
2764 .fips_allowed = 1,
2765 }, {
2766 .alg = "drbg_nopr_sha1",
2767 .fips_allowed = 1,
2768 .test = alg_test_null,
2769 }, {
2770 .alg = "drbg_nopr_sha256",
2771 .test = alg_test_drbg,
2772 .fips_allowed = 1,
2773 .suite = {
2774 .drbg = __VECS(drbg_nopr_sha256_tv_template)
2775 }
2776 }, {
2777 /* covered by drbg_nopr_sha256 test */
2778 .alg = "drbg_nopr_sha384",
2779 .fips_allowed = 1,
2780 .test = alg_test_null,
2781 }, {
2782 .alg = "drbg_nopr_sha512",
2783 .fips_allowed = 1,
2784 .test = alg_test_null,
2785 }, {
2786 .alg = "drbg_pr_ctr_aes128",
2787 .test = alg_test_drbg,
2788 .fips_allowed = 1,
2789 .suite = {
2790 .drbg = __VECS(drbg_pr_ctr_aes128_tv_template)
2791 }
2792 }, {
2793 /* covered by drbg_pr_ctr_aes128 test */
2794 .alg = "drbg_pr_ctr_aes192",
2795 .fips_allowed = 1,
2796 .test = alg_test_null,
2797 }, {
2798 .alg = "drbg_pr_ctr_aes256",
2799 .fips_allowed = 1,
2800 .test = alg_test_null,
2801 }, {
2802 .alg = "drbg_pr_hmac_sha1",
2803 .fips_allowed = 1,
2804 .test = alg_test_null,
2805 }, {
2806 .alg = "drbg_pr_hmac_sha256",
2807 .test = alg_test_drbg,
2808 .fips_allowed = 1,
2809 .suite = {
2810 .drbg = __VECS(drbg_pr_hmac_sha256_tv_template)
2811 }
2812 }, {
2813 /* covered by drbg_pr_hmac_sha256 test */
2814 .alg = "drbg_pr_hmac_sha384",
2815 .fips_allowed = 1,
2816 .test = alg_test_null,
2817 }, {
2818 .alg = "drbg_pr_hmac_sha512",
2819 .test = alg_test_null,
2820 .fips_allowed = 1,
2821 }, {
2822 .alg = "drbg_pr_sha1",
2823 .fips_allowed = 1,
2824 .test = alg_test_null,
2825 }, {
2826 .alg = "drbg_pr_sha256",
2827 .test = alg_test_drbg,
2828 .fips_allowed = 1,
2829 .suite = {
2830 .drbg = __VECS(drbg_pr_sha256_tv_template)
2831 }
2832 }, {
2833 /* covered by drbg_pr_sha256 test */
2834 .alg = "drbg_pr_sha384",
2835 .fips_allowed = 1,
2836 .test = alg_test_null,
2837 }, {
2838 .alg = "drbg_pr_sha512",
2839 .fips_allowed = 1,
2840 .test = alg_test_null,
2841 }, {
2842 .alg = "ecb(aes)",
2843 .test = alg_test_skcipher,
2844 .fips_allowed = 1,
2845 .suite = {
2846 .cipher = {
2847 .enc = __VECS(aes_enc_tv_template),
2848 .dec = __VECS(aes_dec_tv_template)
2849 }
2850 }
2851 }, {
2852 .alg = "ecb(anubis)",
2853 .test = alg_test_skcipher,
2854 .suite = {
2855 .cipher = {
2856 .enc = __VECS(anubis_enc_tv_template),
2857 .dec = __VECS(anubis_dec_tv_template)
2858 }
2859 }
2860 }, {
2861 .alg = "ecb(arc4)",
2862 .test = alg_test_skcipher,
2863 .suite = {
2864 .cipher = {
2865 .enc = __VECS(arc4_enc_tv_template),
2866 .dec = __VECS(arc4_dec_tv_template)
2867 }
2868 }
2869 }, {
2870 .alg = "ecb(blowfish)",
2871 .test = alg_test_skcipher,
2872 .suite = {
2873 .cipher = {
2874 .enc = __VECS(bf_enc_tv_template),
2875 .dec = __VECS(bf_dec_tv_template)
2876 }
2877 }
2878 }, {
2879 .alg = "ecb(camellia)",
2880 .test = alg_test_skcipher,
2881 .suite = {
2882 .cipher = {
2883 .enc = __VECS(camellia_enc_tv_template),
2884 .dec = __VECS(camellia_dec_tv_template)
2885 }
2886 }
2887 }, {
2888 .alg = "ecb(cast5)",
2889 .test = alg_test_skcipher,
2890 .suite = {
2891 .cipher = {
2892 .enc = __VECS(cast5_enc_tv_template),
2893 .dec = __VECS(cast5_dec_tv_template)
2894 }
2895 }
2896 }, {
2897 .alg = "ecb(cast6)",
2898 .test = alg_test_skcipher,
2899 .suite = {
2900 .cipher = {
2901 .enc = __VECS(cast6_enc_tv_template),
2902 .dec = __VECS(cast6_dec_tv_template)
2903 }
2904 }
2905 }, {
2906 .alg = "ecb(cipher_null)",
2907 .test = alg_test_null,
2908 .fips_allowed = 1,
2909 }, {
2910 .alg = "ecb(des)",
2911 .test = alg_test_skcipher,
2912 .suite = {
2913 .cipher = {
2914 .enc = __VECS(des_enc_tv_template),
2915 .dec = __VECS(des_dec_tv_template)
2916 }
2917 }
2918 }, {
2919 .alg = "ecb(des3_ede)",
2920 .test = alg_test_skcipher,
2921 .fips_allowed = 1,
2922 .suite = {
2923 .cipher = {
2924 .enc = __VECS(des3_ede_enc_tv_template),
2925 .dec = __VECS(des3_ede_dec_tv_template)
2926 }
2927 }
2928 }, {
2929 .alg = "ecb(fcrypt)",
2930 .test = alg_test_skcipher,
2931 .suite = {
2932 .cipher = {
2933 .enc = {
2934 .vecs = fcrypt_pcbc_enc_tv_template,
2935 .count = 1
2936 },
2937 .dec = {
2938 .vecs = fcrypt_pcbc_dec_tv_template,
2939 .count = 1
2940 }
2941 }
2942 }
2943 }, {
2944 .alg = "ecb(khazad)",
2945 .test = alg_test_skcipher,
2946 .suite = {
2947 .cipher = {
2948 .enc = __VECS(khazad_enc_tv_template),
2949 .dec = __VECS(khazad_dec_tv_template)
2950 }
2951 }
2952 }, {
2953 .alg = "ecb(seed)",
2954 .test = alg_test_skcipher,
2955 .suite = {
2956 .cipher = {
2957 .enc = __VECS(seed_enc_tv_template),
2958 .dec = __VECS(seed_dec_tv_template)
2959 }
2960 }
2961 }, {
2962 .alg = "ecb(serpent)",
2963 .test = alg_test_skcipher,
2964 .suite = {
2965 .cipher = {
2966 .enc = __VECS(serpent_enc_tv_template),
2967 .dec = __VECS(serpent_dec_tv_template)
2968 }
2969 }
2970 }, {
2971 .alg = "ecb(tea)",
2972 .test = alg_test_skcipher,
2973 .suite = {
2974 .cipher = {
2975 .enc = __VECS(tea_enc_tv_template),
2976 .dec = __VECS(tea_dec_tv_template)
2977 }
2978 }
2979 }, {
2980 .alg = "ecb(tnepres)",
2981 .test = alg_test_skcipher,
2982 .suite = {
2983 .cipher = {
2984 .enc = __VECS(tnepres_enc_tv_template),
2985 .dec = __VECS(tnepres_dec_tv_template)
2986 }
2987 }
2988 }, {
2989 .alg = "ecb(twofish)",
2990 .test = alg_test_skcipher,
2991 .suite = {
2992 .cipher = {
2993 .enc = __VECS(tf_enc_tv_template),
2994 .dec = __VECS(tf_dec_tv_template)
2995 }
2996 }
2997 }, {
2998 .alg = "ecb(xeta)",
2999 .test = alg_test_skcipher,
3000 .suite = {
3001 .cipher = {
3002 .enc = __VECS(xeta_enc_tv_template),
3003 .dec = __VECS(xeta_dec_tv_template)
3004 }
3005 }
3006 }, {
3007 .alg = "ecb(xtea)",
3008 .test = alg_test_skcipher,
3009 .suite = {
3010 .cipher = {
3011 .enc = __VECS(xtea_enc_tv_template),
3012 .dec = __VECS(xtea_dec_tv_template)
3013 }
3014 }
3015 }, {
3016 .alg = "ecdh",
3017 .test = alg_test_kpp,
3018 .fips_allowed = 1,
3019 .suite = {
3020 .kpp = __VECS(ecdh_tv_template)
3021 }
3022 }, {
3023 .alg = "gcm(aes)",
3024 .test = alg_test_aead,
3025 .fips_allowed = 1,
3026 .suite = {
3027 .aead = {
3028 .enc = __VECS(aes_gcm_enc_tv_template),
3029 .dec = __VECS(aes_gcm_dec_tv_template)
3030 }
3031 }
3032 }, {
3033 .alg = "ghash",
3034 .test = alg_test_hash,
3035 .fips_allowed = 1,
3036 .suite = {
3037 .hash = __VECS(ghash_tv_template)
3038 }
3039 }, {
3040 .alg = "hmac(crc32)",
3041 .test = alg_test_hash,
3042 .suite = {
3043 .hash = __VECS(bfin_crc_tv_template)
3044 }
3045 }, {
3046 .alg = "hmac(md5)",
3047 .test = alg_test_hash,
3048 .suite = {
3049 .hash = __VECS(hmac_md5_tv_template)
3050 }
3051 }, {
3052 .alg = "hmac(rmd128)",
3053 .test = alg_test_hash,
3054 .suite = {
3055 .hash = __VECS(hmac_rmd128_tv_template)
3056 }
3057 }, {
3058 .alg = "hmac(rmd160)",
3059 .test = alg_test_hash,
3060 .suite = {
3061 .hash = __VECS(hmac_rmd160_tv_template)
3062 }
3063 }, {
3064 .alg = "hmac(sha1)",
3065 .test = alg_test_hash,
3066 .fips_allowed = 1,
3067 .suite = {
3068 .hash = __VECS(hmac_sha1_tv_template)
3069 }
3070 }, {
3071 .alg = "hmac(sha224)",
3072 .test = alg_test_hash,
3073 .fips_allowed = 1,
3074 .suite = {
3075 .hash = __VECS(hmac_sha224_tv_template)
3076 }
3077 }, {
3078 .alg = "hmac(sha256)",
3079 .test = alg_test_hash,
3080 .fips_allowed = 1,
3081 .suite = {
3082 .hash = __VECS(hmac_sha256_tv_template)
3083 }
3084 }, {
3085 .alg = "hmac(sha3-224)",
3086 .test = alg_test_hash,
3087 .fips_allowed = 1,
3088 .suite = {
3089 .hash = __VECS(hmac_sha3_224_tv_template)
3090 }
3091 }, {
3092 .alg = "hmac(sha3-256)",
3093 .test = alg_test_hash,
3094 .fips_allowed = 1,
3095 .suite = {
3096 .hash = __VECS(hmac_sha3_256_tv_template)
3097 }
3098 }, {
3099 .alg = "hmac(sha3-384)",
3100 .test = alg_test_hash,
3101 .fips_allowed = 1,
3102 .suite = {
3103 .hash = __VECS(hmac_sha3_384_tv_template)
3104 }
3105 }, {
3106 .alg = "hmac(sha3-512)",
3107 .test = alg_test_hash,
3108 .fips_allowed = 1,
3109 .suite = {
3110 .hash = __VECS(hmac_sha3_512_tv_template)
3111 }
3112 }, {
3113 .alg = "hmac(sha384)",
3114 .test = alg_test_hash,
3115 .fips_allowed = 1,
3116 .suite = {
3117 .hash = __VECS(hmac_sha384_tv_template)
3118 }
3119 }, {
3120 .alg = "hmac(sha512)",
3121 .test = alg_test_hash,
3122 .fips_allowed = 1,
3123 .suite = {
3124 .hash = __VECS(hmac_sha512_tv_template)
3125 }
3126 }, {
3127 .alg = "jitterentropy_rng",
3128 .fips_allowed = 1,
3129 .test = alg_test_null,
3130 }, {
3131 .alg = "kw(aes)",
3132 .test = alg_test_skcipher,
3133 .fips_allowed = 1,
3134 .suite = {
3135 .cipher = {
3136 .enc = __VECS(aes_kw_enc_tv_template),
3137 .dec = __VECS(aes_kw_dec_tv_template)
3138 }
3139 }
3140 }, {
3141 .alg = "lrw(aes)",
3142 .test = alg_test_skcipher,
3143 .suite = {
3144 .cipher = {
3145 .enc = __VECS(aes_lrw_enc_tv_template),
3146 .dec = __VECS(aes_lrw_dec_tv_template)
3147 }
3148 }
3149 }, {
3150 .alg = "lrw(camellia)",
3151 .test = alg_test_skcipher,
3152 .suite = {
3153 .cipher = {
3154 .enc = __VECS(camellia_lrw_enc_tv_template),
3155 .dec = __VECS(camellia_lrw_dec_tv_template)
3156 }
3157 }
3158 }, {
3159 .alg = "lrw(cast6)",
3160 .test = alg_test_skcipher,
3161 .suite = {
3162 .cipher = {
3163 .enc = __VECS(cast6_lrw_enc_tv_template),
3164 .dec = __VECS(cast6_lrw_dec_tv_template)
3165 }
3166 }
3167 }, {
3168 .alg = "lrw(serpent)",
3169 .test = alg_test_skcipher,
3170 .suite = {
3171 .cipher = {
3172 .enc = __VECS(serpent_lrw_enc_tv_template),
3173 .dec = __VECS(serpent_lrw_dec_tv_template)
3174 }
3175 }
3176 }, {
3177 .alg = "lrw(twofish)",
3178 .test = alg_test_skcipher,
3179 .suite = {
3180 .cipher = {
3181 .enc = __VECS(tf_lrw_enc_tv_template),
3182 .dec = __VECS(tf_lrw_dec_tv_template)
3183 }
3184 }
3185 }, {
3186 .alg = "lz4",
3187 .test = alg_test_comp,
3188 .fips_allowed = 1,
3189 .suite = {
3190 .comp = {
3191 .comp = __VECS(lz4_comp_tv_template),
3192 .decomp = __VECS(lz4_decomp_tv_template)
3193 }
3194 }
3195 }, {
3196 .alg = "lz4hc",
3197 .test = alg_test_comp,
3198 .fips_allowed = 1,
3199 .suite = {
3200 .comp = {
3201 .comp = __VECS(lz4hc_comp_tv_template),
3202 .decomp = __VECS(lz4hc_decomp_tv_template)
3203 }
3204 }
3205 }, {
3206 .alg = "lzo",
3207 .test = alg_test_comp,
3208 .fips_allowed = 1,
3209 .suite = {
3210 .comp = {
3211 .comp = __VECS(lzo_comp_tv_template),
3212 .decomp = __VECS(lzo_decomp_tv_template)
3213 }
3214 }
3215 }, {
3216 .alg = "md4",
3217 .test = alg_test_hash,
3218 .suite = {
3219 .hash = __VECS(md4_tv_template)
3220 }
3221 }, {
3222 .alg = "md5",
3223 .test = alg_test_hash,
3224 .suite = {
3225 .hash = __VECS(md5_tv_template)
3226 }
3227 }, {
3228 .alg = "michael_mic",
3229 .test = alg_test_hash,
3230 .suite = {
3231 .hash = __VECS(michael_mic_tv_template)
3232 }
3233 }, {
3234 .alg = "ofb(aes)",
3235 .test = alg_test_skcipher,
3236 .fips_allowed = 1,
3237 .suite = {
3238 .cipher = {
3239 .enc = __VECS(aes_ofb_enc_tv_template),
3240 .dec = __VECS(aes_ofb_dec_tv_template)
3241 }
3242 }
3243 }, {
3244 .alg = "pcbc(fcrypt)",
3245 .test = alg_test_skcipher,
3246 .suite = {
3247 .cipher = {
3248 .enc = __VECS(fcrypt_pcbc_enc_tv_template),
3249 .dec = __VECS(fcrypt_pcbc_dec_tv_template)
3250 }
3251 }
3252 }, {
3253 .alg = "pkcs1pad(rsa,sha224)",
3254 .test = alg_test_null,
3255 .fips_allowed = 1,
3256 }, {
3257 .alg = "pkcs1pad(rsa,sha256)",
3258 .test = alg_test_akcipher,
3259 .fips_allowed = 1,
3260 .suite = {
3261 .akcipher = __VECS(pkcs1pad_rsa_tv_template)
3262 }
3263 }, {
3264 .alg = "pkcs1pad(rsa,sha384)",
3265 .test = alg_test_null,
3266 .fips_allowed = 1,
3267 }, {
3268 .alg = "pkcs1pad(rsa,sha512)",
3269 .test = alg_test_null,
3270 .fips_allowed = 1,
3271 }, {
3272 .alg = "poly1305",
3273 .test = alg_test_hash,
3274 .suite = {
3275 .hash = __VECS(poly1305_tv_template)
3276 }
3277 }, {
3278 .alg = "rfc3686(ctr(aes))",
3279 .test = alg_test_skcipher,
3280 .fips_allowed = 1,
3281 .suite = {
3282 .cipher = {
3283 .enc = __VECS(aes_ctr_rfc3686_enc_tv_template),
3284 .dec = __VECS(aes_ctr_rfc3686_dec_tv_template)
3285 }
3286 }
3287 }, {
3288 .alg = "rfc4106(gcm(aes))",
3289 .test = alg_test_aead,
3290 .fips_allowed = 1,
3291 .suite = {
3292 .aead = {
3293 .enc = __VECS(aes_gcm_rfc4106_enc_tv_template),
3294 .dec = __VECS(aes_gcm_rfc4106_dec_tv_template)
3295 }
3296 }
3297 }, {
3298 .alg = "rfc4309(ccm(aes))",
3299 .test = alg_test_aead,
3300 .fips_allowed = 1,
3301 .suite = {
3302 .aead = {
3303 .enc = __VECS(aes_ccm_rfc4309_enc_tv_template),
3304 .dec = __VECS(aes_ccm_rfc4309_dec_tv_template)
3305 }
3306 }
3307 }, {
3308 .alg = "rfc4543(gcm(aes))",
3309 .test = alg_test_aead,
3310 .suite = {
3311 .aead = {
3312 .enc = __VECS(aes_gcm_rfc4543_enc_tv_template),
3313 .dec = __VECS(aes_gcm_rfc4543_dec_tv_template),
3314 }
3315 }
3316 }, {
3317 .alg = "rfc7539(chacha20,poly1305)",
3318 .test = alg_test_aead,
3319 .suite = {
3320 .aead = {
3321 .enc = __VECS(rfc7539_enc_tv_template),
3322 .dec = __VECS(rfc7539_dec_tv_template),
3323 }
3324 }
3325 }, {
3326 .alg = "rfc7539esp(chacha20,poly1305)",
3327 .test = alg_test_aead,
3328 .suite = {
3329 .aead = {
3330 .enc = __VECS(rfc7539esp_enc_tv_template),
3331 .dec = __VECS(rfc7539esp_dec_tv_template),
3332 }
3333 }
3334 }, {
3335 .alg = "rmd128",
3336 .test = alg_test_hash,
3337 .suite = {
3338 .hash = __VECS(rmd128_tv_template)
3339 }
3340 }, {
3341 .alg = "rmd160",
3342 .test = alg_test_hash,
3343 .suite = {
3344 .hash = __VECS(rmd160_tv_template)
3345 }
3346 }, {
3347 .alg = "rmd256",
3348 .test = alg_test_hash,
3349 .suite = {
3350 .hash = __VECS(rmd256_tv_template)
3351 }
3352 }, {
3353 .alg = "rmd320",
3354 .test = alg_test_hash,
3355 .suite = {
3356 .hash = __VECS(rmd320_tv_template)
3357 }
3358 }, {
3359 .alg = "rsa",
3360 .test = alg_test_akcipher,
3361 .fips_allowed = 1,
3362 .suite = {
3363 .akcipher = __VECS(rsa_tv_template)
3364 }
3365 }, {
3366 .alg = "salsa20",
3367 .test = alg_test_skcipher,
3368 .suite = {
3369 .cipher = {
3370 .enc = __VECS(salsa20_stream_enc_tv_template)
3371 }
3372 }
3373 }, {
3374 .alg = "sha1",
3375 .test = alg_test_hash,
3376 .fips_allowed = 1,
3377 .suite = {
3378 .hash = __VECS(sha1_tv_template)
3379 }
3380 }, {
3381 .alg = "sha224",
3382 .test = alg_test_hash,
3383 .fips_allowed = 1,
3384 .suite = {
3385 .hash = __VECS(sha224_tv_template)
3386 }
3387 }, {
3388 .alg = "sha256",
3389 .test = alg_test_hash,
3390 .fips_allowed = 1,
3391 .suite = {
3392 .hash = __VECS(sha256_tv_template)
3393 }
3394 }, {
3395 .alg = "sha3-224",
3396 .test = alg_test_hash,
3397 .fips_allowed = 1,
3398 .suite = {
3399 .hash = __VECS(sha3_224_tv_template)
3400 }
3401 }, {
3402 .alg = "sha3-256",
3403 .test = alg_test_hash,
3404 .fips_allowed = 1,
3405 .suite = {
3406 .hash = __VECS(sha3_256_tv_template)
3407 }
3408 }, {
3409 .alg = "sha3-384",
3410 .test = alg_test_hash,
3411 .fips_allowed = 1,
3412 .suite = {
3413 .hash = __VECS(sha3_384_tv_template)
3414 }
3415 }, {
3416 .alg = "sha3-512",
3417 .test = alg_test_hash,
3418 .fips_allowed = 1,
3419 .suite = {
3420 .hash = __VECS(sha3_512_tv_template)
3421 }
3422 }, {
3423 .alg = "sha384",
3424 .test = alg_test_hash,
3425 .fips_allowed = 1,
3426 .suite = {
3427 .hash = __VECS(sha384_tv_template)
3428 }
3429 }, {
3430 .alg = "sha512",
3431 .test = alg_test_hash,
3432 .fips_allowed = 1,
3433 .suite = {
3434 .hash = __VECS(sha512_tv_template)
3435 }
3436 }, {
3437 .alg = "sm3",
3438 .test = alg_test_hash,
3439 .suite = {
3440 .hash = __VECS(sm3_tv_template)
3441 }
3442 }, {
3443 .alg = "tgr128",
3444 .test = alg_test_hash,
3445 .suite = {
3446 .hash = __VECS(tgr128_tv_template)
3447 }
3448 }, {
3449 .alg = "tgr160",
3450 .test = alg_test_hash,
3451 .suite = {
3452 .hash = __VECS(tgr160_tv_template)
3453 }
3454 }, {
3455 .alg = "tgr192",
3456 .test = alg_test_hash,
3457 .suite = {
3458 .hash = __VECS(tgr192_tv_template)
3459 }
3460 }, {
3461 .alg = "vmac(aes)",
3462 .test = alg_test_hash,
3463 .suite = {
3464 .hash = __VECS(aes_vmac128_tv_template)
3465 }
3466 }, {
3467 .alg = "wp256",
3468 .test = alg_test_hash,
3469 .suite = {
3470 .hash = __VECS(wp256_tv_template)
3471 }
3472 }, {
3473 .alg = "wp384",
3474 .test = alg_test_hash,
3475 .suite = {
3476 .hash = __VECS(wp384_tv_template)
3477 }
3478 }, {
3479 .alg = "wp512",
3480 .test = alg_test_hash,
3481 .suite = {
3482 .hash = __VECS(wp512_tv_template)
3483 }
3484 }, {
3485 .alg = "xcbc(aes)",
3486 .test = alg_test_hash,
3487 .suite = {
3488 .hash = __VECS(aes_xcbc128_tv_template)
3489 }
3490 }, {
3491 .alg = "xts(aes)",
3492 .test = alg_test_skcipher,
3493 .fips_allowed = 1,
3494 .suite = {
3495 .cipher = {
3496 .enc = __VECS(aes_xts_enc_tv_template),
3497 .dec = __VECS(aes_xts_dec_tv_template)
3498 }
3499 }
3500 }, {
3501 .alg = "xts(camellia)",
3502 .test = alg_test_skcipher,
3503 .suite = {
3504 .cipher = {
3505 .enc = __VECS(camellia_xts_enc_tv_template),
3506 .dec = __VECS(camellia_xts_dec_tv_template)
3507 }
3508 }
3509 }, {
3510 .alg = "xts(cast6)",
3511 .test = alg_test_skcipher,
3512 .suite = {
3513 .cipher = {
3514 .enc = __VECS(cast6_xts_enc_tv_template),
3515 .dec = __VECS(cast6_xts_dec_tv_template)
3516 }
3517 }
3518 }, {
3519 .alg = "xts(serpent)",
3520 .test = alg_test_skcipher,
3521 .suite = {
3522 .cipher = {
3523 .enc = __VECS(serpent_xts_enc_tv_template),
3524 .dec = __VECS(serpent_xts_dec_tv_template)
3525 }
3526 }
3527 }, {
3528 .alg = "xts(twofish)",
3529 .test = alg_test_skcipher,
3530 .suite = {
3531 .cipher = {
3532 .enc = __VECS(tf_xts_enc_tv_template),
3533 .dec = __VECS(tf_xts_dec_tv_template)
3534 }
3535 }
3536 }, {
3537 .alg = "zlib-deflate",
3538 .test = alg_test_comp,
3539 .fips_allowed = 1,
3540 .suite = {
3541 .comp = {
3542 .comp = __VECS(zlib_deflate_comp_tv_template),
3543 .decomp = __VECS(zlib_deflate_decomp_tv_template)
3544 }
3545 }
3546 }
3547 };
3548
3549 static bool alg_test_descs_checked;
3550
3551 static void alg_test_descs_check_order(void)
3552 {
3553 int i;
3554
3555 /* only check once */
3556 if (alg_test_descs_checked)
3557 return;
3558
3559 alg_test_descs_checked = true;
3560
3561 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3562 int diff = strcmp(alg_test_descs[i - 1].alg,
3563 alg_test_descs[i].alg);
3564
3565 if (WARN_ON(diff > 0)) {
3566 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3567 alg_test_descs[i - 1].alg,
3568 alg_test_descs[i].alg);
3569 }
3570
3571 if (WARN_ON(diff == 0)) {
3572 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3573 alg_test_descs[i].alg);
3574 }
3575 }
3576 }
3577
3578 static int alg_find_test(const char *alg)
3579 {
3580 int start = 0;
3581 int end = ARRAY_SIZE(alg_test_descs);
3582
3583 while (start < end) {
3584 int i = (start + end) / 2;
3585 int diff = strcmp(alg_test_descs[i].alg, alg);
3586
3587 if (diff > 0) {
3588 end = i;
3589 continue;
3590 }
3591
3592 if (diff < 0) {
3593 start = i + 1;
3594 continue;
3595 }
3596
3597 return i;
3598 }
3599
3600 return -1;
3601 }
3602
3603 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3604 {
3605 int i;
3606 int j;
3607 int rc;
3608
3609 if (!fips_enabled && notests) {
3610 printk_once(KERN_INFO "alg: self-tests disabled\n");
3611 return 0;
3612 }
3613
3614 alg_test_descs_check_order();
3615
3616 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3617 char nalg[CRYPTO_MAX_ALG_NAME];
3618
3619 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3620 sizeof(nalg))
3621 return -ENAMETOOLONG;
3622
3623 i = alg_find_test(nalg);
3624 if (i < 0)
3625 goto notest;
3626
3627 if (fips_enabled && !alg_test_descs[i].fips_allowed)
3628 goto non_fips_alg;
3629
3630 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3631 goto test_done;
3632 }
3633
3634 i = alg_find_test(alg);
3635 j = alg_find_test(driver);
3636 if (i < 0 && j < 0)
3637 goto notest;
3638
3639 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3640 (j >= 0 && !alg_test_descs[j].fips_allowed)))
3641 goto non_fips_alg;
3642
3643 rc = 0;
3644 if (i >= 0)
3645 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3646 type, mask);
3647 if (j >= 0 && j != i)
3648 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3649 type, mask);
3650
3651 test_done:
3652 if (fips_enabled && rc)
3653 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3654
3655 if (fips_enabled && !rc)
3656 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
3657
3658 return rc;
3659
3660 notest:
3661 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3662 return 0;
3663 non_fips_alg:
3664 return -EINVAL;
3665 }
3666
3667 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3668
3669 EXPORT_SYMBOL_GPL(alg_test);