]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blob - crypto/testmgr.c
crypto: testmgr - add tests vectors for RSA
[mirror_ubuntu-bionic-kernel.git] / crypto / testmgr.c
1 /*
2 * Algorithm testing framework and tests.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
15 *
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
19 * any later version.
20 *
21 */
22
23 #include <crypto/aead.h>
24 #include <crypto/hash.h>
25 #include <linux/err.h>
26 #include <linux/fips.h>
27 #include <linux/module.h>
28 #include <linux/scatterlist.h>
29 #include <linux/slab.h>
30 #include <linux/string.h>
31 #include <crypto/rng.h>
32 #include <crypto/drbg.h>
33 #include <crypto/akcipher.h>
34
35 #include "internal.h"
36
37 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
38
39 /* a perfect nop */
40 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
41 {
42 return 0;
43 }
44
45 #else
46
47 #include "testmgr.h"
48
49 /*
50 * Need slab memory for testing (size in number of pages).
51 */
52 #define XBUFSIZE 8
53
54 /*
55 * Indexes into the xbuf to simulate cross-page access.
56 */
57 #define IDX1 32
58 #define IDX2 32400
59 #define IDX3 1
60 #define IDX4 8193
61 #define IDX5 22222
62 #define IDX6 17101
63 #define IDX7 27333
64 #define IDX8 3000
65
66 /*
67 * Used by test_cipher()
68 */
69 #define ENCRYPT 1
70 #define DECRYPT 0
71
72 struct tcrypt_result {
73 struct completion completion;
74 int err;
75 };
76
77 struct aead_test_suite {
78 struct {
79 struct aead_testvec *vecs;
80 unsigned int count;
81 } enc, dec;
82 };
83
84 struct cipher_test_suite {
85 struct {
86 struct cipher_testvec *vecs;
87 unsigned int count;
88 } enc, dec;
89 };
90
91 struct comp_test_suite {
92 struct {
93 struct comp_testvec *vecs;
94 unsigned int count;
95 } comp, decomp;
96 };
97
98 struct pcomp_test_suite {
99 struct {
100 struct pcomp_testvec *vecs;
101 unsigned int count;
102 } comp, decomp;
103 };
104
105 struct hash_test_suite {
106 struct hash_testvec *vecs;
107 unsigned int count;
108 };
109
110 struct cprng_test_suite {
111 struct cprng_testvec *vecs;
112 unsigned int count;
113 };
114
115 struct drbg_test_suite {
116 struct drbg_testvec *vecs;
117 unsigned int count;
118 };
119
120 struct akcipher_test_suite {
121 struct akcipher_testvec *vecs;
122 unsigned int count;
123 };
124
125 struct alg_test_desc {
126 const char *alg;
127 int (*test)(const struct alg_test_desc *desc, const char *driver,
128 u32 type, u32 mask);
129 int fips_allowed; /* set if alg is allowed in fips mode */
130
131 union {
132 struct aead_test_suite aead;
133 struct cipher_test_suite cipher;
134 struct comp_test_suite comp;
135 struct pcomp_test_suite pcomp;
136 struct hash_test_suite hash;
137 struct cprng_test_suite cprng;
138 struct drbg_test_suite drbg;
139 struct akcipher_test_suite akcipher;
140 } suite;
141 };
142
143 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
144
145 static void hexdump(unsigned char *buf, unsigned int len)
146 {
147 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
148 16, 1,
149 buf, len, false);
150 }
151
152 static void tcrypt_complete(struct crypto_async_request *req, int err)
153 {
154 struct tcrypt_result *res = req->data;
155
156 if (err == -EINPROGRESS)
157 return;
158
159 res->err = err;
160 complete(&res->completion);
161 }
162
163 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
164 {
165 int i;
166
167 for (i = 0; i < XBUFSIZE; i++) {
168 buf[i] = (void *)__get_free_page(GFP_KERNEL);
169 if (!buf[i])
170 goto err_free_buf;
171 }
172
173 return 0;
174
175 err_free_buf:
176 while (i-- > 0)
177 free_page((unsigned long)buf[i]);
178
179 return -ENOMEM;
180 }
181
182 static void testmgr_free_buf(char *buf[XBUFSIZE])
183 {
184 int i;
185
186 for (i = 0; i < XBUFSIZE; i++)
187 free_page((unsigned long)buf[i]);
188 }
189
190 static int wait_async_op(struct tcrypt_result *tr, int ret)
191 {
192 if (ret == -EINPROGRESS || ret == -EBUSY) {
193 wait_for_completion(&tr->completion);
194 reinit_completion(&tr->completion);
195 ret = tr->err;
196 }
197 return ret;
198 }
199
200 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
201 unsigned int tcount, bool use_digest,
202 const int align_offset)
203 {
204 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
205 unsigned int i, j, k, temp;
206 struct scatterlist sg[8];
207 char *result;
208 char *key;
209 struct ahash_request *req;
210 struct tcrypt_result tresult;
211 void *hash_buff;
212 char *xbuf[XBUFSIZE];
213 int ret = -ENOMEM;
214
215 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
216 if (!result)
217 return ret;
218 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
219 if (!key)
220 goto out_nobuf;
221 if (testmgr_alloc_buf(xbuf))
222 goto out_nobuf;
223
224 init_completion(&tresult.completion);
225
226 req = ahash_request_alloc(tfm, GFP_KERNEL);
227 if (!req) {
228 printk(KERN_ERR "alg: hash: Failed to allocate request for "
229 "%s\n", algo);
230 goto out_noreq;
231 }
232 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
233 tcrypt_complete, &tresult);
234
235 j = 0;
236 for (i = 0; i < tcount; i++) {
237 if (template[i].np)
238 continue;
239
240 ret = -EINVAL;
241 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
242 goto out;
243
244 j++;
245 memset(result, 0, MAX_DIGEST_SIZE);
246
247 hash_buff = xbuf[0];
248 hash_buff += align_offset;
249
250 memcpy(hash_buff, template[i].plaintext, template[i].psize);
251 sg_init_one(&sg[0], hash_buff, template[i].psize);
252
253 if (template[i].ksize) {
254 crypto_ahash_clear_flags(tfm, ~0);
255 if (template[i].ksize > MAX_KEYLEN) {
256 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
257 j, algo, template[i].ksize, MAX_KEYLEN);
258 ret = -EINVAL;
259 goto out;
260 }
261 memcpy(key, template[i].key, template[i].ksize);
262 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
263 if (ret) {
264 printk(KERN_ERR "alg: hash: setkey failed on "
265 "test %d for %s: ret=%d\n", j, algo,
266 -ret);
267 goto out;
268 }
269 }
270
271 ahash_request_set_crypt(req, sg, result, template[i].psize);
272 if (use_digest) {
273 ret = wait_async_op(&tresult, crypto_ahash_digest(req));
274 if (ret) {
275 pr_err("alg: hash: digest failed on test %d "
276 "for %s: ret=%d\n", j, algo, -ret);
277 goto out;
278 }
279 } else {
280 ret = wait_async_op(&tresult, crypto_ahash_init(req));
281 if (ret) {
282 pr_err("alt: hash: init failed on test %d "
283 "for %s: ret=%d\n", j, algo, -ret);
284 goto out;
285 }
286 ret = wait_async_op(&tresult, crypto_ahash_update(req));
287 if (ret) {
288 pr_err("alt: hash: update failed on test %d "
289 "for %s: ret=%d\n", j, algo, -ret);
290 goto out;
291 }
292 ret = wait_async_op(&tresult, crypto_ahash_final(req));
293 if (ret) {
294 pr_err("alt: hash: final failed on test %d "
295 "for %s: ret=%d\n", j, algo, -ret);
296 goto out;
297 }
298 }
299
300 if (memcmp(result, template[i].digest,
301 crypto_ahash_digestsize(tfm))) {
302 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
303 j, algo);
304 hexdump(result, crypto_ahash_digestsize(tfm));
305 ret = -EINVAL;
306 goto out;
307 }
308 }
309
310 j = 0;
311 for (i = 0; i < tcount; i++) {
312 /* alignment tests are only done with continuous buffers */
313 if (align_offset != 0)
314 break;
315
316 if (!template[i].np)
317 continue;
318
319 j++;
320 memset(result, 0, MAX_DIGEST_SIZE);
321
322 temp = 0;
323 sg_init_table(sg, template[i].np);
324 ret = -EINVAL;
325 for (k = 0; k < template[i].np; k++) {
326 if (WARN_ON(offset_in_page(IDX[k]) +
327 template[i].tap[k] > PAGE_SIZE))
328 goto out;
329 sg_set_buf(&sg[k],
330 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
331 offset_in_page(IDX[k]),
332 template[i].plaintext + temp,
333 template[i].tap[k]),
334 template[i].tap[k]);
335 temp += template[i].tap[k];
336 }
337
338 if (template[i].ksize) {
339 if (template[i].ksize > MAX_KEYLEN) {
340 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
341 j, algo, template[i].ksize, MAX_KEYLEN);
342 ret = -EINVAL;
343 goto out;
344 }
345 crypto_ahash_clear_flags(tfm, ~0);
346 memcpy(key, template[i].key, template[i].ksize);
347 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
348
349 if (ret) {
350 printk(KERN_ERR "alg: hash: setkey "
351 "failed on chunking test %d "
352 "for %s: ret=%d\n", j, algo, -ret);
353 goto out;
354 }
355 }
356
357 ahash_request_set_crypt(req, sg, result, template[i].psize);
358 ret = crypto_ahash_digest(req);
359 switch (ret) {
360 case 0:
361 break;
362 case -EINPROGRESS:
363 case -EBUSY:
364 wait_for_completion(&tresult.completion);
365 reinit_completion(&tresult.completion);
366 ret = tresult.err;
367 if (!ret)
368 break;
369 /* fall through */
370 default:
371 printk(KERN_ERR "alg: hash: digest failed "
372 "on chunking test %d for %s: "
373 "ret=%d\n", j, algo, -ret);
374 goto out;
375 }
376
377 if (memcmp(result, template[i].digest,
378 crypto_ahash_digestsize(tfm))) {
379 printk(KERN_ERR "alg: hash: Chunking test %d "
380 "failed for %s\n", j, algo);
381 hexdump(result, crypto_ahash_digestsize(tfm));
382 ret = -EINVAL;
383 goto out;
384 }
385 }
386
387 ret = 0;
388
389 out:
390 ahash_request_free(req);
391 out_noreq:
392 testmgr_free_buf(xbuf);
393 out_nobuf:
394 kfree(key);
395 kfree(result);
396 return ret;
397 }
398
399 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
400 unsigned int tcount, bool use_digest)
401 {
402 unsigned int alignmask;
403 int ret;
404
405 ret = __test_hash(tfm, template, tcount, use_digest, 0);
406 if (ret)
407 return ret;
408
409 /* test unaligned buffers, check with one byte offset */
410 ret = __test_hash(tfm, template, tcount, use_digest, 1);
411 if (ret)
412 return ret;
413
414 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
415 if (alignmask) {
416 /* Check if alignment mask for tfm is correctly set. */
417 ret = __test_hash(tfm, template, tcount, use_digest,
418 alignmask + 1);
419 if (ret)
420 return ret;
421 }
422
423 return 0;
424 }
425
426 static int __test_aead(struct crypto_aead *tfm, int enc,
427 struct aead_testvec *template, unsigned int tcount,
428 const bool diff_dst, const int align_offset)
429 {
430 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
431 unsigned int i, j, k, n, temp;
432 int ret = -ENOMEM;
433 char *q;
434 char *key;
435 struct aead_request *req;
436 struct scatterlist *sg;
437 struct scatterlist *sgout;
438 const char *e, *d;
439 struct tcrypt_result result;
440 unsigned int authsize, iv_len;
441 void *input;
442 void *output;
443 void *assoc;
444 char *iv;
445 char *xbuf[XBUFSIZE];
446 char *xoutbuf[XBUFSIZE];
447 char *axbuf[XBUFSIZE];
448
449 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
450 if (!iv)
451 return ret;
452 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
453 if (!key)
454 goto out_noxbuf;
455 if (testmgr_alloc_buf(xbuf))
456 goto out_noxbuf;
457 if (testmgr_alloc_buf(axbuf))
458 goto out_noaxbuf;
459 if (diff_dst && testmgr_alloc_buf(xoutbuf))
460 goto out_nooutbuf;
461
462 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
463 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL);
464 if (!sg)
465 goto out_nosg;
466 sgout = &sg[16];
467
468 if (diff_dst)
469 d = "-ddst";
470 else
471 d = "";
472
473 if (enc == ENCRYPT)
474 e = "encryption";
475 else
476 e = "decryption";
477
478 init_completion(&result.completion);
479
480 req = aead_request_alloc(tfm, GFP_KERNEL);
481 if (!req) {
482 pr_err("alg: aead%s: Failed to allocate request for %s\n",
483 d, algo);
484 goto out;
485 }
486
487 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
488 tcrypt_complete, &result);
489
490 for (i = 0, j = 0; i < tcount; i++) {
491 if (template[i].np)
492 continue;
493
494 j++;
495
496 /* some templates have no input data but they will
497 * touch input
498 */
499 input = xbuf[0];
500 input += align_offset;
501 assoc = axbuf[0];
502
503 ret = -EINVAL;
504 if (WARN_ON(align_offset + template[i].ilen >
505 PAGE_SIZE || template[i].alen > PAGE_SIZE))
506 goto out;
507
508 memcpy(input, template[i].input, template[i].ilen);
509 memcpy(assoc, template[i].assoc, template[i].alen);
510 iv_len = crypto_aead_ivsize(tfm);
511 if (template[i].iv)
512 memcpy(iv, template[i].iv, iv_len);
513 else
514 memset(iv, 0, iv_len);
515
516 crypto_aead_clear_flags(tfm, ~0);
517 if (template[i].wk)
518 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
519
520 if (template[i].klen > MAX_KEYLEN) {
521 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
522 d, j, algo, template[i].klen,
523 MAX_KEYLEN);
524 ret = -EINVAL;
525 goto out;
526 }
527 memcpy(key, template[i].key, template[i].klen);
528
529 ret = crypto_aead_setkey(tfm, key, template[i].klen);
530 if (!ret == template[i].fail) {
531 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
532 d, j, algo, crypto_aead_get_flags(tfm));
533 goto out;
534 } else if (ret)
535 continue;
536
537 authsize = abs(template[i].rlen - template[i].ilen);
538 ret = crypto_aead_setauthsize(tfm, authsize);
539 if (ret) {
540 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
541 d, authsize, j, algo);
542 goto out;
543 }
544
545 k = !!template[i].alen;
546 sg_init_table(sg, k + 1);
547 sg_set_buf(&sg[0], assoc, template[i].alen);
548 sg_set_buf(&sg[k], input,
549 template[i].ilen + (enc ? authsize : 0));
550 output = input;
551
552 if (diff_dst) {
553 sg_init_table(sgout, k + 1);
554 sg_set_buf(&sgout[0], assoc, template[i].alen);
555
556 output = xoutbuf[0];
557 output += align_offset;
558 sg_set_buf(&sgout[k], output,
559 template[i].rlen + (enc ? 0 : authsize));
560 }
561
562 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
563 template[i].ilen, iv);
564
565 aead_request_set_ad(req, template[i].alen);
566
567 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
568
569 switch (ret) {
570 case 0:
571 if (template[i].novrfy) {
572 /* verification was supposed to fail */
573 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
574 d, e, j, algo);
575 /* so really, we got a bad message */
576 ret = -EBADMSG;
577 goto out;
578 }
579 break;
580 case -EINPROGRESS:
581 case -EBUSY:
582 wait_for_completion(&result.completion);
583 reinit_completion(&result.completion);
584 ret = result.err;
585 if (!ret)
586 break;
587 case -EBADMSG:
588 if (template[i].novrfy)
589 /* verification failure was expected */
590 continue;
591 /* fall through */
592 default:
593 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
594 d, e, j, algo, -ret);
595 goto out;
596 }
597
598 q = output;
599 if (memcmp(q, template[i].result, template[i].rlen)) {
600 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
601 d, j, e, algo);
602 hexdump(q, template[i].rlen);
603 ret = -EINVAL;
604 goto out;
605 }
606 }
607
608 for (i = 0, j = 0; i < tcount; i++) {
609 /* alignment tests are only done with continuous buffers */
610 if (align_offset != 0)
611 break;
612
613 if (!template[i].np)
614 continue;
615
616 j++;
617
618 if (template[i].iv)
619 memcpy(iv, template[i].iv, MAX_IVLEN);
620 else
621 memset(iv, 0, MAX_IVLEN);
622
623 crypto_aead_clear_flags(tfm, ~0);
624 if (template[i].wk)
625 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
626 if (template[i].klen > MAX_KEYLEN) {
627 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
628 d, j, algo, template[i].klen, MAX_KEYLEN);
629 ret = -EINVAL;
630 goto out;
631 }
632 memcpy(key, template[i].key, template[i].klen);
633
634 ret = crypto_aead_setkey(tfm, key, template[i].klen);
635 if (!ret == template[i].fail) {
636 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
637 d, j, algo, crypto_aead_get_flags(tfm));
638 goto out;
639 } else if (ret)
640 continue;
641
642 authsize = abs(template[i].rlen - template[i].ilen);
643
644 ret = -EINVAL;
645 sg_init_table(sg, template[i].anp + template[i].np);
646 if (diff_dst)
647 sg_init_table(sgout, template[i].anp + template[i].np);
648
649 ret = -EINVAL;
650 for (k = 0, temp = 0; k < template[i].anp; k++) {
651 if (WARN_ON(offset_in_page(IDX[k]) +
652 template[i].atap[k] > PAGE_SIZE))
653 goto out;
654 sg_set_buf(&sg[k],
655 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
656 offset_in_page(IDX[k]),
657 template[i].assoc + temp,
658 template[i].atap[k]),
659 template[i].atap[k]);
660 if (diff_dst)
661 sg_set_buf(&sgout[k],
662 axbuf[IDX[k] >> PAGE_SHIFT] +
663 offset_in_page(IDX[k]),
664 template[i].atap[k]);
665 temp += template[i].atap[k];
666 }
667
668 for (k = 0, temp = 0; k < template[i].np; k++) {
669 if (WARN_ON(offset_in_page(IDX[k]) +
670 template[i].tap[k] > PAGE_SIZE))
671 goto out;
672
673 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
674 memcpy(q, template[i].input + temp, template[i].tap[k]);
675 sg_set_buf(&sg[template[i].anp + k],
676 q, template[i].tap[k]);
677
678 if (diff_dst) {
679 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
680 offset_in_page(IDX[k]);
681
682 memset(q, 0, template[i].tap[k]);
683
684 sg_set_buf(&sgout[template[i].anp + k],
685 q, template[i].tap[k]);
686 }
687
688 n = template[i].tap[k];
689 if (k == template[i].np - 1 && enc)
690 n += authsize;
691 if (offset_in_page(q) + n < PAGE_SIZE)
692 q[n] = 0;
693
694 temp += template[i].tap[k];
695 }
696
697 ret = crypto_aead_setauthsize(tfm, authsize);
698 if (ret) {
699 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
700 d, authsize, j, algo);
701 goto out;
702 }
703
704 if (enc) {
705 if (WARN_ON(sg[template[i].anp + k - 1].offset +
706 sg[template[i].anp + k - 1].length +
707 authsize > PAGE_SIZE)) {
708 ret = -EINVAL;
709 goto out;
710 }
711
712 if (diff_dst)
713 sgout[template[i].anp + k - 1].length +=
714 authsize;
715 sg[template[i].anp + k - 1].length += authsize;
716 }
717
718 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
719 template[i].ilen,
720 iv);
721
722 aead_request_set_ad(req, template[i].alen);
723
724 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
725
726 switch (ret) {
727 case 0:
728 if (template[i].novrfy) {
729 /* verification was supposed to fail */
730 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
731 d, e, j, algo);
732 /* so really, we got a bad message */
733 ret = -EBADMSG;
734 goto out;
735 }
736 break;
737 case -EINPROGRESS:
738 case -EBUSY:
739 wait_for_completion(&result.completion);
740 reinit_completion(&result.completion);
741 ret = result.err;
742 if (!ret)
743 break;
744 case -EBADMSG:
745 if (template[i].novrfy)
746 /* verification failure was expected */
747 continue;
748 /* fall through */
749 default:
750 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
751 d, e, j, algo, -ret);
752 goto out;
753 }
754
755 ret = -EINVAL;
756 for (k = 0, temp = 0; k < template[i].np; k++) {
757 if (diff_dst)
758 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
759 offset_in_page(IDX[k]);
760 else
761 q = xbuf[IDX[k] >> PAGE_SHIFT] +
762 offset_in_page(IDX[k]);
763
764 n = template[i].tap[k];
765 if (k == template[i].np - 1)
766 n += enc ? authsize : -authsize;
767
768 if (memcmp(q, template[i].result + temp, n)) {
769 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
770 d, j, e, k, algo);
771 hexdump(q, n);
772 goto out;
773 }
774
775 q += n;
776 if (k == template[i].np - 1 && !enc) {
777 if (!diff_dst &&
778 memcmp(q, template[i].input +
779 temp + n, authsize))
780 n = authsize;
781 else
782 n = 0;
783 } else {
784 for (n = 0; offset_in_page(q + n) && q[n]; n++)
785 ;
786 }
787 if (n) {
788 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
789 d, j, e, k, algo, n);
790 hexdump(q, n);
791 goto out;
792 }
793
794 temp += template[i].tap[k];
795 }
796 }
797
798 ret = 0;
799
800 out:
801 aead_request_free(req);
802 kfree(sg);
803 out_nosg:
804 if (diff_dst)
805 testmgr_free_buf(xoutbuf);
806 out_nooutbuf:
807 testmgr_free_buf(axbuf);
808 out_noaxbuf:
809 testmgr_free_buf(xbuf);
810 out_noxbuf:
811 kfree(key);
812 kfree(iv);
813 return ret;
814 }
815
816 static int test_aead(struct crypto_aead *tfm, int enc,
817 struct aead_testvec *template, unsigned int tcount)
818 {
819 unsigned int alignmask;
820 int ret;
821
822 /* test 'dst == src' case */
823 ret = __test_aead(tfm, enc, template, tcount, false, 0);
824 if (ret)
825 return ret;
826
827 /* test 'dst != src' case */
828 ret = __test_aead(tfm, enc, template, tcount, true, 0);
829 if (ret)
830 return ret;
831
832 /* test unaligned buffers, check with one byte offset */
833 ret = __test_aead(tfm, enc, template, tcount, true, 1);
834 if (ret)
835 return ret;
836
837 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
838 if (alignmask) {
839 /* Check if alignment mask for tfm is correctly set. */
840 ret = __test_aead(tfm, enc, template, tcount, true,
841 alignmask + 1);
842 if (ret)
843 return ret;
844 }
845
846 return 0;
847 }
848
849 static int test_cipher(struct crypto_cipher *tfm, int enc,
850 struct cipher_testvec *template, unsigned int tcount)
851 {
852 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
853 unsigned int i, j, k;
854 char *q;
855 const char *e;
856 void *data;
857 char *xbuf[XBUFSIZE];
858 int ret = -ENOMEM;
859
860 if (testmgr_alloc_buf(xbuf))
861 goto out_nobuf;
862
863 if (enc == ENCRYPT)
864 e = "encryption";
865 else
866 e = "decryption";
867
868 j = 0;
869 for (i = 0; i < tcount; i++) {
870 if (template[i].np)
871 continue;
872
873 j++;
874
875 ret = -EINVAL;
876 if (WARN_ON(template[i].ilen > PAGE_SIZE))
877 goto out;
878
879 data = xbuf[0];
880 memcpy(data, template[i].input, template[i].ilen);
881
882 crypto_cipher_clear_flags(tfm, ~0);
883 if (template[i].wk)
884 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
885
886 ret = crypto_cipher_setkey(tfm, template[i].key,
887 template[i].klen);
888 if (!ret == template[i].fail) {
889 printk(KERN_ERR "alg: cipher: setkey failed "
890 "on test %d for %s: flags=%x\n", j,
891 algo, crypto_cipher_get_flags(tfm));
892 goto out;
893 } else if (ret)
894 continue;
895
896 for (k = 0; k < template[i].ilen;
897 k += crypto_cipher_blocksize(tfm)) {
898 if (enc)
899 crypto_cipher_encrypt_one(tfm, data + k,
900 data + k);
901 else
902 crypto_cipher_decrypt_one(tfm, data + k,
903 data + k);
904 }
905
906 q = data;
907 if (memcmp(q, template[i].result, template[i].rlen)) {
908 printk(KERN_ERR "alg: cipher: Test %d failed "
909 "on %s for %s\n", j, e, algo);
910 hexdump(q, template[i].rlen);
911 ret = -EINVAL;
912 goto out;
913 }
914 }
915
916 ret = 0;
917
918 out:
919 testmgr_free_buf(xbuf);
920 out_nobuf:
921 return ret;
922 }
923
924 static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc,
925 struct cipher_testvec *template, unsigned int tcount,
926 const bool diff_dst, const int align_offset)
927 {
928 const char *algo =
929 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
930 unsigned int i, j, k, n, temp;
931 char *q;
932 struct ablkcipher_request *req;
933 struct scatterlist sg[8];
934 struct scatterlist sgout[8];
935 const char *e, *d;
936 struct tcrypt_result result;
937 void *data;
938 char iv[MAX_IVLEN];
939 char *xbuf[XBUFSIZE];
940 char *xoutbuf[XBUFSIZE];
941 int ret = -ENOMEM;
942
943 if (testmgr_alloc_buf(xbuf))
944 goto out_nobuf;
945
946 if (diff_dst && testmgr_alloc_buf(xoutbuf))
947 goto out_nooutbuf;
948
949 if (diff_dst)
950 d = "-ddst";
951 else
952 d = "";
953
954 if (enc == ENCRYPT)
955 e = "encryption";
956 else
957 e = "decryption";
958
959 init_completion(&result.completion);
960
961 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
962 if (!req) {
963 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
964 d, algo);
965 goto out;
966 }
967
968 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
969 tcrypt_complete, &result);
970
971 j = 0;
972 for (i = 0; i < tcount; i++) {
973 if (template[i].np && !template[i].also_non_np)
974 continue;
975
976 if (template[i].iv)
977 memcpy(iv, template[i].iv, MAX_IVLEN);
978 else
979 memset(iv, 0, MAX_IVLEN);
980
981 j++;
982 ret = -EINVAL;
983 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE))
984 goto out;
985
986 data = xbuf[0];
987 data += align_offset;
988 memcpy(data, template[i].input, template[i].ilen);
989
990 crypto_ablkcipher_clear_flags(tfm, ~0);
991 if (template[i].wk)
992 crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
993
994 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
995 template[i].klen);
996 if (!ret == template[i].fail) {
997 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
998 d, j, algo, crypto_ablkcipher_get_flags(tfm));
999 goto out;
1000 } else if (ret)
1001 continue;
1002
1003 sg_init_one(&sg[0], data, template[i].ilen);
1004 if (diff_dst) {
1005 data = xoutbuf[0];
1006 data += align_offset;
1007 sg_init_one(&sgout[0], data, template[i].ilen);
1008 }
1009
1010 ablkcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1011 template[i].ilen, iv);
1012 ret = enc ? crypto_ablkcipher_encrypt(req) :
1013 crypto_ablkcipher_decrypt(req);
1014
1015 switch (ret) {
1016 case 0:
1017 break;
1018 case -EINPROGRESS:
1019 case -EBUSY:
1020 wait_for_completion(&result.completion);
1021 reinit_completion(&result.completion);
1022 ret = result.err;
1023 if (!ret)
1024 break;
1025 /* fall through */
1026 default:
1027 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1028 d, e, j, algo, -ret);
1029 goto out;
1030 }
1031
1032 q = data;
1033 if (memcmp(q, template[i].result, template[i].rlen)) {
1034 pr_err("alg: skcipher%s: Test %d failed on %s for %s\n",
1035 d, j, e, algo);
1036 hexdump(q, template[i].rlen);
1037 ret = -EINVAL;
1038 goto out;
1039 }
1040 }
1041
1042 j = 0;
1043 for (i = 0; i < tcount; i++) {
1044 /* alignment tests are only done with continuous buffers */
1045 if (align_offset != 0)
1046 break;
1047
1048 if (!template[i].np)
1049 continue;
1050
1051 if (template[i].iv)
1052 memcpy(iv, template[i].iv, MAX_IVLEN);
1053 else
1054 memset(iv, 0, MAX_IVLEN);
1055
1056 j++;
1057 crypto_ablkcipher_clear_flags(tfm, ~0);
1058 if (template[i].wk)
1059 crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1060
1061 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
1062 template[i].klen);
1063 if (!ret == template[i].fail) {
1064 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1065 d, j, algo, crypto_ablkcipher_get_flags(tfm));
1066 goto out;
1067 } else if (ret)
1068 continue;
1069
1070 temp = 0;
1071 ret = -EINVAL;
1072 sg_init_table(sg, template[i].np);
1073 if (diff_dst)
1074 sg_init_table(sgout, template[i].np);
1075 for (k = 0; k < template[i].np; k++) {
1076 if (WARN_ON(offset_in_page(IDX[k]) +
1077 template[i].tap[k] > PAGE_SIZE))
1078 goto out;
1079
1080 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1081
1082 memcpy(q, template[i].input + temp, template[i].tap[k]);
1083
1084 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1085 q[template[i].tap[k]] = 0;
1086
1087 sg_set_buf(&sg[k], q, template[i].tap[k]);
1088 if (diff_dst) {
1089 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1090 offset_in_page(IDX[k]);
1091
1092 sg_set_buf(&sgout[k], q, template[i].tap[k]);
1093
1094 memset(q, 0, template[i].tap[k]);
1095 if (offset_in_page(q) +
1096 template[i].tap[k] < PAGE_SIZE)
1097 q[template[i].tap[k]] = 0;
1098 }
1099
1100 temp += template[i].tap[k];
1101 }
1102
1103 ablkcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1104 template[i].ilen, iv);
1105
1106 ret = enc ? crypto_ablkcipher_encrypt(req) :
1107 crypto_ablkcipher_decrypt(req);
1108
1109 switch (ret) {
1110 case 0:
1111 break;
1112 case -EINPROGRESS:
1113 case -EBUSY:
1114 wait_for_completion(&result.completion);
1115 reinit_completion(&result.completion);
1116 ret = result.err;
1117 if (!ret)
1118 break;
1119 /* fall through */
1120 default:
1121 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1122 d, e, j, algo, -ret);
1123 goto out;
1124 }
1125
1126 temp = 0;
1127 ret = -EINVAL;
1128 for (k = 0; k < template[i].np; k++) {
1129 if (diff_dst)
1130 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1131 offset_in_page(IDX[k]);
1132 else
1133 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1134 offset_in_page(IDX[k]);
1135
1136 if (memcmp(q, template[i].result + temp,
1137 template[i].tap[k])) {
1138 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1139 d, j, e, k, algo);
1140 hexdump(q, template[i].tap[k]);
1141 goto out;
1142 }
1143
1144 q += template[i].tap[k];
1145 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1146 ;
1147 if (n) {
1148 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1149 d, j, e, k, algo, n);
1150 hexdump(q, n);
1151 goto out;
1152 }
1153 temp += template[i].tap[k];
1154 }
1155 }
1156
1157 ret = 0;
1158
1159 out:
1160 ablkcipher_request_free(req);
1161 if (diff_dst)
1162 testmgr_free_buf(xoutbuf);
1163 out_nooutbuf:
1164 testmgr_free_buf(xbuf);
1165 out_nobuf:
1166 return ret;
1167 }
1168
1169 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
1170 struct cipher_testvec *template, unsigned int tcount)
1171 {
1172 unsigned int alignmask;
1173 int ret;
1174
1175 /* test 'dst == src' case */
1176 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1177 if (ret)
1178 return ret;
1179
1180 /* test 'dst != src' case */
1181 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1182 if (ret)
1183 return ret;
1184
1185 /* test unaligned buffers, check with one byte offset */
1186 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1187 if (ret)
1188 return ret;
1189
1190 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1191 if (alignmask) {
1192 /* Check if alignment mask for tfm is correctly set. */
1193 ret = __test_skcipher(tfm, enc, template, tcount, true,
1194 alignmask + 1);
1195 if (ret)
1196 return ret;
1197 }
1198
1199 return 0;
1200 }
1201
1202 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1203 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1204 {
1205 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1206 unsigned int i;
1207 char result[COMP_BUF_SIZE];
1208 int ret;
1209
1210 for (i = 0; i < ctcount; i++) {
1211 int ilen;
1212 unsigned int dlen = COMP_BUF_SIZE;
1213
1214 memset(result, 0, sizeof (result));
1215
1216 ilen = ctemplate[i].inlen;
1217 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1218 ilen, result, &dlen);
1219 if (ret) {
1220 printk(KERN_ERR "alg: comp: compression failed "
1221 "on test %d for %s: ret=%d\n", i + 1, algo,
1222 -ret);
1223 goto out;
1224 }
1225
1226 if (dlen != ctemplate[i].outlen) {
1227 printk(KERN_ERR "alg: comp: Compression test %d "
1228 "failed for %s: output len = %d\n", i + 1, algo,
1229 dlen);
1230 ret = -EINVAL;
1231 goto out;
1232 }
1233
1234 if (memcmp(result, ctemplate[i].output, dlen)) {
1235 printk(KERN_ERR "alg: comp: Compression test %d "
1236 "failed for %s\n", i + 1, algo);
1237 hexdump(result, dlen);
1238 ret = -EINVAL;
1239 goto out;
1240 }
1241 }
1242
1243 for (i = 0; i < dtcount; i++) {
1244 int ilen;
1245 unsigned int dlen = COMP_BUF_SIZE;
1246
1247 memset(result, 0, sizeof (result));
1248
1249 ilen = dtemplate[i].inlen;
1250 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1251 ilen, result, &dlen);
1252 if (ret) {
1253 printk(KERN_ERR "alg: comp: decompression failed "
1254 "on test %d for %s: ret=%d\n", i + 1, algo,
1255 -ret);
1256 goto out;
1257 }
1258
1259 if (dlen != dtemplate[i].outlen) {
1260 printk(KERN_ERR "alg: comp: Decompression test %d "
1261 "failed for %s: output len = %d\n", i + 1, algo,
1262 dlen);
1263 ret = -EINVAL;
1264 goto out;
1265 }
1266
1267 if (memcmp(result, dtemplate[i].output, dlen)) {
1268 printk(KERN_ERR "alg: comp: Decompression test %d "
1269 "failed for %s\n", i + 1, algo);
1270 hexdump(result, dlen);
1271 ret = -EINVAL;
1272 goto out;
1273 }
1274 }
1275
1276 ret = 0;
1277
1278 out:
1279 return ret;
1280 }
1281
1282 static int test_pcomp(struct crypto_pcomp *tfm,
1283 struct pcomp_testvec *ctemplate,
1284 struct pcomp_testvec *dtemplate, int ctcount,
1285 int dtcount)
1286 {
1287 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
1288 unsigned int i;
1289 char result[COMP_BUF_SIZE];
1290 int res;
1291
1292 for (i = 0; i < ctcount; i++) {
1293 struct comp_request req;
1294 unsigned int produced = 0;
1295
1296 res = crypto_compress_setup(tfm, ctemplate[i].params,
1297 ctemplate[i].paramsize);
1298 if (res) {
1299 pr_err("alg: pcomp: compression setup failed on test "
1300 "%d for %s: error=%d\n", i + 1, algo, res);
1301 return res;
1302 }
1303
1304 res = crypto_compress_init(tfm);
1305 if (res) {
1306 pr_err("alg: pcomp: compression init failed on test "
1307 "%d for %s: error=%d\n", i + 1, algo, res);
1308 return res;
1309 }
1310
1311 memset(result, 0, sizeof(result));
1312
1313 req.next_in = ctemplate[i].input;
1314 req.avail_in = ctemplate[i].inlen / 2;
1315 req.next_out = result;
1316 req.avail_out = ctemplate[i].outlen / 2;
1317
1318 res = crypto_compress_update(tfm, &req);
1319 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1320 pr_err("alg: pcomp: compression update failed on test "
1321 "%d for %s: error=%d\n", i + 1, algo, res);
1322 return res;
1323 }
1324 if (res > 0)
1325 produced += res;
1326
1327 /* Add remaining input data */
1328 req.avail_in += (ctemplate[i].inlen + 1) / 2;
1329
1330 res = crypto_compress_update(tfm, &req);
1331 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1332 pr_err("alg: pcomp: compression update failed on test "
1333 "%d for %s: error=%d\n", i + 1, algo, res);
1334 return res;
1335 }
1336 if (res > 0)
1337 produced += res;
1338
1339 /* Provide remaining output space */
1340 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1341
1342 res = crypto_compress_final(tfm, &req);
1343 if (res < 0) {
1344 pr_err("alg: pcomp: compression final failed on test "
1345 "%d for %s: error=%d\n", i + 1, algo, res);
1346 return res;
1347 }
1348 produced += res;
1349
1350 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1351 pr_err("alg: comp: Compression test %d failed for %s: "
1352 "output len = %d (expected %d)\n", i + 1, algo,
1353 COMP_BUF_SIZE - req.avail_out,
1354 ctemplate[i].outlen);
1355 return -EINVAL;
1356 }
1357
1358 if (produced != ctemplate[i].outlen) {
1359 pr_err("alg: comp: Compression test %d failed for %s: "
1360 "returned len = %u (expected %d)\n", i + 1,
1361 algo, produced, ctemplate[i].outlen);
1362 return -EINVAL;
1363 }
1364
1365 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1366 pr_err("alg: pcomp: Compression test %d failed for "
1367 "%s\n", i + 1, algo);
1368 hexdump(result, ctemplate[i].outlen);
1369 return -EINVAL;
1370 }
1371 }
1372
1373 for (i = 0; i < dtcount; i++) {
1374 struct comp_request req;
1375 unsigned int produced = 0;
1376
1377 res = crypto_decompress_setup(tfm, dtemplate[i].params,
1378 dtemplate[i].paramsize);
1379 if (res) {
1380 pr_err("alg: pcomp: decompression setup failed on "
1381 "test %d for %s: error=%d\n", i + 1, algo, res);
1382 return res;
1383 }
1384
1385 res = crypto_decompress_init(tfm);
1386 if (res) {
1387 pr_err("alg: pcomp: decompression init failed on test "
1388 "%d for %s: error=%d\n", i + 1, algo, res);
1389 return res;
1390 }
1391
1392 memset(result, 0, sizeof(result));
1393
1394 req.next_in = dtemplate[i].input;
1395 req.avail_in = dtemplate[i].inlen / 2;
1396 req.next_out = result;
1397 req.avail_out = dtemplate[i].outlen / 2;
1398
1399 res = crypto_decompress_update(tfm, &req);
1400 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1401 pr_err("alg: pcomp: decompression update failed on "
1402 "test %d for %s: error=%d\n", i + 1, algo, res);
1403 return res;
1404 }
1405 if (res > 0)
1406 produced += res;
1407
1408 /* Add remaining input data */
1409 req.avail_in += (dtemplate[i].inlen + 1) / 2;
1410
1411 res = crypto_decompress_update(tfm, &req);
1412 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1413 pr_err("alg: pcomp: decompression update failed on "
1414 "test %d for %s: error=%d\n", i + 1, algo, res);
1415 return res;
1416 }
1417 if (res > 0)
1418 produced += res;
1419
1420 /* Provide remaining output space */
1421 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1422
1423 res = crypto_decompress_final(tfm, &req);
1424 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1425 pr_err("alg: pcomp: decompression final failed on "
1426 "test %d for %s: error=%d\n", i + 1, algo, res);
1427 return res;
1428 }
1429 if (res > 0)
1430 produced += res;
1431
1432 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1433 pr_err("alg: comp: Decompression test %d failed for "
1434 "%s: output len = %d (expected %d)\n", i + 1,
1435 algo, COMP_BUF_SIZE - req.avail_out,
1436 dtemplate[i].outlen);
1437 return -EINVAL;
1438 }
1439
1440 if (produced != dtemplate[i].outlen) {
1441 pr_err("alg: comp: Decompression test %d failed for "
1442 "%s: returned len = %u (expected %d)\n", i + 1,
1443 algo, produced, dtemplate[i].outlen);
1444 return -EINVAL;
1445 }
1446
1447 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1448 pr_err("alg: pcomp: Decompression test %d failed for "
1449 "%s\n", i + 1, algo);
1450 hexdump(result, dtemplate[i].outlen);
1451 return -EINVAL;
1452 }
1453 }
1454
1455 return 0;
1456 }
1457
1458
1459 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1460 unsigned int tcount)
1461 {
1462 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1463 int err = 0, i, j, seedsize;
1464 u8 *seed;
1465 char result[32];
1466
1467 seedsize = crypto_rng_seedsize(tfm);
1468
1469 seed = kmalloc(seedsize, GFP_KERNEL);
1470 if (!seed) {
1471 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1472 "for %s\n", algo);
1473 return -ENOMEM;
1474 }
1475
1476 for (i = 0; i < tcount; i++) {
1477 memset(result, 0, 32);
1478
1479 memcpy(seed, template[i].v, template[i].vlen);
1480 memcpy(seed + template[i].vlen, template[i].key,
1481 template[i].klen);
1482 memcpy(seed + template[i].vlen + template[i].klen,
1483 template[i].dt, template[i].dtlen);
1484
1485 err = crypto_rng_reset(tfm, seed, seedsize);
1486 if (err) {
1487 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1488 "for %s\n", algo);
1489 goto out;
1490 }
1491
1492 for (j = 0; j < template[i].loops; j++) {
1493 err = crypto_rng_get_bytes(tfm, result,
1494 template[i].rlen);
1495 if (err < 0) {
1496 printk(KERN_ERR "alg: cprng: Failed to obtain "
1497 "the correct amount of random data for "
1498 "%s (requested %d)\n", algo,
1499 template[i].rlen);
1500 goto out;
1501 }
1502 }
1503
1504 err = memcmp(result, template[i].result,
1505 template[i].rlen);
1506 if (err) {
1507 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1508 i, algo);
1509 hexdump(result, template[i].rlen);
1510 err = -EINVAL;
1511 goto out;
1512 }
1513 }
1514
1515 out:
1516 kfree(seed);
1517 return err;
1518 }
1519
1520 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1521 u32 type, u32 mask)
1522 {
1523 struct crypto_aead *tfm;
1524 int err = 0;
1525
1526 tfm = crypto_alloc_aead(driver, type | CRYPTO_ALG_INTERNAL, mask);
1527 if (IS_ERR(tfm)) {
1528 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1529 "%ld\n", driver, PTR_ERR(tfm));
1530 return PTR_ERR(tfm);
1531 }
1532
1533 if (desc->suite.aead.enc.vecs) {
1534 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1535 desc->suite.aead.enc.count);
1536 if (err)
1537 goto out;
1538 }
1539
1540 if (!err && desc->suite.aead.dec.vecs)
1541 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1542 desc->suite.aead.dec.count);
1543
1544 out:
1545 crypto_free_aead(tfm);
1546 return err;
1547 }
1548
1549 static int alg_test_cipher(const struct alg_test_desc *desc,
1550 const char *driver, u32 type, u32 mask)
1551 {
1552 struct crypto_cipher *tfm;
1553 int err = 0;
1554
1555 tfm = crypto_alloc_cipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1556 if (IS_ERR(tfm)) {
1557 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1558 "%s: %ld\n", driver, PTR_ERR(tfm));
1559 return PTR_ERR(tfm);
1560 }
1561
1562 if (desc->suite.cipher.enc.vecs) {
1563 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1564 desc->suite.cipher.enc.count);
1565 if (err)
1566 goto out;
1567 }
1568
1569 if (desc->suite.cipher.dec.vecs)
1570 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1571 desc->suite.cipher.dec.count);
1572
1573 out:
1574 crypto_free_cipher(tfm);
1575 return err;
1576 }
1577
1578 static int alg_test_skcipher(const struct alg_test_desc *desc,
1579 const char *driver, u32 type, u32 mask)
1580 {
1581 struct crypto_ablkcipher *tfm;
1582 int err = 0;
1583
1584 tfm = crypto_alloc_ablkcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1585 if (IS_ERR(tfm)) {
1586 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1587 "%s: %ld\n", driver, PTR_ERR(tfm));
1588 return PTR_ERR(tfm);
1589 }
1590
1591 if (desc->suite.cipher.enc.vecs) {
1592 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1593 desc->suite.cipher.enc.count);
1594 if (err)
1595 goto out;
1596 }
1597
1598 if (desc->suite.cipher.dec.vecs)
1599 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1600 desc->suite.cipher.dec.count);
1601
1602 out:
1603 crypto_free_ablkcipher(tfm);
1604 return err;
1605 }
1606
1607 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1608 u32 type, u32 mask)
1609 {
1610 struct crypto_comp *tfm;
1611 int err;
1612
1613 tfm = crypto_alloc_comp(driver, type, mask);
1614 if (IS_ERR(tfm)) {
1615 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1616 "%ld\n", driver, PTR_ERR(tfm));
1617 return PTR_ERR(tfm);
1618 }
1619
1620 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1621 desc->suite.comp.decomp.vecs,
1622 desc->suite.comp.comp.count,
1623 desc->suite.comp.decomp.count);
1624
1625 crypto_free_comp(tfm);
1626 return err;
1627 }
1628
1629 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1630 u32 type, u32 mask)
1631 {
1632 struct crypto_pcomp *tfm;
1633 int err;
1634
1635 tfm = crypto_alloc_pcomp(driver, type, mask);
1636 if (IS_ERR(tfm)) {
1637 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1638 driver, PTR_ERR(tfm));
1639 return PTR_ERR(tfm);
1640 }
1641
1642 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1643 desc->suite.pcomp.decomp.vecs,
1644 desc->suite.pcomp.comp.count,
1645 desc->suite.pcomp.decomp.count);
1646
1647 crypto_free_pcomp(tfm);
1648 return err;
1649 }
1650
1651 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1652 u32 type, u32 mask)
1653 {
1654 struct crypto_ahash *tfm;
1655 int err;
1656
1657 tfm = crypto_alloc_ahash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1658 if (IS_ERR(tfm)) {
1659 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1660 "%ld\n", driver, PTR_ERR(tfm));
1661 return PTR_ERR(tfm);
1662 }
1663
1664 err = test_hash(tfm, desc->suite.hash.vecs,
1665 desc->suite.hash.count, true);
1666 if (!err)
1667 err = test_hash(tfm, desc->suite.hash.vecs,
1668 desc->suite.hash.count, false);
1669
1670 crypto_free_ahash(tfm);
1671 return err;
1672 }
1673
1674 static int alg_test_crc32c(const struct alg_test_desc *desc,
1675 const char *driver, u32 type, u32 mask)
1676 {
1677 struct crypto_shash *tfm;
1678 u32 val;
1679 int err;
1680
1681 err = alg_test_hash(desc, driver, type, mask);
1682 if (err)
1683 goto out;
1684
1685 tfm = crypto_alloc_shash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1686 if (IS_ERR(tfm)) {
1687 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1688 "%ld\n", driver, PTR_ERR(tfm));
1689 err = PTR_ERR(tfm);
1690 goto out;
1691 }
1692
1693 do {
1694 SHASH_DESC_ON_STACK(shash, tfm);
1695 u32 *ctx = (u32 *)shash_desc_ctx(shash);
1696
1697 shash->tfm = tfm;
1698 shash->flags = 0;
1699
1700 *ctx = le32_to_cpu(420553207);
1701 err = crypto_shash_final(shash, (u8 *)&val);
1702 if (err) {
1703 printk(KERN_ERR "alg: crc32c: Operation failed for "
1704 "%s: %d\n", driver, err);
1705 break;
1706 }
1707
1708 if (val != ~420553207) {
1709 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1710 "%d\n", driver, val);
1711 err = -EINVAL;
1712 }
1713 } while (0);
1714
1715 crypto_free_shash(tfm);
1716
1717 out:
1718 return err;
1719 }
1720
1721 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1722 u32 type, u32 mask)
1723 {
1724 struct crypto_rng *rng;
1725 int err;
1726
1727 rng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1728 if (IS_ERR(rng)) {
1729 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1730 "%ld\n", driver, PTR_ERR(rng));
1731 return PTR_ERR(rng);
1732 }
1733
1734 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1735
1736 crypto_free_rng(rng);
1737
1738 return err;
1739 }
1740
1741
1742 static int drbg_cavs_test(struct drbg_testvec *test, int pr,
1743 const char *driver, u32 type, u32 mask)
1744 {
1745 int ret = -EAGAIN;
1746 struct crypto_rng *drng;
1747 struct drbg_test_data test_data;
1748 struct drbg_string addtl, pers, testentropy;
1749 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1750
1751 if (!buf)
1752 return -ENOMEM;
1753
1754 drng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1755 if (IS_ERR(drng)) {
1756 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1757 "%s\n", driver);
1758 kzfree(buf);
1759 return -ENOMEM;
1760 }
1761
1762 test_data.testentropy = &testentropy;
1763 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1764 drbg_string_fill(&pers, test->pers, test->perslen);
1765 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1766 if (ret) {
1767 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1768 goto outbuf;
1769 }
1770
1771 drbg_string_fill(&addtl, test->addtla, test->addtllen);
1772 if (pr) {
1773 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1774 ret = crypto_drbg_get_bytes_addtl_test(drng,
1775 buf, test->expectedlen, &addtl, &test_data);
1776 } else {
1777 ret = crypto_drbg_get_bytes_addtl(drng,
1778 buf, test->expectedlen, &addtl);
1779 }
1780 if (ret < 0) {
1781 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1782 "driver %s\n", driver);
1783 goto outbuf;
1784 }
1785
1786 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1787 if (pr) {
1788 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1789 ret = crypto_drbg_get_bytes_addtl_test(drng,
1790 buf, test->expectedlen, &addtl, &test_data);
1791 } else {
1792 ret = crypto_drbg_get_bytes_addtl(drng,
1793 buf, test->expectedlen, &addtl);
1794 }
1795 if (ret < 0) {
1796 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1797 "driver %s\n", driver);
1798 goto outbuf;
1799 }
1800
1801 ret = memcmp(test->expected, buf, test->expectedlen);
1802
1803 outbuf:
1804 crypto_free_rng(drng);
1805 kzfree(buf);
1806 return ret;
1807 }
1808
1809
1810 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1811 u32 type, u32 mask)
1812 {
1813 int err = 0;
1814 int pr = 0;
1815 int i = 0;
1816 struct drbg_testvec *template = desc->suite.drbg.vecs;
1817 unsigned int tcount = desc->suite.drbg.count;
1818
1819 if (0 == memcmp(driver, "drbg_pr_", 8))
1820 pr = 1;
1821
1822 for (i = 0; i < tcount; i++) {
1823 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1824 if (err) {
1825 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1826 i, driver);
1827 err = -EINVAL;
1828 break;
1829 }
1830 }
1831 return err;
1832
1833 }
1834
1835 static int do_test_rsa(struct crypto_akcipher *tfm,
1836 struct akcipher_testvec *vecs)
1837 {
1838 struct akcipher_request *req;
1839 void *outbuf_enc = NULL;
1840 void *outbuf_dec = NULL;
1841 struct tcrypt_result result;
1842 unsigned int out_len_max, out_len = 0;
1843 int err = -ENOMEM;
1844
1845 req = akcipher_request_alloc(tfm, GFP_KERNEL);
1846 if (!req)
1847 return err;
1848
1849 init_completion(&result.completion);
1850 err = crypto_akcipher_setkey(tfm, vecs->key, vecs->key_len);
1851 if (err)
1852 goto free_req;
1853
1854 akcipher_request_set_crypt(req, vecs->m, outbuf_enc, vecs->m_size,
1855 out_len);
1856 /* expect this to fail, and update the required buf len */
1857 crypto_akcipher_encrypt(req);
1858 out_len = req->dst_len;
1859 if (!out_len) {
1860 err = -EINVAL;
1861 goto free_req;
1862 }
1863
1864 out_len_max = out_len;
1865 err = -ENOMEM;
1866 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
1867 if (!outbuf_enc)
1868 goto free_req;
1869
1870 akcipher_request_set_crypt(req, vecs->m, outbuf_enc, vecs->m_size,
1871 out_len);
1872 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1873 tcrypt_complete, &result);
1874
1875 /* Run RSA encrypt - c = m^e mod n;*/
1876 err = wait_async_op(&result, crypto_akcipher_encrypt(req));
1877 if (err) {
1878 pr_err("alg: rsa: encrypt test failed. err %d\n", err);
1879 goto free_all;
1880 }
1881 if (out_len != vecs->c_size) {
1882 pr_err("alg: rsa: encrypt test failed. Invalid output len\n");
1883 err = -EINVAL;
1884 goto free_all;
1885 }
1886 /* verify that encrypted message is equal to expected */
1887 if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) {
1888 pr_err("alg: rsa: encrypt test failed. Invalid output\n");
1889 err = -EINVAL;
1890 goto free_all;
1891 }
1892 /* Don't invoke decrypt for vectors with public key */
1893 if (vecs->public_key_vec) {
1894 err = 0;
1895 goto free_all;
1896 }
1897 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
1898 if (!outbuf_dec) {
1899 err = -ENOMEM;
1900 goto free_all;
1901 }
1902 init_completion(&result.completion);
1903 akcipher_request_set_crypt(req, outbuf_enc, outbuf_dec, vecs->c_size,
1904 out_len);
1905
1906 /* Run RSA decrypt - m = c^d mod n;*/
1907 err = wait_async_op(&result, crypto_akcipher_decrypt(req));
1908 if (err) {
1909 pr_err("alg: rsa: decrypt test failed. err %d\n", err);
1910 goto free_all;
1911 }
1912 out_len = req->dst_len;
1913 if (out_len != vecs->m_size) {
1914 pr_err("alg: rsa: decrypt test failed. Invalid output len\n");
1915 err = -EINVAL;
1916 goto free_all;
1917 }
1918 /* verify that decrypted message is equal to the original msg */
1919 if (memcmp(vecs->m, outbuf_dec, vecs->m_size)) {
1920 pr_err("alg: rsa: decrypt test failed. Invalid output\n");
1921 err = -EINVAL;
1922 }
1923 free_all:
1924 kfree(outbuf_dec);
1925 kfree(outbuf_enc);
1926 free_req:
1927 akcipher_request_free(req);
1928 return err;
1929 }
1930
1931 static int test_rsa(struct crypto_akcipher *tfm, struct akcipher_testvec *vecs,
1932 unsigned int tcount)
1933 {
1934 int ret, i;
1935
1936 for (i = 0; i < tcount; i++) {
1937 ret = do_test_rsa(tfm, vecs++);
1938 if (ret) {
1939 pr_err("alg: rsa: test failed on vector %d, err=%d\n",
1940 i + 1, ret);
1941 return ret;
1942 }
1943 }
1944 return 0;
1945 }
1946
1947 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
1948 struct akcipher_testvec *vecs, unsigned int tcount)
1949 {
1950 if (strncmp(alg, "rsa", 3) == 0)
1951 return test_rsa(tfm, vecs, tcount);
1952
1953 return 0;
1954 }
1955
1956 static int alg_test_akcipher(const struct alg_test_desc *desc,
1957 const char *driver, u32 type, u32 mask)
1958 {
1959 struct crypto_akcipher *tfm;
1960 int err = 0;
1961
1962 tfm = crypto_alloc_akcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1963 if (IS_ERR(tfm)) {
1964 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
1965 driver, PTR_ERR(tfm));
1966 return PTR_ERR(tfm);
1967 }
1968 if (desc->suite.akcipher.vecs)
1969 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
1970 desc->suite.akcipher.count);
1971
1972 crypto_free_akcipher(tfm);
1973 return err;
1974 }
1975
1976 static int alg_test_null(const struct alg_test_desc *desc,
1977 const char *driver, u32 type, u32 mask)
1978 {
1979 return 0;
1980 }
1981
1982 /* Please keep this list sorted by algorithm name. */
1983 static const struct alg_test_desc alg_test_descs[] = {
1984 {
1985 .alg = "__cbc-cast5-avx",
1986 .test = alg_test_null,
1987 }, {
1988 .alg = "__cbc-cast6-avx",
1989 .test = alg_test_null,
1990 }, {
1991 .alg = "__cbc-serpent-avx",
1992 .test = alg_test_null,
1993 }, {
1994 .alg = "__cbc-serpent-avx2",
1995 .test = alg_test_null,
1996 }, {
1997 .alg = "__cbc-serpent-sse2",
1998 .test = alg_test_null,
1999 }, {
2000 .alg = "__cbc-twofish-avx",
2001 .test = alg_test_null,
2002 }, {
2003 .alg = "__driver-cbc-aes-aesni",
2004 .test = alg_test_null,
2005 .fips_allowed = 1,
2006 }, {
2007 .alg = "__driver-cbc-camellia-aesni",
2008 .test = alg_test_null,
2009 }, {
2010 .alg = "__driver-cbc-camellia-aesni-avx2",
2011 .test = alg_test_null,
2012 }, {
2013 .alg = "__driver-cbc-cast5-avx",
2014 .test = alg_test_null,
2015 }, {
2016 .alg = "__driver-cbc-cast6-avx",
2017 .test = alg_test_null,
2018 }, {
2019 .alg = "__driver-cbc-serpent-avx",
2020 .test = alg_test_null,
2021 }, {
2022 .alg = "__driver-cbc-serpent-avx2",
2023 .test = alg_test_null,
2024 }, {
2025 .alg = "__driver-cbc-serpent-sse2",
2026 .test = alg_test_null,
2027 }, {
2028 .alg = "__driver-cbc-twofish-avx",
2029 .test = alg_test_null,
2030 }, {
2031 .alg = "__driver-ecb-aes-aesni",
2032 .test = alg_test_null,
2033 .fips_allowed = 1,
2034 }, {
2035 .alg = "__driver-ecb-camellia-aesni",
2036 .test = alg_test_null,
2037 }, {
2038 .alg = "__driver-ecb-camellia-aesni-avx2",
2039 .test = alg_test_null,
2040 }, {
2041 .alg = "__driver-ecb-cast5-avx",
2042 .test = alg_test_null,
2043 }, {
2044 .alg = "__driver-ecb-cast6-avx",
2045 .test = alg_test_null,
2046 }, {
2047 .alg = "__driver-ecb-serpent-avx",
2048 .test = alg_test_null,
2049 }, {
2050 .alg = "__driver-ecb-serpent-avx2",
2051 .test = alg_test_null,
2052 }, {
2053 .alg = "__driver-ecb-serpent-sse2",
2054 .test = alg_test_null,
2055 }, {
2056 .alg = "__driver-ecb-twofish-avx",
2057 .test = alg_test_null,
2058 }, {
2059 .alg = "__ghash-pclmulqdqni",
2060 .test = alg_test_null,
2061 .fips_allowed = 1,
2062 }, {
2063 .alg = "ansi_cprng",
2064 .test = alg_test_cprng,
2065 .fips_allowed = 1,
2066 .suite = {
2067 .cprng = {
2068 .vecs = ansi_cprng_aes_tv_template,
2069 .count = ANSI_CPRNG_AES_TEST_VECTORS
2070 }
2071 }
2072 }, {
2073 .alg = "authenc(hmac(md5),ecb(cipher_null))",
2074 .test = alg_test_aead,
2075 .fips_allowed = 1,
2076 .suite = {
2077 .aead = {
2078 .enc = {
2079 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template,
2080 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS
2081 },
2082 .dec = {
2083 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template,
2084 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS
2085 }
2086 }
2087 }
2088 }, {
2089 .alg = "authenc(hmac(sha1),cbc(aes))",
2090 .test = alg_test_aead,
2091 .fips_allowed = 1,
2092 .suite = {
2093 .aead = {
2094 .enc = {
2095 .vecs =
2096 hmac_sha1_aes_cbc_enc_tv_temp,
2097 .count =
2098 HMAC_SHA1_AES_CBC_ENC_TEST_VEC
2099 }
2100 }
2101 }
2102 }, {
2103 .alg = "authenc(hmac(sha1),cbc(des))",
2104 .test = alg_test_aead,
2105 .fips_allowed = 1,
2106 .suite = {
2107 .aead = {
2108 .enc = {
2109 .vecs =
2110 hmac_sha1_des_cbc_enc_tv_temp,
2111 .count =
2112 HMAC_SHA1_DES_CBC_ENC_TEST_VEC
2113 }
2114 }
2115 }
2116 }, {
2117 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
2118 .test = alg_test_aead,
2119 .fips_allowed = 1,
2120 .suite = {
2121 .aead = {
2122 .enc = {
2123 .vecs =
2124 hmac_sha1_des3_ede_cbc_enc_tv_temp,
2125 .count =
2126 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC
2127 }
2128 }
2129 }
2130 }, {
2131 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
2132 .test = alg_test_aead,
2133 .fips_allowed = 1,
2134 .suite = {
2135 .aead = {
2136 .enc = {
2137 .vecs =
2138 hmac_sha1_ecb_cipher_null_enc_tv_temp,
2139 .count =
2140 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC
2141 },
2142 .dec = {
2143 .vecs =
2144 hmac_sha1_ecb_cipher_null_dec_tv_temp,
2145 .count =
2146 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC
2147 }
2148 }
2149 }
2150 }, {
2151 .alg = "authenc(hmac(sha224),cbc(des))",
2152 .test = alg_test_aead,
2153 .fips_allowed = 1,
2154 .suite = {
2155 .aead = {
2156 .enc = {
2157 .vecs =
2158 hmac_sha224_des_cbc_enc_tv_temp,
2159 .count =
2160 HMAC_SHA224_DES_CBC_ENC_TEST_VEC
2161 }
2162 }
2163 }
2164 }, {
2165 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
2166 .test = alg_test_aead,
2167 .fips_allowed = 1,
2168 .suite = {
2169 .aead = {
2170 .enc = {
2171 .vecs =
2172 hmac_sha224_des3_ede_cbc_enc_tv_temp,
2173 .count =
2174 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC
2175 }
2176 }
2177 }
2178 }, {
2179 .alg = "authenc(hmac(sha256),cbc(aes))",
2180 .test = alg_test_aead,
2181 .fips_allowed = 1,
2182 .suite = {
2183 .aead = {
2184 .enc = {
2185 .vecs =
2186 hmac_sha256_aes_cbc_enc_tv_temp,
2187 .count =
2188 HMAC_SHA256_AES_CBC_ENC_TEST_VEC
2189 }
2190 }
2191 }
2192 }, {
2193 .alg = "authenc(hmac(sha256),cbc(des))",
2194 .test = alg_test_aead,
2195 .fips_allowed = 1,
2196 .suite = {
2197 .aead = {
2198 .enc = {
2199 .vecs =
2200 hmac_sha256_des_cbc_enc_tv_temp,
2201 .count =
2202 HMAC_SHA256_DES_CBC_ENC_TEST_VEC
2203 }
2204 }
2205 }
2206 }, {
2207 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
2208 .test = alg_test_aead,
2209 .fips_allowed = 1,
2210 .suite = {
2211 .aead = {
2212 .enc = {
2213 .vecs =
2214 hmac_sha256_des3_ede_cbc_enc_tv_temp,
2215 .count =
2216 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC
2217 }
2218 }
2219 }
2220 }, {
2221 .alg = "authenc(hmac(sha384),cbc(des))",
2222 .test = alg_test_aead,
2223 .fips_allowed = 1,
2224 .suite = {
2225 .aead = {
2226 .enc = {
2227 .vecs =
2228 hmac_sha384_des_cbc_enc_tv_temp,
2229 .count =
2230 HMAC_SHA384_DES_CBC_ENC_TEST_VEC
2231 }
2232 }
2233 }
2234 }, {
2235 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
2236 .test = alg_test_aead,
2237 .fips_allowed = 1,
2238 .suite = {
2239 .aead = {
2240 .enc = {
2241 .vecs =
2242 hmac_sha384_des3_ede_cbc_enc_tv_temp,
2243 .count =
2244 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC
2245 }
2246 }
2247 }
2248 }, {
2249 .alg = "authenc(hmac(sha512),cbc(aes))",
2250 .test = alg_test_aead,
2251 .fips_allowed = 1,
2252 .suite = {
2253 .aead = {
2254 .enc = {
2255 .vecs =
2256 hmac_sha512_aes_cbc_enc_tv_temp,
2257 .count =
2258 HMAC_SHA512_AES_CBC_ENC_TEST_VEC
2259 }
2260 }
2261 }
2262 }, {
2263 .alg = "authenc(hmac(sha512),cbc(des))",
2264 .test = alg_test_aead,
2265 .fips_allowed = 1,
2266 .suite = {
2267 .aead = {
2268 .enc = {
2269 .vecs =
2270 hmac_sha512_des_cbc_enc_tv_temp,
2271 .count =
2272 HMAC_SHA512_DES_CBC_ENC_TEST_VEC
2273 }
2274 }
2275 }
2276 }, {
2277 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
2278 .test = alg_test_aead,
2279 .fips_allowed = 1,
2280 .suite = {
2281 .aead = {
2282 .enc = {
2283 .vecs =
2284 hmac_sha512_des3_ede_cbc_enc_tv_temp,
2285 .count =
2286 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC
2287 }
2288 }
2289 }
2290 }, {
2291 .alg = "cbc(aes)",
2292 .test = alg_test_skcipher,
2293 .fips_allowed = 1,
2294 .suite = {
2295 .cipher = {
2296 .enc = {
2297 .vecs = aes_cbc_enc_tv_template,
2298 .count = AES_CBC_ENC_TEST_VECTORS
2299 },
2300 .dec = {
2301 .vecs = aes_cbc_dec_tv_template,
2302 .count = AES_CBC_DEC_TEST_VECTORS
2303 }
2304 }
2305 }
2306 }, {
2307 .alg = "cbc(anubis)",
2308 .test = alg_test_skcipher,
2309 .suite = {
2310 .cipher = {
2311 .enc = {
2312 .vecs = anubis_cbc_enc_tv_template,
2313 .count = ANUBIS_CBC_ENC_TEST_VECTORS
2314 },
2315 .dec = {
2316 .vecs = anubis_cbc_dec_tv_template,
2317 .count = ANUBIS_CBC_DEC_TEST_VECTORS
2318 }
2319 }
2320 }
2321 }, {
2322 .alg = "cbc(blowfish)",
2323 .test = alg_test_skcipher,
2324 .suite = {
2325 .cipher = {
2326 .enc = {
2327 .vecs = bf_cbc_enc_tv_template,
2328 .count = BF_CBC_ENC_TEST_VECTORS
2329 },
2330 .dec = {
2331 .vecs = bf_cbc_dec_tv_template,
2332 .count = BF_CBC_DEC_TEST_VECTORS
2333 }
2334 }
2335 }
2336 }, {
2337 .alg = "cbc(camellia)",
2338 .test = alg_test_skcipher,
2339 .suite = {
2340 .cipher = {
2341 .enc = {
2342 .vecs = camellia_cbc_enc_tv_template,
2343 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
2344 },
2345 .dec = {
2346 .vecs = camellia_cbc_dec_tv_template,
2347 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
2348 }
2349 }
2350 }
2351 }, {
2352 .alg = "cbc(cast5)",
2353 .test = alg_test_skcipher,
2354 .suite = {
2355 .cipher = {
2356 .enc = {
2357 .vecs = cast5_cbc_enc_tv_template,
2358 .count = CAST5_CBC_ENC_TEST_VECTORS
2359 },
2360 .dec = {
2361 .vecs = cast5_cbc_dec_tv_template,
2362 .count = CAST5_CBC_DEC_TEST_VECTORS
2363 }
2364 }
2365 }
2366 }, {
2367 .alg = "cbc(cast6)",
2368 .test = alg_test_skcipher,
2369 .suite = {
2370 .cipher = {
2371 .enc = {
2372 .vecs = cast6_cbc_enc_tv_template,
2373 .count = CAST6_CBC_ENC_TEST_VECTORS
2374 },
2375 .dec = {
2376 .vecs = cast6_cbc_dec_tv_template,
2377 .count = CAST6_CBC_DEC_TEST_VECTORS
2378 }
2379 }
2380 }
2381 }, {
2382 .alg = "cbc(des)",
2383 .test = alg_test_skcipher,
2384 .suite = {
2385 .cipher = {
2386 .enc = {
2387 .vecs = des_cbc_enc_tv_template,
2388 .count = DES_CBC_ENC_TEST_VECTORS
2389 },
2390 .dec = {
2391 .vecs = des_cbc_dec_tv_template,
2392 .count = DES_CBC_DEC_TEST_VECTORS
2393 }
2394 }
2395 }
2396 }, {
2397 .alg = "cbc(des3_ede)",
2398 .test = alg_test_skcipher,
2399 .fips_allowed = 1,
2400 .suite = {
2401 .cipher = {
2402 .enc = {
2403 .vecs = des3_ede_cbc_enc_tv_template,
2404 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
2405 },
2406 .dec = {
2407 .vecs = des3_ede_cbc_dec_tv_template,
2408 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
2409 }
2410 }
2411 }
2412 }, {
2413 .alg = "cbc(serpent)",
2414 .test = alg_test_skcipher,
2415 .suite = {
2416 .cipher = {
2417 .enc = {
2418 .vecs = serpent_cbc_enc_tv_template,
2419 .count = SERPENT_CBC_ENC_TEST_VECTORS
2420 },
2421 .dec = {
2422 .vecs = serpent_cbc_dec_tv_template,
2423 .count = SERPENT_CBC_DEC_TEST_VECTORS
2424 }
2425 }
2426 }
2427 }, {
2428 .alg = "cbc(twofish)",
2429 .test = alg_test_skcipher,
2430 .suite = {
2431 .cipher = {
2432 .enc = {
2433 .vecs = tf_cbc_enc_tv_template,
2434 .count = TF_CBC_ENC_TEST_VECTORS
2435 },
2436 .dec = {
2437 .vecs = tf_cbc_dec_tv_template,
2438 .count = TF_CBC_DEC_TEST_VECTORS
2439 }
2440 }
2441 }
2442 }, {
2443 .alg = "ccm(aes)",
2444 .test = alg_test_aead,
2445 .fips_allowed = 1,
2446 .suite = {
2447 .aead = {
2448 .enc = {
2449 .vecs = aes_ccm_enc_tv_template,
2450 .count = AES_CCM_ENC_TEST_VECTORS
2451 },
2452 .dec = {
2453 .vecs = aes_ccm_dec_tv_template,
2454 .count = AES_CCM_DEC_TEST_VECTORS
2455 }
2456 }
2457 }
2458 }, {
2459 .alg = "chacha20",
2460 .test = alg_test_skcipher,
2461 .suite = {
2462 .cipher = {
2463 .enc = {
2464 .vecs = chacha20_enc_tv_template,
2465 .count = CHACHA20_ENC_TEST_VECTORS
2466 },
2467 .dec = {
2468 .vecs = chacha20_enc_tv_template,
2469 .count = CHACHA20_ENC_TEST_VECTORS
2470 },
2471 }
2472 }
2473 }, {
2474 .alg = "cmac(aes)",
2475 .test = alg_test_hash,
2476 .suite = {
2477 .hash = {
2478 .vecs = aes_cmac128_tv_template,
2479 .count = CMAC_AES_TEST_VECTORS
2480 }
2481 }
2482 }, {
2483 .alg = "cmac(des3_ede)",
2484 .test = alg_test_hash,
2485 .suite = {
2486 .hash = {
2487 .vecs = des3_ede_cmac64_tv_template,
2488 .count = CMAC_DES3_EDE_TEST_VECTORS
2489 }
2490 }
2491 }, {
2492 .alg = "compress_null",
2493 .test = alg_test_null,
2494 }, {
2495 .alg = "crc32",
2496 .test = alg_test_hash,
2497 .suite = {
2498 .hash = {
2499 .vecs = crc32_tv_template,
2500 .count = CRC32_TEST_VECTORS
2501 }
2502 }
2503 }, {
2504 .alg = "crc32c",
2505 .test = alg_test_crc32c,
2506 .fips_allowed = 1,
2507 .suite = {
2508 .hash = {
2509 .vecs = crc32c_tv_template,
2510 .count = CRC32C_TEST_VECTORS
2511 }
2512 }
2513 }, {
2514 .alg = "crct10dif",
2515 .test = alg_test_hash,
2516 .fips_allowed = 1,
2517 .suite = {
2518 .hash = {
2519 .vecs = crct10dif_tv_template,
2520 .count = CRCT10DIF_TEST_VECTORS
2521 }
2522 }
2523 }, {
2524 .alg = "cryptd(__driver-cbc-aes-aesni)",
2525 .test = alg_test_null,
2526 .fips_allowed = 1,
2527 }, {
2528 .alg = "cryptd(__driver-cbc-camellia-aesni)",
2529 .test = alg_test_null,
2530 }, {
2531 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
2532 .test = alg_test_null,
2533 }, {
2534 .alg = "cryptd(__driver-cbc-serpent-avx2)",
2535 .test = alg_test_null,
2536 }, {
2537 .alg = "cryptd(__driver-ecb-aes-aesni)",
2538 .test = alg_test_null,
2539 .fips_allowed = 1,
2540 }, {
2541 .alg = "cryptd(__driver-ecb-camellia-aesni)",
2542 .test = alg_test_null,
2543 }, {
2544 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
2545 .test = alg_test_null,
2546 }, {
2547 .alg = "cryptd(__driver-ecb-cast5-avx)",
2548 .test = alg_test_null,
2549 }, {
2550 .alg = "cryptd(__driver-ecb-cast6-avx)",
2551 .test = alg_test_null,
2552 }, {
2553 .alg = "cryptd(__driver-ecb-serpent-avx)",
2554 .test = alg_test_null,
2555 }, {
2556 .alg = "cryptd(__driver-ecb-serpent-avx2)",
2557 .test = alg_test_null,
2558 }, {
2559 .alg = "cryptd(__driver-ecb-serpent-sse2)",
2560 .test = alg_test_null,
2561 }, {
2562 .alg = "cryptd(__driver-ecb-twofish-avx)",
2563 .test = alg_test_null,
2564 }, {
2565 .alg = "cryptd(__driver-gcm-aes-aesni)",
2566 .test = alg_test_null,
2567 .fips_allowed = 1,
2568 }, {
2569 .alg = "cryptd(__ghash-pclmulqdqni)",
2570 .test = alg_test_null,
2571 .fips_allowed = 1,
2572 }, {
2573 .alg = "ctr(aes)",
2574 .test = alg_test_skcipher,
2575 .fips_allowed = 1,
2576 .suite = {
2577 .cipher = {
2578 .enc = {
2579 .vecs = aes_ctr_enc_tv_template,
2580 .count = AES_CTR_ENC_TEST_VECTORS
2581 },
2582 .dec = {
2583 .vecs = aes_ctr_dec_tv_template,
2584 .count = AES_CTR_DEC_TEST_VECTORS
2585 }
2586 }
2587 }
2588 }, {
2589 .alg = "ctr(blowfish)",
2590 .test = alg_test_skcipher,
2591 .suite = {
2592 .cipher = {
2593 .enc = {
2594 .vecs = bf_ctr_enc_tv_template,
2595 .count = BF_CTR_ENC_TEST_VECTORS
2596 },
2597 .dec = {
2598 .vecs = bf_ctr_dec_tv_template,
2599 .count = BF_CTR_DEC_TEST_VECTORS
2600 }
2601 }
2602 }
2603 }, {
2604 .alg = "ctr(camellia)",
2605 .test = alg_test_skcipher,
2606 .suite = {
2607 .cipher = {
2608 .enc = {
2609 .vecs = camellia_ctr_enc_tv_template,
2610 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2611 },
2612 .dec = {
2613 .vecs = camellia_ctr_dec_tv_template,
2614 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2615 }
2616 }
2617 }
2618 }, {
2619 .alg = "ctr(cast5)",
2620 .test = alg_test_skcipher,
2621 .suite = {
2622 .cipher = {
2623 .enc = {
2624 .vecs = cast5_ctr_enc_tv_template,
2625 .count = CAST5_CTR_ENC_TEST_VECTORS
2626 },
2627 .dec = {
2628 .vecs = cast5_ctr_dec_tv_template,
2629 .count = CAST5_CTR_DEC_TEST_VECTORS
2630 }
2631 }
2632 }
2633 }, {
2634 .alg = "ctr(cast6)",
2635 .test = alg_test_skcipher,
2636 .suite = {
2637 .cipher = {
2638 .enc = {
2639 .vecs = cast6_ctr_enc_tv_template,
2640 .count = CAST6_CTR_ENC_TEST_VECTORS
2641 },
2642 .dec = {
2643 .vecs = cast6_ctr_dec_tv_template,
2644 .count = CAST6_CTR_DEC_TEST_VECTORS
2645 }
2646 }
2647 }
2648 }, {
2649 .alg = "ctr(des)",
2650 .test = alg_test_skcipher,
2651 .suite = {
2652 .cipher = {
2653 .enc = {
2654 .vecs = des_ctr_enc_tv_template,
2655 .count = DES_CTR_ENC_TEST_VECTORS
2656 },
2657 .dec = {
2658 .vecs = des_ctr_dec_tv_template,
2659 .count = DES_CTR_DEC_TEST_VECTORS
2660 }
2661 }
2662 }
2663 }, {
2664 .alg = "ctr(des3_ede)",
2665 .test = alg_test_skcipher,
2666 .suite = {
2667 .cipher = {
2668 .enc = {
2669 .vecs = des3_ede_ctr_enc_tv_template,
2670 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2671 },
2672 .dec = {
2673 .vecs = des3_ede_ctr_dec_tv_template,
2674 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2675 }
2676 }
2677 }
2678 }, {
2679 .alg = "ctr(serpent)",
2680 .test = alg_test_skcipher,
2681 .suite = {
2682 .cipher = {
2683 .enc = {
2684 .vecs = serpent_ctr_enc_tv_template,
2685 .count = SERPENT_CTR_ENC_TEST_VECTORS
2686 },
2687 .dec = {
2688 .vecs = serpent_ctr_dec_tv_template,
2689 .count = SERPENT_CTR_DEC_TEST_VECTORS
2690 }
2691 }
2692 }
2693 }, {
2694 .alg = "ctr(twofish)",
2695 .test = alg_test_skcipher,
2696 .suite = {
2697 .cipher = {
2698 .enc = {
2699 .vecs = tf_ctr_enc_tv_template,
2700 .count = TF_CTR_ENC_TEST_VECTORS
2701 },
2702 .dec = {
2703 .vecs = tf_ctr_dec_tv_template,
2704 .count = TF_CTR_DEC_TEST_VECTORS
2705 }
2706 }
2707 }
2708 }, {
2709 .alg = "cts(cbc(aes))",
2710 .test = alg_test_skcipher,
2711 .suite = {
2712 .cipher = {
2713 .enc = {
2714 .vecs = cts_mode_enc_tv_template,
2715 .count = CTS_MODE_ENC_TEST_VECTORS
2716 },
2717 .dec = {
2718 .vecs = cts_mode_dec_tv_template,
2719 .count = CTS_MODE_DEC_TEST_VECTORS
2720 }
2721 }
2722 }
2723 }, {
2724 .alg = "deflate",
2725 .test = alg_test_comp,
2726 .fips_allowed = 1,
2727 .suite = {
2728 .comp = {
2729 .comp = {
2730 .vecs = deflate_comp_tv_template,
2731 .count = DEFLATE_COMP_TEST_VECTORS
2732 },
2733 .decomp = {
2734 .vecs = deflate_decomp_tv_template,
2735 .count = DEFLATE_DECOMP_TEST_VECTORS
2736 }
2737 }
2738 }
2739 }, {
2740 .alg = "digest_null",
2741 .test = alg_test_null,
2742 }, {
2743 .alg = "drbg_nopr_ctr_aes128",
2744 .test = alg_test_drbg,
2745 .fips_allowed = 1,
2746 .suite = {
2747 .drbg = {
2748 .vecs = drbg_nopr_ctr_aes128_tv_template,
2749 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template)
2750 }
2751 }
2752 }, {
2753 .alg = "drbg_nopr_ctr_aes192",
2754 .test = alg_test_drbg,
2755 .fips_allowed = 1,
2756 .suite = {
2757 .drbg = {
2758 .vecs = drbg_nopr_ctr_aes192_tv_template,
2759 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template)
2760 }
2761 }
2762 }, {
2763 .alg = "drbg_nopr_ctr_aes256",
2764 .test = alg_test_drbg,
2765 .fips_allowed = 1,
2766 .suite = {
2767 .drbg = {
2768 .vecs = drbg_nopr_ctr_aes256_tv_template,
2769 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template)
2770 }
2771 }
2772 }, {
2773 /*
2774 * There is no need to specifically test the DRBG with every
2775 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2776 */
2777 .alg = "drbg_nopr_hmac_sha1",
2778 .fips_allowed = 1,
2779 .test = alg_test_null,
2780 }, {
2781 .alg = "drbg_nopr_hmac_sha256",
2782 .test = alg_test_drbg,
2783 .fips_allowed = 1,
2784 .suite = {
2785 .drbg = {
2786 .vecs = drbg_nopr_hmac_sha256_tv_template,
2787 .count =
2788 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template)
2789 }
2790 }
2791 }, {
2792 /* covered by drbg_nopr_hmac_sha256 test */
2793 .alg = "drbg_nopr_hmac_sha384",
2794 .fips_allowed = 1,
2795 .test = alg_test_null,
2796 }, {
2797 .alg = "drbg_nopr_hmac_sha512",
2798 .test = alg_test_null,
2799 .fips_allowed = 1,
2800 }, {
2801 .alg = "drbg_nopr_sha1",
2802 .fips_allowed = 1,
2803 .test = alg_test_null,
2804 }, {
2805 .alg = "drbg_nopr_sha256",
2806 .test = alg_test_drbg,
2807 .fips_allowed = 1,
2808 .suite = {
2809 .drbg = {
2810 .vecs = drbg_nopr_sha256_tv_template,
2811 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template)
2812 }
2813 }
2814 }, {
2815 /* covered by drbg_nopr_sha256 test */
2816 .alg = "drbg_nopr_sha384",
2817 .fips_allowed = 1,
2818 .test = alg_test_null,
2819 }, {
2820 .alg = "drbg_nopr_sha512",
2821 .fips_allowed = 1,
2822 .test = alg_test_null,
2823 }, {
2824 .alg = "drbg_pr_ctr_aes128",
2825 .test = alg_test_drbg,
2826 .fips_allowed = 1,
2827 .suite = {
2828 .drbg = {
2829 .vecs = drbg_pr_ctr_aes128_tv_template,
2830 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template)
2831 }
2832 }
2833 }, {
2834 /* covered by drbg_pr_ctr_aes128 test */
2835 .alg = "drbg_pr_ctr_aes192",
2836 .fips_allowed = 1,
2837 .test = alg_test_null,
2838 }, {
2839 .alg = "drbg_pr_ctr_aes256",
2840 .fips_allowed = 1,
2841 .test = alg_test_null,
2842 }, {
2843 .alg = "drbg_pr_hmac_sha1",
2844 .fips_allowed = 1,
2845 .test = alg_test_null,
2846 }, {
2847 .alg = "drbg_pr_hmac_sha256",
2848 .test = alg_test_drbg,
2849 .fips_allowed = 1,
2850 .suite = {
2851 .drbg = {
2852 .vecs = drbg_pr_hmac_sha256_tv_template,
2853 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template)
2854 }
2855 }
2856 }, {
2857 /* covered by drbg_pr_hmac_sha256 test */
2858 .alg = "drbg_pr_hmac_sha384",
2859 .fips_allowed = 1,
2860 .test = alg_test_null,
2861 }, {
2862 .alg = "drbg_pr_hmac_sha512",
2863 .test = alg_test_null,
2864 .fips_allowed = 1,
2865 }, {
2866 .alg = "drbg_pr_sha1",
2867 .fips_allowed = 1,
2868 .test = alg_test_null,
2869 }, {
2870 .alg = "drbg_pr_sha256",
2871 .test = alg_test_drbg,
2872 .fips_allowed = 1,
2873 .suite = {
2874 .drbg = {
2875 .vecs = drbg_pr_sha256_tv_template,
2876 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template)
2877 }
2878 }
2879 }, {
2880 /* covered by drbg_pr_sha256 test */
2881 .alg = "drbg_pr_sha384",
2882 .fips_allowed = 1,
2883 .test = alg_test_null,
2884 }, {
2885 .alg = "drbg_pr_sha512",
2886 .fips_allowed = 1,
2887 .test = alg_test_null,
2888 }, {
2889 .alg = "ecb(__aes-aesni)",
2890 .test = alg_test_null,
2891 .fips_allowed = 1,
2892 }, {
2893 .alg = "ecb(aes)",
2894 .test = alg_test_skcipher,
2895 .fips_allowed = 1,
2896 .suite = {
2897 .cipher = {
2898 .enc = {
2899 .vecs = aes_enc_tv_template,
2900 .count = AES_ENC_TEST_VECTORS
2901 },
2902 .dec = {
2903 .vecs = aes_dec_tv_template,
2904 .count = AES_DEC_TEST_VECTORS
2905 }
2906 }
2907 }
2908 }, {
2909 .alg = "ecb(anubis)",
2910 .test = alg_test_skcipher,
2911 .suite = {
2912 .cipher = {
2913 .enc = {
2914 .vecs = anubis_enc_tv_template,
2915 .count = ANUBIS_ENC_TEST_VECTORS
2916 },
2917 .dec = {
2918 .vecs = anubis_dec_tv_template,
2919 .count = ANUBIS_DEC_TEST_VECTORS
2920 }
2921 }
2922 }
2923 }, {
2924 .alg = "ecb(arc4)",
2925 .test = alg_test_skcipher,
2926 .suite = {
2927 .cipher = {
2928 .enc = {
2929 .vecs = arc4_enc_tv_template,
2930 .count = ARC4_ENC_TEST_VECTORS
2931 },
2932 .dec = {
2933 .vecs = arc4_dec_tv_template,
2934 .count = ARC4_DEC_TEST_VECTORS
2935 }
2936 }
2937 }
2938 }, {
2939 .alg = "ecb(blowfish)",
2940 .test = alg_test_skcipher,
2941 .suite = {
2942 .cipher = {
2943 .enc = {
2944 .vecs = bf_enc_tv_template,
2945 .count = BF_ENC_TEST_VECTORS
2946 },
2947 .dec = {
2948 .vecs = bf_dec_tv_template,
2949 .count = BF_DEC_TEST_VECTORS
2950 }
2951 }
2952 }
2953 }, {
2954 .alg = "ecb(camellia)",
2955 .test = alg_test_skcipher,
2956 .suite = {
2957 .cipher = {
2958 .enc = {
2959 .vecs = camellia_enc_tv_template,
2960 .count = CAMELLIA_ENC_TEST_VECTORS
2961 },
2962 .dec = {
2963 .vecs = camellia_dec_tv_template,
2964 .count = CAMELLIA_DEC_TEST_VECTORS
2965 }
2966 }
2967 }
2968 }, {
2969 .alg = "ecb(cast5)",
2970 .test = alg_test_skcipher,
2971 .suite = {
2972 .cipher = {
2973 .enc = {
2974 .vecs = cast5_enc_tv_template,
2975 .count = CAST5_ENC_TEST_VECTORS
2976 },
2977 .dec = {
2978 .vecs = cast5_dec_tv_template,
2979 .count = CAST5_DEC_TEST_VECTORS
2980 }
2981 }
2982 }
2983 }, {
2984 .alg = "ecb(cast6)",
2985 .test = alg_test_skcipher,
2986 .suite = {
2987 .cipher = {
2988 .enc = {
2989 .vecs = cast6_enc_tv_template,
2990 .count = CAST6_ENC_TEST_VECTORS
2991 },
2992 .dec = {
2993 .vecs = cast6_dec_tv_template,
2994 .count = CAST6_DEC_TEST_VECTORS
2995 }
2996 }
2997 }
2998 }, {
2999 .alg = "ecb(cipher_null)",
3000 .test = alg_test_null,
3001 }, {
3002 .alg = "ecb(des)",
3003 .test = alg_test_skcipher,
3004 .fips_allowed = 1,
3005 .suite = {
3006 .cipher = {
3007 .enc = {
3008 .vecs = des_enc_tv_template,
3009 .count = DES_ENC_TEST_VECTORS
3010 },
3011 .dec = {
3012 .vecs = des_dec_tv_template,
3013 .count = DES_DEC_TEST_VECTORS
3014 }
3015 }
3016 }
3017 }, {
3018 .alg = "ecb(des3_ede)",
3019 .test = alg_test_skcipher,
3020 .fips_allowed = 1,
3021 .suite = {
3022 .cipher = {
3023 .enc = {
3024 .vecs = des3_ede_enc_tv_template,
3025 .count = DES3_EDE_ENC_TEST_VECTORS
3026 },
3027 .dec = {
3028 .vecs = des3_ede_dec_tv_template,
3029 .count = DES3_EDE_DEC_TEST_VECTORS
3030 }
3031 }
3032 }
3033 }, {
3034 .alg = "ecb(fcrypt)",
3035 .test = alg_test_skcipher,
3036 .suite = {
3037 .cipher = {
3038 .enc = {
3039 .vecs = fcrypt_pcbc_enc_tv_template,
3040 .count = 1
3041 },
3042 .dec = {
3043 .vecs = fcrypt_pcbc_dec_tv_template,
3044 .count = 1
3045 }
3046 }
3047 }
3048 }, {
3049 .alg = "ecb(khazad)",
3050 .test = alg_test_skcipher,
3051 .suite = {
3052 .cipher = {
3053 .enc = {
3054 .vecs = khazad_enc_tv_template,
3055 .count = KHAZAD_ENC_TEST_VECTORS
3056 },
3057 .dec = {
3058 .vecs = khazad_dec_tv_template,
3059 .count = KHAZAD_DEC_TEST_VECTORS
3060 }
3061 }
3062 }
3063 }, {
3064 .alg = "ecb(seed)",
3065 .test = alg_test_skcipher,
3066 .suite = {
3067 .cipher = {
3068 .enc = {
3069 .vecs = seed_enc_tv_template,
3070 .count = SEED_ENC_TEST_VECTORS
3071 },
3072 .dec = {
3073 .vecs = seed_dec_tv_template,
3074 .count = SEED_DEC_TEST_VECTORS
3075 }
3076 }
3077 }
3078 }, {
3079 .alg = "ecb(serpent)",
3080 .test = alg_test_skcipher,
3081 .suite = {
3082 .cipher = {
3083 .enc = {
3084 .vecs = serpent_enc_tv_template,
3085 .count = SERPENT_ENC_TEST_VECTORS
3086 },
3087 .dec = {
3088 .vecs = serpent_dec_tv_template,
3089 .count = SERPENT_DEC_TEST_VECTORS
3090 }
3091 }
3092 }
3093 }, {
3094 .alg = "ecb(tea)",
3095 .test = alg_test_skcipher,
3096 .suite = {
3097 .cipher = {
3098 .enc = {
3099 .vecs = tea_enc_tv_template,
3100 .count = TEA_ENC_TEST_VECTORS
3101 },
3102 .dec = {
3103 .vecs = tea_dec_tv_template,
3104 .count = TEA_DEC_TEST_VECTORS
3105 }
3106 }
3107 }
3108 }, {
3109 .alg = "ecb(tnepres)",
3110 .test = alg_test_skcipher,
3111 .suite = {
3112 .cipher = {
3113 .enc = {
3114 .vecs = tnepres_enc_tv_template,
3115 .count = TNEPRES_ENC_TEST_VECTORS
3116 },
3117 .dec = {
3118 .vecs = tnepres_dec_tv_template,
3119 .count = TNEPRES_DEC_TEST_VECTORS
3120 }
3121 }
3122 }
3123 }, {
3124 .alg = "ecb(twofish)",
3125 .test = alg_test_skcipher,
3126 .suite = {
3127 .cipher = {
3128 .enc = {
3129 .vecs = tf_enc_tv_template,
3130 .count = TF_ENC_TEST_VECTORS
3131 },
3132 .dec = {
3133 .vecs = tf_dec_tv_template,
3134 .count = TF_DEC_TEST_VECTORS
3135 }
3136 }
3137 }
3138 }, {
3139 .alg = "ecb(xeta)",
3140 .test = alg_test_skcipher,
3141 .suite = {
3142 .cipher = {
3143 .enc = {
3144 .vecs = xeta_enc_tv_template,
3145 .count = XETA_ENC_TEST_VECTORS
3146 },
3147 .dec = {
3148 .vecs = xeta_dec_tv_template,
3149 .count = XETA_DEC_TEST_VECTORS
3150 }
3151 }
3152 }
3153 }, {
3154 .alg = "ecb(xtea)",
3155 .test = alg_test_skcipher,
3156 .suite = {
3157 .cipher = {
3158 .enc = {
3159 .vecs = xtea_enc_tv_template,
3160 .count = XTEA_ENC_TEST_VECTORS
3161 },
3162 .dec = {
3163 .vecs = xtea_dec_tv_template,
3164 .count = XTEA_DEC_TEST_VECTORS
3165 }
3166 }
3167 }
3168 }, {
3169 .alg = "gcm(aes)",
3170 .test = alg_test_aead,
3171 .fips_allowed = 1,
3172 .suite = {
3173 .aead = {
3174 .enc = {
3175 .vecs = aes_gcm_enc_tv_template,
3176 .count = AES_GCM_ENC_TEST_VECTORS
3177 },
3178 .dec = {
3179 .vecs = aes_gcm_dec_tv_template,
3180 .count = AES_GCM_DEC_TEST_VECTORS
3181 }
3182 }
3183 }
3184 }, {
3185 .alg = "ghash",
3186 .test = alg_test_hash,
3187 .fips_allowed = 1,
3188 .suite = {
3189 .hash = {
3190 .vecs = ghash_tv_template,
3191 .count = GHASH_TEST_VECTORS
3192 }
3193 }
3194 }, {
3195 .alg = "hmac(crc32)",
3196 .test = alg_test_hash,
3197 .suite = {
3198 .hash = {
3199 .vecs = bfin_crc_tv_template,
3200 .count = BFIN_CRC_TEST_VECTORS
3201 }
3202 }
3203 }, {
3204 .alg = "hmac(md5)",
3205 .test = alg_test_hash,
3206 .suite = {
3207 .hash = {
3208 .vecs = hmac_md5_tv_template,
3209 .count = HMAC_MD5_TEST_VECTORS
3210 }
3211 }
3212 }, {
3213 .alg = "hmac(rmd128)",
3214 .test = alg_test_hash,
3215 .suite = {
3216 .hash = {
3217 .vecs = hmac_rmd128_tv_template,
3218 .count = HMAC_RMD128_TEST_VECTORS
3219 }
3220 }
3221 }, {
3222 .alg = "hmac(rmd160)",
3223 .test = alg_test_hash,
3224 .suite = {
3225 .hash = {
3226 .vecs = hmac_rmd160_tv_template,
3227 .count = HMAC_RMD160_TEST_VECTORS
3228 }
3229 }
3230 }, {
3231 .alg = "hmac(sha1)",
3232 .test = alg_test_hash,
3233 .fips_allowed = 1,
3234 .suite = {
3235 .hash = {
3236 .vecs = hmac_sha1_tv_template,
3237 .count = HMAC_SHA1_TEST_VECTORS
3238 }
3239 }
3240 }, {
3241 .alg = "hmac(sha224)",
3242 .test = alg_test_hash,
3243 .fips_allowed = 1,
3244 .suite = {
3245 .hash = {
3246 .vecs = hmac_sha224_tv_template,
3247 .count = HMAC_SHA224_TEST_VECTORS
3248 }
3249 }
3250 }, {
3251 .alg = "hmac(sha256)",
3252 .test = alg_test_hash,
3253 .fips_allowed = 1,
3254 .suite = {
3255 .hash = {
3256 .vecs = hmac_sha256_tv_template,
3257 .count = HMAC_SHA256_TEST_VECTORS
3258 }
3259 }
3260 }, {
3261 .alg = "hmac(sha384)",
3262 .test = alg_test_hash,
3263 .fips_allowed = 1,
3264 .suite = {
3265 .hash = {
3266 .vecs = hmac_sha384_tv_template,
3267 .count = HMAC_SHA384_TEST_VECTORS
3268 }
3269 }
3270 }, {
3271 .alg = "hmac(sha512)",
3272 .test = alg_test_hash,
3273 .fips_allowed = 1,
3274 .suite = {
3275 .hash = {
3276 .vecs = hmac_sha512_tv_template,
3277 .count = HMAC_SHA512_TEST_VECTORS
3278 }
3279 }
3280 }, {
3281 .alg = "jitterentropy_rng",
3282 .fips_allowed = 1,
3283 .test = alg_test_null,
3284 }, {
3285 .alg = "lrw(aes)",
3286 .test = alg_test_skcipher,
3287 .suite = {
3288 .cipher = {
3289 .enc = {
3290 .vecs = aes_lrw_enc_tv_template,
3291 .count = AES_LRW_ENC_TEST_VECTORS
3292 },
3293 .dec = {
3294 .vecs = aes_lrw_dec_tv_template,
3295 .count = AES_LRW_DEC_TEST_VECTORS
3296 }
3297 }
3298 }
3299 }, {
3300 .alg = "lrw(camellia)",
3301 .test = alg_test_skcipher,
3302 .suite = {
3303 .cipher = {
3304 .enc = {
3305 .vecs = camellia_lrw_enc_tv_template,
3306 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
3307 },
3308 .dec = {
3309 .vecs = camellia_lrw_dec_tv_template,
3310 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
3311 }
3312 }
3313 }
3314 }, {
3315 .alg = "lrw(cast6)",
3316 .test = alg_test_skcipher,
3317 .suite = {
3318 .cipher = {
3319 .enc = {
3320 .vecs = cast6_lrw_enc_tv_template,
3321 .count = CAST6_LRW_ENC_TEST_VECTORS
3322 },
3323 .dec = {
3324 .vecs = cast6_lrw_dec_tv_template,
3325 .count = CAST6_LRW_DEC_TEST_VECTORS
3326 }
3327 }
3328 }
3329 }, {
3330 .alg = "lrw(serpent)",
3331 .test = alg_test_skcipher,
3332 .suite = {
3333 .cipher = {
3334 .enc = {
3335 .vecs = serpent_lrw_enc_tv_template,
3336 .count = SERPENT_LRW_ENC_TEST_VECTORS
3337 },
3338 .dec = {
3339 .vecs = serpent_lrw_dec_tv_template,
3340 .count = SERPENT_LRW_DEC_TEST_VECTORS
3341 }
3342 }
3343 }
3344 }, {
3345 .alg = "lrw(twofish)",
3346 .test = alg_test_skcipher,
3347 .suite = {
3348 .cipher = {
3349 .enc = {
3350 .vecs = tf_lrw_enc_tv_template,
3351 .count = TF_LRW_ENC_TEST_VECTORS
3352 },
3353 .dec = {
3354 .vecs = tf_lrw_dec_tv_template,
3355 .count = TF_LRW_DEC_TEST_VECTORS
3356 }
3357 }
3358 }
3359 }, {
3360 .alg = "lz4",
3361 .test = alg_test_comp,
3362 .fips_allowed = 1,
3363 .suite = {
3364 .comp = {
3365 .comp = {
3366 .vecs = lz4_comp_tv_template,
3367 .count = LZ4_COMP_TEST_VECTORS
3368 },
3369 .decomp = {
3370 .vecs = lz4_decomp_tv_template,
3371 .count = LZ4_DECOMP_TEST_VECTORS
3372 }
3373 }
3374 }
3375 }, {
3376 .alg = "lz4hc",
3377 .test = alg_test_comp,
3378 .fips_allowed = 1,
3379 .suite = {
3380 .comp = {
3381 .comp = {
3382 .vecs = lz4hc_comp_tv_template,
3383 .count = LZ4HC_COMP_TEST_VECTORS
3384 },
3385 .decomp = {
3386 .vecs = lz4hc_decomp_tv_template,
3387 .count = LZ4HC_DECOMP_TEST_VECTORS
3388 }
3389 }
3390 }
3391 }, {
3392 .alg = "lzo",
3393 .test = alg_test_comp,
3394 .fips_allowed = 1,
3395 .suite = {
3396 .comp = {
3397 .comp = {
3398 .vecs = lzo_comp_tv_template,
3399 .count = LZO_COMP_TEST_VECTORS
3400 },
3401 .decomp = {
3402 .vecs = lzo_decomp_tv_template,
3403 .count = LZO_DECOMP_TEST_VECTORS
3404 }
3405 }
3406 }
3407 }, {
3408 .alg = "md4",
3409 .test = alg_test_hash,
3410 .suite = {
3411 .hash = {
3412 .vecs = md4_tv_template,
3413 .count = MD4_TEST_VECTORS
3414 }
3415 }
3416 }, {
3417 .alg = "md5",
3418 .test = alg_test_hash,
3419 .suite = {
3420 .hash = {
3421 .vecs = md5_tv_template,
3422 .count = MD5_TEST_VECTORS
3423 }
3424 }
3425 }, {
3426 .alg = "michael_mic",
3427 .test = alg_test_hash,
3428 .suite = {
3429 .hash = {
3430 .vecs = michael_mic_tv_template,
3431 .count = MICHAEL_MIC_TEST_VECTORS
3432 }
3433 }
3434 }, {
3435 .alg = "ofb(aes)",
3436 .test = alg_test_skcipher,
3437 .fips_allowed = 1,
3438 .suite = {
3439 .cipher = {
3440 .enc = {
3441 .vecs = aes_ofb_enc_tv_template,
3442 .count = AES_OFB_ENC_TEST_VECTORS
3443 },
3444 .dec = {
3445 .vecs = aes_ofb_dec_tv_template,
3446 .count = AES_OFB_DEC_TEST_VECTORS
3447 }
3448 }
3449 }
3450 }, {
3451 .alg = "pcbc(fcrypt)",
3452 .test = alg_test_skcipher,
3453 .suite = {
3454 .cipher = {
3455 .enc = {
3456 .vecs = fcrypt_pcbc_enc_tv_template,
3457 .count = FCRYPT_ENC_TEST_VECTORS
3458 },
3459 .dec = {
3460 .vecs = fcrypt_pcbc_dec_tv_template,
3461 .count = FCRYPT_DEC_TEST_VECTORS
3462 }
3463 }
3464 }
3465 }, {
3466 .alg = "poly1305",
3467 .test = alg_test_hash,
3468 .suite = {
3469 .hash = {
3470 .vecs = poly1305_tv_template,
3471 .count = POLY1305_TEST_VECTORS
3472 }
3473 }
3474 }, {
3475 .alg = "rfc3686(ctr(aes))",
3476 .test = alg_test_skcipher,
3477 .fips_allowed = 1,
3478 .suite = {
3479 .cipher = {
3480 .enc = {
3481 .vecs = aes_ctr_rfc3686_enc_tv_template,
3482 .count = AES_CTR_3686_ENC_TEST_VECTORS
3483 },
3484 .dec = {
3485 .vecs = aes_ctr_rfc3686_dec_tv_template,
3486 .count = AES_CTR_3686_DEC_TEST_VECTORS
3487 }
3488 }
3489 }
3490 }, {
3491 .alg = "rfc4106(gcm(aes))",
3492 .test = alg_test_aead,
3493 .fips_allowed = 1,
3494 .suite = {
3495 .aead = {
3496 .enc = {
3497 .vecs = aes_gcm_rfc4106_enc_tv_template,
3498 .count = AES_GCM_4106_ENC_TEST_VECTORS
3499 },
3500 .dec = {
3501 .vecs = aes_gcm_rfc4106_dec_tv_template,
3502 .count = AES_GCM_4106_DEC_TEST_VECTORS
3503 }
3504 }
3505 }
3506 }, {
3507 .alg = "rfc4309(ccm(aes))",
3508 .test = alg_test_aead,
3509 .fips_allowed = 1,
3510 .suite = {
3511 .aead = {
3512 .enc = {
3513 .vecs = aes_ccm_rfc4309_enc_tv_template,
3514 .count = AES_CCM_4309_ENC_TEST_VECTORS
3515 },
3516 .dec = {
3517 .vecs = aes_ccm_rfc4309_dec_tv_template,
3518 .count = AES_CCM_4309_DEC_TEST_VECTORS
3519 }
3520 }
3521 }
3522 }, {
3523 .alg = "rfc4543(gcm(aes))",
3524 .test = alg_test_aead,
3525 .suite = {
3526 .aead = {
3527 .enc = {
3528 .vecs = aes_gcm_rfc4543_enc_tv_template,
3529 .count = AES_GCM_4543_ENC_TEST_VECTORS
3530 },
3531 .dec = {
3532 .vecs = aes_gcm_rfc4543_dec_tv_template,
3533 .count = AES_GCM_4543_DEC_TEST_VECTORS
3534 },
3535 }
3536 }
3537 }, {
3538 .alg = "rfc7539(chacha20,poly1305)",
3539 .test = alg_test_aead,
3540 .suite = {
3541 .aead = {
3542 .enc = {
3543 .vecs = rfc7539_enc_tv_template,
3544 .count = RFC7539_ENC_TEST_VECTORS
3545 },
3546 .dec = {
3547 .vecs = rfc7539_dec_tv_template,
3548 .count = RFC7539_DEC_TEST_VECTORS
3549 },
3550 }
3551 }
3552 }, {
3553 .alg = "rfc7539esp(chacha20,poly1305)",
3554 .test = alg_test_aead,
3555 .suite = {
3556 .aead = {
3557 .enc = {
3558 .vecs = rfc7539esp_enc_tv_template,
3559 .count = RFC7539ESP_ENC_TEST_VECTORS
3560 },
3561 .dec = {
3562 .vecs = rfc7539esp_dec_tv_template,
3563 .count = RFC7539ESP_DEC_TEST_VECTORS
3564 },
3565 }
3566 }
3567 }, {
3568 .alg = "rmd128",
3569 .test = alg_test_hash,
3570 .suite = {
3571 .hash = {
3572 .vecs = rmd128_tv_template,
3573 .count = RMD128_TEST_VECTORS
3574 }
3575 }
3576 }, {
3577 .alg = "rmd160",
3578 .test = alg_test_hash,
3579 .suite = {
3580 .hash = {
3581 .vecs = rmd160_tv_template,
3582 .count = RMD160_TEST_VECTORS
3583 }
3584 }
3585 }, {
3586 .alg = "rmd256",
3587 .test = alg_test_hash,
3588 .suite = {
3589 .hash = {
3590 .vecs = rmd256_tv_template,
3591 .count = RMD256_TEST_VECTORS
3592 }
3593 }
3594 }, {
3595 .alg = "rmd320",
3596 .test = alg_test_hash,
3597 .suite = {
3598 .hash = {
3599 .vecs = rmd320_tv_template,
3600 .count = RMD320_TEST_VECTORS
3601 }
3602 }
3603 }, {
3604 .alg = "rsa",
3605 .test = alg_test_akcipher,
3606 .fips_allowed = 1,
3607 .suite = {
3608 .akcipher = {
3609 .vecs = rsa_tv_template,
3610 .count = RSA_TEST_VECTORS
3611 }
3612 }
3613 }, {
3614 .alg = "salsa20",
3615 .test = alg_test_skcipher,
3616 .suite = {
3617 .cipher = {
3618 .enc = {
3619 .vecs = salsa20_stream_enc_tv_template,
3620 .count = SALSA20_STREAM_ENC_TEST_VECTORS
3621 }
3622 }
3623 }
3624 }, {
3625 .alg = "sha1",
3626 .test = alg_test_hash,
3627 .fips_allowed = 1,
3628 .suite = {
3629 .hash = {
3630 .vecs = sha1_tv_template,
3631 .count = SHA1_TEST_VECTORS
3632 }
3633 }
3634 }, {
3635 .alg = "sha224",
3636 .test = alg_test_hash,
3637 .fips_allowed = 1,
3638 .suite = {
3639 .hash = {
3640 .vecs = sha224_tv_template,
3641 .count = SHA224_TEST_VECTORS
3642 }
3643 }
3644 }, {
3645 .alg = "sha256",
3646 .test = alg_test_hash,
3647 .fips_allowed = 1,
3648 .suite = {
3649 .hash = {
3650 .vecs = sha256_tv_template,
3651 .count = SHA256_TEST_VECTORS
3652 }
3653 }
3654 }, {
3655 .alg = "sha384",
3656 .test = alg_test_hash,
3657 .fips_allowed = 1,
3658 .suite = {
3659 .hash = {
3660 .vecs = sha384_tv_template,
3661 .count = SHA384_TEST_VECTORS
3662 }
3663 }
3664 }, {
3665 .alg = "sha512",
3666 .test = alg_test_hash,
3667 .fips_allowed = 1,
3668 .suite = {
3669 .hash = {
3670 .vecs = sha512_tv_template,
3671 .count = SHA512_TEST_VECTORS
3672 }
3673 }
3674 }, {
3675 .alg = "tgr128",
3676 .test = alg_test_hash,
3677 .suite = {
3678 .hash = {
3679 .vecs = tgr128_tv_template,
3680 .count = TGR128_TEST_VECTORS
3681 }
3682 }
3683 }, {
3684 .alg = "tgr160",
3685 .test = alg_test_hash,
3686 .suite = {
3687 .hash = {
3688 .vecs = tgr160_tv_template,
3689 .count = TGR160_TEST_VECTORS
3690 }
3691 }
3692 }, {
3693 .alg = "tgr192",
3694 .test = alg_test_hash,
3695 .suite = {
3696 .hash = {
3697 .vecs = tgr192_tv_template,
3698 .count = TGR192_TEST_VECTORS
3699 }
3700 }
3701 }, {
3702 .alg = "vmac(aes)",
3703 .test = alg_test_hash,
3704 .suite = {
3705 .hash = {
3706 .vecs = aes_vmac128_tv_template,
3707 .count = VMAC_AES_TEST_VECTORS
3708 }
3709 }
3710 }, {
3711 .alg = "wp256",
3712 .test = alg_test_hash,
3713 .suite = {
3714 .hash = {
3715 .vecs = wp256_tv_template,
3716 .count = WP256_TEST_VECTORS
3717 }
3718 }
3719 }, {
3720 .alg = "wp384",
3721 .test = alg_test_hash,
3722 .suite = {
3723 .hash = {
3724 .vecs = wp384_tv_template,
3725 .count = WP384_TEST_VECTORS
3726 }
3727 }
3728 }, {
3729 .alg = "wp512",
3730 .test = alg_test_hash,
3731 .suite = {
3732 .hash = {
3733 .vecs = wp512_tv_template,
3734 .count = WP512_TEST_VECTORS
3735 }
3736 }
3737 }, {
3738 .alg = "xcbc(aes)",
3739 .test = alg_test_hash,
3740 .suite = {
3741 .hash = {
3742 .vecs = aes_xcbc128_tv_template,
3743 .count = XCBC_AES_TEST_VECTORS
3744 }
3745 }
3746 }, {
3747 .alg = "xts(aes)",
3748 .test = alg_test_skcipher,
3749 .fips_allowed = 1,
3750 .suite = {
3751 .cipher = {
3752 .enc = {
3753 .vecs = aes_xts_enc_tv_template,
3754 .count = AES_XTS_ENC_TEST_VECTORS
3755 },
3756 .dec = {
3757 .vecs = aes_xts_dec_tv_template,
3758 .count = AES_XTS_DEC_TEST_VECTORS
3759 }
3760 }
3761 }
3762 }, {
3763 .alg = "xts(camellia)",
3764 .test = alg_test_skcipher,
3765 .suite = {
3766 .cipher = {
3767 .enc = {
3768 .vecs = camellia_xts_enc_tv_template,
3769 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
3770 },
3771 .dec = {
3772 .vecs = camellia_xts_dec_tv_template,
3773 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
3774 }
3775 }
3776 }
3777 }, {
3778 .alg = "xts(cast6)",
3779 .test = alg_test_skcipher,
3780 .suite = {
3781 .cipher = {
3782 .enc = {
3783 .vecs = cast6_xts_enc_tv_template,
3784 .count = CAST6_XTS_ENC_TEST_VECTORS
3785 },
3786 .dec = {
3787 .vecs = cast6_xts_dec_tv_template,
3788 .count = CAST6_XTS_DEC_TEST_VECTORS
3789 }
3790 }
3791 }
3792 }, {
3793 .alg = "xts(serpent)",
3794 .test = alg_test_skcipher,
3795 .suite = {
3796 .cipher = {
3797 .enc = {
3798 .vecs = serpent_xts_enc_tv_template,
3799 .count = SERPENT_XTS_ENC_TEST_VECTORS
3800 },
3801 .dec = {
3802 .vecs = serpent_xts_dec_tv_template,
3803 .count = SERPENT_XTS_DEC_TEST_VECTORS
3804 }
3805 }
3806 }
3807 }, {
3808 .alg = "xts(twofish)",
3809 .test = alg_test_skcipher,
3810 .suite = {
3811 .cipher = {
3812 .enc = {
3813 .vecs = tf_xts_enc_tv_template,
3814 .count = TF_XTS_ENC_TEST_VECTORS
3815 },
3816 .dec = {
3817 .vecs = tf_xts_dec_tv_template,
3818 .count = TF_XTS_DEC_TEST_VECTORS
3819 }
3820 }
3821 }
3822 }, {
3823 .alg = "zlib",
3824 .test = alg_test_pcomp,
3825 .fips_allowed = 1,
3826 .suite = {
3827 .pcomp = {
3828 .comp = {
3829 .vecs = zlib_comp_tv_template,
3830 .count = ZLIB_COMP_TEST_VECTORS
3831 },
3832 .decomp = {
3833 .vecs = zlib_decomp_tv_template,
3834 .count = ZLIB_DECOMP_TEST_VECTORS
3835 }
3836 }
3837 }
3838 }
3839 };
3840
3841 static bool alg_test_descs_checked;
3842
3843 static void alg_test_descs_check_order(void)
3844 {
3845 int i;
3846
3847 /* only check once */
3848 if (alg_test_descs_checked)
3849 return;
3850
3851 alg_test_descs_checked = true;
3852
3853 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3854 int diff = strcmp(alg_test_descs[i - 1].alg,
3855 alg_test_descs[i].alg);
3856
3857 if (WARN_ON(diff > 0)) {
3858 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3859 alg_test_descs[i - 1].alg,
3860 alg_test_descs[i].alg);
3861 }
3862
3863 if (WARN_ON(diff == 0)) {
3864 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3865 alg_test_descs[i].alg);
3866 }
3867 }
3868 }
3869
3870 static int alg_find_test(const char *alg)
3871 {
3872 int start = 0;
3873 int end = ARRAY_SIZE(alg_test_descs);
3874
3875 while (start < end) {
3876 int i = (start + end) / 2;
3877 int diff = strcmp(alg_test_descs[i].alg, alg);
3878
3879 if (diff > 0) {
3880 end = i;
3881 continue;
3882 }
3883
3884 if (diff < 0) {
3885 start = i + 1;
3886 continue;
3887 }
3888
3889 return i;
3890 }
3891
3892 return -1;
3893 }
3894
3895 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3896 {
3897 int i;
3898 int j;
3899 int rc;
3900
3901 alg_test_descs_check_order();
3902
3903 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3904 char nalg[CRYPTO_MAX_ALG_NAME];
3905
3906 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3907 sizeof(nalg))
3908 return -ENAMETOOLONG;
3909
3910 i = alg_find_test(nalg);
3911 if (i < 0)
3912 goto notest;
3913
3914 if (fips_enabled && !alg_test_descs[i].fips_allowed)
3915 goto non_fips_alg;
3916
3917 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3918 goto test_done;
3919 }
3920
3921 i = alg_find_test(alg);
3922 j = alg_find_test(driver);
3923 if (i < 0 && j < 0)
3924 goto notest;
3925
3926 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3927 (j >= 0 && !alg_test_descs[j].fips_allowed)))
3928 goto non_fips_alg;
3929
3930 rc = 0;
3931 if (i >= 0)
3932 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3933 type, mask);
3934 if (j >= 0 && j != i)
3935 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3936 type, mask);
3937
3938 test_done:
3939 if (fips_enabled && rc)
3940 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3941
3942 if (fips_enabled && !rc)
3943 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
3944
3945 return rc;
3946
3947 notest:
3948 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3949 return 0;
3950 non_fips_alg:
3951 return -EINVAL;
3952 }
3953
3954 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3955
3956 EXPORT_SYMBOL_GPL(alg_test);