]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blob - crypto/tcrypt.c
pinctrl: baytrail: Add back Baytrail-T ACPI ID
[mirror_ubuntu-artful-kernel.git] / crypto / tcrypt.c
1 /*
2 * Quick & dirty crypto testing module.
3 *
4 * This will only exist until we have a better testing mechanism
5 * (e.g. a char device).
6 *
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
9 * Copyright (c) 2007 Nokia Siemens Networks
10 *
11 * Updated RFC4106 AES-GCM testing.
12 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
13 * Adrian Hoban <adrian.hoban@intel.com>
14 * Gabriele Paoloni <gabriele.paoloni@intel.com>
15 * Tadeusz Struk (tadeusz.struk@intel.com)
16 * Copyright (c) 2010, Intel Corporation.
17 *
18 * This program is free software; you can redistribute it and/or modify it
19 * under the terms of the GNU General Public License as published by the Free
20 * Software Foundation; either version 2 of the License, or (at your option)
21 * any later version.
22 *
23 */
24
25 #include <crypto/hash.h>
26 #include <linux/err.h>
27 #include <linux/init.h>
28 #include <linux/gfp.h>
29 #include <linux/module.h>
30 #include <linux/scatterlist.h>
31 #include <linux/string.h>
32 #include <linux/moduleparam.h>
33 #include <linux/jiffies.h>
34 #include <linux/timex.h>
35 #include <linux/interrupt.h>
36 #include "tcrypt.h"
37 #include "internal.h"
38
39 /*
40 * Need slab memory for testing (size in number of pages).
41 */
42 #define TVMEMSIZE 4
43
44 /*
45 * Used by test_cipher_speed()
46 */
47 #define ENCRYPT 1
48 #define DECRYPT 0
49
50 /*
51 * Used by test_cipher_speed()
52 */
53 static unsigned int sec;
54
55 static char *alg = NULL;
56 static u32 type;
57 static u32 mask;
58 static int mode;
59 static char *tvmem[TVMEMSIZE];
60
61 static char *check[] = {
62 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256",
63 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
64 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
65 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
66 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
67 "lzo", "cts", "zlib", NULL
68 };
69
70 static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc,
71 struct scatterlist *sg, int blen, int sec)
72 {
73 unsigned long start, end;
74 int bcount;
75 int ret;
76
77 for (start = jiffies, end = start + sec * HZ, bcount = 0;
78 time_before(jiffies, end); bcount++) {
79 if (enc)
80 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
81 else
82 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
83
84 if (ret)
85 return ret;
86 }
87
88 printk("%d operations in %d seconds (%ld bytes)\n",
89 bcount, sec, (long)bcount * blen);
90 return 0;
91 }
92
93 static int test_cipher_cycles(struct blkcipher_desc *desc, int enc,
94 struct scatterlist *sg, int blen)
95 {
96 unsigned long cycles = 0;
97 int ret = 0;
98 int i;
99
100 local_irq_disable();
101
102 /* Warm-up run. */
103 for (i = 0; i < 4; i++) {
104 if (enc)
105 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
106 else
107 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
108
109 if (ret)
110 goto out;
111 }
112
113 /* The real thing. */
114 for (i = 0; i < 8; i++) {
115 cycles_t start, end;
116
117 start = get_cycles();
118 if (enc)
119 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
120 else
121 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
122 end = get_cycles();
123
124 if (ret)
125 goto out;
126
127 cycles += end - start;
128 }
129
130 out:
131 local_irq_enable();
132
133 if (ret == 0)
134 printk("1 operation in %lu cycles (%d bytes)\n",
135 (cycles + 4) / 8, blen);
136
137 return ret;
138 }
139
140 static int test_aead_jiffies(struct aead_request *req, int enc,
141 int blen, int sec)
142 {
143 unsigned long start, end;
144 int bcount;
145 int ret;
146
147 for (start = jiffies, end = start + sec * HZ, bcount = 0;
148 time_before(jiffies, end); bcount++) {
149 if (enc)
150 ret = crypto_aead_encrypt(req);
151 else
152 ret = crypto_aead_decrypt(req);
153
154 if (ret)
155 return ret;
156 }
157
158 printk("%d operations in %d seconds (%ld bytes)\n",
159 bcount, sec, (long)bcount * blen);
160 return 0;
161 }
162
163 static int test_aead_cycles(struct aead_request *req, int enc, int blen)
164 {
165 unsigned long cycles = 0;
166 int ret = 0;
167 int i;
168
169 local_irq_disable();
170
171 /* Warm-up run. */
172 for (i = 0; i < 4; i++) {
173 if (enc)
174 ret = crypto_aead_encrypt(req);
175 else
176 ret = crypto_aead_decrypt(req);
177
178 if (ret)
179 goto out;
180 }
181
182 /* The real thing. */
183 for (i = 0; i < 8; i++) {
184 cycles_t start, end;
185
186 start = get_cycles();
187 if (enc)
188 ret = crypto_aead_encrypt(req);
189 else
190 ret = crypto_aead_decrypt(req);
191 end = get_cycles();
192
193 if (ret)
194 goto out;
195
196 cycles += end - start;
197 }
198
199 out:
200 local_irq_enable();
201
202 if (ret == 0)
203 printk("1 operation in %lu cycles (%d bytes)\n",
204 (cycles + 4) / 8, blen);
205
206 return ret;
207 }
208
209 static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
210 static u32 aead_sizes[] = { 16, 64, 256, 512, 1024, 2048, 4096, 8192, 0 };
211
212 #define XBUFSIZE 8
213 #define MAX_IVLEN 32
214
215 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
216 {
217 int i;
218
219 for (i = 0; i < XBUFSIZE; i++) {
220 buf[i] = (void *)__get_free_page(GFP_KERNEL);
221 if (!buf[i])
222 goto err_free_buf;
223 }
224
225 return 0;
226
227 err_free_buf:
228 while (i-- > 0)
229 free_page((unsigned long)buf[i]);
230
231 return -ENOMEM;
232 }
233
234 static void testmgr_free_buf(char *buf[XBUFSIZE])
235 {
236 int i;
237
238 for (i = 0; i < XBUFSIZE; i++)
239 free_page((unsigned long)buf[i]);
240 }
241
242 static void sg_init_aead(struct scatterlist *sg, char *xbuf[XBUFSIZE],
243 unsigned int buflen)
244 {
245 int np = (buflen + PAGE_SIZE - 1)/PAGE_SIZE;
246 int k, rem;
247
248 np = (np > XBUFSIZE) ? XBUFSIZE : np;
249 rem = buflen % PAGE_SIZE;
250 if (np > XBUFSIZE) {
251 rem = PAGE_SIZE;
252 np = XBUFSIZE;
253 }
254 sg_init_table(sg, np);
255 for (k = 0; k < np; ++k) {
256 if (k == (np-1))
257 sg_set_buf(&sg[k], xbuf[k], rem);
258 else
259 sg_set_buf(&sg[k], xbuf[k], PAGE_SIZE);
260 }
261 }
262
263 static void test_aead_speed(const char *algo, int enc, unsigned int sec,
264 struct aead_speed_template *template,
265 unsigned int tcount, u8 authsize,
266 unsigned int aad_size, u8 *keysize)
267 {
268 unsigned int i, j;
269 struct crypto_aead *tfm;
270 int ret = -ENOMEM;
271 const char *key;
272 struct aead_request *req;
273 struct scatterlist *sg;
274 struct scatterlist *asg;
275 struct scatterlist *sgout;
276 const char *e;
277 void *assoc;
278 char iv[MAX_IVLEN];
279 char *xbuf[XBUFSIZE];
280 char *xoutbuf[XBUFSIZE];
281 char *axbuf[XBUFSIZE];
282 unsigned int *b_size;
283 unsigned int iv_len;
284
285 if (enc == ENCRYPT)
286 e = "encryption";
287 else
288 e = "decryption";
289
290 if (testmgr_alloc_buf(xbuf))
291 goto out_noxbuf;
292 if (testmgr_alloc_buf(axbuf))
293 goto out_noaxbuf;
294 if (testmgr_alloc_buf(xoutbuf))
295 goto out_nooutbuf;
296
297 sg = kmalloc(sizeof(*sg) * 8 * 3, GFP_KERNEL);
298 if (!sg)
299 goto out_nosg;
300 asg = &sg[8];
301 sgout = &asg[8];
302
303
304 printk(KERN_INFO "\ntesting speed of %s %s\n", algo, e);
305
306 tfm = crypto_alloc_aead(algo, 0, 0);
307
308 if (IS_ERR(tfm)) {
309 pr_err("alg: aead: Failed to load transform for %s: %ld\n", algo,
310 PTR_ERR(tfm));
311 return;
312 }
313
314 req = aead_request_alloc(tfm, GFP_KERNEL);
315 if (!req) {
316 pr_err("alg: aead: Failed to allocate request for %s\n",
317 algo);
318 goto out;
319 }
320
321 i = 0;
322 do {
323 b_size = aead_sizes;
324 do {
325 assoc = axbuf[0];
326
327 if (aad_size < PAGE_SIZE)
328 memset(assoc, 0xff, aad_size);
329 else {
330 pr_err("associate data length (%u) too big\n",
331 aad_size);
332 goto out_nosg;
333 }
334 sg_init_one(&asg[0], assoc, aad_size);
335
336 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
337 pr_err("template (%u) too big for tvmem (%lu)\n",
338 *keysize + *b_size,
339 TVMEMSIZE * PAGE_SIZE);
340 goto out;
341 }
342
343 key = tvmem[0];
344 for (j = 0; j < tcount; j++) {
345 if (template[j].klen == *keysize) {
346 key = template[j].key;
347 break;
348 }
349 }
350 ret = crypto_aead_setkey(tfm, key, *keysize);
351 ret = crypto_aead_setauthsize(tfm, authsize);
352
353 iv_len = crypto_aead_ivsize(tfm);
354 if (iv_len)
355 memset(&iv, 0xff, iv_len);
356
357 crypto_aead_clear_flags(tfm, ~0);
358 printk(KERN_INFO "test %u (%d bit key, %d byte blocks): ",
359 i, *keysize * 8, *b_size);
360
361
362 memset(tvmem[0], 0xff, PAGE_SIZE);
363
364 if (ret) {
365 pr_err("setkey() failed flags=%x\n",
366 crypto_aead_get_flags(tfm));
367 goto out;
368 }
369
370 sg_init_aead(&sg[0], xbuf,
371 *b_size + (enc ? authsize : 0));
372
373 sg_init_aead(&sgout[0], xoutbuf,
374 *b_size + (enc ? authsize : 0));
375
376 aead_request_set_crypt(req, sg, sgout, *b_size, iv);
377 aead_request_set_assoc(req, asg, aad_size);
378
379 if (sec)
380 ret = test_aead_jiffies(req, enc, *b_size, sec);
381 else
382 ret = test_aead_cycles(req, enc, *b_size);
383
384 if (ret) {
385 pr_err("%s() failed return code=%d\n", e, ret);
386 break;
387 }
388 b_size++;
389 i++;
390 } while (*b_size);
391 keysize++;
392 } while (*keysize);
393
394 out:
395 crypto_free_aead(tfm);
396 kfree(sg);
397 out_nosg:
398 testmgr_free_buf(xoutbuf);
399 out_nooutbuf:
400 testmgr_free_buf(axbuf);
401 out_noaxbuf:
402 testmgr_free_buf(xbuf);
403 out_noxbuf:
404 return;
405 }
406
407 static void test_cipher_speed(const char *algo, int enc, unsigned int sec,
408 struct cipher_speed_template *template,
409 unsigned int tcount, u8 *keysize)
410 {
411 unsigned int ret, i, j, iv_len;
412 const char *key;
413 char iv[128];
414 struct crypto_blkcipher *tfm;
415 struct blkcipher_desc desc;
416 const char *e;
417 u32 *b_size;
418
419 if (enc == ENCRYPT)
420 e = "encryption";
421 else
422 e = "decryption";
423
424 printk("\ntesting speed of %s %s\n", algo, e);
425
426 tfm = crypto_alloc_blkcipher(algo, 0, CRYPTO_ALG_ASYNC);
427
428 if (IS_ERR(tfm)) {
429 printk("failed to load transform for %s: %ld\n", algo,
430 PTR_ERR(tfm));
431 return;
432 }
433 desc.tfm = tfm;
434 desc.flags = 0;
435
436 i = 0;
437 do {
438
439 b_size = block_sizes;
440 do {
441 struct scatterlist sg[TVMEMSIZE];
442
443 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
444 printk("template (%u) too big for "
445 "tvmem (%lu)\n", *keysize + *b_size,
446 TVMEMSIZE * PAGE_SIZE);
447 goto out;
448 }
449
450 printk("test %u (%d bit key, %d byte blocks): ", i,
451 *keysize * 8, *b_size);
452
453 memset(tvmem[0], 0xff, PAGE_SIZE);
454
455 /* set key, plain text and IV */
456 key = tvmem[0];
457 for (j = 0; j < tcount; j++) {
458 if (template[j].klen == *keysize) {
459 key = template[j].key;
460 break;
461 }
462 }
463
464 ret = crypto_blkcipher_setkey(tfm, key, *keysize);
465 if (ret) {
466 printk("setkey() failed flags=%x\n",
467 crypto_blkcipher_get_flags(tfm));
468 goto out;
469 }
470
471 sg_init_table(sg, TVMEMSIZE);
472 sg_set_buf(sg, tvmem[0] + *keysize,
473 PAGE_SIZE - *keysize);
474 for (j = 1; j < TVMEMSIZE; j++) {
475 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
476 memset (tvmem[j], 0xff, PAGE_SIZE);
477 }
478
479 iv_len = crypto_blkcipher_ivsize(tfm);
480 if (iv_len) {
481 memset(&iv, 0xff, iv_len);
482 crypto_blkcipher_set_iv(tfm, iv, iv_len);
483 }
484
485 if (sec)
486 ret = test_cipher_jiffies(&desc, enc, sg,
487 *b_size, sec);
488 else
489 ret = test_cipher_cycles(&desc, enc, sg,
490 *b_size);
491
492 if (ret) {
493 printk("%s() failed flags=%x\n", e, desc.flags);
494 break;
495 }
496 b_size++;
497 i++;
498 } while (*b_size);
499 keysize++;
500 } while (*keysize);
501
502 out:
503 crypto_free_blkcipher(tfm);
504 }
505
506 static int test_hash_jiffies_digest(struct hash_desc *desc,
507 struct scatterlist *sg, int blen,
508 char *out, int sec)
509 {
510 unsigned long start, end;
511 int bcount;
512 int ret;
513
514 for (start = jiffies, end = start + sec * HZ, bcount = 0;
515 time_before(jiffies, end); bcount++) {
516 ret = crypto_hash_digest(desc, sg, blen, out);
517 if (ret)
518 return ret;
519 }
520
521 printk("%6u opers/sec, %9lu bytes/sec\n",
522 bcount / sec, ((long)bcount * blen) / sec);
523
524 return 0;
525 }
526
527 static int test_hash_jiffies(struct hash_desc *desc, struct scatterlist *sg,
528 int blen, int plen, char *out, int sec)
529 {
530 unsigned long start, end;
531 int bcount, pcount;
532 int ret;
533
534 if (plen == blen)
535 return test_hash_jiffies_digest(desc, sg, blen, out, sec);
536
537 for (start = jiffies, end = start + sec * HZ, bcount = 0;
538 time_before(jiffies, end); bcount++) {
539 ret = crypto_hash_init(desc);
540 if (ret)
541 return ret;
542 for (pcount = 0; pcount < blen; pcount += plen) {
543 ret = crypto_hash_update(desc, sg, plen);
544 if (ret)
545 return ret;
546 }
547 /* we assume there is enough space in 'out' for the result */
548 ret = crypto_hash_final(desc, out);
549 if (ret)
550 return ret;
551 }
552
553 printk("%6u opers/sec, %9lu bytes/sec\n",
554 bcount / sec, ((long)bcount * blen) / sec);
555
556 return 0;
557 }
558
559 static int test_hash_cycles_digest(struct hash_desc *desc,
560 struct scatterlist *sg, int blen, char *out)
561 {
562 unsigned long cycles = 0;
563 int i;
564 int ret;
565
566 local_irq_disable();
567
568 /* Warm-up run. */
569 for (i = 0; i < 4; i++) {
570 ret = crypto_hash_digest(desc, sg, blen, out);
571 if (ret)
572 goto out;
573 }
574
575 /* The real thing. */
576 for (i = 0; i < 8; i++) {
577 cycles_t start, end;
578
579 start = get_cycles();
580
581 ret = crypto_hash_digest(desc, sg, blen, out);
582 if (ret)
583 goto out;
584
585 end = get_cycles();
586
587 cycles += end - start;
588 }
589
590 out:
591 local_irq_enable();
592
593 if (ret)
594 return ret;
595
596 printk("%6lu cycles/operation, %4lu cycles/byte\n",
597 cycles / 8, cycles / (8 * blen));
598
599 return 0;
600 }
601
602 static int test_hash_cycles(struct hash_desc *desc, struct scatterlist *sg,
603 int blen, int plen, char *out)
604 {
605 unsigned long cycles = 0;
606 int i, pcount;
607 int ret;
608
609 if (plen == blen)
610 return test_hash_cycles_digest(desc, sg, blen, out);
611
612 local_irq_disable();
613
614 /* Warm-up run. */
615 for (i = 0; i < 4; i++) {
616 ret = crypto_hash_init(desc);
617 if (ret)
618 goto out;
619 for (pcount = 0; pcount < blen; pcount += plen) {
620 ret = crypto_hash_update(desc, sg, plen);
621 if (ret)
622 goto out;
623 }
624 ret = crypto_hash_final(desc, out);
625 if (ret)
626 goto out;
627 }
628
629 /* The real thing. */
630 for (i = 0; i < 8; i++) {
631 cycles_t start, end;
632
633 start = get_cycles();
634
635 ret = crypto_hash_init(desc);
636 if (ret)
637 goto out;
638 for (pcount = 0; pcount < blen; pcount += plen) {
639 ret = crypto_hash_update(desc, sg, plen);
640 if (ret)
641 goto out;
642 }
643 ret = crypto_hash_final(desc, out);
644 if (ret)
645 goto out;
646
647 end = get_cycles();
648
649 cycles += end - start;
650 }
651
652 out:
653 local_irq_enable();
654
655 if (ret)
656 return ret;
657
658 printk("%6lu cycles/operation, %4lu cycles/byte\n",
659 cycles / 8, cycles / (8 * blen));
660
661 return 0;
662 }
663
664 static void test_hash_sg_init(struct scatterlist *sg)
665 {
666 int i;
667
668 sg_init_table(sg, TVMEMSIZE);
669 for (i = 0; i < TVMEMSIZE; i++) {
670 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
671 memset(tvmem[i], 0xff, PAGE_SIZE);
672 }
673 }
674
675 static void test_hash_speed(const char *algo, unsigned int sec,
676 struct hash_speed *speed)
677 {
678 struct scatterlist sg[TVMEMSIZE];
679 struct crypto_hash *tfm;
680 struct hash_desc desc;
681 static char output[1024];
682 int i;
683 int ret;
684
685 printk(KERN_INFO "\ntesting speed of %s\n", algo);
686
687 tfm = crypto_alloc_hash(algo, 0, CRYPTO_ALG_ASYNC);
688
689 if (IS_ERR(tfm)) {
690 printk(KERN_ERR "failed to load transform for %s: %ld\n", algo,
691 PTR_ERR(tfm));
692 return;
693 }
694
695 desc.tfm = tfm;
696 desc.flags = 0;
697
698 if (crypto_hash_digestsize(tfm) > sizeof(output)) {
699 printk(KERN_ERR "digestsize(%u) > outputbuffer(%zu)\n",
700 crypto_hash_digestsize(tfm), sizeof(output));
701 goto out;
702 }
703
704 test_hash_sg_init(sg);
705 for (i = 0; speed[i].blen != 0; i++) {
706 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
707 printk(KERN_ERR
708 "template (%u) too big for tvmem (%lu)\n",
709 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
710 goto out;
711 }
712
713 if (speed[i].klen)
714 crypto_hash_setkey(tfm, tvmem[0], speed[i].klen);
715
716 printk(KERN_INFO "test%3u "
717 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
718 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
719
720 if (sec)
721 ret = test_hash_jiffies(&desc, sg, speed[i].blen,
722 speed[i].plen, output, sec);
723 else
724 ret = test_hash_cycles(&desc, sg, speed[i].blen,
725 speed[i].plen, output);
726
727 if (ret) {
728 printk(KERN_ERR "hashing failed ret=%d\n", ret);
729 break;
730 }
731 }
732
733 out:
734 crypto_free_hash(tfm);
735 }
736
737 struct tcrypt_result {
738 struct completion completion;
739 int err;
740 };
741
742 static void tcrypt_complete(struct crypto_async_request *req, int err)
743 {
744 struct tcrypt_result *res = req->data;
745
746 if (err == -EINPROGRESS)
747 return;
748
749 res->err = err;
750 complete(&res->completion);
751 }
752
753 static inline int do_one_ahash_op(struct ahash_request *req, int ret)
754 {
755 if (ret == -EINPROGRESS || ret == -EBUSY) {
756 struct tcrypt_result *tr = req->base.data;
757
758 ret = wait_for_completion_interruptible(&tr->completion);
759 if (!ret)
760 ret = tr->err;
761 reinit_completion(&tr->completion);
762 }
763 return ret;
764 }
765
766 static int test_ahash_jiffies_digest(struct ahash_request *req, int blen,
767 char *out, int sec)
768 {
769 unsigned long start, end;
770 int bcount;
771 int ret;
772
773 for (start = jiffies, end = start + sec * HZ, bcount = 0;
774 time_before(jiffies, end); bcount++) {
775 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
776 if (ret)
777 return ret;
778 }
779
780 printk("%6u opers/sec, %9lu bytes/sec\n",
781 bcount / sec, ((long)bcount * blen) / sec);
782
783 return 0;
784 }
785
786 static int test_ahash_jiffies(struct ahash_request *req, int blen,
787 int plen, char *out, int sec)
788 {
789 unsigned long start, end;
790 int bcount, pcount;
791 int ret;
792
793 if (plen == blen)
794 return test_ahash_jiffies_digest(req, blen, out, sec);
795
796 for (start = jiffies, end = start + sec * HZ, bcount = 0;
797 time_before(jiffies, end); bcount++) {
798 ret = crypto_ahash_init(req);
799 if (ret)
800 return ret;
801 for (pcount = 0; pcount < blen; pcount += plen) {
802 ret = do_one_ahash_op(req, crypto_ahash_update(req));
803 if (ret)
804 return ret;
805 }
806 /* we assume there is enough space in 'out' for the result */
807 ret = do_one_ahash_op(req, crypto_ahash_final(req));
808 if (ret)
809 return ret;
810 }
811
812 pr_cont("%6u opers/sec, %9lu bytes/sec\n",
813 bcount / sec, ((long)bcount * blen) / sec);
814
815 return 0;
816 }
817
818 static int test_ahash_cycles_digest(struct ahash_request *req, int blen,
819 char *out)
820 {
821 unsigned long cycles = 0;
822 int ret, i;
823
824 /* Warm-up run. */
825 for (i = 0; i < 4; i++) {
826 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
827 if (ret)
828 goto out;
829 }
830
831 /* The real thing. */
832 for (i = 0; i < 8; i++) {
833 cycles_t start, end;
834
835 start = get_cycles();
836
837 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
838 if (ret)
839 goto out;
840
841 end = get_cycles();
842
843 cycles += end - start;
844 }
845
846 out:
847 if (ret)
848 return ret;
849
850 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
851 cycles / 8, cycles / (8 * blen));
852
853 return 0;
854 }
855
856 static int test_ahash_cycles(struct ahash_request *req, int blen,
857 int plen, char *out)
858 {
859 unsigned long cycles = 0;
860 int i, pcount, ret;
861
862 if (plen == blen)
863 return test_ahash_cycles_digest(req, blen, out);
864
865 /* Warm-up run. */
866 for (i = 0; i < 4; i++) {
867 ret = crypto_ahash_init(req);
868 if (ret)
869 goto out;
870 for (pcount = 0; pcount < blen; pcount += plen) {
871 ret = do_one_ahash_op(req, crypto_ahash_update(req));
872 if (ret)
873 goto out;
874 }
875 ret = do_one_ahash_op(req, crypto_ahash_final(req));
876 if (ret)
877 goto out;
878 }
879
880 /* The real thing. */
881 for (i = 0; i < 8; i++) {
882 cycles_t start, end;
883
884 start = get_cycles();
885
886 ret = crypto_ahash_init(req);
887 if (ret)
888 goto out;
889 for (pcount = 0; pcount < blen; pcount += plen) {
890 ret = do_one_ahash_op(req, crypto_ahash_update(req));
891 if (ret)
892 goto out;
893 }
894 ret = do_one_ahash_op(req, crypto_ahash_final(req));
895 if (ret)
896 goto out;
897
898 end = get_cycles();
899
900 cycles += end - start;
901 }
902
903 out:
904 if (ret)
905 return ret;
906
907 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
908 cycles / 8, cycles / (8 * blen));
909
910 return 0;
911 }
912
913 static void test_ahash_speed(const char *algo, unsigned int sec,
914 struct hash_speed *speed)
915 {
916 struct scatterlist sg[TVMEMSIZE];
917 struct tcrypt_result tresult;
918 struct ahash_request *req;
919 struct crypto_ahash *tfm;
920 static char output[1024];
921 int i, ret;
922
923 printk(KERN_INFO "\ntesting speed of async %s\n", algo);
924
925 tfm = crypto_alloc_ahash(algo, 0, 0);
926 if (IS_ERR(tfm)) {
927 pr_err("failed to load transform for %s: %ld\n",
928 algo, PTR_ERR(tfm));
929 return;
930 }
931
932 if (crypto_ahash_digestsize(tfm) > sizeof(output)) {
933 pr_err("digestsize(%u) > outputbuffer(%zu)\n",
934 crypto_ahash_digestsize(tfm), sizeof(output));
935 goto out;
936 }
937
938 test_hash_sg_init(sg);
939 req = ahash_request_alloc(tfm, GFP_KERNEL);
940 if (!req) {
941 pr_err("ahash request allocation failure\n");
942 goto out;
943 }
944
945 init_completion(&tresult.completion);
946 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
947 tcrypt_complete, &tresult);
948
949 for (i = 0; speed[i].blen != 0; i++) {
950 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
951 pr_err("template (%u) too big for tvmem (%lu)\n",
952 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
953 break;
954 }
955
956 pr_info("test%3u "
957 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
958 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
959
960 ahash_request_set_crypt(req, sg, output, speed[i].plen);
961
962 if (sec)
963 ret = test_ahash_jiffies(req, speed[i].blen,
964 speed[i].plen, output, sec);
965 else
966 ret = test_ahash_cycles(req, speed[i].blen,
967 speed[i].plen, output);
968
969 if (ret) {
970 pr_err("hashing failed ret=%d\n", ret);
971 break;
972 }
973 }
974
975 ahash_request_free(req);
976
977 out:
978 crypto_free_ahash(tfm);
979 }
980
981 static inline int do_one_acipher_op(struct ablkcipher_request *req, int ret)
982 {
983 if (ret == -EINPROGRESS || ret == -EBUSY) {
984 struct tcrypt_result *tr = req->base.data;
985
986 ret = wait_for_completion_interruptible(&tr->completion);
987 if (!ret)
988 ret = tr->err;
989 reinit_completion(&tr->completion);
990 }
991
992 return ret;
993 }
994
995 static int test_acipher_jiffies(struct ablkcipher_request *req, int enc,
996 int blen, int sec)
997 {
998 unsigned long start, end;
999 int bcount;
1000 int ret;
1001
1002 for (start = jiffies, end = start + sec * HZ, bcount = 0;
1003 time_before(jiffies, end); bcount++) {
1004 if (enc)
1005 ret = do_one_acipher_op(req,
1006 crypto_ablkcipher_encrypt(req));
1007 else
1008 ret = do_one_acipher_op(req,
1009 crypto_ablkcipher_decrypt(req));
1010
1011 if (ret)
1012 return ret;
1013 }
1014
1015 pr_cont("%d operations in %d seconds (%ld bytes)\n",
1016 bcount, sec, (long)bcount * blen);
1017 return 0;
1018 }
1019
1020 static int test_acipher_cycles(struct ablkcipher_request *req, int enc,
1021 int blen)
1022 {
1023 unsigned long cycles = 0;
1024 int ret = 0;
1025 int i;
1026
1027 /* Warm-up run. */
1028 for (i = 0; i < 4; i++) {
1029 if (enc)
1030 ret = do_one_acipher_op(req,
1031 crypto_ablkcipher_encrypt(req));
1032 else
1033 ret = do_one_acipher_op(req,
1034 crypto_ablkcipher_decrypt(req));
1035
1036 if (ret)
1037 goto out;
1038 }
1039
1040 /* The real thing. */
1041 for (i = 0; i < 8; i++) {
1042 cycles_t start, end;
1043
1044 start = get_cycles();
1045 if (enc)
1046 ret = do_one_acipher_op(req,
1047 crypto_ablkcipher_encrypt(req));
1048 else
1049 ret = do_one_acipher_op(req,
1050 crypto_ablkcipher_decrypt(req));
1051 end = get_cycles();
1052
1053 if (ret)
1054 goto out;
1055
1056 cycles += end - start;
1057 }
1058
1059 out:
1060 if (ret == 0)
1061 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1062 (cycles + 4) / 8, blen);
1063
1064 return ret;
1065 }
1066
1067 static void test_acipher_speed(const char *algo, int enc, unsigned int sec,
1068 struct cipher_speed_template *template,
1069 unsigned int tcount, u8 *keysize)
1070 {
1071 unsigned int ret, i, j, k, iv_len;
1072 struct tcrypt_result tresult;
1073 const char *key;
1074 char iv[128];
1075 struct ablkcipher_request *req;
1076 struct crypto_ablkcipher *tfm;
1077 const char *e;
1078 u32 *b_size;
1079
1080 if (enc == ENCRYPT)
1081 e = "encryption";
1082 else
1083 e = "decryption";
1084
1085 pr_info("\ntesting speed of async %s %s\n", algo, e);
1086
1087 init_completion(&tresult.completion);
1088
1089 tfm = crypto_alloc_ablkcipher(algo, 0, 0);
1090
1091 if (IS_ERR(tfm)) {
1092 pr_err("failed to load transform for %s: %ld\n", algo,
1093 PTR_ERR(tfm));
1094 return;
1095 }
1096
1097 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
1098 if (!req) {
1099 pr_err("tcrypt: skcipher: Failed to allocate request for %s\n",
1100 algo);
1101 goto out;
1102 }
1103
1104 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1105 tcrypt_complete, &tresult);
1106
1107 i = 0;
1108 do {
1109 b_size = block_sizes;
1110
1111 do {
1112 struct scatterlist sg[TVMEMSIZE];
1113
1114 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
1115 pr_err("template (%u) too big for "
1116 "tvmem (%lu)\n", *keysize + *b_size,
1117 TVMEMSIZE * PAGE_SIZE);
1118 goto out_free_req;
1119 }
1120
1121 pr_info("test %u (%d bit key, %d byte blocks): ", i,
1122 *keysize * 8, *b_size);
1123
1124 memset(tvmem[0], 0xff, PAGE_SIZE);
1125
1126 /* set key, plain text and IV */
1127 key = tvmem[0];
1128 for (j = 0; j < tcount; j++) {
1129 if (template[j].klen == *keysize) {
1130 key = template[j].key;
1131 break;
1132 }
1133 }
1134
1135 crypto_ablkcipher_clear_flags(tfm, ~0);
1136
1137 ret = crypto_ablkcipher_setkey(tfm, key, *keysize);
1138 if (ret) {
1139 pr_err("setkey() failed flags=%x\n",
1140 crypto_ablkcipher_get_flags(tfm));
1141 goto out_free_req;
1142 }
1143
1144 sg_init_table(sg, TVMEMSIZE);
1145
1146 k = *keysize + *b_size;
1147 if (k > PAGE_SIZE) {
1148 sg_set_buf(sg, tvmem[0] + *keysize,
1149 PAGE_SIZE - *keysize);
1150 k -= PAGE_SIZE;
1151 j = 1;
1152 while (k > PAGE_SIZE) {
1153 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
1154 memset(tvmem[j], 0xff, PAGE_SIZE);
1155 j++;
1156 k -= PAGE_SIZE;
1157 }
1158 sg_set_buf(sg + j, tvmem[j], k);
1159 memset(tvmem[j], 0xff, k);
1160 } else {
1161 sg_set_buf(sg, tvmem[0] + *keysize, *b_size);
1162 }
1163
1164 iv_len = crypto_ablkcipher_ivsize(tfm);
1165 if (iv_len)
1166 memset(&iv, 0xff, iv_len);
1167
1168 ablkcipher_request_set_crypt(req, sg, sg, *b_size, iv);
1169
1170 if (sec)
1171 ret = test_acipher_jiffies(req, enc,
1172 *b_size, sec);
1173 else
1174 ret = test_acipher_cycles(req, enc,
1175 *b_size);
1176
1177 if (ret) {
1178 pr_err("%s() failed flags=%x\n", e,
1179 crypto_ablkcipher_get_flags(tfm));
1180 break;
1181 }
1182 b_size++;
1183 i++;
1184 } while (*b_size);
1185 keysize++;
1186 } while (*keysize);
1187
1188 out_free_req:
1189 ablkcipher_request_free(req);
1190 out:
1191 crypto_free_ablkcipher(tfm);
1192 }
1193
1194 static void test_available(void)
1195 {
1196 char **name = check;
1197
1198 while (*name) {
1199 printk("alg %s ", *name);
1200 printk(crypto_has_alg(*name, 0, 0) ?
1201 "found\n" : "not found\n");
1202 name++;
1203 }
1204 }
1205
1206 static inline int tcrypt_test(const char *alg)
1207 {
1208 int ret;
1209
1210 ret = alg_test(alg, alg, 0, 0);
1211 /* non-fips algs return -EINVAL in fips mode */
1212 if (fips_enabled && ret == -EINVAL)
1213 ret = 0;
1214 return ret;
1215 }
1216
1217 static int do_test(int m)
1218 {
1219 int i;
1220 int ret = 0;
1221
1222 switch (m) {
1223 case 0:
1224 for (i = 1; i < 200; i++)
1225 ret += do_test(i);
1226 break;
1227
1228 case 1:
1229 ret += tcrypt_test("md5");
1230 break;
1231
1232 case 2:
1233 ret += tcrypt_test("sha1");
1234 break;
1235
1236 case 3:
1237 ret += tcrypt_test("ecb(des)");
1238 ret += tcrypt_test("cbc(des)");
1239 ret += tcrypt_test("ctr(des)");
1240 break;
1241
1242 case 4:
1243 ret += tcrypt_test("ecb(des3_ede)");
1244 ret += tcrypt_test("cbc(des3_ede)");
1245 ret += tcrypt_test("ctr(des3_ede)");
1246 break;
1247
1248 case 5:
1249 ret += tcrypt_test("md4");
1250 break;
1251
1252 case 6:
1253 ret += tcrypt_test("sha256");
1254 break;
1255
1256 case 7:
1257 ret += tcrypt_test("ecb(blowfish)");
1258 ret += tcrypt_test("cbc(blowfish)");
1259 ret += tcrypt_test("ctr(blowfish)");
1260 break;
1261
1262 case 8:
1263 ret += tcrypt_test("ecb(twofish)");
1264 ret += tcrypt_test("cbc(twofish)");
1265 ret += tcrypt_test("ctr(twofish)");
1266 ret += tcrypt_test("lrw(twofish)");
1267 ret += tcrypt_test("xts(twofish)");
1268 break;
1269
1270 case 9:
1271 ret += tcrypt_test("ecb(serpent)");
1272 ret += tcrypt_test("cbc(serpent)");
1273 ret += tcrypt_test("ctr(serpent)");
1274 ret += tcrypt_test("lrw(serpent)");
1275 ret += tcrypt_test("xts(serpent)");
1276 break;
1277
1278 case 10:
1279 ret += tcrypt_test("ecb(aes)");
1280 ret += tcrypt_test("cbc(aes)");
1281 ret += tcrypt_test("lrw(aes)");
1282 ret += tcrypt_test("xts(aes)");
1283 ret += tcrypt_test("ctr(aes)");
1284 ret += tcrypt_test("rfc3686(ctr(aes))");
1285 break;
1286
1287 case 11:
1288 ret += tcrypt_test("sha384");
1289 break;
1290
1291 case 12:
1292 ret += tcrypt_test("sha512");
1293 break;
1294
1295 case 13:
1296 ret += tcrypt_test("deflate");
1297 break;
1298
1299 case 14:
1300 ret += tcrypt_test("ecb(cast5)");
1301 ret += tcrypt_test("cbc(cast5)");
1302 ret += tcrypt_test("ctr(cast5)");
1303 break;
1304
1305 case 15:
1306 ret += tcrypt_test("ecb(cast6)");
1307 ret += tcrypt_test("cbc(cast6)");
1308 ret += tcrypt_test("ctr(cast6)");
1309 ret += tcrypt_test("lrw(cast6)");
1310 ret += tcrypt_test("xts(cast6)");
1311 break;
1312
1313 case 16:
1314 ret += tcrypt_test("ecb(arc4)");
1315 break;
1316
1317 case 17:
1318 ret += tcrypt_test("michael_mic");
1319 break;
1320
1321 case 18:
1322 ret += tcrypt_test("crc32c");
1323 break;
1324
1325 case 19:
1326 ret += tcrypt_test("ecb(tea)");
1327 break;
1328
1329 case 20:
1330 ret += tcrypt_test("ecb(xtea)");
1331 break;
1332
1333 case 21:
1334 ret += tcrypt_test("ecb(khazad)");
1335 break;
1336
1337 case 22:
1338 ret += tcrypt_test("wp512");
1339 break;
1340
1341 case 23:
1342 ret += tcrypt_test("wp384");
1343 break;
1344
1345 case 24:
1346 ret += tcrypt_test("wp256");
1347 break;
1348
1349 case 25:
1350 ret += tcrypt_test("ecb(tnepres)");
1351 break;
1352
1353 case 26:
1354 ret += tcrypt_test("ecb(anubis)");
1355 ret += tcrypt_test("cbc(anubis)");
1356 break;
1357
1358 case 27:
1359 ret += tcrypt_test("tgr192");
1360 break;
1361
1362 case 28:
1363 ret += tcrypt_test("tgr160");
1364 break;
1365
1366 case 29:
1367 ret += tcrypt_test("tgr128");
1368 break;
1369
1370 case 30:
1371 ret += tcrypt_test("ecb(xeta)");
1372 break;
1373
1374 case 31:
1375 ret += tcrypt_test("pcbc(fcrypt)");
1376 break;
1377
1378 case 32:
1379 ret += tcrypt_test("ecb(camellia)");
1380 ret += tcrypt_test("cbc(camellia)");
1381 ret += tcrypt_test("ctr(camellia)");
1382 ret += tcrypt_test("lrw(camellia)");
1383 ret += tcrypt_test("xts(camellia)");
1384 break;
1385
1386 case 33:
1387 ret += tcrypt_test("sha224");
1388 break;
1389
1390 case 34:
1391 ret += tcrypt_test("salsa20");
1392 break;
1393
1394 case 35:
1395 ret += tcrypt_test("gcm(aes)");
1396 break;
1397
1398 case 36:
1399 ret += tcrypt_test("lzo");
1400 break;
1401
1402 case 37:
1403 ret += tcrypt_test("ccm(aes)");
1404 break;
1405
1406 case 38:
1407 ret += tcrypt_test("cts(cbc(aes))");
1408 break;
1409
1410 case 39:
1411 ret += tcrypt_test("rmd128");
1412 break;
1413
1414 case 40:
1415 ret += tcrypt_test("rmd160");
1416 break;
1417
1418 case 41:
1419 ret += tcrypt_test("rmd256");
1420 break;
1421
1422 case 42:
1423 ret += tcrypt_test("rmd320");
1424 break;
1425
1426 case 43:
1427 ret += tcrypt_test("ecb(seed)");
1428 break;
1429
1430 case 44:
1431 ret += tcrypt_test("zlib");
1432 break;
1433
1434 case 45:
1435 ret += tcrypt_test("rfc4309(ccm(aes))");
1436 break;
1437
1438 case 46:
1439 ret += tcrypt_test("ghash");
1440 break;
1441
1442 case 47:
1443 ret += tcrypt_test("crct10dif");
1444 break;
1445
1446 case 100:
1447 ret += tcrypt_test("hmac(md5)");
1448 break;
1449
1450 case 101:
1451 ret += tcrypt_test("hmac(sha1)");
1452 break;
1453
1454 case 102:
1455 ret += tcrypt_test("hmac(sha256)");
1456 break;
1457
1458 case 103:
1459 ret += tcrypt_test("hmac(sha384)");
1460 break;
1461
1462 case 104:
1463 ret += tcrypt_test("hmac(sha512)");
1464 break;
1465
1466 case 105:
1467 ret += tcrypt_test("hmac(sha224)");
1468 break;
1469
1470 case 106:
1471 ret += tcrypt_test("xcbc(aes)");
1472 break;
1473
1474 case 107:
1475 ret += tcrypt_test("hmac(rmd128)");
1476 break;
1477
1478 case 108:
1479 ret += tcrypt_test("hmac(rmd160)");
1480 break;
1481
1482 case 109:
1483 ret += tcrypt_test("vmac(aes)");
1484 break;
1485
1486 case 110:
1487 ret += tcrypt_test("hmac(crc32)");
1488 break;
1489
1490 case 150:
1491 ret += tcrypt_test("ansi_cprng");
1492 break;
1493
1494 case 151:
1495 ret += tcrypt_test("rfc4106(gcm(aes))");
1496 break;
1497
1498 case 152:
1499 ret += tcrypt_test("rfc4543(gcm(aes))");
1500 break;
1501
1502 case 153:
1503 ret += tcrypt_test("cmac(aes)");
1504 break;
1505
1506 case 154:
1507 ret += tcrypt_test("cmac(des3_ede)");
1508 break;
1509
1510 case 155:
1511 ret += tcrypt_test("authenc(hmac(sha1),cbc(aes))");
1512 break;
1513
1514 case 156:
1515 ret += tcrypt_test("authenc(hmac(md5),ecb(cipher_null))");
1516 break;
1517
1518 case 157:
1519 ret += tcrypt_test("authenc(hmac(sha1),ecb(cipher_null))");
1520 break;
1521
1522 case 200:
1523 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
1524 speed_template_16_24_32);
1525 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
1526 speed_template_16_24_32);
1527 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
1528 speed_template_16_24_32);
1529 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
1530 speed_template_16_24_32);
1531 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
1532 speed_template_32_40_48);
1533 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
1534 speed_template_32_40_48);
1535 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
1536 speed_template_32_48_64);
1537 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
1538 speed_template_32_48_64);
1539 test_cipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
1540 speed_template_16_24_32);
1541 test_cipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
1542 speed_template_16_24_32);
1543 break;
1544
1545 case 201:
1546 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
1547 des3_speed_template, DES3_SPEED_VECTORS,
1548 speed_template_24);
1549 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
1550 des3_speed_template, DES3_SPEED_VECTORS,
1551 speed_template_24);
1552 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
1553 des3_speed_template, DES3_SPEED_VECTORS,
1554 speed_template_24);
1555 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
1556 des3_speed_template, DES3_SPEED_VECTORS,
1557 speed_template_24);
1558 break;
1559
1560 case 202:
1561 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
1562 speed_template_16_24_32);
1563 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
1564 speed_template_16_24_32);
1565 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
1566 speed_template_16_24_32);
1567 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
1568 speed_template_16_24_32);
1569 test_cipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
1570 speed_template_16_24_32);
1571 test_cipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
1572 speed_template_16_24_32);
1573 test_cipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
1574 speed_template_32_40_48);
1575 test_cipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
1576 speed_template_32_40_48);
1577 test_cipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
1578 speed_template_32_48_64);
1579 test_cipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
1580 speed_template_32_48_64);
1581 break;
1582
1583 case 203:
1584 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
1585 speed_template_8_32);
1586 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
1587 speed_template_8_32);
1588 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
1589 speed_template_8_32);
1590 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
1591 speed_template_8_32);
1592 test_cipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
1593 speed_template_8_32);
1594 test_cipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
1595 speed_template_8_32);
1596 break;
1597
1598 case 204:
1599 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
1600 speed_template_8);
1601 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
1602 speed_template_8);
1603 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
1604 speed_template_8);
1605 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
1606 speed_template_8);
1607 break;
1608
1609 case 205:
1610 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
1611 speed_template_16_24_32);
1612 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
1613 speed_template_16_24_32);
1614 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
1615 speed_template_16_24_32);
1616 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
1617 speed_template_16_24_32);
1618 test_cipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
1619 speed_template_16_24_32);
1620 test_cipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
1621 speed_template_16_24_32);
1622 test_cipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
1623 speed_template_32_40_48);
1624 test_cipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
1625 speed_template_32_40_48);
1626 test_cipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
1627 speed_template_32_48_64);
1628 test_cipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
1629 speed_template_32_48_64);
1630 break;
1631
1632 case 206:
1633 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
1634 speed_template_16_32);
1635 break;
1636
1637 case 207:
1638 test_cipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
1639 speed_template_16_32);
1640 test_cipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
1641 speed_template_16_32);
1642 test_cipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
1643 speed_template_16_32);
1644 test_cipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
1645 speed_template_16_32);
1646 test_cipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
1647 speed_template_16_32);
1648 test_cipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
1649 speed_template_16_32);
1650 test_cipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
1651 speed_template_32_48);
1652 test_cipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
1653 speed_template_32_48);
1654 test_cipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
1655 speed_template_32_64);
1656 test_cipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
1657 speed_template_32_64);
1658 break;
1659
1660 case 208:
1661 test_cipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
1662 speed_template_8);
1663 break;
1664
1665 case 209:
1666 test_cipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
1667 speed_template_8_16);
1668 test_cipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
1669 speed_template_8_16);
1670 test_cipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
1671 speed_template_8_16);
1672 test_cipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
1673 speed_template_8_16);
1674 test_cipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
1675 speed_template_8_16);
1676 test_cipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
1677 speed_template_8_16);
1678 break;
1679
1680 case 210:
1681 test_cipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
1682 speed_template_16_32);
1683 test_cipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
1684 speed_template_16_32);
1685 test_cipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
1686 speed_template_16_32);
1687 test_cipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
1688 speed_template_16_32);
1689 test_cipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
1690 speed_template_16_32);
1691 test_cipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
1692 speed_template_16_32);
1693 test_cipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
1694 speed_template_32_48);
1695 test_cipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
1696 speed_template_32_48);
1697 test_cipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
1698 speed_template_32_64);
1699 test_cipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
1700 speed_template_32_64);
1701 break;
1702
1703 case 211:
1704 test_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec,
1705 NULL, 0, 16, 8, aead_speed_template_20);
1706 break;
1707
1708 case 300:
1709 /* fall through */
1710
1711 case 301:
1712 test_hash_speed("md4", sec, generic_hash_speed_template);
1713 if (mode > 300 && mode < 400) break;
1714
1715 case 302:
1716 test_hash_speed("md5", sec, generic_hash_speed_template);
1717 if (mode > 300 && mode < 400) break;
1718
1719 case 303:
1720 test_hash_speed("sha1", sec, generic_hash_speed_template);
1721 if (mode > 300 && mode < 400) break;
1722
1723 case 304:
1724 test_hash_speed("sha256", sec, generic_hash_speed_template);
1725 if (mode > 300 && mode < 400) break;
1726
1727 case 305:
1728 test_hash_speed("sha384", sec, generic_hash_speed_template);
1729 if (mode > 300 && mode < 400) break;
1730
1731 case 306:
1732 test_hash_speed("sha512", sec, generic_hash_speed_template);
1733 if (mode > 300 && mode < 400) break;
1734
1735 case 307:
1736 test_hash_speed("wp256", sec, generic_hash_speed_template);
1737 if (mode > 300 && mode < 400) break;
1738
1739 case 308:
1740 test_hash_speed("wp384", sec, generic_hash_speed_template);
1741 if (mode > 300 && mode < 400) break;
1742
1743 case 309:
1744 test_hash_speed("wp512", sec, generic_hash_speed_template);
1745 if (mode > 300 && mode < 400) break;
1746
1747 case 310:
1748 test_hash_speed("tgr128", sec, generic_hash_speed_template);
1749 if (mode > 300 && mode < 400) break;
1750
1751 case 311:
1752 test_hash_speed("tgr160", sec, generic_hash_speed_template);
1753 if (mode > 300 && mode < 400) break;
1754
1755 case 312:
1756 test_hash_speed("tgr192", sec, generic_hash_speed_template);
1757 if (mode > 300 && mode < 400) break;
1758
1759 case 313:
1760 test_hash_speed("sha224", sec, generic_hash_speed_template);
1761 if (mode > 300 && mode < 400) break;
1762
1763 case 314:
1764 test_hash_speed("rmd128", sec, generic_hash_speed_template);
1765 if (mode > 300 && mode < 400) break;
1766
1767 case 315:
1768 test_hash_speed("rmd160", sec, generic_hash_speed_template);
1769 if (mode > 300 && mode < 400) break;
1770
1771 case 316:
1772 test_hash_speed("rmd256", sec, generic_hash_speed_template);
1773 if (mode > 300 && mode < 400) break;
1774
1775 case 317:
1776 test_hash_speed("rmd320", sec, generic_hash_speed_template);
1777 if (mode > 300 && mode < 400) break;
1778
1779 case 318:
1780 test_hash_speed("ghash-generic", sec, hash_speed_template_16);
1781 if (mode > 300 && mode < 400) break;
1782
1783 case 319:
1784 test_hash_speed("crc32c", sec, generic_hash_speed_template);
1785 if (mode > 300 && mode < 400) break;
1786
1787 case 320:
1788 test_hash_speed("crct10dif", sec, generic_hash_speed_template);
1789 if (mode > 300 && mode < 400) break;
1790
1791 case 399:
1792 break;
1793
1794 case 400:
1795 /* fall through */
1796
1797 case 401:
1798 test_ahash_speed("md4", sec, generic_hash_speed_template);
1799 if (mode > 400 && mode < 500) break;
1800
1801 case 402:
1802 test_ahash_speed("md5", sec, generic_hash_speed_template);
1803 if (mode > 400 && mode < 500) break;
1804
1805 case 403:
1806 test_ahash_speed("sha1", sec, generic_hash_speed_template);
1807 if (mode > 400 && mode < 500) break;
1808
1809 case 404:
1810 test_ahash_speed("sha256", sec, generic_hash_speed_template);
1811 if (mode > 400 && mode < 500) break;
1812
1813 case 405:
1814 test_ahash_speed("sha384", sec, generic_hash_speed_template);
1815 if (mode > 400 && mode < 500) break;
1816
1817 case 406:
1818 test_ahash_speed("sha512", sec, generic_hash_speed_template);
1819 if (mode > 400 && mode < 500) break;
1820
1821 case 407:
1822 test_ahash_speed("wp256", sec, generic_hash_speed_template);
1823 if (mode > 400 && mode < 500) break;
1824
1825 case 408:
1826 test_ahash_speed("wp384", sec, generic_hash_speed_template);
1827 if (mode > 400 && mode < 500) break;
1828
1829 case 409:
1830 test_ahash_speed("wp512", sec, generic_hash_speed_template);
1831 if (mode > 400 && mode < 500) break;
1832
1833 case 410:
1834 test_ahash_speed("tgr128", sec, generic_hash_speed_template);
1835 if (mode > 400 && mode < 500) break;
1836
1837 case 411:
1838 test_ahash_speed("tgr160", sec, generic_hash_speed_template);
1839 if (mode > 400 && mode < 500) break;
1840
1841 case 412:
1842 test_ahash_speed("tgr192", sec, generic_hash_speed_template);
1843 if (mode > 400 && mode < 500) break;
1844
1845 case 413:
1846 test_ahash_speed("sha224", sec, generic_hash_speed_template);
1847 if (mode > 400 && mode < 500) break;
1848
1849 case 414:
1850 test_ahash_speed("rmd128", sec, generic_hash_speed_template);
1851 if (mode > 400 && mode < 500) break;
1852
1853 case 415:
1854 test_ahash_speed("rmd160", sec, generic_hash_speed_template);
1855 if (mode > 400 && mode < 500) break;
1856
1857 case 416:
1858 test_ahash_speed("rmd256", sec, generic_hash_speed_template);
1859 if (mode > 400 && mode < 500) break;
1860
1861 case 417:
1862 test_ahash_speed("rmd320", sec, generic_hash_speed_template);
1863 if (mode > 400 && mode < 500) break;
1864
1865 case 499:
1866 break;
1867
1868 case 500:
1869 test_acipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
1870 speed_template_16_24_32);
1871 test_acipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
1872 speed_template_16_24_32);
1873 test_acipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
1874 speed_template_16_24_32);
1875 test_acipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
1876 speed_template_16_24_32);
1877 test_acipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
1878 speed_template_32_40_48);
1879 test_acipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
1880 speed_template_32_40_48);
1881 test_acipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
1882 speed_template_32_48_64);
1883 test_acipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
1884 speed_template_32_48_64);
1885 test_acipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
1886 speed_template_16_24_32);
1887 test_acipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
1888 speed_template_16_24_32);
1889 test_acipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
1890 speed_template_16_24_32);
1891 test_acipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
1892 speed_template_16_24_32);
1893 test_acipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
1894 speed_template_16_24_32);
1895 test_acipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
1896 speed_template_16_24_32);
1897 test_acipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL, 0,
1898 speed_template_20_28_36);
1899 test_acipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL, 0,
1900 speed_template_20_28_36);
1901 break;
1902
1903 case 501:
1904 test_acipher_speed("ecb(des3_ede)", ENCRYPT, sec,
1905 des3_speed_template, DES3_SPEED_VECTORS,
1906 speed_template_24);
1907 test_acipher_speed("ecb(des3_ede)", DECRYPT, sec,
1908 des3_speed_template, DES3_SPEED_VECTORS,
1909 speed_template_24);
1910 test_acipher_speed("cbc(des3_ede)", ENCRYPT, sec,
1911 des3_speed_template, DES3_SPEED_VECTORS,
1912 speed_template_24);
1913 test_acipher_speed("cbc(des3_ede)", DECRYPT, sec,
1914 des3_speed_template, DES3_SPEED_VECTORS,
1915 speed_template_24);
1916 test_acipher_speed("cfb(des3_ede)", ENCRYPT, sec,
1917 des3_speed_template, DES3_SPEED_VECTORS,
1918 speed_template_24);
1919 test_acipher_speed("cfb(des3_ede)", DECRYPT, sec,
1920 des3_speed_template, DES3_SPEED_VECTORS,
1921 speed_template_24);
1922 test_acipher_speed("ofb(des3_ede)", ENCRYPT, sec,
1923 des3_speed_template, DES3_SPEED_VECTORS,
1924 speed_template_24);
1925 test_acipher_speed("ofb(des3_ede)", DECRYPT, sec,
1926 des3_speed_template, DES3_SPEED_VECTORS,
1927 speed_template_24);
1928 break;
1929
1930 case 502:
1931 test_acipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
1932 speed_template_8);
1933 test_acipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
1934 speed_template_8);
1935 test_acipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
1936 speed_template_8);
1937 test_acipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
1938 speed_template_8);
1939 test_acipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
1940 speed_template_8);
1941 test_acipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
1942 speed_template_8);
1943 test_acipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
1944 speed_template_8);
1945 test_acipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
1946 speed_template_8);
1947 break;
1948
1949 case 503:
1950 test_acipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
1951 speed_template_16_32);
1952 test_acipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
1953 speed_template_16_32);
1954 test_acipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
1955 speed_template_16_32);
1956 test_acipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
1957 speed_template_16_32);
1958 test_acipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
1959 speed_template_16_32);
1960 test_acipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
1961 speed_template_16_32);
1962 test_acipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
1963 speed_template_32_48);
1964 test_acipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
1965 speed_template_32_48);
1966 test_acipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
1967 speed_template_32_64);
1968 test_acipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
1969 speed_template_32_64);
1970 break;
1971
1972 case 504:
1973 test_acipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
1974 speed_template_16_24_32);
1975 test_acipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
1976 speed_template_16_24_32);
1977 test_acipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
1978 speed_template_16_24_32);
1979 test_acipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
1980 speed_template_16_24_32);
1981 test_acipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
1982 speed_template_16_24_32);
1983 test_acipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
1984 speed_template_16_24_32);
1985 test_acipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
1986 speed_template_32_40_48);
1987 test_acipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
1988 speed_template_32_40_48);
1989 test_acipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
1990 speed_template_32_48_64);
1991 test_acipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
1992 speed_template_32_48_64);
1993 break;
1994
1995 case 505:
1996 test_acipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
1997 speed_template_8);
1998 break;
1999
2000 case 506:
2001 test_acipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2002 speed_template_8_16);
2003 test_acipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2004 speed_template_8_16);
2005 test_acipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2006 speed_template_8_16);
2007 test_acipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2008 speed_template_8_16);
2009 test_acipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2010 speed_template_8_16);
2011 test_acipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2012 speed_template_8_16);
2013 break;
2014
2015 case 507:
2016 test_acipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2017 speed_template_16_32);
2018 test_acipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2019 speed_template_16_32);
2020 test_acipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2021 speed_template_16_32);
2022 test_acipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2023 speed_template_16_32);
2024 test_acipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2025 speed_template_16_32);
2026 test_acipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2027 speed_template_16_32);
2028 test_acipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2029 speed_template_32_48);
2030 test_acipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2031 speed_template_32_48);
2032 test_acipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2033 speed_template_32_64);
2034 test_acipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2035 speed_template_32_64);
2036 break;
2037
2038 case 508:
2039 test_acipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2040 speed_template_16_32);
2041 test_acipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2042 speed_template_16_32);
2043 test_acipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2044 speed_template_16_32);
2045 test_acipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2046 speed_template_16_32);
2047 test_acipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2048 speed_template_16_32);
2049 test_acipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2050 speed_template_16_32);
2051 test_acipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2052 speed_template_32_48);
2053 test_acipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2054 speed_template_32_48);
2055 test_acipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2056 speed_template_32_64);
2057 test_acipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2058 speed_template_32_64);
2059 break;
2060
2061 case 509:
2062 test_acipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2063 speed_template_8_32);
2064 test_acipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2065 speed_template_8_32);
2066 test_acipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2067 speed_template_8_32);
2068 test_acipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2069 speed_template_8_32);
2070 test_acipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2071 speed_template_8_32);
2072 test_acipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2073 speed_template_8_32);
2074 break;
2075
2076 case 1000:
2077 test_available();
2078 break;
2079 }
2080
2081 return ret;
2082 }
2083
2084 static int do_alg_test(const char *alg, u32 type, u32 mask)
2085 {
2086 return crypto_has_alg(alg, type, mask ?: CRYPTO_ALG_TYPE_MASK) ?
2087 0 : -ENOENT;
2088 }
2089
2090 static int __init tcrypt_mod_init(void)
2091 {
2092 int err = -ENOMEM;
2093 int i;
2094
2095 for (i = 0; i < TVMEMSIZE; i++) {
2096 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
2097 if (!tvmem[i])
2098 goto err_free_tv;
2099 }
2100
2101 if (alg)
2102 err = do_alg_test(alg, type, mask);
2103 else
2104 err = do_test(mode);
2105
2106 if (err) {
2107 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
2108 goto err_free_tv;
2109 }
2110
2111 /* We intentionaly return -EAGAIN to prevent keeping the module,
2112 * unless we're running in fips mode. It does all its work from
2113 * init() and doesn't offer any runtime functionality, but in
2114 * the fips case, checking for a successful load is helpful.
2115 * => we don't need it in the memory, do we?
2116 * -- mludvig
2117 */
2118 if (!fips_enabled)
2119 err = -EAGAIN;
2120
2121 err_free_tv:
2122 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
2123 free_page((unsigned long)tvmem[i]);
2124
2125 return err;
2126 }
2127
2128 /*
2129 * If an init function is provided, an exit function must also be provided
2130 * to allow module unload.
2131 */
2132 static void __exit tcrypt_mod_fini(void) { }
2133
2134 module_init(tcrypt_mod_init);
2135 module_exit(tcrypt_mod_fini);
2136
2137 module_param(alg, charp, 0);
2138 module_param(type, uint, 0);
2139 module_param(mask, uint, 0);
2140 module_param(mode, int, 0);
2141 module_param(sec, uint, 0);
2142 MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
2143 "(defaults to zero which uses CPU cycles instead)");
2144
2145 MODULE_LICENSE("GPL");
2146 MODULE_DESCRIPTION("Quick & dirty crypto testing module");
2147 MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");