054cac3ed39f2e24c7453d893a61b05dfb91917c
[openwrt/openwrt.git] / package / kernel / lantiq / ltq-deu / src / ltq_deu_testmgr.c
1 /*
2 * Algorithm testing framework and tests.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
12 * any later version.
13 *
14 */
15
16 #include <crypto/hash.h>
17 #include <linux/err.h>
18 #include <linux/module.h>
19 #include <linux/scatterlist.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
22 #include <crypto/rng.h>
23 #include <linux/jiffies.h>
24 #include <linux/init.h>
25 #include <linux/moduleparam.h>
26 #include <linux/delay.h>
27 #include <linux/types.h>
28 #include <linux/sched.h>
29
30 #include "internal.h"
31 #include "ifxmips_testmgr.h"
32 #include "ifxmips_tcrypt.h"
33 #include "ifxmips_deu.h"
34
35 /* changes for LQ ablkcipher speedtest */
36 #include <linux/timex.h>
37 #include <linux/interrupt.h>
38 #include <asm/mipsregs.h>
39
40 /*
41 * Need slab memory for testing (size in number of pages).
42 */
43 #define XBUFSIZE 8
44
45 /*
46 * Indexes into the xbuf to simulate cross-page access.
47 */
48 #define IDX1 32
49 #define IDX2 32400
50 #define IDX3 1
51 #define IDX4 8193
52 #define IDX5 22222
53 #define IDX6 17101
54 #define IDX7 27333
55 #define IDX8 3000
56
57 /*
58 * Used by test_cipher()
59 */
60 #define ENCRYPT 1
61 #define DECRYPT 0
62
63 /*
64 * Need slab memory for testing (size in number of pages).
65 */
66 #define TVMEMSIZE 4
67
68 /*
69 * Used by test_cipher_speed()
70 */
71 #define ENCRYPT 1
72 #define DECRYPT 0
73
74 /*
75 * Used by test_cipher_speed()
76 */
77
78 #ifndef INIT_COMPLETION
79 #define INIT_COMPLETION(a) reinit_completion(&a)
80 #endif
81
82
83 static unsigned int sec;
84
85 static char *alg = NULL;
86 static u32 type;
87 static u32 mask;
88 static int mode;
89 static char *tvmem[TVMEMSIZE];
90
91 static char *check[] = {
92 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256",
93 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
94 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
95 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
96 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
97 "lzo", "cts", "zlib", NULL
98 };
99 struct tcrypt_result {
100 struct completion completion;
101 int err;
102 };
103
104 struct aead_test_suite {
105 struct {
106 struct aead_testvec *vecs;
107 unsigned int count;
108 } enc, dec;
109 };
110
111 struct cipher_test_suite {
112 struct {
113 struct cipher_testvec *vecs;
114 unsigned int count;
115 } enc, dec;
116 };
117
118 struct comp_test_suite {
119 struct {
120 struct comp_testvec *vecs;
121 unsigned int count;
122 } comp, decomp;
123 };
124
125 struct pcomp_test_suite {
126 struct {
127 struct pcomp_testvec *vecs;
128 unsigned int count;
129 } comp, decomp;
130 };
131
132 struct hash_test_suite {
133 struct hash_testvec *vecs;
134 unsigned int count;
135 };
136
137 struct cprng_test_suite {
138 struct cprng_testvec *vecs;
139 unsigned int count;
140 };
141
142 struct alg_test_desc {
143 const char *alg;
144 int (*test)(const struct alg_test_desc *desc, const char *driver,
145 u32 type, u32 mask);
146 int fips_allowed; /* set if alg is allowed in fips mode */
147
148 union {
149 struct aead_test_suite aead;
150 struct cipher_test_suite cipher;
151 struct comp_test_suite comp;
152 struct pcomp_test_suite pcomp;
153 struct hash_test_suite hash;
154 struct cprng_test_suite cprng;
155 } suite;
156 };
157
158 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
159
160 static void hexdump(unsigned char *buf, unsigned int len)
161 {
162 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
163 16, 1,
164 buf, len, false);
165 }
166
167 static void tcrypt_complete(struct crypto_async_request *req, int err)
168 {
169 struct tcrypt_result *res = req->data;
170
171 //printk("Signal done test\n");
172
173 if (err == -EINPROGRESS) {
174 printk("********************* Completion didnt go too well **************************** \n");
175 return;
176 }
177
178 res->err = err;
179 complete_all(&res->completion);
180 }
181
182 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
183 {
184 int i;
185
186 for (i = 0; i < XBUFSIZE; i++) {
187 buf[i] = (void *)__get_free_page(GFP_KERNEL);
188 if (!buf[i])
189 goto err_free_buf;
190 }
191
192 return 0;
193
194 err_free_buf:
195 while (i-- > 0)
196 free_page((unsigned long)buf[i]);
197
198 return -ENOMEM;
199 }
200
201 static void testmgr_free_buf(char *buf[XBUFSIZE])
202 {
203 int i;
204
205 for (i = 0; i < XBUFSIZE; i++)
206 free_page((unsigned long)buf[i]);
207 }
208
209 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
210 unsigned int tcount)
211 {
212 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
213 unsigned int i, j, k, temp;
214 struct scatterlist sg[8];
215 char result[64];
216 struct ahash_request *req;
217 struct tcrypt_result tresult;
218 void *hash_buff;
219 char *xbuf[XBUFSIZE];
220 int ret = -ENOMEM;
221
222 if (testmgr_alloc_buf(xbuf))
223 goto out_nobuf;
224
225 init_completion(&tresult.completion);
226
227 req = ahash_request_alloc(tfm, GFP_KERNEL);
228 if (!req) {
229 printk(KERN_ERR "alg: hash: Failed to allocate request for "
230 "%s\n", algo);
231 goto out_noreq;
232 }
233 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
234 tcrypt_complete, &tresult);
235
236 j = 0;
237 for (i = 0; i < tcount; i++) {
238 if (template[i].np)
239 continue;
240
241 j++;
242 memset(result, 0, 64);
243
244 hash_buff = xbuf[0];
245
246 memcpy(hash_buff, template[i].plaintext, template[i].psize);
247 sg_init_one(&sg[0], hash_buff, template[i].psize);
248
249 if (template[i].ksize) {
250 crypto_ahash_clear_flags(tfm, ~0);
251 ret = crypto_ahash_setkey(tfm, template[i].key,
252 template[i].ksize);
253 if (ret) {
254 printk(KERN_ERR "alg: hash: setkey failed on "
255 "test %d for %s: ret=%d\n", j, algo,
256 -ret);
257 goto out;
258 }
259 }
260
261 ahash_request_set_crypt(req, sg, result, template[i].psize);
262 ret = crypto_ahash_digest(req);
263 switch (ret) {
264 case 0:
265 break;
266 case -EINPROGRESS:
267 case -EBUSY:
268 ret = wait_for_completion_interruptible(
269 &tresult.completion);
270 if (!ret && !(ret = tresult.err)) {
271 INIT_COMPLETION(tresult.completion);
272 break;
273 }
274 /* fall through */
275 default:
276 printk(KERN_ERR "alg: hash: digest failed on test %d "
277 "for %s: ret=%d\n", j, algo, -ret);
278 goto out;
279 }
280
281 if (memcmp(result, template[i].digest,
282 crypto_ahash_digestsize(tfm))) {
283 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
284 j, algo);
285 hexdump(result, crypto_ahash_digestsize(tfm));
286 ret = -EINVAL;
287 goto out;
288 }
289 else {
290 printk(KERN_ERR "alg: hash: Test %d passed for %s\n",
291 j, algo);
292 hexdump(result, crypto_ahash_digestsize(tfm));
293 }
294 }
295
296 j = 0;
297 for (i = 0; i < tcount; i++) {
298 if (template[i].np) {
299 j++;
300 memset(result, 0, 64);
301
302 temp = 0;
303 sg_init_table(sg, template[i].np);
304 ret = -EINVAL;
305 for (k = 0; k < template[i].np; k++) {
306 if (WARN_ON(offset_in_page(IDX[k]) +
307 template[i].tap[k] > PAGE_SIZE))
308 goto out;
309 sg_set_buf(&sg[k],
310 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
311 offset_in_page(IDX[k]),
312 template[i].plaintext + temp,
313 template[i].tap[k]),
314 template[i].tap[k]);
315 temp += template[i].tap[k];
316 }
317
318 if (template[i].ksize) {
319 crypto_ahash_clear_flags(tfm, ~0);
320 ret = crypto_ahash_setkey(tfm, template[i].key,
321 template[i].ksize);
322
323 if (ret) {
324 printk(KERN_ERR "alg: hash: setkey "
325 "failed on chunking test %d "
326 "for %s: ret=%d\n", j, algo,
327 -ret);
328 goto out;
329 }
330 }
331
332 ahash_request_set_crypt(req, sg, result,
333 template[i].psize);
334 ret = crypto_ahash_digest(req);
335 switch (ret) {
336 case 0:
337 break;
338 case -EINPROGRESS:
339 case -EBUSY:
340 ret = wait_for_completion_interruptible(
341 &tresult.completion);
342 if (!ret && !(ret = tresult.err)) {
343 INIT_COMPLETION(tresult.completion);
344 break;
345 }
346 /* fall through */
347 default:
348 printk(KERN_ERR "alg: hash: digest failed "
349 "on chunking test %d for %s: "
350 "ret=%d\n", j, algo, -ret);
351 goto out;
352 }
353
354 if (memcmp(result, template[i].digest,
355 crypto_ahash_digestsize(tfm))) {
356 printk(KERN_ERR "alg: hash: Chunking test %d "
357 "failed for %s\n", j, algo);
358 hexdump(result, crypto_ahash_digestsize(tfm));
359 ret = -EINVAL;
360 goto out;
361 }
362 else {
363 printk(KERN_ERR "alg: hash: Chunking test %d "
364 "passed for %s\n", j, algo);
365 hexdump(result, crypto_ahash_digestsize(tfm));
366 }
367 }
368 }
369
370 ret = 0;
371
372 out:
373 ahash_request_free(req);
374 out_noreq:
375 testmgr_free_buf(xbuf);
376 out_nobuf:
377 return ret;
378 }
379
380 static int test_aead(struct crypto_aead *tfm, int enc,
381 struct aead_testvec *template, unsigned int tcount)
382 {
383 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
384 unsigned int i, j, k, n, temp;
385 int ret = -ENOMEM;
386 char *q;
387 char *key;
388 struct aead_request *req;
389 struct scatterlist sg[8];
390 struct scatterlist asg[8];
391 const char *e;
392 struct tcrypt_result result;
393 unsigned int authsize;
394 void *input;
395 void *assoc;
396 char iv[MAX_IVLEN];
397 char *xbuf[XBUFSIZE];
398 char *axbuf[XBUFSIZE];
399
400 if (testmgr_alloc_buf(xbuf))
401 goto out_noxbuf;
402 if (testmgr_alloc_buf(axbuf))
403 goto out_noaxbuf;
404
405 if (enc == ENCRYPT)
406 e = "encryption";
407 else
408 e = "decryption";
409
410 init_completion(&result.completion);
411
412 req = aead_request_alloc(tfm, GFP_KERNEL);
413 if (!req) {
414 printk(KERN_ERR "alg: aead: Failed to allocate request for "
415 "%s\n", algo);
416 goto out;
417 }
418
419 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
420 tcrypt_complete, &result);
421
422 for (i = 0, j = 0; i < tcount; i++) {
423 if (!template[i].np) {
424 j++;
425
426 /* some tepmplates have no input data but they will
427 * touch input
428 */
429 input = xbuf[0];
430 assoc = axbuf[0];
431
432 ret = -EINVAL;
433 if (WARN_ON(template[i].ilen > PAGE_SIZE ||
434 template[i].alen > PAGE_SIZE))
435 goto out;
436
437 memcpy(input, template[i].input, template[i].ilen);
438 memcpy(assoc, template[i].assoc, template[i].alen);
439 if (template[i].iv)
440 memcpy(iv, template[i].iv, MAX_IVLEN);
441 else
442 memset(iv, 0, MAX_IVLEN);
443
444 crypto_aead_clear_flags(tfm, ~0);
445 if (template[i].wk)
446 crypto_aead_set_flags(
447 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
448
449 key = template[i].key;
450
451 ret = crypto_aead_setkey(tfm, key,
452 template[i].klen);
453 if (!ret == template[i].fail) {
454 printk(KERN_ERR "alg: aead: setkey failed on "
455 "test %d for %s: flags=%x\n", j, algo,
456 crypto_aead_get_flags(tfm));
457 goto out;
458 } else if (ret)
459 continue;
460
461 authsize = abs(template[i].rlen - template[i].ilen);
462 ret = crypto_aead_setauthsize(tfm, authsize);
463 if (ret) {
464 printk(KERN_ERR "alg: aead: Failed to set "
465 "authsize to %u on test %d for %s\n",
466 authsize, j, algo);
467 goto out;
468 }
469
470 sg_init_one(&sg[0], input,
471 template[i].ilen + (enc ? authsize : 0));
472
473 sg_init_one(&asg[0], assoc, template[i].alen);
474
475 aead_request_set_crypt(req, sg, sg,
476 template[i].ilen, iv);
477
478 aead_request_set_assoc(req, asg, template[i].alen);
479
480 ret = enc ?
481 crypto_aead_encrypt(req) :
482 crypto_aead_decrypt(req);
483
484 switch (ret) {
485 case 0:
486 if (template[i].novrfy) {
487 /* verification was supposed to fail */
488 printk(KERN_ERR "alg: aead: %s failed "
489 "on test %d for %s: ret was 0, "
490 "expected -EBADMSG\n",
491 e, j, algo);
492 /* so really, we got a bad message */
493 ret = -EBADMSG;
494 goto out;
495 }
496 break;
497 case -EINPROGRESS:
498 case -EBUSY:
499 ret = wait_for_completion_interruptible(
500 &result.completion);
501 if (!ret && !(ret = result.err)) {
502 INIT_COMPLETION(result.completion);
503 break;
504 }
505 case -EBADMSG:
506 if (template[i].novrfy)
507 /* verification failure was expected */
508 continue;
509 /* fall through */
510 default:
511 printk(KERN_ERR "alg: aead: %s failed on test "
512 "%d for %s: ret=%d\n", e, j, algo, -ret);
513 goto out;
514 }
515
516 q = input;
517 if (memcmp(q, template[i].result, template[i].rlen)) {
518 printk(KERN_ERR "alg: aead: Test %d failed on "
519 "%s for %s\n", j, e, algo);
520 hexdump(q, template[i].rlen);
521 ret = -EINVAL;
522 goto out;
523 }
524 else {
525 printk(KERN_ERR "alg: aead: Test %d passed on "
526 "%s for %s\n", j, e, algo);
527 hexdump(q, template[i].rlen);
528 }
529 }
530 }
531
532 for (i = 0, j = 0; i < tcount; i++) {
533 if (template[i].np) {
534 j++;
535
536 if (template[i].iv)
537 memcpy(iv, template[i].iv, MAX_IVLEN);
538 else
539 memset(iv, 0, MAX_IVLEN);
540
541 crypto_aead_clear_flags(tfm, ~0);
542 if (template[i].wk)
543 crypto_aead_set_flags(
544 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
545 key = template[i].key;
546
547 ret = crypto_aead_setkey(tfm, key, template[i].klen);
548 if (!ret == template[i].fail) {
549 printk(KERN_ERR "alg: aead: setkey failed on "
550 "chunk test %d for %s: flags=%x\n", j,
551 algo, crypto_aead_get_flags(tfm));
552 goto out;
553 } else if (ret)
554 continue;
555
556 authsize = abs(template[i].rlen - template[i].ilen);
557
558 ret = -EINVAL;
559 sg_init_table(sg, template[i].np);
560 for (k = 0, temp = 0; k < template[i].np; k++) {
561 if (WARN_ON(offset_in_page(IDX[k]) +
562 template[i].tap[k] > PAGE_SIZE))
563 goto out;
564
565 q = xbuf[IDX[k] >> PAGE_SHIFT] +
566 offset_in_page(IDX[k]);
567
568 memcpy(q, template[i].input + temp,
569 template[i].tap[k]);
570
571 n = template[i].tap[k];
572 if (k == template[i].np - 1 && enc)
573 n += authsize;
574 if (offset_in_page(q) + n < PAGE_SIZE)
575 q[n] = 0;
576
577 sg_set_buf(&sg[k], q, template[i].tap[k]);
578 temp += template[i].tap[k];
579 }
580
581 ret = crypto_aead_setauthsize(tfm, authsize);
582 if (ret) {
583 printk(KERN_ERR "alg: aead: Failed to set "
584 "authsize to %u on chunk test %d for "
585 "%s\n", authsize, j, algo);
586 goto out;
587 }
588
589 if (enc) {
590 if (WARN_ON(sg[k - 1].offset +
591 sg[k - 1].length + authsize >
592 PAGE_SIZE)) {
593 ret = -EINVAL;
594 goto out;
595 }
596
597 sg[k - 1].length += authsize;
598 }
599
600 sg_init_table(asg, template[i].anp);
601 ret = -EINVAL;
602 for (k = 0, temp = 0; k < template[i].anp; k++) {
603 if (WARN_ON(offset_in_page(IDX[k]) +
604 template[i].atap[k] > PAGE_SIZE))
605 goto out;
606 sg_set_buf(&asg[k],
607 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
608 offset_in_page(IDX[k]),
609 template[i].assoc + temp,
610 template[i].atap[k]),
611 template[i].atap[k]);
612 temp += template[i].atap[k];
613 }
614
615 aead_request_set_crypt(req, sg, sg,
616 template[i].ilen,
617 iv);
618
619 aead_request_set_assoc(req, asg, template[i].alen);
620
621 ret = enc ?
622 crypto_aead_encrypt(req) :
623 crypto_aead_decrypt(req);
624
625 switch (ret) {
626 case 0:
627 if (template[i].novrfy) {
628 /* verification was supposed to fail */
629 printk(KERN_ERR "alg: aead: %s failed "
630 "on chunk test %d for %s: ret "
631 "was 0, expected -EBADMSG\n",
632 e, j, algo);
633 /* so really, we got a bad message */
634 ret = -EBADMSG;
635 goto out;
636 }
637 break;
638 case -EINPROGRESS:
639 case -EBUSY:
640 ret = wait_for_completion_interruptible(
641 &result.completion);
642 if (!ret && !(ret = result.err)) {
643 INIT_COMPLETION(result.completion);
644 break;
645 }
646 case -EBADMSG:
647 if (template[i].novrfy)
648 /* verification failure was expected */
649 continue;
650 /* fall through */
651 default:
652 printk(KERN_ERR "alg: aead: %s failed on "
653 "chunk test %d for %s: ret=%d\n", e, j,
654 algo, -ret);
655 goto out;
656 }
657
658 ret = -EINVAL;
659 for (k = 0, temp = 0; k < template[i].np; k++) {
660 q = xbuf[IDX[k] >> PAGE_SHIFT] +
661 offset_in_page(IDX[k]);
662
663 n = template[i].tap[k];
664 if (k == template[i].np - 1)
665 n += enc ? authsize : -authsize;
666
667 if (memcmp(q, template[i].result + temp, n)) {
668 printk(KERN_ERR "alg: aead: Chunk "
669 "test %d failed on %s at page "
670 "%u for %s\n", j, e, k, algo);
671 hexdump(q, n);
672 goto out;
673 }
674 else {
675 printk(KERN_ERR "alg: aead: Chunk "
676 "test %d passed on %s at page "
677 "%u for %s\n", j, e, k, algo);
678 hexdump(q, n);
679 }
680
681 q += n;
682 if (k == template[i].np - 1 && !enc) {
683 if (memcmp(q, template[i].input +
684 temp + n, authsize))
685 n = authsize;
686 else
687 n = 0;
688 } else {
689 for (n = 0; offset_in_page(q + n) &&
690 q[n]; n++)
691 ;
692 }
693 if (n) {
694 printk(KERN_ERR "alg: aead: Result "
695 "buffer corruption in chunk "
696 "test %d on %s at page %u for "
697 "%s: %u bytes:\n", j, e, k,
698 algo, n);
699 hexdump(q, n);
700 goto out;
701 }
702 temp += template[i].tap[k];
703 }
704 }
705 }
706
707 ret = 0;
708
709 out:
710 aead_request_free(req);
711 testmgr_free_buf(axbuf);
712 out_noaxbuf:
713 testmgr_free_buf(xbuf);
714 out_noxbuf:
715 return ret;
716 }
717
718 static int test_cipher(struct crypto_cipher *tfm, int enc,
719 struct cipher_testvec *template, unsigned int tcount)
720 {
721 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
722 unsigned int i, j, k;
723 char *q;
724 const char *e;
725 void *data;
726 char *xbuf[XBUFSIZE];
727 int ret = -ENOMEM;
728
729 if (testmgr_alloc_buf(xbuf))
730 goto out_nobuf;
731
732 if (enc == ENCRYPT)
733 e = "encryption";
734 else
735 e = "decryption";
736
737 j = 0;
738 for (i = 0; i < tcount; i++) {
739 if (template[i].np)
740 continue;
741
742 j++;
743
744 ret = -EINVAL;
745 if (WARN_ON(template[i].ilen > PAGE_SIZE))
746 goto out;
747
748 data = xbuf[0];
749 memcpy(data, template[i].input, template[i].ilen);
750
751 crypto_cipher_clear_flags(tfm, ~0);
752 if (template[i].wk)
753 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
754
755 ret = crypto_cipher_setkey(tfm, template[i].key,
756 template[i].klen);
757 if (!ret == template[i].fail) {
758 printk(KERN_ERR "alg: cipher: setkey failed "
759 "on test %d for %s: flags=%x\n", j,
760 algo, crypto_cipher_get_flags(tfm));
761 goto out;
762 } else if (ret)
763 continue;
764
765 for (k = 0; k < template[i].ilen;
766 k += crypto_cipher_blocksize(tfm)) {
767 if (enc)
768 crypto_cipher_encrypt_one(tfm, data + k,
769 data + k);
770 else
771 crypto_cipher_decrypt_one(tfm, data + k,
772 data + k);
773 }
774
775 q = data;
776 if (memcmp(q, template[i].result, template[i].rlen)) {
777 printk(KERN_ERR "alg: cipher: Test %d failed "
778 "on %s for %s\n", j, e, algo);
779 hexdump(q, template[i].rlen);
780 ret = -EINVAL;
781 goto out;
782 }
783 else {
784 printk(KERN_ERR "alg: cipher: Test %d passed "
785 "on %s for %s\n", j, e, algo);
786 hexdump(q, template[i].rlen);
787 }
788 }
789
790 ret = 0;
791
792 out:
793 testmgr_free_buf(xbuf);
794 out_nobuf:
795 return ret;
796 }
797
798 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
799 struct cipher_testvec *template, unsigned int tcount)
800 {
801 const char *algo =
802 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
803 unsigned int i, j, k, n, temp;
804 char *q;
805 struct ablkcipher_request *req;
806 struct scatterlist sg[8];
807 const char *e;
808 struct tcrypt_result result;
809 void *data;
810 char iv[MAX_IVLEN];
811 char *xbuf[XBUFSIZE];
812 int ret = -ENOMEM;
813
814 if (testmgr_alloc_buf(xbuf))
815 goto out_nobuf;
816
817 if (enc == ENCRYPT)
818 e = "encryption";
819 else
820 e = "decryption";
821
822 init_completion(&result.completion);
823
824 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
825 if (!req) {
826 printk(KERN_ERR "alg: skcipher: Failed to allocate request "
827 "for %s\n", algo);
828 goto out;
829 }
830
831 //printk("tcount: %u\n", tcount);
832
833 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
834 tcrypt_complete, &result);
835
836 j = 0;
837 for (i = 0; i < tcount; i++) {
838 if (template[i].iv)
839 memcpy(iv, template[i].iv, MAX_IVLEN);
840 else
841 memset(iv, 0, MAX_IVLEN);
842
843 if (!(template[i].np)) {
844 //printk("np: %d, i: %d, j: %d\n", template[i].np, i, j);
845 j++;
846
847 ret = -EINVAL;
848 if (WARN_ON(template[i].ilen > PAGE_SIZE))
849 goto out;
850
851 data = xbuf[0];
852 memcpy(data, template[i].input, template[i].ilen);
853
854 crypto_ablkcipher_clear_flags(tfm, ~0);
855 if (template[i].wk)
856 crypto_ablkcipher_set_flags(
857 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
858
859 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
860 template[i].klen);
861 if (!ret == template[i].fail) {
862 printk(KERN_ERR "alg: skcipher: setkey failed "
863 "on test %d for %s: flags=%x\n", j,
864 algo, crypto_ablkcipher_get_flags(tfm));
865 printk("ERROR\n");
866 goto out;
867 } else if (ret)
868 continue;
869
870 sg_init_one(&sg[0], data, template[i].ilen);
871
872 ablkcipher_request_set_crypt(req, sg, sg,
873 template[i].ilen, iv);
874 ret = enc ?
875 crypto_ablkcipher_encrypt(req) :
876 crypto_ablkcipher_decrypt(req);
877
878 switch (ret) {
879 case 0:
880 break;
881 case -EINPROGRESS:
882 case -EBUSY:
883 ret = wait_for_completion_interruptible(
884 &result.completion);
885 if (!ret && !((ret = result.err))) {
886 INIT_COMPLETION(result.completion);
887 break;
888 }
889 /* fall through */
890 default:
891 printk(KERN_ERR "alg: skcipher: %s failed on "
892 "test %d for %s: ret=%d\n", e, j, algo,
893 -ret);
894 printk("ERROR\n");
895 goto out;
896 }
897 q = data;
898 if (memcmp(q, template[i].result, template[i].rlen)) {
899 printk(KERN_ERR "alg: skcipher: Test %d "
900 "failed on %s for %s\n", j, e, algo);
901 hexdump(q, template[i].rlen);
902 printk("ERROR\n");
903 ret = -EINVAL;
904 goto out;
905 }
906 else {
907 printk(KERN_ERR "alg: skcipher: Test %d "
908 "*PASSED* on %s for %s\n", j, e, algo);
909 hexdump(q, template[i].rlen);
910 printk("DONE\n");
911 }
912 }
913 }
914 printk("Testing %s chunking across pages.\n", algo);
915 j = 0;
916 for (i = 0; i < tcount; i++) {
917 if (template[i].iv)
918 memcpy(iv, template[i].iv, MAX_IVLEN);
919 else
920 memset(iv, 0, MAX_IVLEN);
921
922 if (template[i].np) {
923 j++;
924
925 crypto_ablkcipher_clear_flags(tfm, ~0);
926 if (template[i].wk)
927 crypto_ablkcipher_set_flags(
928 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
929
930 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
931 template[i].klen);
932 if (!ret == template[i].fail) {
933 printk(KERN_ERR "alg: skcipher: setkey failed "
934 "on chunk test %d for %s: flags=%x\n",
935 j, algo,
936 crypto_ablkcipher_get_flags(tfm));
937 printk("ERROR\n");
938 goto out;
939 } else if (ret)
940 continue;
941
942 temp = 0;
943 ret = -EINVAL;
944 sg_init_table(sg, template[i].np);
945 for (k = 0; k < template[i].np; k++) {
946 if (WARN_ON(offset_in_page(IDX[k]) +
947 template[i].tap[k] > PAGE_SIZE))
948 goto out;
949
950 q = xbuf[IDX[k] >> PAGE_SHIFT] +
951 offset_in_page(IDX[k]);
952
953 memcpy(q, template[i].input + temp,
954 template[i].tap[k]);
955
956 if (offset_in_page(q) + template[i].tap[k] <
957 PAGE_SIZE)
958 q[template[i].tap[k]] = 0;
959
960 sg_set_buf(&sg[k], q, template[i].tap[k]);
961
962 temp += template[i].tap[k];
963 }
964
965 ablkcipher_request_set_crypt(req, sg, sg,
966 template[i].ilen, iv);
967
968 ret = enc ?
969 crypto_ablkcipher_encrypt(req) :
970 crypto_ablkcipher_decrypt(req);
971
972 switch (ret) {
973 case 0:
974 break;
975 case -EINPROGRESS:
976 case -EBUSY:
977 ret = wait_for_completion_interruptible(
978 &result.completion);
979 if (!ret && !((ret = result.err))) {
980 INIT_COMPLETION(result.completion);
981 break;
982 }
983 /* fall through */
984 default:
985 printk(KERN_ERR "alg: skcipher: %s failed on "
986 "chunk test %d for %s: ret=%d\n", e, j,
987 algo, -ret);
988 printk("ERROR\n");
989 goto out;
990 }
991
992 temp = 0;
993 ret = -EINVAL;
994 for (k = 0; k < template[i].np; k++) {
995 q = xbuf[IDX[k] >> PAGE_SHIFT] +
996 offset_in_page(IDX[k]);
997
998 if (memcmp(q, template[i].result + temp,
999 template[i].tap[k])) {
1000 printk(KERN_ERR "alg: skcipher: Chunk "
1001 "test %d failed on %s at page "
1002 "%u for %s\n", j, e, k, algo);
1003 hexdump(q, template[i].tap[k]);
1004 printk("ERROR\n");
1005 goto out;
1006 }
1007 else {
1008 printk(KERN_ERR "alg: skcipher: Chunk "
1009 "test %d *PASSED* on %s at page "
1010 "%u for %s\n", j, e, k, algo);
1011 hexdump(q, template[i].tap[k]);
1012 printk("DONE\n");
1013 }
1014
1015 q += template[i].tap[k];
1016 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1017 ;
1018 #if 1
1019 if (n) {
1020 printk(KERN_ERR "alg: skcipher: "
1021 "Result buffer corruption in "
1022 "chunk test %d on %s at page "
1023 "%u for %s: %u bytes:\n", j, e,
1024 k, algo, n);
1025 hexdump(q, n);
1026 printk("ERROR\n");
1027 goto out;
1028 }
1029 else {
1030 printk(KERN_ERR "alg: skcipher: "
1031 "Result buffer clean in "
1032 "chunk test %d on %s at page "
1033 "%u for %s: %u bytes:\n", j, e,
1034 k, algo, n);
1035 hexdump(q, n);
1036 printk("Chunk Buffer clean\n");
1037 }
1038 #endif
1039 temp += template[i].tap[k];
1040 }
1041 }
1042 }
1043
1044 ret = 0;
1045 out:
1046 ablkcipher_request_free(req);
1047 testmgr_free_buf(xbuf);
1048 out_nobuf:
1049 return ret;
1050 }
1051
1052 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1053 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1054 {
1055 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1056 unsigned int i;
1057 char result[COMP_BUF_SIZE];
1058 int ret;
1059
1060 for (i = 0; i < ctcount; i++) {
1061 int ilen;
1062 unsigned int dlen = COMP_BUF_SIZE;
1063
1064 memset(result, 0, sizeof (result));
1065
1066 ilen = ctemplate[i].inlen;
1067 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1068 ilen, result, &dlen);
1069 if (ret) {
1070 printk(KERN_ERR "alg: comp: compression failed "
1071 "on test %d for %s: ret=%d\n", i + 1, algo,
1072 -ret);
1073 goto out;
1074 }
1075
1076 if (dlen != ctemplate[i].outlen) {
1077 printk(KERN_ERR "alg: comp: Compression test %d "
1078 "failed for %s: output len = %d\n", i + 1, algo,
1079 dlen);
1080 ret = -EINVAL;
1081 goto out;
1082 }
1083
1084 if (memcmp(result, ctemplate[i].output, dlen)) {
1085 printk(KERN_ERR "alg: comp: Compression test %d "
1086 "failed for %s\n", i + 1, algo);
1087 hexdump(result, dlen);
1088 ret = -EINVAL;
1089 goto out;
1090 }
1091 else {
1092 printk(KERN_ERR "alg: comp: Compression test %d "
1093 "passed for %s\n", i + 1, algo);
1094 hexdump(result, dlen);
1095 }
1096 }
1097
1098 for (i = 0; i < dtcount; i++) {
1099 int ilen;
1100 unsigned int dlen = COMP_BUF_SIZE;
1101
1102 memset(result, 0, sizeof (result));
1103
1104 ilen = dtemplate[i].inlen;
1105 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1106 ilen, result, &dlen);
1107 if (ret) {
1108 printk(KERN_ERR "alg: comp: decompression failed "
1109 "on test %d for %s: ret=%d\n", i + 1, algo,
1110 -ret);
1111 goto out;
1112 }
1113
1114 if (dlen != dtemplate[i].outlen) {
1115 printk(KERN_ERR "alg: comp: Decompression test %d "
1116 "failed for %s: output len = %d\n", i + 1, algo,
1117 dlen);
1118 ret = -EINVAL;
1119 goto out;
1120 }
1121
1122 if (memcmp(result, dtemplate[i].output, dlen)) {
1123 printk(KERN_ERR "alg: comp: Decompression test %d "
1124 "failed for %s\n", i + 1, algo);
1125 hexdump(result, dlen);
1126 ret = -EINVAL;
1127 goto out;
1128 }
1129 else {
1130 printk(KERN_ERR "alg: comp: Decompression test %d "
1131 "passed for %s\n", i + 1, algo);
1132 hexdump(result, dlen);
1133 }
1134 }
1135
1136 ret = 0;
1137
1138 out:
1139 return ret;
1140 }
1141
1142 static int test_pcomp(struct crypto_pcomp *tfm,
1143 struct pcomp_testvec *ctemplate,
1144 struct pcomp_testvec *dtemplate, int ctcount,
1145 int dtcount)
1146 {
1147 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
1148 unsigned int i;
1149 char result[COMP_BUF_SIZE];
1150 int res;
1151
1152 for (i = 0; i < ctcount; i++) {
1153 struct comp_request req;
1154 unsigned int produced = 0;
1155
1156 res = crypto_compress_setup(tfm, ctemplate[i].params,
1157 ctemplate[i].paramsize);
1158 if (res) {
1159 pr_err("alg: pcomp: compression setup failed on test "
1160 "%d for %s: error=%d\n", i + 1, algo, res);
1161 return res;
1162 }
1163
1164 res = crypto_compress_init(tfm);
1165 if (res) {
1166 pr_err("alg: pcomp: compression init failed on test "
1167 "%d for %s: error=%d\n", i + 1, algo, res);
1168 return res;
1169 }
1170
1171 memset(result, 0, sizeof(result));
1172
1173 req.next_in = ctemplate[i].input;
1174 req.avail_in = ctemplate[i].inlen / 2;
1175 req.next_out = result;
1176 req.avail_out = ctemplate[i].outlen / 2;
1177
1178 res = crypto_compress_update(tfm, &req);
1179 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1180 pr_err("alg: pcomp: compression update failed on test "
1181 "%d for %s: error=%d\n", i + 1, algo, res);
1182 return res;
1183 }
1184 if (res > 0)
1185 produced += res;
1186
1187 /* Add remaining input data */
1188 req.avail_in += (ctemplate[i].inlen + 1) / 2;
1189
1190 res = crypto_compress_update(tfm, &req);
1191 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1192 pr_err("alg: pcomp: compression update failed on test "
1193 "%d for %s: error=%d\n", i + 1, algo, res);
1194 return res;
1195 }
1196 if (res > 0)
1197 produced += res;
1198
1199 /* Provide remaining output space */
1200 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1201
1202 res = crypto_compress_final(tfm, &req);
1203 if (res < 0) {
1204 pr_err("alg: pcomp: compression final failed on test "
1205 "%d for %s: error=%d\n", i + 1, algo, res);
1206 return res;
1207 }
1208 produced += res;
1209
1210 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1211 pr_err("alg: comp: Compression test %d failed for %s: "
1212 "output len = %d (expected %d)\n", i + 1, algo,
1213 COMP_BUF_SIZE - req.avail_out,
1214 ctemplate[i].outlen);
1215 return -EINVAL;
1216 }
1217
1218 if (produced != ctemplate[i].outlen) {
1219 pr_err("alg: comp: Compression test %d failed for %s: "
1220 "returned len = %u (expected %d)\n", i + 1,
1221 algo, produced, ctemplate[i].outlen);
1222 return -EINVAL;
1223 }
1224
1225 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1226 pr_err("alg: pcomp: Compression test %d failed for "
1227 "%s\n", i + 1, algo);
1228 hexdump(result, ctemplate[i].outlen);
1229 return -EINVAL;
1230 }
1231 }
1232
1233 for (i = 0; i < dtcount; i++) {
1234 struct comp_request req;
1235 unsigned int produced = 0;
1236
1237 res = crypto_decompress_setup(tfm, dtemplate[i].params,
1238 dtemplate[i].paramsize);
1239 if (res) {
1240 pr_err("alg: pcomp: decompression setup failed on "
1241 "test %d for %s: error=%d\n", i + 1, algo, res);
1242 return res;
1243 }
1244
1245 res = crypto_decompress_init(tfm);
1246 if (res) {
1247 pr_err("alg: pcomp: decompression init failed on test "
1248 "%d for %s: error=%d\n", i + 1, algo, res);
1249 return res;
1250 }
1251
1252 memset(result, 0, sizeof(result));
1253
1254 req.next_in = dtemplate[i].input;
1255 req.avail_in = dtemplate[i].inlen / 2;
1256 req.next_out = result;
1257 req.avail_out = dtemplate[i].outlen / 2;
1258
1259 res = crypto_decompress_update(tfm, &req);
1260 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1261 pr_err("alg: pcomp: decompression update failed on "
1262 "test %d for %s: error=%d\n", i + 1, algo, res);
1263 return res;
1264 }
1265 if (res > 0)
1266 produced += res;
1267
1268 /* Add remaining input data */
1269 req.avail_in += (dtemplate[i].inlen + 1) / 2;
1270
1271 res = crypto_decompress_update(tfm, &req);
1272 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1273 pr_err("alg: pcomp: decompression update failed on "
1274 "test %d for %s: error=%d\n", i + 1, algo, res);
1275 return res;
1276 }
1277 if (res > 0)
1278 produced += res;
1279
1280 /* Provide remaining output space */
1281 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1282
1283 res = crypto_decompress_final(tfm, &req);
1284 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1285 pr_err("alg: pcomp: decompression final failed on "
1286 "test %d for %s: error=%d\n", i + 1, algo, res);
1287 return res;
1288 }
1289 if (res > 0)
1290 produced += res;
1291
1292 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1293 pr_err("alg: comp: Decompression test %d failed for "
1294 "%s: output len = %d (expected %d)\n", i + 1,
1295 algo, COMP_BUF_SIZE - req.avail_out,
1296 dtemplate[i].outlen);
1297 return -EINVAL;
1298 }
1299
1300 if (produced != dtemplate[i].outlen) {
1301 pr_err("alg: comp: Decompression test %d failed for "
1302 "%s: returned len = %u (expected %d)\n", i + 1,
1303 algo, produced, dtemplate[i].outlen);
1304 return -EINVAL;
1305 }
1306
1307 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1308 pr_err("alg: pcomp: Decompression test %d failed for "
1309 "%s\n", i + 1, algo);
1310 hexdump(result, dtemplate[i].outlen);
1311 return -EINVAL;
1312 }
1313 }
1314
1315 return 0;
1316 }
1317
1318 static int test_ablkcipher_jiffies(struct ablkcipher_request *req, int enc,
1319 int sec, struct tcrypt_result *result,
1320 int blen)
1321 {
1322 unsigned long start, end;
1323 int bcount;
1324 int ret;
1325
1326 for (start = jiffies, end = start + sec * HZ, bcount = 0;
1327 time_before(jiffies, end); bcount++) {
1328
1329 if (enc)
1330 ret = crypto_ablkcipher_encrypt(req);
1331 else
1332 ret = crypto_ablkcipher_decrypt(req);
1333
1334 switch (ret) {
1335 case 0:
1336 break;
1337 case -EINPROGRESS:
1338 case -EBUSY:
1339 ret = wait_for_completion_interruptible(
1340 &result->completion);
1341 if (!ret && !((ret = result->err))) {
1342 INIT_COMPLETION(result->completion);
1343 break;
1344 }
1345 default:
1346 printk("ERROR\n");
1347 return ret;
1348 }
1349 }
1350
1351 printk("%d operations in %d seconds (%ld bytes)\n",
1352 bcount, sec, (long)bcount * blen);
1353
1354 return 0;
1355 }
1356
1357 static int test_ablkcipher_cycles(struct ablkcipher_request *req, int enc,
1358 int sec, struct tcrypt_result *result,
1359 int blen)
1360 {
1361 unsigned long cycles = 0;
1362 int ret = 0;
1363 int i;
1364 unsigned long start, end = 0;
1365 //local_bh_disable();
1366 //local_irq_disable();
1367 /* Warm-up run. */
1368 for (i = 0; i < 4; i++) {
1369 if (enc)
1370 ret = crypto_ablkcipher_encrypt(req);
1371 else
1372 ret = crypto_ablkcipher_decrypt(req);
1373
1374 switch (ret) {
1375 case 0:
1376 break;
1377 case -EINPROGRESS:
1378 case -EBUSY:
1379 #if 0
1380 ret = wait_for_completion_interruptible(
1381 &result->completion);
1382 if (!ret && !((ret = result->err))) {
1383 INIT_COMPLETION(result->completion);
1384 break;
1385 }
1386 #else
1387
1388 wait_for_completion(&result->completion);
1389 INIT_COMPLETION(result->completion);
1390 break;
1391 #endif
1392 default:
1393 printk("ERROR\n");
1394 return ret;
1395 }
1396
1397 if (signal_pending(current)) {
1398 printk("Signal caught\n");
1399 break;
1400 }
1401
1402 }
1403
1404 //printk("Debug ln: (%d), fn: %s\n", __LINE__, __func__);
1405 /* The real thing. */
1406 for (i = 0; i < 8; i++) {
1407 end = 0;
1408 start = 0;
1409 start = read_c0_count();
1410 if (enc)
1411 ret = crypto_ablkcipher_encrypt(req);
1412 else
1413 ret = crypto_ablkcipher_decrypt(req);
1414
1415 switch (ret) {
1416 case 0:
1417 break;
1418 case -EINPROGRESS:
1419 case -EBUSY:
1420 #if 0
1421 ret = wait_for_completion_interruptible(
1422 &result->completion);
1423 end = get_cycles();
1424 if (!ret && !((ret = result->err))) {
1425 INIT_COMPLETION(result->completion);
1426 break;
1427 }
1428 #else
1429 wait_for_completion(&result->completion);
1430 end = read_c0_count();
1431 INIT_COMPLETION(result->completion);
1432 break;
1433 #endif
1434 default:
1435 printk("ERROR\n");
1436 return ret;
1437 }
1438
1439 if (signal_pending(current)) {
1440 printk("Signal caught\n");
1441 break;
1442 }
1443
1444 cycles += end - start;
1445 }
1446
1447 // local_irq_enable();
1448 // local_bh_enable();
1449
1450 printk("1 operation in %lu cycles (%d bytes)\n",
1451 (cycles + 4) / 8, blen);
1452
1453 return 0;
1454
1455 }
1456
1457 static u32 b_size[] = {16, 64, 256, 1024, 8192, 0};
1458
1459 static int test_skcipher_speed(struct crypto_ablkcipher *tfm, int enc,
1460 struct cipher_speed_template *template,
1461 unsigned int tcount, unsigned int sec,
1462 u8* keysize)
1463 {
1464 const char *algo =
1465 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
1466
1467 unsigned int i = 0, j, iv_len;
1468 struct ablkcipher_request *req;
1469 //struct scatterlist sg[8];
1470 const char *e;
1471 struct tcrypt_result result;
1472 char iv[MAX_IVLEN];
1473 static char *xbuf[XBUFSIZE];
1474 int ret = -ENOMEM;
1475 u32 *block_size;
1476 static char *tvmem_buf[4];
1477 const char *key;
1478
1479 if (testmgr_alloc_buf(xbuf))
1480 goto out_nobuf;
1481
1482 if (enc == ENCRYPT)
1483 e = "encryption";
1484 else
1485 e = "decryption";
1486
1487 init_completion(&result.completion);
1488
1489 printk("Start ablkcipher speed test\n");
1490
1491 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
1492 if (!req) {
1493 printk(KERN_ERR "alg: skcipher: Failed to allocate request "
1494 "for %s\n", algo);
1495 goto out;
1496 }
1497
1498 // ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1499 ablkcipher_request_set_callback(req, 0,
1500 tcrypt_complete, &result);
1501
1502 do {
1503
1504 block_size = b_size;
1505
1506 do {
1507 struct scatterlist sg[4];
1508 if ((*keysize + *block_size) > 4 * PAGE_SIZE) {
1509 printk("template (%u) too big for "
1510 "tvmem_buf (%lu)\n", *keysize + *block_size,
1511 4 * PAGE_SIZE);
1512 goto out;
1513 }
1514 crypto_ablkcipher_clear_flags(tfm, ~0);
1515
1516 printk("test %u (%d bit key, %d byte blocks): ", i,
1517 *keysize * 8, *block_size);
1518
1519 memset(tvmem_buf[0], 0xff, PAGE_SIZE);
1520 key = tvmem_buf[0];
1521
1522 for (j = 0; j < tcount; j++) {
1523 if (template[j].klen == *keysize) {
1524 key = template[j].key;
1525 break;
1526 }
1527 }
1528 ret = crypto_ablkcipher_setkey(tfm, key, *keysize);
1529 if (ret) {
1530 printk("Error setting of keys\n");
1531 goto out;
1532 }
1533
1534 sg_init_table(sg, 4);
1535
1536 for (j = 0; j < 4; j++) {
1537 tvmem_buf[j] = xbuf[j];
1538 memset(tvmem_buf[j], 0xff, PAGE_SIZE);
1539 sg_set_buf(sg + j, tvmem_buf[j], PAGE_SIZE);
1540 }
1541
1542 iv_len = crypto_ablkcipher_ivsize(tfm);
1543 if (iv_len) {
1544 memset(&iv, 0xff, iv_len);
1545 }
1546
1547 ablkcipher_request_set_crypt(req, sg, sg,
1548 *block_size, iv);
1549
1550 //printk("Debug ln: %d, %s\n", __LINE__, __func__);
1551 if (sec)
1552 ret = test_ablkcipher_jiffies(req, enc, sec,
1553 &result, *block_size);
1554 else
1555 ret = test_ablkcipher_cycles(req, enc, sec,
1556 &result, *block_size);
1557
1558
1559 if (ret) {
1560 printk(KERN_ERR "alg: skcipher: %s failed on "
1561 "test %d for %s: ret=%d\n", e, j, algo,
1562 -ret);
1563 goto out;
1564 }
1565
1566 block_size++;
1567 i++;
1568 } while (*block_size);
1569 keysize++;
1570 } while (*keysize);
1571
1572 ret = 0;
1573 out:
1574 printk("End ablkcipher speed test\n");
1575 ablkcipher_request_free(req);
1576 testmgr_free_buf(xbuf);
1577 #if 0
1578 if (!completion_done(&result->completion)) {
1579 printk("There are threads waiting for completion, completing all\n");
1580 complete_all(&result->completion);
1581 }
1582 #endif
1583
1584 //testmgr_free_buf(tvbuf);
1585 out_nobuf:
1586 return ret;
1587
1588 }
1589
1590 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1591 unsigned int tcount)
1592 {
1593 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1594 int err = 0, i, j, seedsize;
1595 u8 *seed;
1596 char result[32];
1597
1598 seedsize = crypto_rng_seedsize(tfm);
1599
1600 seed = kmalloc(seedsize, GFP_KERNEL);
1601 if (!seed) {
1602 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1603 "for %s\n", algo);
1604 return -ENOMEM;
1605 }
1606
1607 for (i = 0; i < tcount; i++) {
1608 memset(result, 0, 32);
1609
1610 memcpy(seed, template[i].v, template[i].vlen);
1611 memcpy(seed + template[i].vlen, template[i].key,
1612 template[i].klen);
1613 memcpy(seed + template[i].vlen + template[i].klen,
1614 template[i].dt, template[i].dtlen);
1615
1616 err = crypto_rng_reset(tfm, seed, seedsize);
1617 if (err) {
1618 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1619 "for %s\n", algo);
1620 goto out;
1621 }
1622
1623 for (j = 0; j < template[i].loops; j++) {
1624 err = crypto_rng_get_bytes(tfm, result,
1625 template[i].rlen);
1626 if (err != template[i].rlen) {
1627 printk(KERN_ERR "alg: cprng: Failed to obtain "
1628 "the correct amount of random data for "
1629 "%s (requested %d, got %d)\n", algo,
1630 template[i].rlen, err);
1631 goto out;
1632 }
1633 }
1634
1635 err = memcmp(result, template[i].result,
1636 template[i].rlen);
1637 if (err) {
1638 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1639 i, algo);
1640 hexdump(result, template[i].rlen);
1641 err = -EINVAL;
1642 goto out;
1643 }
1644 }
1645
1646 out:
1647 kfree(seed);
1648 return err;
1649 }
1650
1651 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1652 u32 type, u32 mask)
1653 {
1654 struct crypto_aead *tfm;
1655 int err = 0;
1656
1657 tfm = crypto_alloc_aead(driver, type, mask);
1658 if (IS_ERR(tfm)) {
1659 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1660 "%ld\n", driver, PTR_ERR(tfm));
1661 return PTR_ERR(tfm);
1662 }
1663
1664 if (desc->suite.aead.enc.vecs) {
1665 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1666 desc->suite.aead.enc.count);
1667 if (err)
1668 goto out;
1669 }
1670
1671 if (!err && desc->suite.aead.dec.vecs)
1672 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1673 desc->suite.aead.dec.count);
1674
1675 out:
1676 crypto_free_aead(tfm);
1677 return err;
1678 }
1679
1680 static int alg_test_cipher(const struct alg_test_desc *desc,
1681 const char *driver, u32 type, u32 mask)
1682 {
1683 struct crypto_cipher *tfm;
1684 int err = 0;
1685
1686 tfm = crypto_alloc_cipher(driver, type, mask);
1687 if (IS_ERR(tfm)) {
1688 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1689 "%s: %ld\n", driver, PTR_ERR(tfm));
1690 return PTR_ERR(tfm);
1691 }
1692
1693 if (desc->suite.cipher.enc.vecs) {
1694 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1695 desc->suite.cipher.enc.count);
1696 if (err)
1697 goto out;
1698 }
1699
1700 if (desc->suite.cipher.dec.vecs)
1701 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1702 desc->suite.cipher.dec.count);
1703
1704 out:
1705 crypto_free_cipher(tfm);
1706 return err;
1707 }
1708
1709 static int alg_test_skcipher(const struct alg_test_desc *desc,
1710 const char *driver, u32 type, u32 mask)
1711 {
1712 struct crypto_ablkcipher *tfm;
1713 int err = 0;
1714
1715 tfm = crypto_alloc_ablkcipher(driver, type, mask);
1716 if (IS_ERR(tfm)) {
1717 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1718 "%s: %ld\n", driver, PTR_ERR(tfm));
1719 return PTR_ERR(tfm);
1720 }
1721
1722 if (desc->suite.cipher.enc.vecs) {
1723 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1724 desc->suite.cipher.enc.count);
1725 if (err)
1726 goto out;
1727 }
1728
1729 if (desc->suite.cipher.dec.vecs)
1730 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1731 desc->suite.cipher.dec.count);
1732
1733 out:
1734 crypto_free_ablkcipher(tfm);
1735 return err;
1736 }
1737
1738 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1739 u32 type, u32 mask)
1740 {
1741 struct crypto_comp *tfm;
1742 int err;
1743
1744 tfm = crypto_alloc_comp(driver, type, mask);
1745 if (IS_ERR(tfm)) {
1746 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1747 "%ld\n", driver, PTR_ERR(tfm));
1748 return PTR_ERR(tfm);
1749 }
1750
1751 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1752 desc->suite.comp.decomp.vecs,
1753 desc->suite.comp.comp.count,
1754 desc->suite.comp.decomp.count);
1755
1756 crypto_free_comp(tfm);
1757 return err;
1758 }
1759
1760 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1761 u32 type, u32 mask)
1762 {
1763 struct crypto_pcomp *tfm;
1764 int err;
1765
1766 tfm = crypto_alloc_pcomp(driver, type, mask);
1767 if (IS_ERR(tfm)) {
1768 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1769 driver, PTR_ERR(tfm));
1770 return PTR_ERR(tfm);
1771 }
1772
1773 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1774 desc->suite.pcomp.decomp.vecs,
1775 desc->suite.pcomp.comp.count,
1776 desc->suite.pcomp.decomp.count);
1777
1778 crypto_free_pcomp(tfm);
1779 return err;
1780 }
1781
1782 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1783 u32 type, u32 mask)
1784 {
1785 struct crypto_ahash *tfm;
1786 int err;
1787
1788 tfm = crypto_alloc_ahash(driver, type, mask);
1789 if (IS_ERR(tfm)) {
1790 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1791 "%ld\n", driver, PTR_ERR(tfm));
1792 return PTR_ERR(tfm);
1793 }
1794
1795 err = test_hash(tfm, desc->suite.hash.vecs, desc->suite.hash.count);
1796
1797 crypto_free_ahash(tfm);
1798 return err;
1799 }
1800
1801 static int alg_test_crc32c(const struct alg_test_desc *desc,
1802 const char *driver, u32 type, u32 mask)
1803 {
1804 struct crypto_shash *tfm;
1805 u32 val;
1806 int err;
1807
1808 err = alg_test_hash(desc, driver, type, mask);
1809 if (err)
1810 goto out;
1811
1812 tfm = crypto_alloc_shash(driver, type, mask);
1813 if (IS_ERR(tfm)) {
1814 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1815 "%ld\n", driver, PTR_ERR(tfm));
1816 err = PTR_ERR(tfm);
1817 goto out;
1818 }
1819
1820 do {
1821 struct {
1822 struct shash_desc shash;
1823 char ctx[crypto_shash_descsize(tfm)];
1824 } sdesc;
1825
1826 sdesc.shash.tfm = tfm;
1827 sdesc.shash.flags = 0;
1828
1829 *(u32 *)sdesc.ctx = le32_to_cpu(420553207);
1830 err = crypto_shash_final(&sdesc.shash, (u8 *)&val);
1831 if (err) {
1832 printk(KERN_ERR "alg: crc32c: Operation failed for "
1833 "%s: %d\n", driver, err);
1834 break;
1835 }
1836
1837 if (val != ~420553207) {
1838 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1839 "%d\n", driver, val);
1840 err = -EINVAL;
1841 }
1842 } while (0);
1843
1844 crypto_free_shash(tfm);
1845
1846 out:
1847 return err;
1848 }
1849
1850 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1851 u32 type, u32 mask)
1852 {
1853 struct crypto_rng *rng;
1854 int err = 0;
1855
1856 rng = crypto_alloc_rng(driver, type, mask);
1857 if (IS_ERR(rng)) {
1858 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1859 "%ld\n", driver, PTR_ERR(rng));
1860 return PTR_ERR(rng);
1861 }
1862
1863 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1864
1865 crypto_free_rng(rng);
1866
1867 return err;
1868 }
1869
1870 /* Please keep this list sorted by algorithm name. */
1871 static const struct alg_test_desc alg_test_descs[] = {
1872 {
1873 .alg = "ansi_cprng",
1874 .test = alg_test_cprng,
1875 .fips_allowed = 1,
1876 .suite = {
1877 .cprng = {
1878 .vecs = ansi_cprng_aes_tv_template,
1879 .count = ANSI_CPRNG_AES_TEST_VECTORS
1880 }
1881 }
1882 }, {
1883 .alg = "cbc(aes)",
1884 .test = alg_test_skcipher,
1885 .fips_allowed = 1,
1886 .suite = {
1887 .cipher = {
1888 .enc = {
1889 .vecs = aes_cbc_enc_tv_template,
1890 .count = AES_CBC_ENC_TEST_VECTORS
1891 },
1892 .dec = {
1893 .vecs = aes_cbc_dec_tv_template,
1894 .count = AES_CBC_DEC_TEST_VECTORS
1895 }
1896 }
1897 }
1898 }, {
1899 .alg = "cbc(anubis)",
1900 .test = alg_test_skcipher,
1901 .suite = {
1902 .cipher = {
1903 .enc = {
1904 .vecs = anubis_cbc_enc_tv_template,
1905 .count = ANUBIS_CBC_ENC_TEST_VECTORS
1906 },
1907 .dec = {
1908 .vecs = anubis_cbc_dec_tv_template,
1909 .count = ANUBIS_CBC_DEC_TEST_VECTORS
1910 }
1911 }
1912 }
1913 }, {
1914 .alg = "cbc(blowfish)",
1915 .test = alg_test_skcipher,
1916 .suite = {
1917 .cipher = {
1918 .enc = {
1919 .vecs = bf_cbc_enc_tv_template,
1920 .count = BF_CBC_ENC_TEST_VECTORS
1921 },
1922 .dec = {
1923 .vecs = bf_cbc_dec_tv_template,
1924 .count = BF_CBC_DEC_TEST_VECTORS
1925 }
1926 }
1927 }
1928 }, {
1929 .alg = "cbc(camellia)",
1930 .test = alg_test_skcipher,
1931 .suite = {
1932 .cipher = {
1933 .enc = {
1934 .vecs = camellia_cbc_enc_tv_template,
1935 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
1936 },
1937 .dec = {
1938 .vecs = camellia_cbc_dec_tv_template,
1939 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
1940 }
1941 }
1942 }
1943 }, {
1944 .alg = "cbc(des)",
1945 .test = alg_test_skcipher,
1946 .suite = {
1947 .cipher = {
1948 .enc = {
1949 .vecs = des_cbc_enc_tv_template,
1950 .count = DES_CBC_ENC_TEST_VECTORS
1951 },
1952 .dec = {
1953 .vecs = des_cbc_dec_tv_template,
1954 .count = DES_CBC_DEC_TEST_VECTORS
1955 }
1956 }
1957 }
1958 }, {
1959 .alg = "cbc(des3_ede)",
1960 .test = alg_test_skcipher,
1961 .fips_allowed = 1,
1962 .suite = {
1963 .cipher = {
1964 .enc = {
1965 .vecs = des3_ede_cbc_enc_tv_template,
1966 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
1967 },
1968 .dec = {
1969 .vecs = des3_ede_cbc_dec_tv_template,
1970 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
1971 }
1972 }
1973 }
1974 }, {
1975 .alg = "cbc(twofish)",
1976 .test = alg_test_skcipher,
1977 .suite = {
1978 .cipher = {
1979 .enc = {
1980 .vecs = tf_cbc_enc_tv_template,
1981 .count = TF_CBC_ENC_TEST_VECTORS
1982 },
1983 .dec = {
1984 .vecs = tf_cbc_dec_tv_template,
1985 .count = TF_CBC_DEC_TEST_VECTORS
1986 }
1987 }
1988 }
1989 }, {
1990 .alg = "ccm(aes)",
1991 .test = alg_test_aead,
1992 .fips_allowed = 1,
1993 .suite = {
1994 .aead = {
1995 .enc = {
1996 .vecs = aes_ccm_enc_tv_template,
1997 .count = AES_CCM_ENC_TEST_VECTORS
1998 },
1999 .dec = {
2000 .vecs = aes_ccm_dec_tv_template,
2001 .count = AES_CCM_DEC_TEST_VECTORS
2002 }
2003 }
2004 }
2005 }, {
2006 .alg = "crc32c",
2007 .test = alg_test_crc32c,
2008 .fips_allowed = 1,
2009 .suite = {
2010 .hash = {
2011 .vecs = crc32c_tv_template,
2012 .count = CRC32C_TEST_VECTORS
2013 }
2014 }
2015 }, {
2016 .alg = "ctr(aes)",
2017 .test = alg_test_skcipher,
2018 .fips_allowed = 1,
2019 .suite = {
2020 .cipher = {
2021 .enc = {
2022 .vecs = aes_ctr_enc_tv_template,
2023 .count = AES_CTR_ENC_TEST_VECTORS
2024 },
2025 .dec = {
2026 .vecs = aes_ctr_dec_tv_template,
2027 .count = AES_CTR_DEC_TEST_VECTORS
2028 }
2029 }
2030 }
2031 }, {
2032 .alg = "cts(cbc(aes))",
2033 .test = alg_test_skcipher,
2034 .suite = {
2035 .cipher = {
2036 .enc = {
2037 .vecs = cts_mode_enc_tv_template,
2038 .count = CTS_MODE_ENC_TEST_VECTORS
2039 },
2040 .dec = {
2041 .vecs = cts_mode_dec_tv_template,
2042 .count = CTS_MODE_DEC_TEST_VECTORS
2043 }
2044 }
2045 }
2046 }, {
2047 .alg = "deflate",
2048 .test = alg_test_comp,
2049 .suite = {
2050 .comp = {
2051 .comp = {
2052 .vecs = deflate_comp_tv_template,
2053 .count = DEFLATE_COMP_TEST_VECTORS
2054 },
2055 .decomp = {
2056 .vecs = deflate_decomp_tv_template,
2057 .count = DEFLATE_DECOMP_TEST_VECTORS
2058 }
2059 }
2060 }
2061 }, {
2062 .alg = "ecb(aes)",
2063 .test = alg_test_skcipher,
2064 .fips_allowed = 1,
2065 .suite = {
2066 .cipher = {
2067 .enc = {
2068 .vecs = aes_enc_tv_template,
2069 .count = AES_ENC_TEST_VECTORS
2070 },
2071 .dec = {
2072 .vecs = aes_dec_tv_template,
2073 .count = AES_DEC_TEST_VECTORS
2074 }
2075 }
2076 }
2077 }, {
2078 .alg = "ecb(anubis)",
2079 .test = alg_test_skcipher,
2080 .suite = {
2081 .cipher = {
2082 .enc = {
2083 .vecs = anubis_enc_tv_template,
2084 .count = ANUBIS_ENC_TEST_VECTORS
2085 },
2086 .dec = {
2087 .vecs = anubis_dec_tv_template,
2088 .count = ANUBIS_DEC_TEST_VECTORS
2089 }
2090 }
2091 }
2092 }, {
2093 .alg = "ecb(arc4)",
2094 .test = alg_test_skcipher,
2095 .suite = {
2096 .cipher = {
2097 .enc = {
2098 .vecs = arc4_enc_tv_template,
2099 .count = ARC4_ENC_TEST_VECTORS
2100 },
2101 .dec = {
2102 .vecs = arc4_dec_tv_template,
2103 .count = ARC4_DEC_TEST_VECTORS
2104 }
2105 }
2106 }
2107 }, {
2108 .alg = "ecb(blowfish)",
2109 .test = alg_test_skcipher,
2110 .suite = {
2111 .cipher = {
2112 .enc = {
2113 .vecs = bf_enc_tv_template,
2114 .count = BF_ENC_TEST_VECTORS
2115 },
2116 .dec = {
2117 .vecs = bf_dec_tv_template,
2118 .count = BF_DEC_TEST_VECTORS
2119 }
2120 }
2121 }
2122 }, {
2123 .alg = "ecb(camellia)",
2124 .test = alg_test_skcipher,
2125 .suite = {
2126 .cipher = {
2127 .enc = {
2128 .vecs = camellia_enc_tv_template,
2129 .count = CAMELLIA_ENC_TEST_VECTORS
2130 },
2131 .dec = {
2132 .vecs = camellia_dec_tv_template,
2133 .count = CAMELLIA_DEC_TEST_VECTORS
2134 }
2135 }
2136 }
2137 }, {
2138 .alg = "ecb(cast5)",
2139 .test = alg_test_skcipher,
2140 .suite = {
2141 .cipher = {
2142 .enc = {
2143 .vecs = cast5_enc_tv_template,
2144 .count = CAST5_ENC_TEST_VECTORS
2145 },
2146 .dec = {
2147 .vecs = cast5_dec_tv_template,
2148 .count = CAST5_DEC_TEST_VECTORS
2149 }
2150 }
2151 }
2152 }, {
2153 .alg = "ecb(cast6)",
2154 .test = alg_test_skcipher,
2155 .suite = {
2156 .cipher = {
2157 .enc = {
2158 .vecs = cast6_enc_tv_template,
2159 .count = CAST6_ENC_TEST_VECTORS
2160 },
2161 .dec = {
2162 .vecs = cast6_dec_tv_template,
2163 .count = CAST6_DEC_TEST_VECTORS
2164 }
2165 }
2166 }
2167 }, {
2168 .alg = "ecb(des)",
2169 .test = alg_test_skcipher,
2170 .fips_allowed = 1,
2171 .suite = {
2172 .cipher = {
2173 .enc = {
2174 .vecs = des_enc_tv_template,
2175 .count = DES_ENC_TEST_VECTORS
2176 },
2177 .dec = {
2178 .vecs = des_dec_tv_template,
2179 .count = DES_DEC_TEST_VECTORS
2180 }
2181 }
2182 }
2183 }, {
2184 .alg = "ecb(des3_ede)",
2185 .test = alg_test_skcipher,
2186 .fips_allowed = 1,
2187 .suite = {
2188 .cipher = {
2189 .enc = {
2190 .vecs = des3_ede_enc_tv_template,
2191 .count = DES3_EDE_ENC_TEST_VECTORS
2192 },
2193 .dec = {
2194 .vecs = des3_ede_dec_tv_template,
2195 .count = DES3_EDE_DEC_TEST_VECTORS
2196 }
2197 }
2198 }
2199 }, {
2200 .alg = "ecb(khazad)",
2201 .test = alg_test_skcipher,
2202 .suite = {
2203 .cipher = {
2204 .enc = {
2205 .vecs = khazad_enc_tv_template,
2206 .count = KHAZAD_ENC_TEST_VECTORS
2207 },
2208 .dec = {
2209 .vecs = khazad_dec_tv_template,
2210 .count = KHAZAD_DEC_TEST_VECTORS
2211 }
2212 }
2213 }
2214 }, {
2215 .alg = "ecb(seed)",
2216 .test = alg_test_skcipher,
2217 .suite = {
2218 .cipher = {
2219 .enc = {
2220 .vecs = seed_enc_tv_template,
2221 .count = SEED_ENC_TEST_VECTORS
2222 },
2223 .dec = {
2224 .vecs = seed_dec_tv_template,
2225 .count = SEED_DEC_TEST_VECTORS
2226 }
2227 }
2228 }
2229 }, {
2230 .alg = "ecb(serpent)",
2231 .test = alg_test_skcipher,
2232 .suite = {
2233 .cipher = {
2234 .enc = {
2235 .vecs = serpent_enc_tv_template,
2236 .count = SERPENT_ENC_TEST_VECTORS
2237 },
2238 .dec = {
2239 .vecs = serpent_dec_tv_template,
2240 .count = SERPENT_DEC_TEST_VECTORS
2241 }
2242 }
2243 }
2244 }, {
2245 .alg = "ecb(tea)",
2246 .test = alg_test_skcipher,
2247 .suite = {
2248 .cipher = {
2249 .enc = {
2250 .vecs = tea_enc_tv_template,
2251 .count = TEA_ENC_TEST_VECTORS
2252 },
2253 .dec = {
2254 .vecs = tea_dec_tv_template,
2255 .count = TEA_DEC_TEST_VECTORS
2256 }
2257 }
2258 }
2259 }, {
2260 .alg = "ecb(tnepres)",
2261 .test = alg_test_skcipher,
2262 .suite = {
2263 .cipher = {
2264 .enc = {
2265 .vecs = tnepres_enc_tv_template,
2266 .count = TNEPRES_ENC_TEST_VECTORS
2267 },
2268 .dec = {
2269 .vecs = tnepres_dec_tv_template,
2270 .count = TNEPRES_DEC_TEST_VECTORS
2271 }
2272 }
2273 }
2274 }, {
2275 .alg = "ecb(twofish)",
2276 .test = alg_test_skcipher,
2277 .suite = {
2278 .cipher = {
2279 .enc = {
2280 .vecs = tf_enc_tv_template,
2281 .count = TF_ENC_TEST_VECTORS
2282 },
2283 .dec = {
2284 .vecs = tf_dec_tv_template,
2285 .count = TF_DEC_TEST_VECTORS
2286 }
2287 }
2288 }
2289 }, {
2290 .alg = "ecb(xeta)",
2291 .test = alg_test_skcipher,
2292 .suite = {
2293 .cipher = {
2294 .enc = {
2295 .vecs = xeta_enc_tv_template,
2296 .count = XETA_ENC_TEST_VECTORS
2297 },
2298 .dec = {
2299 .vecs = xeta_dec_tv_template,
2300 .count = XETA_DEC_TEST_VECTORS
2301 }
2302 }
2303 }
2304 }, {
2305 .alg = "ecb(xtea)",
2306 .test = alg_test_skcipher,
2307 .suite = {
2308 .cipher = {
2309 .enc = {
2310 .vecs = xtea_enc_tv_template,
2311 .count = XTEA_ENC_TEST_VECTORS
2312 },
2313 .dec = {
2314 .vecs = xtea_dec_tv_template,
2315 .count = XTEA_DEC_TEST_VECTORS
2316 }
2317 }
2318 }
2319 }, {
2320 .alg = "gcm(aes)",
2321 .test = alg_test_aead,
2322 .fips_allowed = 1,
2323 .suite = {
2324 .aead = {
2325 .enc = {
2326 .vecs = aes_gcm_enc_tv_template,
2327 .count = AES_GCM_ENC_TEST_VECTORS
2328 },
2329 .dec = {
2330 .vecs = aes_gcm_dec_tv_template,
2331 .count = AES_GCM_DEC_TEST_VECTORS
2332 }
2333 }
2334 }
2335 }, {
2336 .alg = "hmac(md5)",
2337 .test = alg_test_hash,
2338 .suite = {
2339 .hash = {
2340 .vecs = hmac_md5_tv_template,
2341 .count = HMAC_MD5_TEST_VECTORS
2342 }
2343 }
2344 }, {
2345 .alg = "hmac(rmd128)",
2346 .test = alg_test_hash,
2347 .suite = {
2348 .hash = {
2349 .vecs = hmac_rmd128_tv_template,
2350 .count = HMAC_RMD128_TEST_VECTORS
2351 }
2352 }
2353 }, {
2354 .alg = "hmac(rmd160)",
2355 .test = alg_test_hash,
2356 .suite = {
2357 .hash = {
2358 .vecs = hmac_rmd160_tv_template,
2359 .count = HMAC_RMD160_TEST_VECTORS
2360 }
2361 }
2362 }, {
2363 .alg = "hmac(sha1)",
2364 .test = alg_test_hash,
2365 .fips_allowed = 1,
2366 .suite = {
2367 .hash = {
2368 .vecs = hmac_sha1_tv_template,
2369 .count = HMAC_SHA1_TEST_VECTORS
2370 }
2371 }
2372 }, {
2373 .alg = "hmac(sha224)",
2374 .test = alg_test_hash,
2375 .fips_allowed = 1,
2376 .suite = {
2377 .hash = {
2378 .vecs = hmac_sha224_tv_template,
2379 .count = HMAC_SHA224_TEST_VECTORS
2380 }
2381 }
2382 }, {
2383 .alg = "hmac(sha256)",
2384 .test = alg_test_hash,
2385 .fips_allowed = 1,
2386 .suite = {
2387 .hash = {
2388 .vecs = hmac_sha256_tv_template,
2389 .count = HMAC_SHA256_TEST_VECTORS
2390 }
2391 }
2392 }, {
2393 .alg = "hmac(sha384)",
2394 .test = alg_test_hash,
2395 .fips_allowed = 1,
2396 .suite = {
2397 .hash = {
2398 .vecs = hmac_sha384_tv_template,
2399 .count = HMAC_SHA384_TEST_VECTORS
2400 }
2401 }
2402 }, {
2403 .alg = "hmac(sha512)",
2404 .test = alg_test_hash,
2405 .fips_allowed = 1,
2406 .suite = {
2407 .hash = {
2408 .vecs = hmac_sha512_tv_template,
2409 .count = HMAC_SHA512_TEST_VECTORS
2410 }
2411 }
2412 #if !defined(CONFIG_CRYPTO_DEV_AES) && !defined(CONFIG_CRYPTO_ASYNC_AES)
2413 }, {
2414 .alg = "lrw(aes)",
2415 .test = alg_test_skcipher,
2416 .suite = {
2417 .cipher = {
2418 .enc = {
2419 .vecs = aes_lrw_enc_tv_template,
2420 .count = AES_LRW_ENC_TEST_VECTORS
2421 },
2422 .dec = {
2423 .vecs = aes_lrw_dec_tv_template,
2424 .count = AES_LRW_DEC_TEST_VECTORS
2425 }
2426 }
2427 }
2428 #endif
2429 }, {
2430 .alg = "lzo",
2431 .test = alg_test_comp,
2432 .suite = {
2433 .comp = {
2434 .comp = {
2435 .vecs = lzo_comp_tv_template,
2436 .count = LZO_COMP_TEST_VECTORS
2437 },
2438 .decomp = {
2439 .vecs = lzo_decomp_tv_template,
2440 .count = LZO_DECOMP_TEST_VECTORS
2441 }
2442 }
2443 }
2444 }, {
2445 .alg = "md4",
2446 .test = alg_test_hash,
2447 .suite = {
2448 .hash = {
2449 .vecs = md4_tv_template,
2450 .count = MD4_TEST_VECTORS
2451 }
2452 }
2453 }, {
2454 .alg = "md5",
2455 .test = alg_test_hash,
2456 .suite = {
2457 .hash = {
2458 .vecs = md5_tv_template,
2459 .count = MD5_TEST_VECTORS
2460 }
2461 }
2462 }, {
2463 .alg = "michael_mic",
2464 .test = alg_test_hash,
2465 .suite = {
2466 .hash = {
2467 .vecs = michael_mic_tv_template,
2468 .count = MICHAEL_MIC_TEST_VECTORS
2469 }
2470 }
2471 }, {
2472 .alg = "pcbc(fcrypt)",
2473 .test = alg_test_skcipher,
2474 .suite = {
2475 .cipher = {
2476 .enc = {
2477 .vecs = fcrypt_pcbc_enc_tv_template,
2478 .count = FCRYPT_ENC_TEST_VECTORS
2479 },
2480 .dec = {
2481 .vecs = fcrypt_pcbc_dec_tv_template,
2482 .count = FCRYPT_DEC_TEST_VECTORS
2483 }
2484 }
2485 }
2486
2487 }, {
2488 .alg = "rfc3686(ctr(aes))",
2489 .test = alg_test_skcipher,
2490 .fips_allowed = 1,
2491 .suite = {
2492 .cipher = {
2493 .enc = {
2494 .vecs = aes_ctr_rfc3686_enc_tv_template,
2495 .count = AES_CTR_3686_ENC_TEST_VECTORS
2496 },
2497 .dec = {
2498 .vecs = aes_ctr_rfc3686_dec_tv_template,
2499 .count = AES_CTR_3686_DEC_TEST_VECTORS
2500 }
2501 }
2502 }
2503 }, {
2504 .alg = "rfc4309(ccm(aes))",
2505 .test = alg_test_aead,
2506 .fips_allowed = 1,
2507 .suite = {
2508 .aead = {
2509 .enc = {
2510 .vecs = aes_ccm_rfc4309_enc_tv_template,
2511 .count = AES_CCM_4309_ENC_TEST_VECTORS
2512 },
2513 .dec = {
2514 .vecs = aes_ccm_rfc4309_dec_tv_template,
2515 .count = AES_CCM_4309_DEC_TEST_VECTORS
2516 }
2517 }
2518 }
2519 }, {
2520 .alg = "rmd128",
2521 .test = alg_test_hash,
2522 .suite = {
2523 .hash = {
2524 .vecs = rmd128_tv_template,
2525 .count = RMD128_TEST_VECTORS
2526 }
2527 }
2528 }, {
2529 .alg = "rmd160",
2530 .test = alg_test_hash,
2531 .suite = {
2532 .hash = {
2533 .vecs = rmd160_tv_template,
2534 .count = RMD160_TEST_VECTORS
2535 }
2536 }
2537 }, {
2538 .alg = "rmd256",
2539 .test = alg_test_hash,
2540 .suite = {
2541 .hash = {
2542 .vecs = rmd256_tv_template,
2543 .count = RMD256_TEST_VECTORS
2544 }
2545 }
2546 }, {
2547 .alg = "rmd320",
2548 .test = alg_test_hash,
2549 .suite = {
2550 .hash = {
2551 .vecs = rmd320_tv_template,
2552 .count = RMD320_TEST_VECTORS
2553 }
2554 }
2555 }, {
2556 .alg = "salsa20",
2557 .test = alg_test_skcipher,
2558 .suite = {
2559 .cipher = {
2560 .enc = {
2561 .vecs = salsa20_stream_enc_tv_template,
2562 .count = SALSA20_STREAM_ENC_TEST_VECTORS
2563 }
2564 }
2565 }
2566 }, {
2567 .alg = "sha1",
2568 .test = alg_test_hash,
2569 .fips_allowed = 1,
2570 .suite = {
2571 .hash = {
2572 .vecs = sha1_tv_template,
2573 .count = SHA1_TEST_VECTORS
2574 }
2575 }
2576 }, {
2577 .alg = "sha224",
2578 .test = alg_test_hash,
2579 .fips_allowed = 1,
2580 .suite = {
2581 .hash = {
2582 .vecs = sha224_tv_template,
2583 .count = SHA224_TEST_VECTORS
2584 }
2585 }
2586 }, {
2587 .alg = "sha256",
2588 .test = alg_test_hash,
2589 .fips_allowed = 1,
2590 .suite = {
2591 .hash = {
2592 .vecs = sha256_tv_template,
2593 .count = SHA256_TEST_VECTORS
2594 }
2595 }
2596 }, {
2597 .alg = "sha384",
2598 .test = alg_test_hash,
2599 .fips_allowed = 1,
2600 .suite = {
2601 .hash = {
2602 .vecs = sha384_tv_template,
2603 .count = SHA384_TEST_VECTORS
2604 }
2605 }
2606 }, {
2607 .alg = "sha512",
2608 .test = alg_test_hash,
2609 .fips_allowed = 1,
2610 .suite = {
2611 .hash = {
2612 .vecs = sha512_tv_template,
2613 .count = SHA512_TEST_VECTORS
2614 }
2615 }
2616 }, {
2617 .alg = "tgr128",
2618 .test = alg_test_hash,
2619 .suite = {
2620 .hash = {
2621 .vecs = tgr128_tv_template,
2622 .count = TGR128_TEST_VECTORS
2623 }
2624 }
2625 }, {
2626 .alg = "tgr160",
2627 .test = alg_test_hash,
2628 .suite = {
2629 .hash = {
2630 .vecs = tgr160_tv_template,
2631 .count = TGR160_TEST_VECTORS
2632 }
2633 }
2634 }, {
2635 .alg = "tgr192",
2636 .test = alg_test_hash,
2637 .suite = {
2638 .hash = {
2639 .vecs = tgr192_tv_template,
2640 .count = TGR192_TEST_VECTORS
2641 }
2642 }
2643 }, {
2644 .alg = "vmac(aes)",
2645 .test = alg_test_hash,
2646 .suite = {
2647 .hash = {
2648 .vecs = aes_vmac128_tv_template,
2649 .count = VMAC_AES_TEST_VECTORS
2650 }
2651 }
2652 }, {
2653 .alg = "wp256",
2654 .test = alg_test_hash,
2655 .suite = {
2656 .hash = {
2657 .vecs = wp256_tv_template,
2658 .count = WP256_TEST_VECTORS
2659 }
2660 }
2661 }, {
2662 .alg = "wp384",
2663 .test = alg_test_hash,
2664 .suite = {
2665 .hash = {
2666 .vecs = wp384_tv_template,
2667 .count = WP384_TEST_VECTORS
2668 }
2669 }
2670 }, {
2671 .alg = "wp512",
2672 .test = alg_test_hash,
2673 .suite = {
2674 .hash = {
2675 .vecs = wp512_tv_template,
2676 .count = WP512_TEST_VECTORS
2677 }
2678 }
2679 }, {
2680 .alg = "xcbc(aes)",
2681 .test = alg_test_hash,
2682 .suite = {
2683 .hash = {
2684 .vecs = aes_xcbc128_tv_template,
2685 .count = XCBC_AES_TEST_VECTORS
2686 }
2687 }
2688 #if 0
2689 }, {
2690 .alg = "xts(aes)",
2691 .test = alg_test_skcipher,
2692 .suite = {
2693 .cipher = {
2694 .enc = {
2695 .vecs = aes_xts_enc_tv_template,
2696 .count = AES_XTS_ENC_TEST_VECTORS
2697 },
2698 .dec = {
2699 .vecs = aes_xts_dec_tv_template,
2700 .count = AES_XTS_DEC_TEST_VECTORS
2701 }
2702 }
2703 }
2704 #endif
2705 }, {
2706 .alg = "zlib",
2707 .test = alg_test_pcomp,
2708 .suite = {
2709 .pcomp = {
2710 .comp = {
2711 .vecs = zlib_comp_tv_template,
2712 .count = ZLIB_COMP_TEST_VECTORS
2713 },
2714 .decomp = {
2715 .vecs = zlib_decomp_tv_template,
2716 .count = ZLIB_DECOMP_TEST_VECTORS
2717 }
2718 }
2719 }
2720 }
2721 };
2722
2723 static int alg_find_test(const char *alg)
2724 {
2725 int start = 0;
2726 int end = ARRAY_SIZE(alg_test_descs);
2727
2728 while (start < end) {
2729 int i = (start + end) / 2;
2730 int diff = strcmp(alg_test_descs[i].alg, alg);
2731
2732 if (diff > 0) {
2733 end = i;
2734 continue;
2735 }
2736
2737 if (diff < 0) {
2738 start = i + 1;
2739 continue;
2740 }
2741
2742 return i;
2743 }
2744
2745 return -1;
2746 }
2747
2748 static int ifx_alg_test(const char *driver, const char *alg, u32 type, u32 mask)
2749 {
2750 int i;
2751 int j;
2752 int rc;
2753
2754 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
2755 char nalg[CRYPTO_MAX_ALG_NAME];
2756
2757 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
2758 sizeof(nalg))
2759 return -ENAMETOOLONG;
2760
2761 i = alg_find_test(nalg);
2762 if (i < 0)
2763 goto notest;
2764
2765 if (fips_enabled && !alg_test_descs[i].fips_allowed)
2766 goto non_fips_alg;
2767
2768 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
2769 goto test_done;
2770 }
2771
2772 i = alg_find_test(alg);
2773 j = alg_find_test(driver);
2774 if (i < 0 && j < 0)
2775 goto notest;
2776
2777 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
2778 (j >= 0 && !alg_test_descs[j].fips_allowed)))
2779 goto non_fips_alg;
2780
2781 rc = 0;
2782 if (i >= 0)
2783 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
2784 type, mask);
2785 if (j >= 0)
2786 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
2787 type, mask);
2788
2789 test_done:
2790 if (fips_enabled && rc)
2791 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
2792
2793 if (fips_enabled && !rc)
2794 printk(KERN_INFO "alg: self-tests for %s (%s) passed\n",
2795 driver, alg);
2796
2797 return rc;
2798
2799 notest:
2800 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
2801 return 0;
2802 non_fips_alg:
2803 return -EINVAL;
2804 }
2805 EXPORT_SYMBOL_GPL(ifx_alg_test);
2806
2807 /* Modified speed test for async block cipher mode*/
2808
2809 static int ifx_alg_speed_test(const char *driver, const char *alg,
2810 unsigned int sec,
2811 struct cipher_speed_template *template,
2812 unsigned int tcount, u8 *keysize)
2813 {
2814 int i;
2815 int j;
2816 int err;
2817 int type = 0, mask = 0;
2818 struct crypto_ablkcipher *tfm;
2819
2820 i = alg_find_test(alg);
2821 j = alg_find_test(driver);
2822
2823 if (i < 0 && j < 0)
2824 goto notest;
2825
2826 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
2827 (j >= 0 && !alg_test_descs[j].fips_allowed)))
2828 goto non_fips_alg;
2829
2830 tfm = crypto_alloc_ablkcipher(driver, type, mask);
2831
2832 if (IS_ERR(tfm)) {
2833 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
2834 "%s: %ld\n", driver, PTR_ERR(tfm));
2835 return PTR_ERR(tfm);
2836 }
2837 err = test_skcipher_speed(tfm, ENCRYPT, template,
2838 tcount, sec, keysize);
2839 if (err)
2840 goto test_done;
2841
2842 err = test_skcipher_speed(tfm, DECRYPT, template,
2843 tcount, sec, keysize);
2844 if (!err)
2845 goto test_done;
2846
2847 notest:
2848 return 0;
2849 non_fips_alg:
2850 return -EINVAL;
2851
2852 test_done:
2853 if (fips_enabled && err)
2854 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
2855
2856 if (fips_enabled && !err)
2857 printk(KERN_INFO "alg: self-tests for %s (%s) passed\n",
2858 driver, alg);
2859
2860 crypto_free_ablkcipher(tfm);
2861 return err;
2862 }
2863 EXPORT_SYMBOL_GPL(ifx_alg_speed_test);
2864
2865
2866 static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc,
2867 struct scatterlist *sg, int blen, int sec)
2868 {
2869 unsigned long start, end;
2870 int bcount;
2871 int ret;
2872
2873 for (start = jiffies, end = start + sec * HZ, bcount = 0;
2874 time_before(jiffies, end); bcount++) {
2875 if (enc)
2876 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
2877 else
2878 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
2879
2880 if (ret)
2881 return ret;
2882 }
2883
2884 printk("%d operations in %d seconds (%ld bytes)\n",
2885 bcount, sec, (long)bcount * blen);
2886 return 0;
2887 }
2888
2889 static int test_cipher_cycles(struct blkcipher_desc *desc, int enc,
2890 struct scatterlist *sg, int blen)
2891 {
2892 unsigned long cycles = 0;
2893 unsigned long start, end;
2894 int ret = 0;
2895 int i;
2896
2897 local_bh_disable();
2898 local_irq_disable();
2899
2900 /* Warm-up run. */
2901 for (i = 0; i < 4; i++) {
2902 if (enc)
2903 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
2904 else
2905 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
2906
2907 if (ret)
2908 goto out;
2909 }
2910
2911 /* The real thing. */
2912 for (i = 0; i < 8; i++) {
2913 /* Original code to get cycles, does not work with MIPS
2914 * cycles_t start, end;
2915 * start = get_cycles();
2916 */
2917
2918 start = read_c0_count(); // LQ modified tcrypt
2919
2920 if (enc)
2921 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
2922 else
2923 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
2924
2925 /* Original code to get cycles, does not work with MIPS
2926 * end = get_cycles();
2927 */
2928
2929 end = read_c0_count(); //LQ modified tcrypt
2930
2931 if (ret)
2932 goto out;
2933
2934 cycles += end - start;
2935 }
2936
2937 out:
2938 local_irq_enable();
2939 local_bh_enable();
2940
2941 if (ret == 0)
2942 printk("1 operation in %lu cycles (%d bytes)\n",
2943 (cycles + 4) / 8, blen);
2944
2945 return ret;
2946 }
2947
2948 static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
2949
2950 static void test_cipher_speed(const char *algo, int enc, unsigned int sec,
2951 struct cipher_speed_template *template,
2952 unsigned int tcount, u8 *keysize)
2953 {
2954 unsigned int ret, i, j, iv_len;
2955 const char *key, iv[128];
2956 struct crypto_blkcipher *tfm;
2957 struct blkcipher_desc desc;
2958 const char *e;
2959 u32 *b_size;
2960
2961 if (enc == ENCRYPT)
2962 e = "encryption";
2963 else
2964 e = "decryption";
2965
2966 printk("\n ******* testing speed of %s %s ******* \n", algo, e);
2967
2968 tfm = crypto_alloc_blkcipher(algo, 0, CRYPTO_ALG_ASYNC);
2969
2970 if (IS_ERR(tfm)) {
2971 printk("failed to load transform for %s: %ld\n", algo,
2972 PTR_ERR(tfm));
2973 return;
2974 }
2975 desc.tfm = tfm;
2976 desc.flags = 0;
2977
2978 i = 0;
2979 do {
2980
2981 b_size = block_sizes;
2982 do {
2983 struct scatterlist sg[TVMEMSIZE];
2984
2985 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
2986 printk("template (%u) too big for "
2987 "tvmem (%lu)\n", *keysize + *b_size,
2988 TVMEMSIZE * PAGE_SIZE);
2989 goto out;
2990 }
2991
2992 printk("test %u (%d bit key, %d byte blocks): ", i,
2993 *keysize * 8, *b_size);
2994
2995 memset(tvmem[0], 0xff, PAGE_SIZE);
2996
2997 /* set key, plain text and IV */
2998 key = tvmem[0];
2999 for (j = 0; j < tcount; j++) {
3000 if (template[j].klen == *keysize) {
3001 key = template[j].key;
3002 break;
3003 }
3004 }
3005
3006 ret = crypto_blkcipher_setkey(tfm, key, *keysize);
3007 if (ret) {
3008 printk("setkey() failed flags=%x\n",
3009 crypto_blkcipher_get_flags(tfm));
3010 goto out;
3011 }
3012
3013 sg_init_table(sg, TVMEMSIZE);
3014 sg_set_buf(sg, tvmem[0] + *keysize,
3015 PAGE_SIZE - *keysize);
3016 for (j = 1; j < TVMEMSIZE; j++) {
3017 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
3018 memset (tvmem[j], 0xff, PAGE_SIZE);
3019 }
3020
3021 iv_len = crypto_blkcipher_ivsize(tfm);
3022 if (iv_len) {
3023 memset(&iv, 0xff, iv_len);
3024 crypto_blkcipher_set_iv(tfm, iv, iv_len);
3025 }
3026
3027 if (sec)
3028 ret = test_cipher_jiffies(&desc, enc, sg,
3029 *b_size, sec);
3030 else
3031 ret = test_cipher_cycles(&desc, enc, sg,
3032 *b_size);
3033
3034 if (ret) {
3035 printk("%s() failed flags=%x\n", e, desc.flags);
3036 break;
3037 }
3038 b_size++;
3039 i++;
3040 } while (*b_size);
3041 keysize++;
3042 } while (*keysize);
3043
3044 out:
3045 crypto_free_blkcipher(tfm);
3046 }
3047
3048 static int test_hash_jiffies_digest(struct hash_desc *desc,
3049 struct scatterlist *sg, int blen,
3050 char *out, int sec)
3051 {
3052 unsigned long start, end;
3053 int bcount;
3054 int ret;
3055
3056 for (start = jiffies, end = start + sec * HZ, bcount = 0;
3057 time_before(jiffies, end); bcount++) {
3058 ret = crypto_hash_digest(desc, sg, blen, out);
3059 if (ret)
3060 return ret;
3061 }
3062
3063 printk("%6u opers/sec, %9lu bytes/sec\n",
3064 bcount / sec, ((long)bcount * blen) / sec);
3065
3066 return 0;
3067 }
3068
3069 static int test_hash_jiffies(struct hash_desc *desc, struct scatterlist *sg,
3070 int blen, int plen, char *out, int sec)
3071 {
3072 unsigned long start, end;
3073 int bcount, pcount;
3074 int ret;
3075
3076 if (plen == blen)
3077 return test_hash_jiffies_digest(desc, sg, blen, out, sec);
3078
3079 for (start = jiffies, end = start + sec * HZ, bcount = 0;
3080 time_before(jiffies, end); bcount++) {
3081 ret = crypto_hash_init(desc);
3082 if (ret)
3083 return ret;
3084 for (pcount = 0; pcount < blen; pcount += plen) {
3085 ret = crypto_hash_update(desc, sg, plen);
3086 if (ret)
3087 return ret;
3088 }
3089 /* we assume there is enough space in 'out' for the result */
3090 ret = crypto_hash_final(desc, out);
3091 if (ret)
3092 return ret;
3093 }
3094
3095 printk("%6u opers/sec, %9lu bytes/sec\n",
3096 bcount / sec, ((long)bcount * blen) / sec);
3097
3098 return 0;
3099 }
3100
3101 static int test_hash_cycles_digest(struct hash_desc *desc,
3102 struct scatterlist *sg, int blen, char *out)
3103 {
3104 unsigned long cycles = 0;
3105 unsigned long start, end;
3106 int i;
3107 int ret;
3108
3109 local_bh_disable();
3110 local_irq_disable();
3111
3112 /* Warm-up run. */
3113 for (i = 0; i < 4; i++) {
3114 ret = crypto_hash_digest(desc, sg, blen, out);
3115 if (ret)
3116 goto out;
3117 }
3118
3119 /* The real thing. */
3120 for (i = 0; i < 8; i++) {
3121
3122 /* Original code to get cycles, does not work with MIPS
3123 * cycles_t start, end;
3124 * start = get_cycles();
3125 */
3126
3127 start = read_c0_count(); // LQ modified tcrypt
3128
3129 ret = crypto_hash_digest(desc, sg, blen, out);
3130 if (ret)
3131 goto out;
3132
3133 /* Original code to get cycles, does not work with MIPS
3134 * end = get_cycles();
3135 */
3136
3137 end = read_c0_count(); // LQ modified tcrypt
3138
3139 cycles += end - start;
3140 }
3141
3142 out:
3143 local_irq_enable();
3144 local_bh_enable();
3145
3146 if (ret)
3147 return ret;
3148
3149 printk("%6lu cycles/operation, %4lu cycles/byte\n",
3150 cycles / 8, cycles / (8 * blen));
3151
3152 return 0;
3153 }
3154
3155 static int test_hash_cycles(struct hash_desc *desc, struct scatterlist *sg,
3156 int blen, int plen, char *out)
3157 {
3158 unsigned long cycles = 0;
3159 unsigned long start, end;
3160 int i, pcount;
3161 int ret;
3162
3163 if (plen == blen)
3164 return test_hash_cycles_digest(desc, sg, blen, out);
3165
3166 local_bh_disable();
3167 local_irq_disable();
3168
3169 /* Warm-up run. */
3170 for (i = 0; i < 4; i++) {
3171 ret = crypto_hash_init(desc);
3172 if (ret)
3173 goto out;
3174 for (pcount = 0; pcount < blen; pcount += plen) {
3175 ret = crypto_hash_update(desc, sg, plen);
3176 if (ret)
3177 goto out;
3178 }
3179 ret = crypto_hash_final(desc, out);
3180 if (ret)
3181 goto out;
3182 }
3183
3184 /* The real thing. */
3185 for (i = 0; i < 8; i++) {
3186
3187 /* Original code for getting cycles, not working for MIPS
3188 * cycle_t start, end;
3189 * end = get_cycles();
3190 */
3191
3192 start = read_c0_count(); // LQ modified tcrypt
3193
3194 ret = crypto_hash_init(desc);
3195 if (ret)
3196 goto out;
3197 for (pcount = 0; pcount < blen; pcount += plen) {
3198 ret = crypto_hash_update(desc, sg, plen);
3199 if (ret)
3200 goto out;
3201 }
3202 ret = crypto_hash_final(desc, out);
3203 if (ret)
3204 goto out;
3205
3206 /* Original code for getting cycles, not working for MIPS
3207 * end = get_cycles();
3208 */
3209
3210 end = read_c0_count(); // LQ modified tcrypt
3211
3212 cycles += end - start;
3213 }
3214
3215 out:
3216 local_irq_enable();
3217 local_bh_enable();
3218
3219 if (ret)
3220 return ret;
3221
3222 printk("%6lu cycles/operation, %4lu cycles/byte\n",
3223 cycles / 8, cycles / (8 * blen));
3224
3225 return 0;
3226 }
3227
3228 static void test_hash_speed(const char *algo, unsigned int sec,
3229 struct hash_speed *speed)
3230 {
3231 struct scatterlist sg[TVMEMSIZE];
3232 struct crypto_hash *tfm;
3233 struct hash_desc desc;
3234 static char output[1024];
3235 int i;
3236 int ret;
3237
3238 printk(KERN_INFO "\ntesting speed of %s\n", algo);
3239
3240 tfm = crypto_alloc_hash(algo, 0, CRYPTO_ALG_ASYNC);
3241
3242 if (IS_ERR(tfm)) {
3243 printk(KERN_ERR "failed to load transform for %s: %ld\n", algo,
3244 PTR_ERR(tfm));
3245 return;
3246 }
3247
3248 desc.tfm = tfm;
3249 desc.flags = 0;
3250
3251 if (crypto_hash_digestsize(tfm) > sizeof(output)) {
3252 printk(KERN_ERR "digestsize(%u) > outputbuffer(%zu)\n",
3253 crypto_hash_digestsize(tfm), sizeof(output));
3254 goto out;
3255 }
3256
3257 sg_init_table(sg, TVMEMSIZE);
3258 for (i = 0; i < TVMEMSIZE; i++) {
3259 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
3260 memset(tvmem[i], 0xff, PAGE_SIZE);
3261 }
3262
3263 for (i = 0; speed[i].blen != 0; i++) {
3264 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
3265 printk(KERN_ERR
3266 "template (%u) too big for tvmem (%lu)\n",
3267 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
3268 goto out;
3269 }
3270
3271 printk(KERN_INFO "test%3u "
3272 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
3273 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
3274
3275 if (sec)
3276 ret = test_hash_jiffies(&desc, sg, speed[i].blen,
3277 speed[i].plen, output, sec);
3278 else
3279 ret = test_hash_cycles(&desc, sg, speed[i].blen,
3280 speed[i].plen, output);
3281
3282 if (ret) {
3283 printk(KERN_ERR "hashing failed ret=%d\n", ret);
3284 break;
3285 }
3286 }
3287
3288 out:
3289 crypto_free_hash(tfm);
3290 }
3291
3292
3293 static void test_available(void)
3294 {
3295 char **name = check;
3296
3297 while (*name) {
3298 printk("alg %s ", *name);
3299 printk(crypto_has_alg(*name, 0, 0) ?
3300 "found\n" : "not found\n");
3301 name++;
3302 }
3303 }
3304
3305 static inline int tcrypt_test(const char *alg)
3306 {
3307 int ret;
3308
3309 printk("Running test %s\n", alg);
3310 ret = ifx_alg_test(alg, alg, 0, 0);
3311 /* non-fips algs return -EINVAL in fips mode */
3312 if (fips_enabled && ret == -EINVAL)
3313 ret = 0;
3314 return ret;
3315 }
3316
3317 static inline int tcrypt_speedtest(const char *alg,
3318 struct cipher_speed_template *template,
3319 unsigned int tcount, u8 *keysize)
3320 {
3321 int ret;
3322
3323 printk("[****** Running speedtest %s *******]\n", alg);
3324 ret = ifx_alg_speed_test(alg, alg, sec, template, tcount, keysize);
3325 if (fips_enabled && ret == -EINVAL)
3326 ret = 0;
3327 return ret;
3328 }
3329
3330
3331 static int do_test(int m)
3332 {
3333 int i;
3334 int ret = 0;
3335
3336 switch (m) {
3337 case 0:
3338 for (i = 1; i < 200; i++)
3339 ret += do_test(i);
3340 break;
3341
3342 case 1:
3343 ret += tcrypt_test("md5");
3344 break;
3345
3346 case 2:
3347 ret += tcrypt_test("sha1");
3348 break;
3349
3350 case 3:
3351 ret += tcrypt_test("ecb(des)");
3352 ret += tcrypt_test("cbc(des)");
3353 break;
3354
3355 case 4:
3356 ret += tcrypt_test("ecb(des3_ede)");
3357 ret += tcrypt_test("cbc(des3_ede)");
3358 break;
3359
3360 case 5:
3361 ret += tcrypt_test("md4");
3362 break;
3363
3364 case 6:
3365 ret += tcrypt_test("sha256");
3366 break;
3367
3368 case 7:
3369 ret += tcrypt_test("ecb(blowfish)");
3370 ret += tcrypt_test("cbc(blowfish)");
3371 break;
3372
3373 case 8:
3374 ret += tcrypt_test("ecb(twofish)");
3375 ret += tcrypt_test("cbc(twofish)");
3376 break;
3377
3378 case 9:
3379 ret += tcrypt_test("ecb(serpent)");
3380 break;
3381
3382 case 10:
3383 ret += tcrypt_test("ecb(aes)");
3384 ret += tcrypt_test("cbc(aes)");
3385 // ret += tcrypt_test("lrw(aes)");
3386 // ret += tcrypt_test("xts(aes)");
3387 ret += tcrypt_test("ctr(aes)");
3388 ret += tcrypt_test("rfc3686(ctr(aes))");
3389 break;
3390
3391 case 11:
3392 ret += tcrypt_test("sha384");
3393 break;
3394
3395 case 12:
3396 ret += tcrypt_test("sha512");
3397 break;
3398
3399 case 13:
3400 ret += tcrypt_test("deflate");
3401 break;
3402
3403 case 14:
3404 ret += tcrypt_test("ecb(cast5)");
3405 break;
3406
3407 case 15:
3408 ret += tcrypt_test("ecb(cast6)");
3409 break;
3410
3411 case 16:
3412 ret += tcrypt_test("ecb(arc4)");
3413 break;
3414
3415 case 17:
3416 ret += tcrypt_test("michael_mic");
3417 break;
3418
3419 case 18:
3420 ret += tcrypt_test("crc32c");
3421 break;
3422
3423 case 19:
3424 ret += tcrypt_test("ecb(tea)");
3425 break;
3426
3427 case 20:
3428 ret += tcrypt_test("ecb(xtea)");
3429 break;
3430
3431 case 21:
3432 ret += tcrypt_test("ecb(khazad)");
3433 break;
3434
3435 case 22:
3436 ret += tcrypt_test("wp512");
3437 break;
3438
3439 case 23:
3440 ret += tcrypt_test("wp384");
3441 break;
3442
3443 case 24:
3444 ret += tcrypt_test("wp256");
3445 break;
3446
3447 case 25:
3448 ret += tcrypt_test("ecb(tnepres)");
3449 break;
3450
3451 case 26:
3452 ret += tcrypt_test("ecb(anubis)");
3453 ret += tcrypt_test("cbc(anubis)");
3454 break;
3455
3456 case 27:
3457 ret += tcrypt_test("tgr192");
3458 break;
3459
3460 case 28:
3461
3462 ret += tcrypt_test("tgr160");
3463 break;
3464
3465 case 29:
3466 ret += tcrypt_test("tgr128");
3467 break;
3468
3469 case 30:
3470 ret += tcrypt_test("ecb(xeta)");
3471 break;
3472
3473 case 31:
3474 ret += tcrypt_test("pcbc(fcrypt)");
3475 break;
3476
3477 case 32:
3478 ret += tcrypt_test("ecb(camellia)");
3479 ret += tcrypt_test("cbc(camellia)");
3480 break;
3481 case 33:
3482 ret += tcrypt_test("sha224");
3483 break;
3484
3485 case 34:
3486 ret += tcrypt_test("salsa20");
3487 break;
3488
3489 case 35:
3490 ret += tcrypt_test("gcm(aes)");
3491 break;
3492
3493 case 36:
3494 ret += tcrypt_test("lzo");
3495 break;
3496
3497 case 37:
3498 ret += tcrypt_test("ccm(aes)");
3499 break;
3500
3501 case 38:
3502 ret += tcrypt_test("cts(cbc(aes))");
3503 break;
3504
3505 case 39:
3506 ret += tcrypt_test("rmd128");
3507 break;
3508
3509 case 40:
3510 ret += tcrypt_test("rmd160");
3511 break;
3512
3513 case 41:
3514 ret += tcrypt_test("rmd256");
3515 break;
3516
3517 case 42:
3518 ret += tcrypt_test("rmd320");
3519 break;
3520
3521 case 43:
3522 ret += tcrypt_test("ecb(seed)");
3523 break;
3524
3525 case 44:
3526 ret += tcrypt_test("zlib");
3527 break;
3528
3529 case 45:
3530 ret += tcrypt_test("rfc4309(ccm(aes))");
3531 break;
3532
3533 case 100:
3534 ret += tcrypt_test("hmac(md5)");
3535 break;
3536
3537 case 101:
3538 ret += tcrypt_test("hmac(sha1)");
3539 break;
3540
3541 case 102:
3542 ret += tcrypt_test("hmac(sha256)");
3543 break;
3544
3545 case 103:
3546 ret += tcrypt_test("hmac(sha384)");
3547 break;
3548
3549 case 104:
3550 ret += tcrypt_test("hmac(sha512)");
3551 break;
3552
3553 case 105:
3554 ret += tcrypt_test("hmac(sha224)");
3555 break;
3556
3557 case 106:
3558 ret += tcrypt_test("xcbc(aes)");
3559 break;
3560
3561 case 107:
3562 ret += tcrypt_test("hmac(rmd128)");
3563 break;
3564
3565 case 108:
3566 ret += tcrypt_test("hmac(rmd160)");
3567 break;
3568
3569 case 109:
3570 ret += tcrypt_test("vmac(aes)");
3571 break;
3572
3573 case 150:
3574 ret += tcrypt_test("ansi_cprng");
3575 break;
3576
3577 case 200:
3578 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
3579 speed_template_16_24_32);
3580 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
3581 speed_template_16_24_32);
3582 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
3583 speed_template_16_24_32);
3584 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
3585 speed_template_16_24_32);
3586 #if !defined(CONFIG_CRYPTO_DEV_AES) && !defined(CONFIG_CRYPTO_ASYNC_AES)
3587 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
3588 speed_template_32_40_48);
3589 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
3590 speed_template_32_40_48);
3591 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
3592 speed_template_32_48_64);
3593 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
3594 speed_template_32_48_64);
3595 #endif
3596 break;
3597
3598 case 201:
3599 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
3600 des3_speed_template, DES3_SPEED_VECTORS,
3601 speed_template_24);
3602 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
3603 des3_speed_template, DES3_SPEED_VECTORS,
3604 speed_template_24);
3605 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
3606 des3_speed_template, DES3_SPEED_VECTORS,
3607 speed_template_24);
3608 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
3609 des3_speed_template, DES3_SPEED_VECTORS,
3610 speed_template_24);
3611 break;
3612
3613 case 202:
3614 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
3615 speed_template_16_24_32);
3616 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
3617 speed_template_16_24_32);
3618 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
3619 speed_template_16_24_32);
3620 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
3621 speed_template_16_24_32);
3622 break;
3623
3624 case 203:
3625 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
3626 speed_template_8_32);
3627 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
3628 speed_template_8_32);
3629 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
3630 speed_template_8_32);
3631 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
3632 speed_template_8_32);
3633 break;
3634
3635 case 204:
3636 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
3637 speed_template_8);
3638 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
3639 speed_template_8);
3640 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
3641 speed_template_8);
3642 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
3643 speed_template_8);
3644 break;
3645
3646 case 205:
3647 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
3648 speed_template_16_24_32);
3649 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
3650 speed_template_16_24_32);
3651 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
3652 speed_template_16_24_32);
3653 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
3654 speed_template_16_24_32);
3655 break;
3656
3657 case 206:
3658 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
3659 speed_template_16_32);
3660 break;
3661
3662 case 300:
3663 /* fall through */
3664
3665 case 301:
3666 test_hash_speed("md4", sec, generic_hash_speed_template);
3667 if (mode > 300 && mode < 400) break;
3668
3669 case 302:
3670 test_hash_speed("md5", sec, generic_hash_speed_template);
3671 if (mode > 300 && mode < 400) break;
3672
3673 case 303:
3674 test_hash_speed("sha1", sec, generic_hash_speed_template);
3675 if (mode > 300 && mode < 400) break;
3676
3677 case 304:
3678 test_hash_speed("sha256", sec, generic_hash_speed_template);
3679 if (mode > 300 && mode < 400) break;
3680
3681 case 305:
3682 test_hash_speed("sha384", sec, generic_hash_speed_template);
3683 if (mode > 300 && mode < 400) break;
3684
3685 case 306:
3686 test_hash_speed("sha512", sec, generic_hash_speed_template);
3687 if (mode > 300 && mode < 400) break;
3688
3689 case 307:
3690 test_hash_speed("wp256", sec, generic_hash_speed_template);
3691 if (mode > 300 && mode < 400) break;
3692
3693 case 308:
3694 test_hash_speed("wp384", sec, generic_hash_speed_template);
3695 if (mode > 300 && mode < 400) break;
3696
3697 case 309:
3698 test_hash_speed("wp512", sec, generic_hash_speed_template);
3699 if (mode > 300 && mode < 400) break;
3700
3701 case 310:
3702 test_hash_speed("tgr128", sec, generic_hash_speed_template);
3703 if (mode > 300 && mode < 400) break;
3704
3705 case 311:
3706 test_hash_speed("tgr160", sec, generic_hash_speed_template);
3707 if (mode > 300 && mode < 400) break;
3708
3709 case 312:
3710 test_hash_speed("tgr192", sec, generic_hash_speed_template);
3711 if (mode > 300 && mode < 400) break;
3712
3713 case 313:
3714 test_hash_speed("sha224", sec, generic_hash_speed_template);
3715 if (mode > 300 && mode < 400) break;
3716
3717 case 314:
3718 test_hash_speed("rmd128", sec, generic_hash_speed_template);
3719 if (mode > 300 && mode < 400) break;
3720
3721 case 315:
3722 test_hash_speed("rmd160", sec, generic_hash_speed_template);
3723 if (mode > 300 && mode < 400) break;
3724
3725 case 316:
3726 test_hash_speed("rmd256", sec, generic_hash_speed_template);
3727 if (mode > 300 && mode < 400) break;
3728
3729 case 317:
3730 test_hash_speed("rmd320", sec, generic_hash_speed_template);
3731 if (mode > 300 && mode < 400) break;
3732
3733 case 399:
3734 break;
3735
3736 /* Modified speed test for async block cipher mode */
3737 case 400:
3738 tcrypt_speedtest("ecb(aes)", NULL, 0,
3739 speed_template_16_24_32);
3740 tcrypt_speedtest("cbc(aes)", NULL, 0,
3741 speed_template_16_24_32);
3742 break;
3743
3744 case 401:
3745 tcrypt_speedtest("ecb(des3_ede)", des3_speed_template,
3746 DES3_SPEED_VECTORS,speed_template_24);
3747 tcrypt_speedtest("cbc(des3_ede)", des3_speed_template,
3748 DES3_SPEED_VECTORS,speed_template_24);
3749 break;
3750
3751 case 404:
3752 tcrypt_speedtest("ecb(des)", NULL, 0,
3753 speed_template_8);
3754 tcrypt_speedtest("cbc(des)", NULL, 0,
3755 speed_template_8);
3756 break;
3757
3758 case 1000:
3759 test_available();
3760 break;
3761 }
3762
3763 return ret;
3764 }
3765 #if !defined(CONFIG_CRYPTO_DEV_DEU)
3766 static int do_alg_test(const char *alg, u32 type, u32 mask)
3767 {
3768 return crypto_has_alg(alg, type, mask ?: CRYPTO_ALG_TYPE_MASK) ?
3769 0 : -ENOENT;
3770 }
3771 #endif
3772
3773 static int __init tcrypt_mod_init(void)
3774 {
3775 int err = -ENOMEM;
3776 int i;
3777
3778 printk("Starting Lantiq DEU Crypto TESTS . . . . . . .\n");
3779
3780 for (i = 0; i < TVMEMSIZE; i++) {
3781 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
3782 if (!tvmem[i])
3783 goto err_free_tv;
3784 }
3785
3786 #if defined(CONFIG_CRYPTO_DEV_DEU)
3787 #if defined(CONFIG_CRYPTO_DEV_MD5)
3788 mode = 1; // test md5 only
3789 err = do_test(mode);
3790 if (err)
3791 goto md5_err;
3792
3793 md5_err:
3794 if (err) {
3795 printk(KERN_ERR "md5: one or more tests failed!\n");
3796 goto err_free_tv;
3797 }
3798 #endif
3799 #if defined(CONFIG_CRYPTO_DEV_SHA1)
3800 mode = 2; // test sha1 only
3801 err = do_test(mode);
3802 if (err)
3803 goto sha1_err;
3804
3805 sha1_err:
3806 if (err) {
3807 printk(KERN_ERR "sha1: one or more tests failed!\n");
3808 goto err_free_tv;
3809 }
3810 #endif
3811 #if defined (CONFIG_CRYPTO_DEV_DES) || defined (CONFIG_CRYPTO_ASYNC_DES)
3812 mode = 3; // test des only
3813 err = do_test(mode);
3814 if (err)
3815 goto des_err;
3816
3817 mode = 4; // test des3 only
3818 err = do_test(mode);
3819 if (err)
3820 goto des_err;
3821
3822 des_err:
3823 if (err) {
3824 printk(KERN_ERR "des3: one or more tests failed!\n");
3825 goto err_free_tv;
3826 }
3827 #endif
3828 #if defined (CONFIG_CRYPTO_ASYNC_AES) || defined (CONFIG_CRYPTO_DEV_AES)
3829 mode = 10; // test aes only
3830 err = do_test(mode);
3831 if (err)
3832 goto aes_err;
3833
3834 aes_err:
3835 if (err) {
3836 printk(KERN_ERR "aes: one or more tests failed!\n");
3837 goto err_free_tv;
3838 }
3839 #endif
3840 #if defined(CONFIG_CRYPTO_DEV_ARC4)
3841 mode = 16;
3842 err = do_test(mode);
3843
3844 if (err) {
3845 printk(KERN_ERR "arc4: one or more tests failed!\n");
3846 goto err_free_tv;
3847 }
3848 #endif
3849 #if defined (CONFIG_CRYPTO_DEV_MD5_HMAC)
3850 mode = 100;
3851 err = do_test(mode);
3852
3853 if (err) {
3854 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
3855 goto err_free_tv;
3856 }
3857 #endif
3858 #if defined (CONFIG_CRYPTO_DEV_SHA1_HMAC)
3859 mode = 101;
3860 err = do_test(mode);
3861
3862 if (err) {
3863 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
3864 goto err_free_tv;
3865 }
3866 #endif
3867
3868 /* Start Speed tests test modes */
3869 #if defined(CONFIG_CRYPTO_DEV_SPEED_TEST)
3870 #if defined(CONFIG_CRYPTO_DEV_AES)
3871 mode = 200;
3872 err = do_test(mode);
3873 if (err)
3874 goto speed_err;
3875 #endif
3876 #if defined (CONFIG_CRYPTO_DEV_DES)
3877 mode = 201;
3878 err = do_test(mode);
3879 if (err)
3880 goto speed_err;
3881
3882 mode = 204;
3883 err = do_test(mode);
3884 if (err)
3885 goto speed_err;
3886 #endif
3887 #if defined (CONFIG_CRYPTO_DEV_MD5)
3888 mode = 302;
3889 err = do_test(mode);
3890 if (err)
3891 goto speed_err;
3892 #endif
3893 #if defined (CONFIG_CRYPTO_DEV_SHA1)
3894 mode = 303;
3895 err = do_test(mode);
3896 if (err)
3897 goto speed_err;
3898 #endif
3899 printk("Speed tests finished successfully\n");
3900 goto fips_check;
3901
3902 speed_err:
3903 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
3904 goto err_free_tv;
3905 #endif /* CONFIG_CRYPTO_DEV_SPEED_TEST */
3906
3907 #else
3908 if (alg)
3909 err = do_alg_test(alg, type, mask);
3910 else
3911 err = do_test(mode);
3912
3913 if (err) {
3914 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
3915 goto err_free_tv;
3916 }
3917 #endif /* CONFIG_CRYPTO_DEV_DEU */
3918
3919 fips_check:
3920 /* We intentionaly return -EAGAIN to prevent keeping the module,
3921 * unless we're running in fips mode. It does all its work from
3922 * init() and doesn't offer any runtime functionality, but in
3923 * the fips case, checking for a successful load is helpful.
3924 * => we don't need it in the memory, do we?
3925 * -- mludvig
3926 */
3927 if (!fips_enabled)
3928 err = -EAGAIN;
3929
3930 err_free_tv:
3931 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++ ){
3932 printk("Freeing page: %d\n", i);
3933 free_page((unsigned long)tvmem[i]);
3934 }
3935
3936 printk("Finished DEU testing . . . . . .\n");
3937 return err;
3938 }
3939
3940 /*
3941 * If an init function is provided, an exit function must also be provided
3942 * to allow module unload.
3943 */
3944 static void __exit tcrypt_mod_fini(void) {}
3945
3946
3947 module_init(tcrypt_mod_init);
3948 module_exit(tcrypt_mod_fini);
3949
3950 module_param(alg, charp, 0);
3951 module_param(type, uint, 0);
3952 module_param(mask, uint, 0);
3953 module_param(mode, int, 0);
3954 module_param(sec, uint, 0);
3955 MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
3956 "(defaults to zero which uses CPU cycles instead)");
3957
3958 MODULE_LICENSE("GPL");
3959 MODULE_DESCRIPTION("Quick & dirty crypto testing module");
3960 MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");
3961