lantiq: Tune the XWAY subtarget cflags
[openwrt/staging/wigyori.git] / package / platform / lantiq / ltq-deu / src / ltq_deu_testmgr.c
1 /*
2 * Algorithm testing framework and tests.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
12 * any later version.
13 *
14 */
15
16 #include <crypto/hash.h>
17 #include <linux/err.h>
18 #include <linux/module.h>
19 #include <linux/scatterlist.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
22 #include <crypto/rng.h>
23 #include <linux/jiffies.h>
24 #include <linux/init.h>
25 #include <linux/moduleparam.h>
26 #include <linux/delay.h>
27 #include <linux/types.h>
28 #include <linux/sched.h>
29
30 #include "internal.h"
31 #include "ifxmips_testmgr.h"
32 #include "ifxmips_tcrypt.h"
33 #include "ifxmips_deu.h"
34
35 /* changes for LQ ablkcipher speedtest */
36 #include <linux/timex.h>
37 #include <linux/interrupt.h>
38 #include <asm/mipsregs.h>
39
40 /*
41 * Need slab memory for testing (size in number of pages).
42 */
43 #define XBUFSIZE 8
44
45 /*
46 * Indexes into the xbuf to simulate cross-page access.
47 */
48 #define IDX1 32
49 #define IDX2 32400
50 #define IDX3 1
51 #define IDX4 8193
52 #define IDX5 22222
53 #define IDX6 17101
54 #define IDX7 27333
55 #define IDX8 3000
56
57 /*
58 * Used by test_cipher()
59 */
60 #define ENCRYPT 1
61 #define DECRYPT 0
62
63 /*
64 * Need slab memory for testing (size in number of pages).
65 */
66 #define TVMEMSIZE 4
67
68 /*
69 * Used by test_cipher_speed()
70 */
71 #define ENCRYPT 1
72 #define DECRYPT 0
73
74 /*
75 * Used by test_cipher_speed()
76 */
77
78
79 static unsigned int sec;
80
81 static char *alg = NULL;
82 static u32 type;
83 static u32 mask;
84 static int mode;
85 static char *tvmem[TVMEMSIZE];
86
87 static char *check[] = {
88 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256",
89 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
90 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
91 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
92 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
93 "lzo", "cts", "zlib", NULL
94 };
95 struct tcrypt_result {
96 struct completion completion;
97 int err;
98 };
99
100 struct aead_test_suite {
101 struct {
102 struct aead_testvec *vecs;
103 unsigned int count;
104 } enc, dec;
105 };
106
107 struct cipher_test_suite {
108 struct {
109 struct cipher_testvec *vecs;
110 unsigned int count;
111 } enc, dec;
112 };
113
114 struct comp_test_suite {
115 struct {
116 struct comp_testvec *vecs;
117 unsigned int count;
118 } comp, decomp;
119 };
120
121 struct pcomp_test_suite {
122 struct {
123 struct pcomp_testvec *vecs;
124 unsigned int count;
125 } comp, decomp;
126 };
127
128 struct hash_test_suite {
129 struct hash_testvec *vecs;
130 unsigned int count;
131 };
132
133 struct cprng_test_suite {
134 struct cprng_testvec *vecs;
135 unsigned int count;
136 };
137
138 struct alg_test_desc {
139 const char *alg;
140 int (*test)(const struct alg_test_desc *desc, const char *driver,
141 u32 type, u32 mask);
142 int fips_allowed; /* set if alg is allowed in fips mode */
143
144 union {
145 struct aead_test_suite aead;
146 struct cipher_test_suite cipher;
147 struct comp_test_suite comp;
148 struct pcomp_test_suite pcomp;
149 struct hash_test_suite hash;
150 struct cprng_test_suite cprng;
151 } suite;
152 };
153
154 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
155
156 static void hexdump(unsigned char *buf, unsigned int len)
157 {
158 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
159 16, 1,
160 buf, len, false);
161 }
162
163 static void tcrypt_complete(struct crypto_async_request *req, int err)
164 {
165 struct tcrypt_result *res = req->data;
166
167 //printk("Signal done test\n");
168
169 if (err == -EINPROGRESS) {
170 printk("********************* Completion didnt go too well **************************** \n");
171 return;
172 }
173
174 res->err = err;
175 complete_all(&res->completion);
176 }
177
178 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
179 {
180 int i;
181
182 for (i = 0; i < XBUFSIZE; i++) {
183 buf[i] = (void *)__get_free_page(GFP_KERNEL);
184 if (!buf[i])
185 goto err_free_buf;
186 }
187
188 return 0;
189
190 err_free_buf:
191 while (i-- > 0)
192 free_page((unsigned long)buf[i]);
193
194 return -ENOMEM;
195 }
196
197 static void testmgr_free_buf(char *buf[XBUFSIZE])
198 {
199 int i;
200
201 for (i = 0; i < XBUFSIZE; i++)
202 free_page((unsigned long)buf[i]);
203 }
204
205 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
206 unsigned int tcount)
207 {
208 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
209 unsigned int i, j, k, temp;
210 struct scatterlist sg[8];
211 char result[64];
212 struct ahash_request *req;
213 struct tcrypt_result tresult;
214 void *hash_buff;
215 char *xbuf[XBUFSIZE];
216 int ret = -ENOMEM;
217
218 if (testmgr_alloc_buf(xbuf))
219 goto out_nobuf;
220
221 init_completion(&tresult.completion);
222
223 req = ahash_request_alloc(tfm, GFP_KERNEL);
224 if (!req) {
225 printk(KERN_ERR "alg: hash: Failed to allocate request for "
226 "%s\n", algo);
227 goto out_noreq;
228 }
229 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
230 tcrypt_complete, &tresult);
231
232 j = 0;
233 for (i = 0; i < tcount; i++) {
234 if (template[i].np)
235 continue;
236
237 j++;
238 memset(result, 0, 64);
239
240 hash_buff = xbuf[0];
241
242 memcpy(hash_buff, template[i].plaintext, template[i].psize);
243 sg_init_one(&sg[0], hash_buff, template[i].psize);
244
245 if (template[i].ksize) {
246 crypto_ahash_clear_flags(tfm, ~0);
247 ret = crypto_ahash_setkey(tfm, template[i].key,
248 template[i].ksize);
249 if (ret) {
250 printk(KERN_ERR "alg: hash: setkey failed on "
251 "test %d for %s: ret=%d\n", j, algo,
252 -ret);
253 goto out;
254 }
255 }
256
257 ahash_request_set_crypt(req, sg, result, template[i].psize);
258 ret = crypto_ahash_digest(req);
259 switch (ret) {
260 case 0:
261 break;
262 case -EINPROGRESS:
263 case -EBUSY:
264 ret = wait_for_completion_interruptible(
265 &tresult.completion);
266 if (!ret && !(ret = tresult.err)) {
267 INIT_COMPLETION(tresult.completion);
268 break;
269 }
270 /* fall through */
271 default:
272 printk(KERN_ERR "alg: hash: digest failed on test %d "
273 "for %s: ret=%d\n", j, algo, -ret);
274 goto out;
275 }
276
277 if (memcmp(result, template[i].digest,
278 crypto_ahash_digestsize(tfm))) {
279 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
280 j, algo);
281 hexdump(result, crypto_ahash_digestsize(tfm));
282 ret = -EINVAL;
283 goto out;
284 }
285 else {
286 printk(KERN_ERR "alg: hash: Test %d passed for %s\n",
287 j, algo);
288 hexdump(result, crypto_ahash_digestsize(tfm));
289 }
290 }
291
292 j = 0;
293 for (i = 0; i < tcount; i++) {
294 if (template[i].np) {
295 j++;
296 memset(result, 0, 64);
297
298 temp = 0;
299 sg_init_table(sg, template[i].np);
300 ret = -EINVAL;
301 for (k = 0; k < template[i].np; k++) {
302 if (WARN_ON(offset_in_page(IDX[k]) +
303 template[i].tap[k] > PAGE_SIZE))
304 goto out;
305 sg_set_buf(&sg[k],
306 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
307 offset_in_page(IDX[k]),
308 template[i].plaintext + temp,
309 template[i].tap[k]),
310 template[i].tap[k]);
311 temp += template[i].tap[k];
312 }
313
314 if (template[i].ksize) {
315 crypto_ahash_clear_flags(tfm, ~0);
316 ret = crypto_ahash_setkey(tfm, template[i].key,
317 template[i].ksize);
318
319 if (ret) {
320 printk(KERN_ERR "alg: hash: setkey "
321 "failed on chunking test %d "
322 "for %s: ret=%d\n", j, algo,
323 -ret);
324 goto out;
325 }
326 }
327
328 ahash_request_set_crypt(req, sg, result,
329 template[i].psize);
330 ret = crypto_ahash_digest(req);
331 switch (ret) {
332 case 0:
333 break;
334 case -EINPROGRESS:
335 case -EBUSY:
336 ret = wait_for_completion_interruptible(
337 &tresult.completion);
338 if (!ret && !(ret = tresult.err)) {
339 INIT_COMPLETION(tresult.completion);
340 break;
341 }
342 /* fall through */
343 default:
344 printk(KERN_ERR "alg: hash: digest failed "
345 "on chunking test %d for %s: "
346 "ret=%d\n", j, algo, -ret);
347 goto out;
348 }
349
350 if (memcmp(result, template[i].digest,
351 crypto_ahash_digestsize(tfm))) {
352 printk(KERN_ERR "alg: hash: Chunking test %d "
353 "failed for %s\n", j, algo);
354 hexdump(result, crypto_ahash_digestsize(tfm));
355 ret = -EINVAL;
356 goto out;
357 }
358 else {
359 printk(KERN_ERR "alg: hash: Chunking test %d "
360 "passed for %s\n", j, algo);
361 hexdump(result, crypto_ahash_digestsize(tfm));
362 }
363 }
364 }
365
366 ret = 0;
367
368 out:
369 ahash_request_free(req);
370 out_noreq:
371 testmgr_free_buf(xbuf);
372 out_nobuf:
373 return ret;
374 }
375
376 static int test_aead(struct crypto_aead *tfm, int enc,
377 struct aead_testvec *template, unsigned int tcount)
378 {
379 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
380 unsigned int i, j, k, n, temp;
381 int ret = -ENOMEM;
382 char *q;
383 char *key;
384 struct aead_request *req;
385 struct scatterlist sg[8];
386 struct scatterlist asg[8];
387 const char *e;
388 struct tcrypt_result result;
389 unsigned int authsize;
390 void *input;
391 void *assoc;
392 char iv[MAX_IVLEN];
393 char *xbuf[XBUFSIZE];
394 char *axbuf[XBUFSIZE];
395
396 if (testmgr_alloc_buf(xbuf))
397 goto out_noxbuf;
398 if (testmgr_alloc_buf(axbuf))
399 goto out_noaxbuf;
400
401 if (enc == ENCRYPT)
402 e = "encryption";
403 else
404 e = "decryption";
405
406 init_completion(&result.completion);
407
408 req = aead_request_alloc(tfm, GFP_KERNEL);
409 if (!req) {
410 printk(KERN_ERR "alg: aead: Failed to allocate request for "
411 "%s\n", algo);
412 goto out;
413 }
414
415 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
416 tcrypt_complete, &result);
417
418 for (i = 0, j = 0; i < tcount; i++) {
419 if (!template[i].np) {
420 j++;
421
422 /* some tepmplates have no input data but they will
423 * touch input
424 */
425 input = xbuf[0];
426 assoc = axbuf[0];
427
428 ret = -EINVAL;
429 if (WARN_ON(template[i].ilen > PAGE_SIZE ||
430 template[i].alen > PAGE_SIZE))
431 goto out;
432
433 memcpy(input, template[i].input, template[i].ilen);
434 memcpy(assoc, template[i].assoc, template[i].alen);
435 if (template[i].iv)
436 memcpy(iv, template[i].iv, MAX_IVLEN);
437 else
438 memset(iv, 0, MAX_IVLEN);
439
440 crypto_aead_clear_flags(tfm, ~0);
441 if (template[i].wk)
442 crypto_aead_set_flags(
443 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
444
445 key = template[i].key;
446
447 ret = crypto_aead_setkey(tfm, key,
448 template[i].klen);
449 if (!ret == template[i].fail) {
450 printk(KERN_ERR "alg: aead: setkey failed on "
451 "test %d for %s: flags=%x\n", j, algo,
452 crypto_aead_get_flags(tfm));
453 goto out;
454 } else if (ret)
455 continue;
456
457 authsize = abs(template[i].rlen - template[i].ilen);
458 ret = crypto_aead_setauthsize(tfm, authsize);
459 if (ret) {
460 printk(KERN_ERR "alg: aead: Failed to set "
461 "authsize to %u on test %d for %s\n",
462 authsize, j, algo);
463 goto out;
464 }
465
466 sg_init_one(&sg[0], input,
467 template[i].ilen + (enc ? authsize : 0));
468
469 sg_init_one(&asg[0], assoc, template[i].alen);
470
471 aead_request_set_crypt(req, sg, sg,
472 template[i].ilen, iv);
473
474 aead_request_set_assoc(req, asg, template[i].alen);
475
476 ret = enc ?
477 crypto_aead_encrypt(req) :
478 crypto_aead_decrypt(req);
479
480 switch (ret) {
481 case 0:
482 if (template[i].novrfy) {
483 /* verification was supposed to fail */
484 printk(KERN_ERR "alg: aead: %s failed "
485 "on test %d for %s: ret was 0, "
486 "expected -EBADMSG\n",
487 e, j, algo);
488 /* so really, we got a bad message */
489 ret = -EBADMSG;
490 goto out;
491 }
492 break;
493 case -EINPROGRESS:
494 case -EBUSY:
495 ret = wait_for_completion_interruptible(
496 &result.completion);
497 if (!ret && !(ret = result.err)) {
498 INIT_COMPLETION(result.completion);
499 break;
500 }
501 case -EBADMSG:
502 if (template[i].novrfy)
503 /* verification failure was expected */
504 continue;
505 /* fall through */
506 default:
507 printk(KERN_ERR "alg: aead: %s failed on test "
508 "%d for %s: ret=%d\n", e, j, algo, -ret);
509 goto out;
510 }
511
512 q = input;
513 if (memcmp(q, template[i].result, template[i].rlen)) {
514 printk(KERN_ERR "alg: aead: Test %d failed on "
515 "%s for %s\n", j, e, algo);
516 hexdump(q, template[i].rlen);
517 ret = -EINVAL;
518 goto out;
519 }
520 else {
521 printk(KERN_ERR "alg: aead: Test %d passed on "
522 "%s for %s\n", j, e, algo);
523 hexdump(q, template[i].rlen);
524 }
525 }
526 }
527
528 for (i = 0, j = 0; i < tcount; i++) {
529 if (template[i].np) {
530 j++;
531
532 if (template[i].iv)
533 memcpy(iv, template[i].iv, MAX_IVLEN);
534 else
535 memset(iv, 0, MAX_IVLEN);
536
537 crypto_aead_clear_flags(tfm, ~0);
538 if (template[i].wk)
539 crypto_aead_set_flags(
540 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
541 key = template[i].key;
542
543 ret = crypto_aead_setkey(tfm, key, template[i].klen);
544 if (!ret == template[i].fail) {
545 printk(KERN_ERR "alg: aead: setkey failed on "
546 "chunk test %d for %s: flags=%x\n", j,
547 algo, crypto_aead_get_flags(tfm));
548 goto out;
549 } else if (ret)
550 continue;
551
552 authsize = abs(template[i].rlen - template[i].ilen);
553
554 ret = -EINVAL;
555 sg_init_table(sg, template[i].np);
556 for (k = 0, temp = 0; k < template[i].np; k++) {
557 if (WARN_ON(offset_in_page(IDX[k]) +
558 template[i].tap[k] > PAGE_SIZE))
559 goto out;
560
561 q = xbuf[IDX[k] >> PAGE_SHIFT] +
562 offset_in_page(IDX[k]);
563
564 memcpy(q, template[i].input + temp,
565 template[i].tap[k]);
566
567 n = template[i].tap[k];
568 if (k == template[i].np - 1 && enc)
569 n += authsize;
570 if (offset_in_page(q) + n < PAGE_SIZE)
571 q[n] = 0;
572
573 sg_set_buf(&sg[k], q, template[i].tap[k]);
574 temp += template[i].tap[k];
575 }
576
577 ret = crypto_aead_setauthsize(tfm, authsize);
578 if (ret) {
579 printk(KERN_ERR "alg: aead: Failed to set "
580 "authsize to %u on chunk test %d for "
581 "%s\n", authsize, j, algo);
582 goto out;
583 }
584
585 if (enc) {
586 if (WARN_ON(sg[k - 1].offset +
587 sg[k - 1].length + authsize >
588 PAGE_SIZE)) {
589 ret = -EINVAL;
590 goto out;
591 }
592
593 sg[k - 1].length += authsize;
594 }
595
596 sg_init_table(asg, template[i].anp);
597 ret = -EINVAL;
598 for (k = 0, temp = 0; k < template[i].anp; k++) {
599 if (WARN_ON(offset_in_page(IDX[k]) +
600 template[i].atap[k] > PAGE_SIZE))
601 goto out;
602 sg_set_buf(&asg[k],
603 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
604 offset_in_page(IDX[k]),
605 template[i].assoc + temp,
606 template[i].atap[k]),
607 template[i].atap[k]);
608 temp += template[i].atap[k];
609 }
610
611 aead_request_set_crypt(req, sg, sg,
612 template[i].ilen,
613 iv);
614
615 aead_request_set_assoc(req, asg, template[i].alen);
616
617 ret = enc ?
618 crypto_aead_encrypt(req) :
619 crypto_aead_decrypt(req);
620
621 switch (ret) {
622 case 0:
623 if (template[i].novrfy) {
624 /* verification was supposed to fail */
625 printk(KERN_ERR "alg: aead: %s failed "
626 "on chunk test %d for %s: ret "
627 "was 0, expected -EBADMSG\n",
628 e, j, algo);
629 /* so really, we got a bad message */
630 ret = -EBADMSG;
631 goto out;
632 }
633 break;
634 case -EINPROGRESS:
635 case -EBUSY:
636 ret = wait_for_completion_interruptible(
637 &result.completion);
638 if (!ret && !(ret = result.err)) {
639 INIT_COMPLETION(result.completion);
640 break;
641 }
642 case -EBADMSG:
643 if (template[i].novrfy)
644 /* verification failure was expected */
645 continue;
646 /* fall through */
647 default:
648 printk(KERN_ERR "alg: aead: %s failed on "
649 "chunk test %d for %s: ret=%d\n", e, j,
650 algo, -ret);
651 goto out;
652 }
653
654 ret = -EINVAL;
655 for (k = 0, temp = 0; k < template[i].np; k++) {
656 q = xbuf[IDX[k] >> PAGE_SHIFT] +
657 offset_in_page(IDX[k]);
658
659 n = template[i].tap[k];
660 if (k == template[i].np - 1)
661 n += enc ? authsize : -authsize;
662
663 if (memcmp(q, template[i].result + temp, n)) {
664 printk(KERN_ERR "alg: aead: Chunk "
665 "test %d failed on %s at page "
666 "%u for %s\n", j, e, k, algo);
667 hexdump(q, n);
668 goto out;
669 }
670 else {
671 printk(KERN_ERR "alg: aead: Chunk "
672 "test %d passed on %s at page "
673 "%u for %s\n", j, e, k, algo);
674 hexdump(q, n);
675 }
676
677 q += n;
678 if (k == template[i].np - 1 && !enc) {
679 if (memcmp(q, template[i].input +
680 temp + n, authsize))
681 n = authsize;
682 else
683 n = 0;
684 } else {
685 for (n = 0; offset_in_page(q + n) &&
686 q[n]; n++)
687 ;
688 }
689 if (n) {
690 printk(KERN_ERR "alg: aead: Result "
691 "buffer corruption in chunk "
692 "test %d on %s at page %u for "
693 "%s: %u bytes:\n", j, e, k,
694 algo, n);
695 hexdump(q, n);
696 goto out;
697 }
698 temp += template[i].tap[k];
699 }
700 }
701 }
702
703 ret = 0;
704
705 out:
706 aead_request_free(req);
707 testmgr_free_buf(axbuf);
708 out_noaxbuf:
709 testmgr_free_buf(xbuf);
710 out_noxbuf:
711 return ret;
712 }
713
714 static int test_cipher(struct crypto_cipher *tfm, int enc,
715 struct cipher_testvec *template, unsigned int tcount)
716 {
717 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
718 unsigned int i, j, k;
719 char *q;
720 const char *e;
721 void *data;
722 char *xbuf[XBUFSIZE];
723 int ret = -ENOMEM;
724
725 if (testmgr_alloc_buf(xbuf))
726 goto out_nobuf;
727
728 if (enc == ENCRYPT)
729 e = "encryption";
730 else
731 e = "decryption";
732
733 j = 0;
734 for (i = 0; i < tcount; i++) {
735 if (template[i].np)
736 continue;
737
738 j++;
739
740 ret = -EINVAL;
741 if (WARN_ON(template[i].ilen > PAGE_SIZE))
742 goto out;
743
744 data = xbuf[0];
745 memcpy(data, template[i].input, template[i].ilen);
746
747 crypto_cipher_clear_flags(tfm, ~0);
748 if (template[i].wk)
749 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
750
751 ret = crypto_cipher_setkey(tfm, template[i].key,
752 template[i].klen);
753 if (!ret == template[i].fail) {
754 printk(KERN_ERR "alg: cipher: setkey failed "
755 "on test %d for %s: flags=%x\n", j,
756 algo, crypto_cipher_get_flags(tfm));
757 goto out;
758 } else if (ret)
759 continue;
760
761 for (k = 0; k < template[i].ilen;
762 k += crypto_cipher_blocksize(tfm)) {
763 if (enc)
764 crypto_cipher_encrypt_one(tfm, data + k,
765 data + k);
766 else
767 crypto_cipher_decrypt_one(tfm, data + k,
768 data + k);
769 }
770
771 q = data;
772 if (memcmp(q, template[i].result, template[i].rlen)) {
773 printk(KERN_ERR "alg: cipher: Test %d failed "
774 "on %s for %s\n", j, e, algo);
775 hexdump(q, template[i].rlen);
776 ret = -EINVAL;
777 goto out;
778 }
779 else {
780 printk(KERN_ERR "alg: cipher: Test %d passed "
781 "on %s for %s\n", j, e, algo);
782 hexdump(q, template[i].rlen);
783 }
784 }
785
786 ret = 0;
787
788 out:
789 testmgr_free_buf(xbuf);
790 out_nobuf:
791 return ret;
792 }
793
794 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
795 struct cipher_testvec *template, unsigned int tcount)
796 {
797 const char *algo =
798 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
799 unsigned int i, j, k, n, temp;
800 char *q;
801 struct ablkcipher_request *req;
802 struct scatterlist sg[8];
803 const char *e;
804 struct tcrypt_result result;
805 void *data;
806 char iv[MAX_IVLEN];
807 char *xbuf[XBUFSIZE];
808 int ret = -ENOMEM;
809
810 if (testmgr_alloc_buf(xbuf))
811 goto out_nobuf;
812
813 if (enc == ENCRYPT)
814 e = "encryption";
815 else
816 e = "decryption";
817
818 init_completion(&result.completion);
819
820 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
821 if (!req) {
822 printk(KERN_ERR "alg: skcipher: Failed to allocate request "
823 "for %s\n", algo);
824 goto out;
825 }
826
827 //printk("tcount: %u\n", tcount);
828
829 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
830 tcrypt_complete, &result);
831
832 j = 0;
833 for (i = 0; i < tcount; i++) {
834 if (template[i].iv)
835 memcpy(iv, template[i].iv, MAX_IVLEN);
836 else
837 memset(iv, 0, MAX_IVLEN);
838
839 if (!(template[i].np)) {
840 //printk("np: %d, i: %d, j: %d\n", template[i].np, i, j);
841 j++;
842
843 ret = -EINVAL;
844 if (WARN_ON(template[i].ilen > PAGE_SIZE))
845 goto out;
846
847 data = xbuf[0];
848 memcpy(data, template[i].input, template[i].ilen);
849
850 crypto_ablkcipher_clear_flags(tfm, ~0);
851 if (template[i].wk)
852 crypto_ablkcipher_set_flags(
853 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
854
855 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
856 template[i].klen);
857 if (!ret == template[i].fail) {
858 printk(KERN_ERR "alg: skcipher: setkey failed "
859 "on test %d for %s: flags=%x\n", j,
860 algo, crypto_ablkcipher_get_flags(tfm));
861 printk("ERROR\n");
862 goto out;
863 } else if (ret)
864 continue;
865
866 sg_init_one(&sg[0], data, template[i].ilen);
867
868 ablkcipher_request_set_crypt(req, sg, sg,
869 template[i].ilen, iv);
870 ret = enc ?
871 crypto_ablkcipher_encrypt(req) :
872 crypto_ablkcipher_decrypt(req);
873
874 switch (ret) {
875 case 0:
876 break;
877 case -EINPROGRESS:
878 case -EBUSY:
879 ret = wait_for_completion_interruptible(
880 &result.completion);
881 if (!ret && !((ret = result.err))) {
882 INIT_COMPLETION(result.completion);
883 break;
884 }
885 /* fall through */
886 default:
887 printk(KERN_ERR "alg: skcipher: %s failed on "
888 "test %d for %s: ret=%d\n", e, j, algo,
889 -ret);
890 printk("ERROR\n");
891 goto out;
892 }
893 q = data;
894 if (memcmp(q, template[i].result, template[i].rlen)) {
895 printk(KERN_ERR "alg: skcipher: Test %d "
896 "failed on %s for %s\n", j, e, algo);
897 hexdump(q, template[i].rlen);
898 printk("ERROR\n");
899 ret = -EINVAL;
900 goto out;
901 }
902 else {
903 printk(KERN_ERR "alg: skcipher: Test %d "
904 "*PASSED* on %s for %s\n", j, e, algo);
905 hexdump(q, template[i].rlen);
906 printk("DONE\n");
907 }
908 }
909 }
910 printk("Testing %s chunking across pages.\n", algo);
911 j = 0;
912 for (i = 0; i < tcount; i++) {
913 if (template[i].iv)
914 memcpy(iv, template[i].iv, MAX_IVLEN);
915 else
916 memset(iv, 0, MAX_IVLEN);
917
918 if (template[i].np) {
919 j++;
920
921 crypto_ablkcipher_clear_flags(tfm, ~0);
922 if (template[i].wk)
923 crypto_ablkcipher_set_flags(
924 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
925
926 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
927 template[i].klen);
928 if (!ret == template[i].fail) {
929 printk(KERN_ERR "alg: skcipher: setkey failed "
930 "on chunk test %d for %s: flags=%x\n",
931 j, algo,
932 crypto_ablkcipher_get_flags(tfm));
933 printk("ERROR\n");
934 goto out;
935 } else if (ret)
936 continue;
937
938 temp = 0;
939 ret = -EINVAL;
940 sg_init_table(sg, template[i].np);
941 for (k = 0; k < template[i].np; k++) {
942 if (WARN_ON(offset_in_page(IDX[k]) +
943 template[i].tap[k] > PAGE_SIZE))
944 goto out;
945
946 q = xbuf[IDX[k] >> PAGE_SHIFT] +
947 offset_in_page(IDX[k]);
948
949 memcpy(q, template[i].input + temp,
950 template[i].tap[k]);
951
952 if (offset_in_page(q) + template[i].tap[k] <
953 PAGE_SIZE)
954 q[template[i].tap[k]] = 0;
955
956 sg_set_buf(&sg[k], q, template[i].tap[k]);
957
958 temp += template[i].tap[k];
959 }
960
961 ablkcipher_request_set_crypt(req, sg, sg,
962 template[i].ilen, iv);
963
964 ret = enc ?
965 crypto_ablkcipher_encrypt(req) :
966 crypto_ablkcipher_decrypt(req);
967
968 switch (ret) {
969 case 0:
970 break;
971 case -EINPROGRESS:
972 case -EBUSY:
973 ret = wait_for_completion_interruptible(
974 &result.completion);
975 if (!ret && !((ret = result.err))) {
976 INIT_COMPLETION(result.completion);
977 break;
978 }
979 /* fall through */
980 default:
981 printk(KERN_ERR "alg: skcipher: %s failed on "
982 "chunk test %d for %s: ret=%d\n", e, j,
983 algo, -ret);
984 printk("ERROR\n");
985 goto out;
986 }
987
988 temp = 0;
989 ret = -EINVAL;
990 for (k = 0; k < template[i].np; k++) {
991 q = xbuf[IDX[k] >> PAGE_SHIFT] +
992 offset_in_page(IDX[k]);
993
994 if (memcmp(q, template[i].result + temp,
995 template[i].tap[k])) {
996 printk(KERN_ERR "alg: skcipher: Chunk "
997 "test %d failed on %s at page "
998 "%u for %s\n", j, e, k, algo);
999 hexdump(q, template[i].tap[k]);
1000 printk("ERROR\n");
1001 goto out;
1002 }
1003 else {
1004 printk(KERN_ERR "alg: skcipher: Chunk "
1005 "test %d *PASSED* on %s at page "
1006 "%u for %s\n", j, e, k, algo);
1007 hexdump(q, template[i].tap[k]);
1008 printk("DONE\n");
1009 }
1010
1011 q += template[i].tap[k];
1012 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1013 ;
1014 #if 1
1015 if (n) {
1016 printk(KERN_ERR "alg: skcipher: "
1017 "Result buffer corruption in "
1018 "chunk test %d on %s at page "
1019 "%u for %s: %u bytes:\n", j, e,
1020 k, algo, n);
1021 hexdump(q, n);
1022 printk("ERROR\n");
1023 goto out;
1024 }
1025 else {
1026 printk(KERN_ERR "alg: skcipher: "
1027 "Result buffer clean in "
1028 "chunk test %d on %s at page "
1029 "%u for %s: %u bytes:\n", j, e,
1030 k, algo, n);
1031 hexdump(q, n);
1032 printk("Chunk Buffer clean\n");
1033 }
1034 #endif
1035 temp += template[i].tap[k];
1036 }
1037 }
1038 }
1039
1040 ret = 0;
1041 out:
1042 ablkcipher_request_free(req);
1043 testmgr_free_buf(xbuf);
1044 out_nobuf:
1045 return ret;
1046 }
1047
1048 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1049 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1050 {
1051 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1052 unsigned int i;
1053 char result[COMP_BUF_SIZE];
1054 int ret;
1055
1056 for (i = 0; i < ctcount; i++) {
1057 int ilen;
1058 unsigned int dlen = COMP_BUF_SIZE;
1059
1060 memset(result, 0, sizeof (result));
1061
1062 ilen = ctemplate[i].inlen;
1063 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1064 ilen, result, &dlen);
1065 if (ret) {
1066 printk(KERN_ERR "alg: comp: compression failed "
1067 "on test %d for %s: ret=%d\n", i + 1, algo,
1068 -ret);
1069 goto out;
1070 }
1071
1072 if (dlen != ctemplate[i].outlen) {
1073 printk(KERN_ERR "alg: comp: Compression test %d "
1074 "failed for %s: output len = %d\n", i + 1, algo,
1075 dlen);
1076 ret = -EINVAL;
1077 goto out;
1078 }
1079
1080 if (memcmp(result, ctemplate[i].output, dlen)) {
1081 printk(KERN_ERR "alg: comp: Compression test %d "
1082 "failed for %s\n", i + 1, algo);
1083 hexdump(result, dlen);
1084 ret = -EINVAL;
1085 goto out;
1086 }
1087 else {
1088 printk(KERN_ERR "alg: comp: Compression test %d "
1089 "passed for %s\n", i + 1, algo);
1090 hexdump(result, dlen);
1091 }
1092 }
1093
1094 for (i = 0; i < dtcount; i++) {
1095 int ilen;
1096 unsigned int dlen = COMP_BUF_SIZE;
1097
1098 memset(result, 0, sizeof (result));
1099
1100 ilen = dtemplate[i].inlen;
1101 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1102 ilen, result, &dlen);
1103 if (ret) {
1104 printk(KERN_ERR "alg: comp: decompression failed "
1105 "on test %d for %s: ret=%d\n", i + 1, algo,
1106 -ret);
1107 goto out;
1108 }
1109
1110 if (dlen != dtemplate[i].outlen) {
1111 printk(KERN_ERR "alg: comp: Decompression test %d "
1112 "failed for %s: output len = %d\n", i + 1, algo,
1113 dlen);
1114 ret = -EINVAL;
1115 goto out;
1116 }
1117
1118 if (memcmp(result, dtemplate[i].output, dlen)) {
1119 printk(KERN_ERR "alg: comp: Decompression test %d "
1120 "failed for %s\n", i + 1, algo);
1121 hexdump(result, dlen);
1122 ret = -EINVAL;
1123 goto out;
1124 }
1125 else {
1126 printk(KERN_ERR "alg: comp: Decompression test %d "
1127 "passed for %s\n", i + 1, algo);
1128 hexdump(result, dlen);
1129 }
1130 }
1131
1132 ret = 0;
1133
1134 out:
1135 return ret;
1136 }
1137
1138 static int test_pcomp(struct crypto_pcomp *tfm,
1139 struct pcomp_testvec *ctemplate,
1140 struct pcomp_testvec *dtemplate, int ctcount,
1141 int dtcount)
1142 {
1143 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
1144 unsigned int i;
1145 char result[COMP_BUF_SIZE];
1146 int res;
1147
1148 for (i = 0; i < ctcount; i++) {
1149 struct comp_request req;
1150 unsigned int produced = 0;
1151
1152 res = crypto_compress_setup(tfm, ctemplate[i].params,
1153 ctemplate[i].paramsize);
1154 if (res) {
1155 pr_err("alg: pcomp: compression setup failed on test "
1156 "%d for %s: error=%d\n", i + 1, algo, res);
1157 return res;
1158 }
1159
1160 res = crypto_compress_init(tfm);
1161 if (res) {
1162 pr_err("alg: pcomp: compression init failed on test "
1163 "%d for %s: error=%d\n", i + 1, algo, res);
1164 return res;
1165 }
1166
1167 memset(result, 0, sizeof(result));
1168
1169 req.next_in = ctemplate[i].input;
1170 req.avail_in = ctemplate[i].inlen / 2;
1171 req.next_out = result;
1172 req.avail_out = ctemplate[i].outlen / 2;
1173
1174 res = crypto_compress_update(tfm, &req);
1175 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1176 pr_err("alg: pcomp: compression update failed on test "
1177 "%d for %s: error=%d\n", i + 1, algo, res);
1178 return res;
1179 }
1180 if (res > 0)
1181 produced += res;
1182
1183 /* Add remaining input data */
1184 req.avail_in += (ctemplate[i].inlen + 1) / 2;
1185
1186 res = crypto_compress_update(tfm, &req);
1187 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1188 pr_err("alg: pcomp: compression update failed on test "
1189 "%d for %s: error=%d\n", i + 1, algo, res);
1190 return res;
1191 }
1192 if (res > 0)
1193 produced += res;
1194
1195 /* Provide remaining output space */
1196 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1197
1198 res = crypto_compress_final(tfm, &req);
1199 if (res < 0) {
1200 pr_err("alg: pcomp: compression final failed on test "
1201 "%d for %s: error=%d\n", i + 1, algo, res);
1202 return res;
1203 }
1204 produced += res;
1205
1206 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1207 pr_err("alg: comp: Compression test %d failed for %s: "
1208 "output len = %d (expected %d)\n", i + 1, algo,
1209 COMP_BUF_SIZE - req.avail_out,
1210 ctemplate[i].outlen);
1211 return -EINVAL;
1212 }
1213
1214 if (produced != ctemplate[i].outlen) {
1215 pr_err("alg: comp: Compression test %d failed for %s: "
1216 "returned len = %u (expected %d)\n", i + 1,
1217 algo, produced, ctemplate[i].outlen);
1218 return -EINVAL;
1219 }
1220
1221 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1222 pr_err("alg: pcomp: Compression test %d failed for "
1223 "%s\n", i + 1, algo);
1224 hexdump(result, ctemplate[i].outlen);
1225 return -EINVAL;
1226 }
1227 }
1228
1229 for (i = 0; i < dtcount; i++) {
1230 struct comp_request req;
1231 unsigned int produced = 0;
1232
1233 res = crypto_decompress_setup(tfm, dtemplate[i].params,
1234 dtemplate[i].paramsize);
1235 if (res) {
1236 pr_err("alg: pcomp: decompression setup failed on "
1237 "test %d for %s: error=%d\n", i + 1, algo, res);
1238 return res;
1239 }
1240
1241 res = crypto_decompress_init(tfm);
1242 if (res) {
1243 pr_err("alg: pcomp: decompression init failed on test "
1244 "%d for %s: error=%d\n", i + 1, algo, res);
1245 return res;
1246 }
1247
1248 memset(result, 0, sizeof(result));
1249
1250 req.next_in = dtemplate[i].input;
1251 req.avail_in = dtemplate[i].inlen / 2;
1252 req.next_out = result;
1253 req.avail_out = dtemplate[i].outlen / 2;
1254
1255 res = crypto_decompress_update(tfm, &req);
1256 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1257 pr_err("alg: pcomp: decompression update failed on "
1258 "test %d for %s: error=%d\n", i + 1, algo, res);
1259 return res;
1260 }
1261 if (res > 0)
1262 produced += res;
1263
1264 /* Add remaining input data */
1265 req.avail_in += (dtemplate[i].inlen + 1) / 2;
1266
1267 res = crypto_decompress_update(tfm, &req);
1268 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1269 pr_err("alg: pcomp: decompression update failed on "
1270 "test %d for %s: error=%d\n", i + 1, algo, res);
1271 return res;
1272 }
1273 if (res > 0)
1274 produced += res;
1275
1276 /* Provide remaining output space */
1277 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1278
1279 res = crypto_decompress_final(tfm, &req);
1280 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1281 pr_err("alg: pcomp: decompression final failed on "
1282 "test %d for %s: error=%d\n", i + 1, algo, res);
1283 return res;
1284 }
1285 if (res > 0)
1286 produced += res;
1287
1288 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1289 pr_err("alg: comp: Decompression test %d failed for "
1290 "%s: output len = %d (expected %d)\n", i + 1,
1291 algo, COMP_BUF_SIZE - req.avail_out,
1292 dtemplate[i].outlen);
1293 return -EINVAL;
1294 }
1295
1296 if (produced != dtemplate[i].outlen) {
1297 pr_err("alg: comp: Decompression test %d failed for "
1298 "%s: returned len = %u (expected %d)\n", i + 1,
1299 algo, produced, dtemplate[i].outlen);
1300 return -EINVAL;
1301 }
1302
1303 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1304 pr_err("alg: pcomp: Decompression test %d failed for "
1305 "%s\n", i + 1, algo);
1306 hexdump(result, dtemplate[i].outlen);
1307 return -EINVAL;
1308 }
1309 }
1310
1311 return 0;
1312 }
1313
1314 static int test_ablkcipher_jiffies(struct ablkcipher_request *req, int enc,
1315 int sec, struct tcrypt_result *result,
1316 int blen)
1317 {
1318 unsigned long start, end;
1319 int bcount;
1320 int ret;
1321
1322 for (start = jiffies, end = start + sec * HZ, bcount = 0;
1323 time_before(jiffies, end); bcount++) {
1324
1325 if (enc)
1326 ret = crypto_ablkcipher_encrypt(req);
1327 else
1328 ret = crypto_ablkcipher_decrypt(req);
1329
1330 switch (ret) {
1331 case 0:
1332 break;
1333 case -EINPROGRESS:
1334 case -EBUSY:
1335 ret = wait_for_completion_interruptible(
1336 &result->completion);
1337 if (!ret && !((ret = result->err))) {
1338 INIT_COMPLETION(result->completion);
1339 break;
1340 }
1341 default:
1342 printk("ERROR\n");
1343 return ret;
1344 }
1345 }
1346
1347 printk("%d operations in %d seconds (%ld bytes)\n",
1348 bcount, sec, (long)bcount * blen);
1349
1350 return 0;
1351 }
1352
1353 static int test_ablkcipher_cycles(struct ablkcipher_request *req, int enc,
1354 int sec, struct tcrypt_result *result,
1355 int blen)
1356 {
1357 unsigned long cycles = 0;
1358 int ret = 0;
1359 int i;
1360 unsigned long start, end = 0;
1361 //local_bh_disable();
1362 //local_irq_disable();
1363 /* Warm-up run. */
1364 for (i = 0; i < 4; i++) {
1365 if (enc)
1366 ret = crypto_ablkcipher_encrypt(req);
1367 else
1368 ret = crypto_ablkcipher_decrypt(req);
1369
1370 switch (ret) {
1371 case 0:
1372 break;
1373 case -EINPROGRESS:
1374 case -EBUSY:
1375 #if 0
1376 ret = wait_for_completion_interruptible(
1377 &result->completion);
1378 if (!ret && !((ret = result->err))) {
1379 INIT_COMPLETION(result->completion);
1380 break;
1381 }
1382 #else
1383
1384 wait_for_completion(&result->completion);
1385 INIT_COMPLETION(result->completion);
1386 break;
1387 #endif
1388 default:
1389 printk("ERROR\n");
1390 return ret;
1391 }
1392
1393 if (signal_pending(current)) {
1394 printk("Signal caught\n");
1395 break;
1396 }
1397
1398 }
1399
1400 //printk("Debug ln: (%d), fn: %s\n", __LINE__, __func__);
1401 /* The real thing. */
1402 for (i = 0; i < 8; i++) {
1403 end = 0;
1404 start = 0;
1405 start = read_c0_count();
1406 if (enc)
1407 ret = crypto_ablkcipher_encrypt(req);
1408 else
1409 ret = crypto_ablkcipher_decrypt(req);
1410
1411 switch (ret) {
1412 case 0:
1413 break;
1414 case -EINPROGRESS:
1415 case -EBUSY:
1416 #if 0
1417 ret = wait_for_completion_interruptible(
1418 &result->completion);
1419 end = get_cycles();
1420 if (!ret && !((ret = result->err))) {
1421 INIT_COMPLETION(result->completion);
1422 break;
1423 }
1424 #else
1425 wait_for_completion(&result->completion);
1426 end = read_c0_count();
1427 INIT_COMPLETION(result->completion);
1428 break;
1429 #endif
1430 default:
1431 printk("ERROR\n");
1432 return ret;
1433 }
1434
1435 if (signal_pending(current)) {
1436 printk("Signal caught\n");
1437 break;
1438 }
1439
1440 cycles += end - start;
1441 }
1442
1443 // local_irq_enable();
1444 // local_bh_enable();
1445
1446 printk("1 operation in %lu cycles (%d bytes)\n",
1447 (cycles + 4) / 8, blen);
1448
1449 return 0;
1450
1451 }
1452
1453 static u32 b_size[] = {16, 64, 256, 1024, 8192, 0};
1454
1455 static int test_skcipher_speed(struct crypto_ablkcipher *tfm, int enc,
1456 struct cipher_speed_template *template,
1457 unsigned int tcount, unsigned int sec,
1458 u8* keysize)
1459 {
1460 const char *algo =
1461 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
1462
1463 unsigned int i = 0, j, iv_len;
1464 struct ablkcipher_request *req;
1465 //struct scatterlist sg[8];
1466 const char *e;
1467 struct tcrypt_result result;
1468 char iv[MAX_IVLEN];
1469 static char *xbuf[XBUFSIZE];
1470 int ret = -ENOMEM;
1471 u32 *block_size;
1472 static char *tvmem_buf[4];
1473 const char *key;
1474
1475 if (testmgr_alloc_buf(xbuf))
1476 goto out_nobuf;
1477
1478 if (enc == ENCRYPT)
1479 e = "encryption";
1480 else
1481 e = "decryption";
1482
1483 init_completion(&result.completion);
1484
1485 printk("Start ablkcipher speed test\n");
1486
1487 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
1488 if (!req) {
1489 printk(KERN_ERR "alg: skcipher: Failed to allocate request "
1490 "for %s\n", algo);
1491 goto out;
1492 }
1493
1494 // ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1495 ablkcipher_request_set_callback(req, 0,
1496 tcrypt_complete, &result);
1497
1498 do {
1499
1500 block_size = b_size;
1501
1502 do {
1503 struct scatterlist sg[4];
1504 if ((*keysize + *block_size) > 4 * PAGE_SIZE) {
1505 printk("template (%u) too big for "
1506 "tvmem_buf (%lu)\n", *keysize + *block_size,
1507 4 * PAGE_SIZE);
1508 goto out;
1509 }
1510 crypto_ablkcipher_clear_flags(tfm, ~0);
1511
1512 printk("test %u (%d bit key, %d byte blocks): ", i,
1513 *keysize * 8, *block_size);
1514
1515 memset(tvmem_buf[0], 0xff, PAGE_SIZE);
1516 key = tvmem_buf[0];
1517
1518 for (j = 0; j < tcount; j++) {
1519 if (template[j].klen == *keysize) {
1520 key = template[j].key;
1521 break;
1522 }
1523 }
1524 ret = crypto_ablkcipher_setkey(tfm, key, *keysize);
1525 if (ret) {
1526 printk("Error setting of keys\n");
1527 goto out;
1528 }
1529
1530 sg_init_table(sg, 4);
1531
1532 for (j = 0; j < 4; j++) {
1533 tvmem_buf[j] = xbuf[j];
1534 memset(tvmem_buf[j], 0xff, PAGE_SIZE);
1535 sg_set_buf(sg + j, tvmem_buf[j], PAGE_SIZE);
1536 }
1537
1538 iv_len = crypto_ablkcipher_ivsize(tfm);
1539 if (iv_len) {
1540 memset(&iv, 0xff, iv_len);
1541 }
1542
1543 ablkcipher_request_set_crypt(req, sg, sg,
1544 *block_size, iv);
1545
1546 //printk("Debug ln: %d, %s\n", __LINE__, __func__);
1547 if (sec)
1548 ret = test_ablkcipher_jiffies(req, enc, sec,
1549 &result, *block_size);
1550 else
1551 ret = test_ablkcipher_cycles(req, enc, sec,
1552 &result, *block_size);
1553
1554
1555 if (ret) {
1556 printk(KERN_ERR "alg: skcipher: %s failed on "
1557 "test %d for %s: ret=%d\n", e, j, algo,
1558 -ret);
1559 goto out;
1560 }
1561
1562 block_size++;
1563 i++;
1564 } while (*block_size);
1565 keysize++;
1566 } while (*keysize);
1567
1568 ret = 0;
1569 out:
1570 printk("End ablkcipher speed test\n");
1571 ablkcipher_request_free(req);
1572 testmgr_free_buf(xbuf);
1573 #if 0
1574 if (!completion_done(&result->completion)) {
1575 printk("There are threads waiting for completion, completing all\n");
1576 complete_all(&result->completion);
1577 }
1578 #endif
1579
1580 //testmgr_free_buf(tvbuf);
1581 out_nobuf:
1582 return ret;
1583
1584 }
1585
1586 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1587 unsigned int tcount)
1588 {
1589 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1590 int err = 0, i, j, seedsize;
1591 u8 *seed;
1592 char result[32];
1593
1594 seedsize = crypto_rng_seedsize(tfm);
1595
1596 seed = kmalloc(seedsize, GFP_KERNEL);
1597 if (!seed) {
1598 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1599 "for %s\n", algo);
1600 return -ENOMEM;
1601 }
1602
1603 for (i = 0; i < tcount; i++) {
1604 memset(result, 0, 32);
1605
1606 memcpy(seed, template[i].v, template[i].vlen);
1607 memcpy(seed + template[i].vlen, template[i].key,
1608 template[i].klen);
1609 memcpy(seed + template[i].vlen + template[i].klen,
1610 template[i].dt, template[i].dtlen);
1611
1612 err = crypto_rng_reset(tfm, seed, seedsize);
1613 if (err) {
1614 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1615 "for %s\n", algo);
1616 goto out;
1617 }
1618
1619 for (j = 0; j < template[i].loops; j++) {
1620 err = crypto_rng_get_bytes(tfm, result,
1621 template[i].rlen);
1622 if (err != template[i].rlen) {
1623 printk(KERN_ERR "alg: cprng: Failed to obtain "
1624 "the correct amount of random data for "
1625 "%s (requested %d, got %d)\n", algo,
1626 template[i].rlen, err);
1627 goto out;
1628 }
1629 }
1630
1631 err = memcmp(result, template[i].result,
1632 template[i].rlen);
1633 if (err) {
1634 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1635 i, algo);
1636 hexdump(result, template[i].rlen);
1637 err = -EINVAL;
1638 goto out;
1639 }
1640 }
1641
1642 out:
1643 kfree(seed);
1644 return err;
1645 }
1646
1647 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1648 u32 type, u32 mask)
1649 {
1650 struct crypto_aead *tfm;
1651 int err = 0;
1652
1653 tfm = crypto_alloc_aead(driver, type, mask);
1654 if (IS_ERR(tfm)) {
1655 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1656 "%ld\n", driver, PTR_ERR(tfm));
1657 return PTR_ERR(tfm);
1658 }
1659
1660 if (desc->suite.aead.enc.vecs) {
1661 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1662 desc->suite.aead.enc.count);
1663 if (err)
1664 goto out;
1665 }
1666
1667 if (!err && desc->suite.aead.dec.vecs)
1668 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1669 desc->suite.aead.dec.count);
1670
1671 out:
1672 crypto_free_aead(tfm);
1673 return err;
1674 }
1675
1676 static int alg_test_cipher(const struct alg_test_desc *desc,
1677 const char *driver, u32 type, u32 mask)
1678 {
1679 struct crypto_cipher *tfm;
1680 int err = 0;
1681
1682 tfm = crypto_alloc_cipher(driver, type, mask);
1683 if (IS_ERR(tfm)) {
1684 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1685 "%s: %ld\n", driver, PTR_ERR(tfm));
1686 return PTR_ERR(tfm);
1687 }
1688
1689 if (desc->suite.cipher.enc.vecs) {
1690 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1691 desc->suite.cipher.enc.count);
1692 if (err)
1693 goto out;
1694 }
1695
1696 if (desc->suite.cipher.dec.vecs)
1697 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1698 desc->suite.cipher.dec.count);
1699
1700 out:
1701 crypto_free_cipher(tfm);
1702 return err;
1703 }
1704
1705 static int alg_test_skcipher(const struct alg_test_desc *desc,
1706 const char *driver, u32 type, u32 mask)
1707 {
1708 struct crypto_ablkcipher *tfm;
1709 int err = 0;
1710
1711 tfm = crypto_alloc_ablkcipher(driver, type, mask);
1712 if (IS_ERR(tfm)) {
1713 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1714 "%s: %ld\n", driver, PTR_ERR(tfm));
1715 return PTR_ERR(tfm);
1716 }
1717
1718 if (desc->suite.cipher.enc.vecs) {
1719 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1720 desc->suite.cipher.enc.count);
1721 if (err)
1722 goto out;
1723 }
1724
1725 if (desc->suite.cipher.dec.vecs)
1726 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1727 desc->suite.cipher.dec.count);
1728
1729 out:
1730 crypto_free_ablkcipher(tfm);
1731 return err;
1732 }
1733
1734 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1735 u32 type, u32 mask)
1736 {
1737 struct crypto_comp *tfm;
1738 int err;
1739
1740 tfm = crypto_alloc_comp(driver, type, mask);
1741 if (IS_ERR(tfm)) {
1742 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1743 "%ld\n", driver, PTR_ERR(tfm));
1744 return PTR_ERR(tfm);
1745 }
1746
1747 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1748 desc->suite.comp.decomp.vecs,
1749 desc->suite.comp.comp.count,
1750 desc->suite.comp.decomp.count);
1751
1752 crypto_free_comp(tfm);
1753 return err;
1754 }
1755
1756 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1757 u32 type, u32 mask)
1758 {
1759 struct crypto_pcomp *tfm;
1760 int err;
1761
1762 tfm = crypto_alloc_pcomp(driver, type, mask);
1763 if (IS_ERR(tfm)) {
1764 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1765 driver, PTR_ERR(tfm));
1766 return PTR_ERR(tfm);
1767 }
1768
1769 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1770 desc->suite.pcomp.decomp.vecs,
1771 desc->suite.pcomp.comp.count,
1772 desc->suite.pcomp.decomp.count);
1773
1774 crypto_free_pcomp(tfm);
1775 return err;
1776 }
1777
1778 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1779 u32 type, u32 mask)
1780 {
1781 struct crypto_ahash *tfm;
1782 int err;
1783
1784 tfm = crypto_alloc_ahash(driver, type, mask);
1785 if (IS_ERR(tfm)) {
1786 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1787 "%ld\n", driver, PTR_ERR(tfm));
1788 return PTR_ERR(tfm);
1789 }
1790
1791 err = test_hash(tfm, desc->suite.hash.vecs, desc->suite.hash.count);
1792
1793 crypto_free_ahash(tfm);
1794 return err;
1795 }
1796
1797 static int alg_test_crc32c(const struct alg_test_desc *desc,
1798 const char *driver, u32 type, u32 mask)
1799 {
1800 struct crypto_shash *tfm;
1801 u32 val;
1802 int err;
1803
1804 err = alg_test_hash(desc, driver, type, mask);
1805 if (err)
1806 goto out;
1807
1808 tfm = crypto_alloc_shash(driver, type, mask);
1809 if (IS_ERR(tfm)) {
1810 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1811 "%ld\n", driver, PTR_ERR(tfm));
1812 err = PTR_ERR(tfm);
1813 goto out;
1814 }
1815
1816 do {
1817 struct {
1818 struct shash_desc shash;
1819 char ctx[crypto_shash_descsize(tfm)];
1820 } sdesc;
1821
1822 sdesc.shash.tfm = tfm;
1823 sdesc.shash.flags = 0;
1824
1825 *(u32 *)sdesc.ctx = le32_to_cpu(420553207);
1826 err = crypto_shash_final(&sdesc.shash, (u8 *)&val);
1827 if (err) {
1828 printk(KERN_ERR "alg: crc32c: Operation failed for "
1829 "%s: %d\n", driver, err);
1830 break;
1831 }
1832
1833 if (val != ~420553207) {
1834 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1835 "%d\n", driver, val);
1836 err = -EINVAL;
1837 }
1838 } while (0);
1839
1840 crypto_free_shash(tfm);
1841
1842 out:
1843 return err;
1844 }
1845
1846 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1847 u32 type, u32 mask)
1848 {
1849 struct crypto_rng *rng;
1850 int err = 0;
1851
1852 rng = crypto_alloc_rng(driver, type, mask);
1853 if (IS_ERR(rng)) {
1854 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1855 "%ld\n", driver, PTR_ERR(rng));
1856 return PTR_ERR(rng);
1857 }
1858
1859 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1860
1861 crypto_free_rng(rng);
1862
1863 return err;
1864 }
1865
1866 /* Please keep this list sorted by algorithm name. */
1867 static const struct alg_test_desc alg_test_descs[] = {
1868 {
1869 .alg = "ansi_cprng",
1870 .test = alg_test_cprng,
1871 .fips_allowed = 1,
1872 .suite = {
1873 .cprng = {
1874 .vecs = ansi_cprng_aes_tv_template,
1875 .count = ANSI_CPRNG_AES_TEST_VECTORS
1876 }
1877 }
1878 }, {
1879 .alg = "cbc(aes)",
1880 .test = alg_test_skcipher,
1881 .fips_allowed = 1,
1882 .suite = {
1883 .cipher = {
1884 .enc = {
1885 .vecs = aes_cbc_enc_tv_template,
1886 .count = AES_CBC_ENC_TEST_VECTORS
1887 },
1888 .dec = {
1889 .vecs = aes_cbc_dec_tv_template,
1890 .count = AES_CBC_DEC_TEST_VECTORS
1891 }
1892 }
1893 }
1894 }, {
1895 .alg = "cbc(anubis)",
1896 .test = alg_test_skcipher,
1897 .suite = {
1898 .cipher = {
1899 .enc = {
1900 .vecs = anubis_cbc_enc_tv_template,
1901 .count = ANUBIS_CBC_ENC_TEST_VECTORS
1902 },
1903 .dec = {
1904 .vecs = anubis_cbc_dec_tv_template,
1905 .count = ANUBIS_CBC_DEC_TEST_VECTORS
1906 }
1907 }
1908 }
1909 }, {
1910 .alg = "cbc(blowfish)",
1911 .test = alg_test_skcipher,
1912 .suite = {
1913 .cipher = {
1914 .enc = {
1915 .vecs = bf_cbc_enc_tv_template,
1916 .count = BF_CBC_ENC_TEST_VECTORS
1917 },
1918 .dec = {
1919 .vecs = bf_cbc_dec_tv_template,
1920 .count = BF_CBC_DEC_TEST_VECTORS
1921 }
1922 }
1923 }
1924 }, {
1925 .alg = "cbc(camellia)",
1926 .test = alg_test_skcipher,
1927 .suite = {
1928 .cipher = {
1929 .enc = {
1930 .vecs = camellia_cbc_enc_tv_template,
1931 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
1932 },
1933 .dec = {
1934 .vecs = camellia_cbc_dec_tv_template,
1935 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
1936 }
1937 }
1938 }
1939 }, {
1940 .alg = "cbc(des)",
1941 .test = alg_test_skcipher,
1942 .suite = {
1943 .cipher = {
1944 .enc = {
1945 .vecs = des_cbc_enc_tv_template,
1946 .count = DES_CBC_ENC_TEST_VECTORS
1947 },
1948 .dec = {
1949 .vecs = des_cbc_dec_tv_template,
1950 .count = DES_CBC_DEC_TEST_VECTORS
1951 }
1952 }
1953 }
1954 }, {
1955 .alg = "cbc(des3_ede)",
1956 .test = alg_test_skcipher,
1957 .fips_allowed = 1,
1958 .suite = {
1959 .cipher = {
1960 .enc = {
1961 .vecs = des3_ede_cbc_enc_tv_template,
1962 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
1963 },
1964 .dec = {
1965 .vecs = des3_ede_cbc_dec_tv_template,
1966 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
1967 }
1968 }
1969 }
1970 }, {
1971 .alg = "cbc(twofish)",
1972 .test = alg_test_skcipher,
1973 .suite = {
1974 .cipher = {
1975 .enc = {
1976 .vecs = tf_cbc_enc_tv_template,
1977 .count = TF_CBC_ENC_TEST_VECTORS
1978 },
1979 .dec = {
1980 .vecs = tf_cbc_dec_tv_template,
1981 .count = TF_CBC_DEC_TEST_VECTORS
1982 }
1983 }
1984 }
1985 }, {
1986 .alg = "ccm(aes)",
1987 .test = alg_test_aead,
1988 .fips_allowed = 1,
1989 .suite = {
1990 .aead = {
1991 .enc = {
1992 .vecs = aes_ccm_enc_tv_template,
1993 .count = AES_CCM_ENC_TEST_VECTORS
1994 },
1995 .dec = {
1996 .vecs = aes_ccm_dec_tv_template,
1997 .count = AES_CCM_DEC_TEST_VECTORS
1998 }
1999 }
2000 }
2001 }, {
2002 .alg = "crc32c",
2003 .test = alg_test_crc32c,
2004 .fips_allowed = 1,
2005 .suite = {
2006 .hash = {
2007 .vecs = crc32c_tv_template,
2008 .count = CRC32C_TEST_VECTORS
2009 }
2010 }
2011 }, {
2012 .alg = "ctr(aes)",
2013 .test = alg_test_skcipher,
2014 .fips_allowed = 1,
2015 .suite = {
2016 .cipher = {
2017 .enc = {
2018 .vecs = aes_ctr_enc_tv_template,
2019 .count = AES_CTR_ENC_TEST_VECTORS
2020 },
2021 .dec = {
2022 .vecs = aes_ctr_dec_tv_template,
2023 .count = AES_CTR_DEC_TEST_VECTORS
2024 }
2025 }
2026 }
2027 }, {
2028 .alg = "cts(cbc(aes))",
2029 .test = alg_test_skcipher,
2030 .suite = {
2031 .cipher = {
2032 .enc = {
2033 .vecs = cts_mode_enc_tv_template,
2034 .count = CTS_MODE_ENC_TEST_VECTORS
2035 },
2036 .dec = {
2037 .vecs = cts_mode_dec_tv_template,
2038 .count = CTS_MODE_DEC_TEST_VECTORS
2039 }
2040 }
2041 }
2042 }, {
2043 .alg = "deflate",
2044 .test = alg_test_comp,
2045 .suite = {
2046 .comp = {
2047 .comp = {
2048 .vecs = deflate_comp_tv_template,
2049 .count = DEFLATE_COMP_TEST_VECTORS
2050 },
2051 .decomp = {
2052 .vecs = deflate_decomp_tv_template,
2053 .count = DEFLATE_DECOMP_TEST_VECTORS
2054 }
2055 }
2056 }
2057 }, {
2058 .alg = "ecb(aes)",
2059 .test = alg_test_skcipher,
2060 .fips_allowed = 1,
2061 .suite = {
2062 .cipher = {
2063 .enc = {
2064 .vecs = aes_enc_tv_template,
2065 .count = AES_ENC_TEST_VECTORS
2066 },
2067 .dec = {
2068 .vecs = aes_dec_tv_template,
2069 .count = AES_DEC_TEST_VECTORS
2070 }
2071 }
2072 }
2073 }, {
2074 .alg = "ecb(anubis)",
2075 .test = alg_test_skcipher,
2076 .suite = {
2077 .cipher = {
2078 .enc = {
2079 .vecs = anubis_enc_tv_template,
2080 .count = ANUBIS_ENC_TEST_VECTORS
2081 },
2082 .dec = {
2083 .vecs = anubis_dec_tv_template,
2084 .count = ANUBIS_DEC_TEST_VECTORS
2085 }
2086 }
2087 }
2088 }, {
2089 .alg = "ecb(arc4)",
2090 .test = alg_test_skcipher,
2091 .suite = {
2092 .cipher = {
2093 .enc = {
2094 .vecs = arc4_enc_tv_template,
2095 .count = ARC4_ENC_TEST_VECTORS
2096 },
2097 .dec = {
2098 .vecs = arc4_dec_tv_template,
2099 .count = ARC4_DEC_TEST_VECTORS
2100 }
2101 }
2102 }
2103 }, {
2104 .alg = "ecb(blowfish)",
2105 .test = alg_test_skcipher,
2106 .suite = {
2107 .cipher = {
2108 .enc = {
2109 .vecs = bf_enc_tv_template,
2110 .count = BF_ENC_TEST_VECTORS
2111 },
2112 .dec = {
2113 .vecs = bf_dec_tv_template,
2114 .count = BF_DEC_TEST_VECTORS
2115 }
2116 }
2117 }
2118 }, {
2119 .alg = "ecb(camellia)",
2120 .test = alg_test_skcipher,
2121 .suite = {
2122 .cipher = {
2123 .enc = {
2124 .vecs = camellia_enc_tv_template,
2125 .count = CAMELLIA_ENC_TEST_VECTORS
2126 },
2127 .dec = {
2128 .vecs = camellia_dec_tv_template,
2129 .count = CAMELLIA_DEC_TEST_VECTORS
2130 }
2131 }
2132 }
2133 }, {
2134 .alg = "ecb(cast5)",
2135 .test = alg_test_skcipher,
2136 .suite = {
2137 .cipher = {
2138 .enc = {
2139 .vecs = cast5_enc_tv_template,
2140 .count = CAST5_ENC_TEST_VECTORS
2141 },
2142 .dec = {
2143 .vecs = cast5_dec_tv_template,
2144 .count = CAST5_DEC_TEST_VECTORS
2145 }
2146 }
2147 }
2148 }, {
2149 .alg = "ecb(cast6)",
2150 .test = alg_test_skcipher,
2151 .suite = {
2152 .cipher = {
2153 .enc = {
2154 .vecs = cast6_enc_tv_template,
2155 .count = CAST6_ENC_TEST_VECTORS
2156 },
2157 .dec = {
2158 .vecs = cast6_dec_tv_template,
2159 .count = CAST6_DEC_TEST_VECTORS
2160 }
2161 }
2162 }
2163 }, {
2164 .alg = "ecb(des)",
2165 .test = alg_test_skcipher,
2166 .fips_allowed = 1,
2167 .suite = {
2168 .cipher = {
2169 .enc = {
2170 .vecs = des_enc_tv_template,
2171 .count = DES_ENC_TEST_VECTORS
2172 },
2173 .dec = {
2174 .vecs = des_dec_tv_template,
2175 .count = DES_DEC_TEST_VECTORS
2176 }
2177 }
2178 }
2179 }, {
2180 .alg = "ecb(des3_ede)",
2181 .test = alg_test_skcipher,
2182 .fips_allowed = 1,
2183 .suite = {
2184 .cipher = {
2185 .enc = {
2186 .vecs = des3_ede_enc_tv_template,
2187 .count = DES3_EDE_ENC_TEST_VECTORS
2188 },
2189 .dec = {
2190 .vecs = des3_ede_dec_tv_template,
2191 .count = DES3_EDE_DEC_TEST_VECTORS
2192 }
2193 }
2194 }
2195 }, {
2196 .alg = "ecb(khazad)",
2197 .test = alg_test_skcipher,
2198 .suite = {
2199 .cipher = {
2200 .enc = {
2201 .vecs = khazad_enc_tv_template,
2202 .count = KHAZAD_ENC_TEST_VECTORS
2203 },
2204 .dec = {
2205 .vecs = khazad_dec_tv_template,
2206 .count = KHAZAD_DEC_TEST_VECTORS
2207 }
2208 }
2209 }
2210 }, {
2211 .alg = "ecb(seed)",
2212 .test = alg_test_skcipher,
2213 .suite = {
2214 .cipher = {
2215 .enc = {
2216 .vecs = seed_enc_tv_template,
2217 .count = SEED_ENC_TEST_VECTORS
2218 },
2219 .dec = {
2220 .vecs = seed_dec_tv_template,
2221 .count = SEED_DEC_TEST_VECTORS
2222 }
2223 }
2224 }
2225 }, {
2226 .alg = "ecb(serpent)",
2227 .test = alg_test_skcipher,
2228 .suite = {
2229 .cipher = {
2230 .enc = {
2231 .vecs = serpent_enc_tv_template,
2232 .count = SERPENT_ENC_TEST_VECTORS
2233 },
2234 .dec = {
2235 .vecs = serpent_dec_tv_template,
2236 .count = SERPENT_DEC_TEST_VECTORS
2237 }
2238 }
2239 }
2240 }, {
2241 .alg = "ecb(tea)",
2242 .test = alg_test_skcipher,
2243 .suite = {
2244 .cipher = {
2245 .enc = {
2246 .vecs = tea_enc_tv_template,
2247 .count = TEA_ENC_TEST_VECTORS
2248 },
2249 .dec = {
2250 .vecs = tea_dec_tv_template,
2251 .count = TEA_DEC_TEST_VECTORS
2252 }
2253 }
2254 }
2255 }, {
2256 .alg = "ecb(tnepres)",
2257 .test = alg_test_skcipher,
2258 .suite = {
2259 .cipher = {
2260 .enc = {
2261 .vecs = tnepres_enc_tv_template,
2262 .count = TNEPRES_ENC_TEST_VECTORS
2263 },
2264 .dec = {
2265 .vecs = tnepres_dec_tv_template,
2266 .count = TNEPRES_DEC_TEST_VECTORS
2267 }
2268 }
2269 }
2270 }, {
2271 .alg = "ecb(twofish)",
2272 .test = alg_test_skcipher,
2273 .suite = {
2274 .cipher = {
2275 .enc = {
2276 .vecs = tf_enc_tv_template,
2277 .count = TF_ENC_TEST_VECTORS
2278 },
2279 .dec = {
2280 .vecs = tf_dec_tv_template,
2281 .count = TF_DEC_TEST_VECTORS
2282 }
2283 }
2284 }
2285 }, {
2286 .alg = "ecb(xeta)",
2287 .test = alg_test_skcipher,
2288 .suite = {
2289 .cipher = {
2290 .enc = {
2291 .vecs = xeta_enc_tv_template,
2292 .count = XETA_ENC_TEST_VECTORS
2293 },
2294 .dec = {
2295 .vecs = xeta_dec_tv_template,
2296 .count = XETA_DEC_TEST_VECTORS
2297 }
2298 }
2299 }
2300 }, {
2301 .alg = "ecb(xtea)",
2302 .test = alg_test_skcipher,
2303 .suite = {
2304 .cipher = {
2305 .enc = {
2306 .vecs = xtea_enc_tv_template,
2307 .count = XTEA_ENC_TEST_VECTORS
2308 },
2309 .dec = {
2310 .vecs = xtea_dec_tv_template,
2311 .count = XTEA_DEC_TEST_VECTORS
2312 }
2313 }
2314 }
2315 }, {
2316 .alg = "gcm(aes)",
2317 .test = alg_test_aead,
2318 .fips_allowed = 1,
2319 .suite = {
2320 .aead = {
2321 .enc = {
2322 .vecs = aes_gcm_enc_tv_template,
2323 .count = AES_GCM_ENC_TEST_VECTORS
2324 },
2325 .dec = {
2326 .vecs = aes_gcm_dec_tv_template,
2327 .count = AES_GCM_DEC_TEST_VECTORS
2328 }
2329 }
2330 }
2331 }, {
2332 .alg = "hmac(md5)",
2333 .test = alg_test_hash,
2334 .suite = {
2335 .hash = {
2336 .vecs = hmac_md5_tv_template,
2337 .count = HMAC_MD5_TEST_VECTORS
2338 }
2339 }
2340 }, {
2341 .alg = "hmac(rmd128)",
2342 .test = alg_test_hash,
2343 .suite = {
2344 .hash = {
2345 .vecs = hmac_rmd128_tv_template,
2346 .count = HMAC_RMD128_TEST_VECTORS
2347 }
2348 }
2349 }, {
2350 .alg = "hmac(rmd160)",
2351 .test = alg_test_hash,
2352 .suite = {
2353 .hash = {
2354 .vecs = hmac_rmd160_tv_template,
2355 .count = HMAC_RMD160_TEST_VECTORS
2356 }
2357 }
2358 }, {
2359 .alg = "hmac(sha1)",
2360 .test = alg_test_hash,
2361 .fips_allowed = 1,
2362 .suite = {
2363 .hash = {
2364 .vecs = hmac_sha1_tv_template,
2365 .count = HMAC_SHA1_TEST_VECTORS
2366 }
2367 }
2368 }, {
2369 .alg = "hmac(sha224)",
2370 .test = alg_test_hash,
2371 .fips_allowed = 1,
2372 .suite = {
2373 .hash = {
2374 .vecs = hmac_sha224_tv_template,
2375 .count = HMAC_SHA224_TEST_VECTORS
2376 }
2377 }
2378 }, {
2379 .alg = "hmac(sha256)",
2380 .test = alg_test_hash,
2381 .fips_allowed = 1,
2382 .suite = {
2383 .hash = {
2384 .vecs = hmac_sha256_tv_template,
2385 .count = HMAC_SHA256_TEST_VECTORS
2386 }
2387 }
2388 }, {
2389 .alg = "hmac(sha384)",
2390 .test = alg_test_hash,
2391 .fips_allowed = 1,
2392 .suite = {
2393 .hash = {
2394 .vecs = hmac_sha384_tv_template,
2395 .count = HMAC_SHA384_TEST_VECTORS
2396 }
2397 }
2398 }, {
2399 .alg = "hmac(sha512)",
2400 .test = alg_test_hash,
2401 .fips_allowed = 1,
2402 .suite = {
2403 .hash = {
2404 .vecs = hmac_sha512_tv_template,
2405 .count = HMAC_SHA512_TEST_VECTORS
2406 }
2407 }
2408 #if !defined(CONFIG_CRYPTO_DEV_AES) && !defined(CONFIG_CRYPTO_ASYNC_AES)
2409 }, {
2410 .alg = "lrw(aes)",
2411 .test = alg_test_skcipher,
2412 .suite = {
2413 .cipher = {
2414 .enc = {
2415 .vecs = aes_lrw_enc_tv_template,
2416 .count = AES_LRW_ENC_TEST_VECTORS
2417 },
2418 .dec = {
2419 .vecs = aes_lrw_dec_tv_template,
2420 .count = AES_LRW_DEC_TEST_VECTORS
2421 }
2422 }
2423 }
2424 #endif
2425 }, {
2426 .alg = "lzo",
2427 .test = alg_test_comp,
2428 .suite = {
2429 .comp = {
2430 .comp = {
2431 .vecs = lzo_comp_tv_template,
2432 .count = LZO_COMP_TEST_VECTORS
2433 },
2434 .decomp = {
2435 .vecs = lzo_decomp_tv_template,
2436 .count = LZO_DECOMP_TEST_VECTORS
2437 }
2438 }
2439 }
2440 }, {
2441 .alg = "md4",
2442 .test = alg_test_hash,
2443 .suite = {
2444 .hash = {
2445 .vecs = md4_tv_template,
2446 .count = MD4_TEST_VECTORS
2447 }
2448 }
2449 }, {
2450 .alg = "md5",
2451 .test = alg_test_hash,
2452 .suite = {
2453 .hash = {
2454 .vecs = md5_tv_template,
2455 .count = MD5_TEST_VECTORS
2456 }
2457 }
2458 }, {
2459 .alg = "michael_mic",
2460 .test = alg_test_hash,
2461 .suite = {
2462 .hash = {
2463 .vecs = michael_mic_tv_template,
2464 .count = MICHAEL_MIC_TEST_VECTORS
2465 }
2466 }
2467 }, {
2468 .alg = "pcbc(fcrypt)",
2469 .test = alg_test_skcipher,
2470 .suite = {
2471 .cipher = {
2472 .enc = {
2473 .vecs = fcrypt_pcbc_enc_tv_template,
2474 .count = FCRYPT_ENC_TEST_VECTORS
2475 },
2476 .dec = {
2477 .vecs = fcrypt_pcbc_dec_tv_template,
2478 .count = FCRYPT_DEC_TEST_VECTORS
2479 }
2480 }
2481 }
2482
2483 }, {
2484 .alg = "rfc3686(ctr(aes))",
2485 .test = alg_test_skcipher,
2486 .fips_allowed = 1,
2487 .suite = {
2488 .cipher = {
2489 .enc = {
2490 .vecs = aes_ctr_rfc3686_enc_tv_template,
2491 .count = AES_CTR_3686_ENC_TEST_VECTORS
2492 },
2493 .dec = {
2494 .vecs = aes_ctr_rfc3686_dec_tv_template,
2495 .count = AES_CTR_3686_DEC_TEST_VECTORS
2496 }
2497 }
2498 }
2499 }, {
2500 .alg = "rfc4309(ccm(aes))",
2501 .test = alg_test_aead,
2502 .fips_allowed = 1,
2503 .suite = {
2504 .aead = {
2505 .enc = {
2506 .vecs = aes_ccm_rfc4309_enc_tv_template,
2507 .count = AES_CCM_4309_ENC_TEST_VECTORS
2508 },
2509 .dec = {
2510 .vecs = aes_ccm_rfc4309_dec_tv_template,
2511 .count = AES_CCM_4309_DEC_TEST_VECTORS
2512 }
2513 }
2514 }
2515 }, {
2516 .alg = "rmd128",
2517 .test = alg_test_hash,
2518 .suite = {
2519 .hash = {
2520 .vecs = rmd128_tv_template,
2521 .count = RMD128_TEST_VECTORS
2522 }
2523 }
2524 }, {
2525 .alg = "rmd160",
2526 .test = alg_test_hash,
2527 .suite = {
2528 .hash = {
2529 .vecs = rmd160_tv_template,
2530 .count = RMD160_TEST_VECTORS
2531 }
2532 }
2533 }, {
2534 .alg = "rmd256",
2535 .test = alg_test_hash,
2536 .suite = {
2537 .hash = {
2538 .vecs = rmd256_tv_template,
2539 .count = RMD256_TEST_VECTORS
2540 }
2541 }
2542 }, {
2543 .alg = "rmd320",
2544 .test = alg_test_hash,
2545 .suite = {
2546 .hash = {
2547 .vecs = rmd320_tv_template,
2548 .count = RMD320_TEST_VECTORS
2549 }
2550 }
2551 }, {
2552 .alg = "salsa20",
2553 .test = alg_test_skcipher,
2554 .suite = {
2555 .cipher = {
2556 .enc = {
2557 .vecs = salsa20_stream_enc_tv_template,
2558 .count = SALSA20_STREAM_ENC_TEST_VECTORS
2559 }
2560 }
2561 }
2562 }, {
2563 .alg = "sha1",
2564 .test = alg_test_hash,
2565 .fips_allowed = 1,
2566 .suite = {
2567 .hash = {
2568 .vecs = sha1_tv_template,
2569 .count = SHA1_TEST_VECTORS
2570 }
2571 }
2572 }, {
2573 .alg = "sha224",
2574 .test = alg_test_hash,
2575 .fips_allowed = 1,
2576 .suite = {
2577 .hash = {
2578 .vecs = sha224_tv_template,
2579 .count = SHA224_TEST_VECTORS
2580 }
2581 }
2582 }, {
2583 .alg = "sha256",
2584 .test = alg_test_hash,
2585 .fips_allowed = 1,
2586 .suite = {
2587 .hash = {
2588 .vecs = sha256_tv_template,
2589 .count = SHA256_TEST_VECTORS
2590 }
2591 }
2592 }, {
2593 .alg = "sha384",
2594 .test = alg_test_hash,
2595 .fips_allowed = 1,
2596 .suite = {
2597 .hash = {
2598 .vecs = sha384_tv_template,
2599 .count = SHA384_TEST_VECTORS
2600 }
2601 }
2602 }, {
2603 .alg = "sha512",
2604 .test = alg_test_hash,
2605 .fips_allowed = 1,
2606 .suite = {
2607 .hash = {
2608 .vecs = sha512_tv_template,
2609 .count = SHA512_TEST_VECTORS
2610 }
2611 }
2612 }, {
2613 .alg = "tgr128",
2614 .test = alg_test_hash,
2615 .suite = {
2616 .hash = {
2617 .vecs = tgr128_tv_template,
2618 .count = TGR128_TEST_VECTORS
2619 }
2620 }
2621 }, {
2622 .alg = "tgr160",
2623 .test = alg_test_hash,
2624 .suite = {
2625 .hash = {
2626 .vecs = tgr160_tv_template,
2627 .count = TGR160_TEST_VECTORS
2628 }
2629 }
2630 }, {
2631 .alg = "tgr192",
2632 .test = alg_test_hash,
2633 .suite = {
2634 .hash = {
2635 .vecs = tgr192_tv_template,
2636 .count = TGR192_TEST_VECTORS
2637 }
2638 }
2639 }, {
2640 .alg = "vmac(aes)",
2641 .test = alg_test_hash,
2642 .suite = {
2643 .hash = {
2644 .vecs = aes_vmac128_tv_template,
2645 .count = VMAC_AES_TEST_VECTORS
2646 }
2647 }
2648 }, {
2649 .alg = "wp256",
2650 .test = alg_test_hash,
2651 .suite = {
2652 .hash = {
2653 .vecs = wp256_tv_template,
2654 .count = WP256_TEST_VECTORS
2655 }
2656 }
2657 }, {
2658 .alg = "wp384",
2659 .test = alg_test_hash,
2660 .suite = {
2661 .hash = {
2662 .vecs = wp384_tv_template,
2663 .count = WP384_TEST_VECTORS
2664 }
2665 }
2666 }, {
2667 .alg = "wp512",
2668 .test = alg_test_hash,
2669 .suite = {
2670 .hash = {
2671 .vecs = wp512_tv_template,
2672 .count = WP512_TEST_VECTORS
2673 }
2674 }
2675 }, {
2676 .alg = "xcbc(aes)",
2677 .test = alg_test_hash,
2678 .suite = {
2679 .hash = {
2680 .vecs = aes_xcbc128_tv_template,
2681 .count = XCBC_AES_TEST_VECTORS
2682 }
2683 }
2684 #if 0
2685 }, {
2686 .alg = "xts(aes)",
2687 .test = alg_test_skcipher,
2688 .suite = {
2689 .cipher = {
2690 .enc = {
2691 .vecs = aes_xts_enc_tv_template,
2692 .count = AES_XTS_ENC_TEST_VECTORS
2693 },
2694 .dec = {
2695 .vecs = aes_xts_dec_tv_template,
2696 .count = AES_XTS_DEC_TEST_VECTORS
2697 }
2698 }
2699 }
2700 #endif
2701 }, {
2702 .alg = "zlib",
2703 .test = alg_test_pcomp,
2704 .suite = {
2705 .pcomp = {
2706 .comp = {
2707 .vecs = zlib_comp_tv_template,
2708 .count = ZLIB_COMP_TEST_VECTORS
2709 },
2710 .decomp = {
2711 .vecs = zlib_decomp_tv_template,
2712 .count = ZLIB_DECOMP_TEST_VECTORS
2713 }
2714 }
2715 }
2716 }
2717 };
2718
2719 static int alg_find_test(const char *alg)
2720 {
2721 int start = 0;
2722 int end = ARRAY_SIZE(alg_test_descs);
2723
2724 while (start < end) {
2725 int i = (start + end) / 2;
2726 int diff = strcmp(alg_test_descs[i].alg, alg);
2727
2728 if (diff > 0) {
2729 end = i;
2730 continue;
2731 }
2732
2733 if (diff < 0) {
2734 start = i + 1;
2735 continue;
2736 }
2737
2738 return i;
2739 }
2740
2741 return -1;
2742 }
2743
2744 static int ifx_alg_test(const char *driver, const char *alg, u32 type, u32 mask)
2745 {
2746 int i;
2747 int j;
2748 int rc;
2749
2750 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
2751 char nalg[CRYPTO_MAX_ALG_NAME];
2752
2753 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
2754 sizeof(nalg))
2755 return -ENAMETOOLONG;
2756
2757 i = alg_find_test(nalg);
2758 if (i < 0)
2759 goto notest;
2760
2761 if (fips_enabled && !alg_test_descs[i].fips_allowed)
2762 goto non_fips_alg;
2763
2764 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
2765 goto test_done;
2766 }
2767
2768 i = alg_find_test(alg);
2769 j = alg_find_test(driver);
2770 if (i < 0 && j < 0)
2771 goto notest;
2772
2773 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
2774 (j >= 0 && !alg_test_descs[j].fips_allowed)))
2775 goto non_fips_alg;
2776
2777 rc = 0;
2778 if (i >= 0)
2779 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
2780 type, mask);
2781 if (j >= 0)
2782 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
2783 type, mask);
2784
2785 test_done:
2786 if (fips_enabled && rc)
2787 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
2788
2789 if (fips_enabled && !rc)
2790 printk(KERN_INFO "alg: self-tests for %s (%s) passed\n",
2791 driver, alg);
2792
2793 return rc;
2794
2795 notest:
2796 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
2797 return 0;
2798 non_fips_alg:
2799 return -EINVAL;
2800 }
2801 EXPORT_SYMBOL_GPL(ifx_alg_test);
2802
2803 /* Modified speed test for async block cipher mode*/
2804
2805 static int ifx_alg_speed_test(const char *driver, const char *alg,
2806 unsigned int sec,
2807 struct cipher_speed_template *template,
2808 unsigned int tcount, u8 *keysize)
2809 {
2810 int i;
2811 int j;
2812 int err;
2813 int type = 0, mask = 0;
2814 struct crypto_ablkcipher *tfm;
2815
2816 i = alg_find_test(alg);
2817 j = alg_find_test(driver);
2818
2819 if (i < 0 && j < 0)
2820 goto notest;
2821
2822 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
2823 (j >= 0 && !alg_test_descs[j].fips_allowed)))
2824 goto non_fips_alg;
2825
2826 tfm = crypto_alloc_ablkcipher(driver, type, mask);
2827
2828 if (IS_ERR(tfm)) {
2829 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
2830 "%s: %ld\n", driver, PTR_ERR(tfm));
2831 return PTR_ERR(tfm);
2832 }
2833 err = test_skcipher_speed(tfm, ENCRYPT, template,
2834 tcount, sec, keysize);
2835 if (err)
2836 goto test_done;
2837
2838 err = test_skcipher_speed(tfm, DECRYPT, template,
2839 tcount, sec, keysize);
2840 if (!err)
2841 goto test_done;
2842
2843 notest:
2844 return 0;
2845 non_fips_alg:
2846 return -EINVAL;
2847
2848 test_done:
2849 if (fips_enabled && err)
2850 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
2851
2852 if (fips_enabled && !err)
2853 printk(KERN_INFO "alg: self-tests for %s (%s) passed\n",
2854 driver, alg);
2855
2856 crypto_free_ablkcipher(tfm);
2857 return err;
2858 }
2859 EXPORT_SYMBOL_GPL(ifx_alg_speed_test);
2860
2861
2862 static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc,
2863 struct scatterlist *sg, int blen, int sec)
2864 {
2865 unsigned long start, end;
2866 int bcount;
2867 int ret;
2868
2869 for (start = jiffies, end = start + sec * HZ, bcount = 0;
2870 time_before(jiffies, end); bcount++) {
2871 if (enc)
2872 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
2873 else
2874 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
2875
2876 if (ret)
2877 return ret;
2878 }
2879
2880 printk("%d operations in %d seconds (%ld bytes)\n",
2881 bcount, sec, (long)bcount * blen);
2882 return 0;
2883 }
2884
2885 static int test_cipher_cycles(struct blkcipher_desc *desc, int enc,
2886 struct scatterlist *sg, int blen)
2887 {
2888 unsigned long cycles = 0;
2889 unsigned long start, end;
2890 int ret = 0;
2891 int i;
2892
2893 local_bh_disable();
2894 local_irq_disable();
2895
2896 /* Warm-up run. */
2897 for (i = 0; i < 4; i++) {
2898 if (enc)
2899 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
2900 else
2901 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
2902
2903 if (ret)
2904 goto out;
2905 }
2906
2907 /* The real thing. */
2908 for (i = 0; i < 8; i++) {
2909 /* Original code to get cycles, does not work with MIPS
2910 * cycles_t start, end;
2911 * start = get_cycles();
2912 */
2913
2914 start = read_c0_count(); // LQ modified tcrypt
2915
2916 if (enc)
2917 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
2918 else
2919 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
2920
2921 /* Original code to get cycles, does not work with MIPS
2922 * end = get_cycles();
2923 */
2924
2925 end = read_c0_count(); //LQ modified tcrypt
2926
2927 if (ret)
2928 goto out;
2929
2930 cycles += end - start;
2931 }
2932
2933 out:
2934 local_irq_enable();
2935 local_bh_enable();
2936
2937 if (ret == 0)
2938 printk("1 operation in %lu cycles (%d bytes)\n",
2939 (cycles + 4) / 8, blen);
2940
2941 return ret;
2942 }
2943
2944 static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
2945
2946 static void test_cipher_speed(const char *algo, int enc, unsigned int sec,
2947 struct cipher_speed_template *template,
2948 unsigned int tcount, u8 *keysize)
2949 {
2950 unsigned int ret, i, j, iv_len;
2951 const char *key, iv[128];
2952 struct crypto_blkcipher *tfm;
2953 struct blkcipher_desc desc;
2954 const char *e;
2955 u32 *b_size;
2956
2957 if (enc == ENCRYPT)
2958 e = "encryption";
2959 else
2960 e = "decryption";
2961
2962 printk("\n ******* testing speed of %s %s ******* \n", algo, e);
2963
2964 tfm = crypto_alloc_blkcipher(algo, 0, CRYPTO_ALG_ASYNC);
2965
2966 if (IS_ERR(tfm)) {
2967 printk("failed to load transform for %s: %ld\n", algo,
2968 PTR_ERR(tfm));
2969 return;
2970 }
2971 desc.tfm = tfm;
2972 desc.flags = 0;
2973
2974 i = 0;
2975 do {
2976
2977 b_size = block_sizes;
2978 do {
2979 struct scatterlist sg[TVMEMSIZE];
2980
2981 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
2982 printk("template (%u) too big for "
2983 "tvmem (%lu)\n", *keysize + *b_size,
2984 TVMEMSIZE * PAGE_SIZE);
2985 goto out;
2986 }
2987
2988 printk("test %u (%d bit key, %d byte blocks): ", i,
2989 *keysize * 8, *b_size);
2990
2991 memset(tvmem[0], 0xff, PAGE_SIZE);
2992
2993 /* set key, plain text and IV */
2994 key = tvmem[0];
2995 for (j = 0; j < tcount; j++) {
2996 if (template[j].klen == *keysize) {
2997 key = template[j].key;
2998 break;
2999 }
3000 }
3001
3002 ret = crypto_blkcipher_setkey(tfm, key, *keysize);
3003 if (ret) {
3004 printk("setkey() failed flags=%x\n",
3005 crypto_blkcipher_get_flags(tfm));
3006 goto out;
3007 }
3008
3009 sg_init_table(sg, TVMEMSIZE);
3010 sg_set_buf(sg, tvmem[0] + *keysize,
3011 PAGE_SIZE - *keysize);
3012 for (j = 1; j < TVMEMSIZE; j++) {
3013 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
3014 memset (tvmem[j], 0xff, PAGE_SIZE);
3015 }
3016
3017 iv_len = crypto_blkcipher_ivsize(tfm);
3018 if (iv_len) {
3019 memset(&iv, 0xff, iv_len);
3020 crypto_blkcipher_set_iv(tfm, iv, iv_len);
3021 }
3022
3023 if (sec)
3024 ret = test_cipher_jiffies(&desc, enc, sg,
3025 *b_size, sec);
3026 else
3027 ret = test_cipher_cycles(&desc, enc, sg,
3028 *b_size);
3029
3030 if (ret) {
3031 printk("%s() failed flags=%x\n", e, desc.flags);
3032 break;
3033 }
3034 b_size++;
3035 i++;
3036 } while (*b_size);
3037 keysize++;
3038 } while (*keysize);
3039
3040 out:
3041 crypto_free_blkcipher(tfm);
3042 }
3043
3044 static int test_hash_jiffies_digest(struct hash_desc *desc,
3045 struct scatterlist *sg, int blen,
3046 char *out, int sec)
3047 {
3048 unsigned long start, end;
3049 int bcount;
3050 int ret;
3051
3052 for (start = jiffies, end = start + sec * HZ, bcount = 0;
3053 time_before(jiffies, end); bcount++) {
3054 ret = crypto_hash_digest(desc, sg, blen, out);
3055 if (ret)
3056 return ret;
3057 }
3058
3059 printk("%6u opers/sec, %9lu bytes/sec\n",
3060 bcount / sec, ((long)bcount * blen) / sec);
3061
3062 return 0;
3063 }
3064
3065 static int test_hash_jiffies(struct hash_desc *desc, struct scatterlist *sg,
3066 int blen, int plen, char *out, int sec)
3067 {
3068 unsigned long start, end;
3069 int bcount, pcount;
3070 int ret;
3071
3072 if (plen == blen)
3073 return test_hash_jiffies_digest(desc, sg, blen, out, sec);
3074
3075 for (start = jiffies, end = start + sec * HZ, bcount = 0;
3076 time_before(jiffies, end); bcount++) {
3077 ret = crypto_hash_init(desc);
3078 if (ret)
3079 return ret;
3080 for (pcount = 0; pcount < blen; pcount += plen) {
3081 ret = crypto_hash_update(desc, sg, plen);
3082 if (ret)
3083 return ret;
3084 }
3085 /* we assume there is enough space in 'out' for the result */
3086 ret = crypto_hash_final(desc, out);
3087 if (ret)
3088 return ret;
3089 }
3090
3091 printk("%6u opers/sec, %9lu bytes/sec\n",
3092 bcount / sec, ((long)bcount * blen) / sec);
3093
3094 return 0;
3095 }
3096
3097 static int test_hash_cycles_digest(struct hash_desc *desc,
3098 struct scatterlist *sg, int blen, char *out)
3099 {
3100 unsigned long cycles = 0;
3101 unsigned long start, end;
3102 int i;
3103 int ret;
3104
3105 local_bh_disable();
3106 local_irq_disable();
3107
3108 /* Warm-up run. */
3109 for (i = 0; i < 4; i++) {
3110 ret = crypto_hash_digest(desc, sg, blen, out);
3111 if (ret)
3112 goto out;
3113 }
3114
3115 /* The real thing. */
3116 for (i = 0; i < 8; i++) {
3117
3118 /* Original code to get cycles, does not work with MIPS
3119 * cycles_t start, end;
3120 * start = get_cycles();
3121 */
3122
3123 start = read_c0_count(); // LQ modified tcrypt
3124
3125 ret = crypto_hash_digest(desc, sg, blen, out);
3126 if (ret)
3127 goto out;
3128
3129 /* Original code to get cycles, does not work with MIPS
3130 * end = get_cycles();
3131 */
3132
3133 end = read_c0_count(); // LQ modified tcrypt
3134
3135 cycles += end - start;
3136 }
3137
3138 out:
3139 local_irq_enable();
3140 local_bh_enable();
3141
3142 if (ret)
3143 return ret;
3144
3145 printk("%6lu cycles/operation, %4lu cycles/byte\n",
3146 cycles / 8, cycles / (8 * blen));
3147
3148 return 0;
3149 }
3150
3151 static int test_hash_cycles(struct hash_desc *desc, struct scatterlist *sg,
3152 int blen, int plen, char *out)
3153 {
3154 unsigned long cycles = 0;
3155 unsigned long start, end;
3156 int i, pcount;
3157 int ret;
3158
3159 if (plen == blen)
3160 return test_hash_cycles_digest(desc, sg, blen, out);
3161
3162 local_bh_disable();
3163 local_irq_disable();
3164
3165 /* Warm-up run. */
3166 for (i = 0; i < 4; i++) {
3167 ret = crypto_hash_init(desc);
3168 if (ret)
3169 goto out;
3170 for (pcount = 0; pcount < blen; pcount += plen) {
3171 ret = crypto_hash_update(desc, sg, plen);
3172 if (ret)
3173 goto out;
3174 }
3175 ret = crypto_hash_final(desc, out);
3176 if (ret)
3177 goto out;
3178 }
3179
3180 /* The real thing. */
3181 for (i = 0; i < 8; i++) {
3182
3183 /* Original code for getting cycles, not working for MIPS
3184 * cycle_t start, end;
3185 * end = get_cycles();
3186 */
3187
3188 start = read_c0_count(); // LQ modified tcrypt
3189
3190 ret = crypto_hash_init(desc);
3191 if (ret)
3192 goto out;
3193 for (pcount = 0; pcount < blen; pcount += plen) {
3194 ret = crypto_hash_update(desc, sg, plen);
3195 if (ret)
3196 goto out;
3197 }
3198 ret = crypto_hash_final(desc, out);
3199 if (ret)
3200 goto out;
3201
3202 /* Original code for getting cycles, not working for MIPS
3203 * end = get_cycles();
3204 */
3205
3206 end = read_c0_count(); // LQ modified tcrypt
3207
3208 cycles += end - start;
3209 }
3210
3211 out:
3212 local_irq_enable();
3213 local_bh_enable();
3214
3215 if (ret)
3216 return ret;
3217
3218 printk("%6lu cycles/operation, %4lu cycles/byte\n",
3219 cycles / 8, cycles / (8 * blen));
3220
3221 return 0;
3222 }
3223
3224 static void test_hash_speed(const char *algo, unsigned int sec,
3225 struct hash_speed *speed)
3226 {
3227 struct scatterlist sg[TVMEMSIZE];
3228 struct crypto_hash *tfm;
3229 struct hash_desc desc;
3230 static char output[1024];
3231 int i;
3232 int ret;
3233
3234 printk(KERN_INFO "\ntesting speed of %s\n", algo);
3235
3236 tfm = crypto_alloc_hash(algo, 0, CRYPTO_ALG_ASYNC);
3237
3238 if (IS_ERR(tfm)) {
3239 printk(KERN_ERR "failed to load transform for %s: %ld\n", algo,
3240 PTR_ERR(tfm));
3241 return;
3242 }
3243
3244 desc.tfm = tfm;
3245 desc.flags = 0;
3246
3247 if (crypto_hash_digestsize(tfm) > sizeof(output)) {
3248 printk(KERN_ERR "digestsize(%u) > outputbuffer(%zu)\n",
3249 crypto_hash_digestsize(tfm), sizeof(output));
3250 goto out;
3251 }
3252
3253 sg_init_table(sg, TVMEMSIZE);
3254 for (i = 0; i < TVMEMSIZE; i++) {
3255 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
3256 memset(tvmem[i], 0xff, PAGE_SIZE);
3257 }
3258
3259 for (i = 0; speed[i].blen != 0; i++) {
3260 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
3261 printk(KERN_ERR
3262 "template (%u) too big for tvmem (%lu)\n",
3263 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
3264 goto out;
3265 }
3266
3267 printk(KERN_INFO "test%3u "
3268 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
3269 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
3270
3271 if (sec)
3272 ret = test_hash_jiffies(&desc, sg, speed[i].blen,
3273 speed[i].plen, output, sec);
3274 else
3275 ret = test_hash_cycles(&desc, sg, speed[i].blen,
3276 speed[i].plen, output);
3277
3278 if (ret) {
3279 printk(KERN_ERR "hashing failed ret=%d\n", ret);
3280 break;
3281 }
3282 }
3283
3284 out:
3285 crypto_free_hash(tfm);
3286 }
3287
3288
3289 static void test_available(void)
3290 {
3291 char **name = check;
3292
3293 while (*name) {
3294 printk("alg %s ", *name);
3295 printk(crypto_has_alg(*name, 0, 0) ?
3296 "found\n" : "not found\n");
3297 name++;
3298 }
3299 }
3300
3301 static inline int tcrypt_test(const char *alg)
3302 {
3303 int ret;
3304
3305 printk("Running test %s\n", alg);
3306 ret = ifx_alg_test(alg, alg, 0, 0);
3307 /* non-fips algs return -EINVAL in fips mode */
3308 if (fips_enabled && ret == -EINVAL)
3309 ret = 0;
3310 return ret;
3311 }
3312
3313 static inline int tcrypt_speedtest(const char *alg,
3314 struct cipher_speed_template *template,
3315 unsigned int tcount, u8 *keysize)
3316 {
3317 int ret;
3318
3319 printk("[****** Running speedtest %s *******]\n", alg);
3320 ret = ifx_alg_speed_test(alg, alg, sec, template, tcount, keysize);
3321 if (fips_enabled && ret == -EINVAL)
3322 ret = 0;
3323 return ret;
3324 }
3325
3326
3327 static int do_test(int m)
3328 {
3329 int i;
3330 int ret = 0;
3331
3332 switch (m) {
3333 case 0:
3334 for (i = 1; i < 200; i++)
3335 ret += do_test(i);
3336 break;
3337
3338 case 1:
3339 ret += tcrypt_test("md5");
3340 break;
3341
3342 case 2:
3343 ret += tcrypt_test("sha1");
3344 break;
3345
3346 case 3:
3347 ret += tcrypt_test("ecb(des)");
3348 ret += tcrypt_test("cbc(des)");
3349 break;
3350
3351 case 4:
3352 ret += tcrypt_test("ecb(des3_ede)");
3353 ret += tcrypt_test("cbc(des3_ede)");
3354 break;
3355
3356 case 5:
3357 ret += tcrypt_test("md4");
3358 break;
3359
3360 case 6:
3361 ret += tcrypt_test("sha256");
3362 break;
3363
3364 case 7:
3365 ret += tcrypt_test("ecb(blowfish)");
3366 ret += tcrypt_test("cbc(blowfish)");
3367 break;
3368
3369 case 8:
3370 ret += tcrypt_test("ecb(twofish)");
3371 ret += tcrypt_test("cbc(twofish)");
3372 break;
3373
3374 case 9:
3375 ret += tcrypt_test("ecb(serpent)");
3376 break;
3377
3378 case 10:
3379 ret += tcrypt_test("ecb(aes)");
3380 ret += tcrypt_test("cbc(aes)");
3381 // ret += tcrypt_test("lrw(aes)");
3382 // ret += tcrypt_test("xts(aes)");
3383 ret += tcrypt_test("ctr(aes)");
3384 ret += tcrypt_test("rfc3686(ctr(aes))");
3385 break;
3386
3387 case 11:
3388 ret += tcrypt_test("sha384");
3389 break;
3390
3391 case 12:
3392 ret += tcrypt_test("sha512");
3393 break;
3394
3395 case 13:
3396 ret += tcrypt_test("deflate");
3397 break;
3398
3399 case 14:
3400 ret += tcrypt_test("ecb(cast5)");
3401 break;
3402
3403 case 15:
3404 ret += tcrypt_test("ecb(cast6)");
3405 break;
3406
3407 case 16:
3408 ret += tcrypt_test("ecb(arc4)");
3409 break;
3410
3411 case 17:
3412 ret += tcrypt_test("michael_mic");
3413 break;
3414
3415 case 18:
3416 ret += tcrypt_test("crc32c");
3417 break;
3418
3419 case 19:
3420 ret += tcrypt_test("ecb(tea)");
3421 break;
3422
3423 case 20:
3424 ret += tcrypt_test("ecb(xtea)");
3425 break;
3426
3427 case 21:
3428 ret += tcrypt_test("ecb(khazad)");
3429 break;
3430
3431 case 22:
3432 ret += tcrypt_test("wp512");
3433 break;
3434
3435 case 23:
3436 ret += tcrypt_test("wp384");
3437 break;
3438
3439 case 24:
3440 ret += tcrypt_test("wp256");
3441 break;
3442
3443 case 25:
3444 ret += tcrypt_test("ecb(tnepres)");
3445 break;
3446
3447 case 26:
3448 ret += tcrypt_test("ecb(anubis)");
3449 ret += tcrypt_test("cbc(anubis)");
3450 break;
3451
3452 case 27:
3453 ret += tcrypt_test("tgr192");
3454 break;
3455
3456 case 28:
3457
3458 ret += tcrypt_test("tgr160");
3459 break;
3460
3461 case 29:
3462 ret += tcrypt_test("tgr128");
3463 break;
3464
3465 case 30:
3466 ret += tcrypt_test("ecb(xeta)");
3467 break;
3468
3469 case 31:
3470 ret += tcrypt_test("pcbc(fcrypt)");
3471 break;
3472
3473 case 32:
3474 ret += tcrypt_test("ecb(camellia)");
3475 ret += tcrypt_test("cbc(camellia)");
3476 break;
3477 case 33:
3478 ret += tcrypt_test("sha224");
3479 break;
3480
3481 case 34:
3482 ret += tcrypt_test("salsa20");
3483 break;
3484
3485 case 35:
3486 ret += tcrypt_test("gcm(aes)");
3487 break;
3488
3489 case 36:
3490 ret += tcrypt_test("lzo");
3491 break;
3492
3493 case 37:
3494 ret += tcrypt_test("ccm(aes)");
3495 break;
3496
3497 case 38:
3498 ret += tcrypt_test("cts(cbc(aes))");
3499 break;
3500
3501 case 39:
3502 ret += tcrypt_test("rmd128");
3503 break;
3504
3505 case 40:
3506 ret += tcrypt_test("rmd160");
3507 break;
3508
3509 case 41:
3510 ret += tcrypt_test("rmd256");
3511 break;
3512
3513 case 42:
3514 ret += tcrypt_test("rmd320");
3515 break;
3516
3517 case 43:
3518 ret += tcrypt_test("ecb(seed)");
3519 break;
3520
3521 case 44:
3522 ret += tcrypt_test("zlib");
3523 break;
3524
3525 case 45:
3526 ret += tcrypt_test("rfc4309(ccm(aes))");
3527 break;
3528
3529 case 100:
3530 ret += tcrypt_test("hmac(md5)");
3531 break;
3532
3533 case 101:
3534 ret += tcrypt_test("hmac(sha1)");
3535 break;
3536
3537 case 102:
3538 ret += tcrypt_test("hmac(sha256)");
3539 break;
3540
3541 case 103:
3542 ret += tcrypt_test("hmac(sha384)");
3543 break;
3544
3545 case 104:
3546 ret += tcrypt_test("hmac(sha512)");
3547 break;
3548
3549 case 105:
3550 ret += tcrypt_test("hmac(sha224)");
3551 break;
3552
3553 case 106:
3554 ret += tcrypt_test("xcbc(aes)");
3555 break;
3556
3557 case 107:
3558 ret += tcrypt_test("hmac(rmd128)");
3559 break;
3560
3561 case 108:
3562 ret += tcrypt_test("hmac(rmd160)");
3563 break;
3564
3565 case 109:
3566 ret += tcrypt_test("vmac(aes)");
3567 break;
3568
3569 case 150:
3570 ret += tcrypt_test("ansi_cprng");
3571 break;
3572
3573 case 200:
3574 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
3575 speed_template_16_24_32);
3576 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
3577 speed_template_16_24_32);
3578 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
3579 speed_template_16_24_32);
3580 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
3581 speed_template_16_24_32);
3582 #if !defined(CONFIG_CRYPTO_DEV_AES) && !defined(CONFIG_CRYPTO_ASYNC_AES)
3583 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
3584 speed_template_32_40_48);
3585 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
3586 speed_template_32_40_48);
3587 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
3588 speed_template_32_48_64);
3589 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
3590 speed_template_32_48_64);
3591 #endif
3592 break;
3593
3594 case 201:
3595 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
3596 des3_speed_template, DES3_SPEED_VECTORS,
3597 speed_template_24);
3598 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
3599 des3_speed_template, DES3_SPEED_VECTORS,
3600 speed_template_24);
3601 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
3602 des3_speed_template, DES3_SPEED_VECTORS,
3603 speed_template_24);
3604 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
3605 des3_speed_template, DES3_SPEED_VECTORS,
3606 speed_template_24);
3607 break;
3608
3609 case 202:
3610 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
3611 speed_template_16_24_32);
3612 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
3613 speed_template_16_24_32);
3614 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
3615 speed_template_16_24_32);
3616 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
3617 speed_template_16_24_32);
3618 break;
3619
3620 case 203:
3621 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
3622 speed_template_8_32);
3623 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
3624 speed_template_8_32);
3625 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
3626 speed_template_8_32);
3627 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
3628 speed_template_8_32);
3629 break;
3630
3631 case 204:
3632 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
3633 speed_template_8);
3634 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
3635 speed_template_8);
3636 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
3637 speed_template_8);
3638 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
3639 speed_template_8);
3640 break;
3641
3642 case 205:
3643 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
3644 speed_template_16_24_32);
3645 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
3646 speed_template_16_24_32);
3647 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
3648 speed_template_16_24_32);
3649 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
3650 speed_template_16_24_32);
3651 break;
3652
3653 case 206:
3654 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
3655 speed_template_16_32);
3656 break;
3657
3658 case 300:
3659 /* fall through */
3660
3661 case 301:
3662 test_hash_speed("md4", sec, generic_hash_speed_template);
3663 if (mode > 300 && mode < 400) break;
3664
3665 case 302:
3666 test_hash_speed("md5", sec, generic_hash_speed_template);
3667 if (mode > 300 && mode < 400) break;
3668
3669 case 303:
3670 test_hash_speed("sha1", sec, generic_hash_speed_template);
3671 if (mode > 300 && mode < 400) break;
3672
3673 case 304:
3674 test_hash_speed("sha256", sec, generic_hash_speed_template);
3675 if (mode > 300 && mode < 400) break;
3676
3677 case 305:
3678 test_hash_speed("sha384", sec, generic_hash_speed_template);
3679 if (mode > 300 && mode < 400) break;
3680
3681 case 306:
3682 test_hash_speed("sha512", sec, generic_hash_speed_template);
3683 if (mode > 300 && mode < 400) break;
3684
3685 case 307:
3686 test_hash_speed("wp256", sec, generic_hash_speed_template);
3687 if (mode > 300 && mode < 400) break;
3688
3689 case 308:
3690 test_hash_speed("wp384", sec, generic_hash_speed_template);
3691 if (mode > 300 && mode < 400) break;
3692
3693 case 309:
3694 test_hash_speed("wp512", sec, generic_hash_speed_template);
3695 if (mode > 300 && mode < 400) break;
3696
3697 case 310:
3698 test_hash_speed("tgr128", sec, generic_hash_speed_template);
3699 if (mode > 300 && mode < 400) break;
3700
3701 case 311:
3702 test_hash_speed("tgr160", sec, generic_hash_speed_template);
3703 if (mode > 300 && mode < 400) break;
3704
3705 case 312:
3706 test_hash_speed("tgr192", sec, generic_hash_speed_template);
3707 if (mode > 300 && mode < 400) break;
3708
3709 case 313:
3710 test_hash_speed("sha224", sec, generic_hash_speed_template);
3711 if (mode > 300 && mode < 400) break;
3712
3713 case 314:
3714 test_hash_speed("rmd128", sec, generic_hash_speed_template);
3715 if (mode > 300 && mode < 400) break;
3716
3717 case 315:
3718 test_hash_speed("rmd160", sec, generic_hash_speed_template);
3719 if (mode > 300 && mode < 400) break;
3720
3721 case 316:
3722 test_hash_speed("rmd256", sec, generic_hash_speed_template);
3723 if (mode > 300 && mode < 400) break;
3724
3725 case 317:
3726 test_hash_speed("rmd320", sec, generic_hash_speed_template);
3727 if (mode > 300 && mode < 400) break;
3728
3729 case 399:
3730 break;
3731
3732 /* Modified speed test for async block cipher mode */
3733 case 400:
3734 tcrypt_speedtest("ecb(aes)", NULL, 0,
3735 speed_template_16_24_32);
3736 tcrypt_speedtest("cbc(aes)", NULL, 0,
3737 speed_template_16_24_32);
3738 break;
3739
3740 case 401:
3741 tcrypt_speedtest("ecb(des3_ede)", des3_speed_template,
3742 DES3_SPEED_VECTORS,speed_template_24);
3743 tcrypt_speedtest("cbc(des3_ede)", des3_speed_template,
3744 DES3_SPEED_VECTORS,speed_template_24);
3745 break;
3746
3747 case 404:
3748 tcrypt_speedtest("ecb(des)", NULL, 0,
3749 speed_template_8);
3750 tcrypt_speedtest("cbc(des)", NULL, 0,
3751 speed_template_8);
3752 break;
3753
3754 case 1000:
3755 test_available();
3756 break;
3757 }
3758
3759 return ret;
3760 }
3761 #if !defined(CONFIG_CRYPTO_DEV_DEU)
3762 static int do_alg_test(const char *alg, u32 type, u32 mask)
3763 {
3764 return crypto_has_alg(alg, type, mask ?: CRYPTO_ALG_TYPE_MASK) ?
3765 0 : -ENOENT;
3766 }
3767 #endif
3768
3769 static int __init tcrypt_mod_init(void)
3770 {
3771 int err = -ENOMEM;
3772 int i;
3773
3774 printk("Starting Lantiq DEU Crypto TESTS . . . . . . .\n");
3775
3776 for (i = 0; i < TVMEMSIZE; i++) {
3777 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
3778 if (!tvmem[i])
3779 goto err_free_tv;
3780 }
3781
3782 #if defined(CONFIG_CRYPTO_DEV_DEU)
3783 #if defined(CONFIG_CRYPTO_DEV_MD5)
3784 mode = 1; // test md5 only
3785 err = do_test(mode);
3786 if (err)
3787 goto md5_err;
3788
3789 md5_err:
3790 if (err) {
3791 printk(KERN_ERR "md5: one or more tests failed!\n");
3792 goto err_free_tv;
3793 }
3794 #endif
3795 #if defined(CONFIG_CRYPTO_DEV_SHA1)
3796 mode = 2; // test sha1 only
3797 err = do_test(mode);
3798 if (err)
3799 goto sha1_err;
3800
3801 sha1_err:
3802 if (err) {
3803 printk(KERN_ERR "sha1: one or more tests failed!\n");
3804 goto err_free_tv;
3805 }
3806 #endif
3807 #if defined (CONFIG_CRYPTO_DEV_DES) || defined (CONFIG_CRYPTO_ASYNC_DES)
3808 mode = 3; // test des only
3809 err = do_test(mode);
3810 if (err)
3811 goto des_err;
3812
3813 mode = 4; // test des3 only
3814 err = do_test(mode);
3815 if (err)
3816 goto des_err;
3817
3818 des_err:
3819 if (err) {
3820 printk(KERN_ERR "des3: one or more tests failed!\n");
3821 goto err_free_tv;
3822 }
3823 #endif
3824 #if defined (CONFIG_CRYPTO_ASYNC_AES) || defined (CONFIG_CRYPTO_DEV_AES)
3825 mode = 10; // test aes only
3826 err = do_test(mode);
3827 if (err)
3828 goto aes_err;
3829
3830 aes_err:
3831 if (err) {
3832 printk(KERN_ERR "aes: one or more tests failed!\n");
3833 goto err_free_tv;
3834 }
3835 #endif
3836 #if defined(CONFIG_CRYPTO_DEV_ARC4)
3837 mode = 16;
3838 err = do_test(mode);
3839
3840 if (err) {
3841 printk(KERN_ERR "arc4: one or more tests failed!\n");
3842 goto err_free_tv;
3843 }
3844 #endif
3845 #if defined (CONFIG_CRYPTO_DEV_MD5_HMAC)
3846 mode = 100;
3847 err = do_test(mode);
3848
3849 if (err) {
3850 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
3851 goto err_free_tv;
3852 }
3853 #endif
3854 #if defined (CONFIG_CRYPTO_DEV_SHA1_HMAC)
3855 mode = 101;
3856 err = do_test(mode);
3857
3858 if (err) {
3859 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
3860 goto err_free_tv;
3861 }
3862 #endif
3863
3864 /* Start Speed tests test modes */
3865 #if defined(CONFIG_CRYPTO_DEV_SPEED_TEST)
3866 #if defined(CONFIG_CRYPTO_DEV_AES)
3867 mode = 200;
3868 err = do_test(mode);
3869 if (err)
3870 goto speed_err;
3871 #endif
3872 #if defined (CONFIG_CRYPTO_DEV_DES)
3873 mode = 201;
3874 err = do_test(mode);
3875 if (err)
3876 goto speed_err;
3877
3878 mode = 204;
3879 err = do_test(mode);
3880 if (err)
3881 goto speed_err;
3882 #endif
3883 #if defined (CONFIG_CRYPTO_DEV_MD5)
3884 mode = 302;
3885 err = do_test(mode);
3886 if (err)
3887 goto speed_err;
3888 #endif
3889 #if defined (CONFIG_CRYPTO_DEV_SHA1)
3890 mode = 303;
3891 err = do_test(mode);
3892 if (err)
3893 goto speed_err;
3894 #endif
3895 printk("Speed tests finished successfully\n");
3896 goto fips_check;
3897
3898 speed_err:
3899 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
3900 goto err_free_tv;
3901 #endif /* CONFIG_CRYPTO_DEV_SPEED_TEST */
3902
3903 #else
3904 if (alg)
3905 err = do_alg_test(alg, type, mask);
3906 else
3907 err = do_test(mode);
3908
3909 if (err) {
3910 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
3911 goto err_free_tv;
3912 }
3913 #endif /* CONFIG_CRYPTO_DEV_DEU */
3914
3915 fips_check:
3916 /* We intentionaly return -EAGAIN to prevent keeping the module,
3917 * unless we're running in fips mode. It does all its work from
3918 * init() and doesn't offer any runtime functionality, but in
3919 * the fips case, checking for a successful load is helpful.
3920 * => we don't need it in the memory, do we?
3921 * -- mludvig
3922 */
3923 if (!fips_enabled)
3924 err = -EAGAIN;
3925
3926 err_free_tv:
3927 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++ ){
3928 printk("Freeing page: %d\n", i);
3929 free_page((unsigned long)tvmem[i]);
3930 }
3931
3932 printk("Finished DEU testing . . . . . .\n");
3933 return err;
3934 }
3935
3936 /*
3937 * If an init function is provided, an exit function must also be provided
3938 * to allow module unload.
3939 */
3940 static void __exit tcrypt_mod_fini(void) {}
3941
3942
3943 module_init(tcrypt_mod_init);
3944 module_exit(tcrypt_mod_fini);
3945
3946 module_param(alg, charp, 0);
3947 module_param(type, uint, 0);
3948 module_param(mask, uint, 0);
3949 module_param(mode, int, 0);
3950 module_param(sec, uint, 0);
3951 MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
3952 "(defaults to zero which uses CPU cycles instead)");
3953
3954 MODULE_LICENSE("GPL");
3955 MODULE_DESCRIPTION("Quick & dirty crypto testing module");
3956 MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");
3957