2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
16 #include <crypto/hash.h>
17 #include <linux/err.h>
18 #include <linux/module.h>
19 #include <linux/scatterlist.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
22 #include <crypto/rng.h>
23 #include <linux/jiffies.h>
24 #include <linux/init.h>
25 #include <linux/moduleparam.h>
26 #include <linux/delay.h>
27 #include <linux/types.h>
28 #include <linux/sched.h>
31 #include "ifxmips_testmgr.h"
32 #include "ifxmips_tcrypt.h"
33 #include "ifxmips_deu.h"
35 /* changes for LQ ablkcipher speedtest */
36 #include <linux/timex.h>
37 #include <linux/interrupt.h>
38 #include <asm/mipsregs.h>
41 * Need slab memory for testing (size in number of pages).
46 * Indexes into the xbuf to simulate cross-page access.
58 * Used by test_cipher()
64 * Need slab memory for testing (size in number of pages).
69 * Used by test_cipher_speed()
75 * Used by test_cipher_speed()
78 #ifndef INIT_COMPLETION
79 #define INIT_COMPLETION(a) reinit_completion(&a)
83 static unsigned int sec
;
85 static char *alg
= NULL
;
89 static char *tvmem
[TVMEMSIZE
];
91 static char *check
[] = {
92 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256",
93 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
94 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
95 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
96 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
97 "lzo", "cts", "zlib", NULL
99 struct tcrypt_result
{
100 struct completion completion
;
104 struct aead_test_suite
{
106 struct aead_testvec
*vecs
;
111 struct cipher_test_suite
{
113 struct cipher_testvec
*vecs
;
118 struct comp_test_suite
{
120 struct comp_testvec
*vecs
;
125 struct pcomp_test_suite
{
127 struct pcomp_testvec
*vecs
;
132 struct hash_test_suite
{
133 struct hash_testvec
*vecs
;
137 struct cprng_test_suite
{
138 struct cprng_testvec
*vecs
;
142 struct alg_test_desc
{
144 int (*test
)(const struct alg_test_desc
*desc
, const char *driver
,
146 int fips_allowed
; /* set if alg is allowed in fips mode */
149 struct aead_test_suite aead
;
150 struct cipher_test_suite cipher
;
151 struct comp_test_suite comp
;
152 struct pcomp_test_suite pcomp
;
153 struct hash_test_suite hash
;
154 struct cprng_test_suite cprng
;
158 static unsigned int IDX
[8] = { IDX1
, IDX2
, IDX3
, IDX4
, IDX5
, IDX6
, IDX7
, IDX8
};
160 static void hexdump(unsigned char *buf
, unsigned int len
)
162 print_hex_dump(KERN_CONT
, "", DUMP_PREFIX_OFFSET
,
167 static void tcrypt_complete(struct crypto_async_request
*req
, int err
)
169 struct tcrypt_result
*res
= req
->data
;
171 //printk("Signal done test\n");
173 if (err
== -EINPROGRESS
) {
174 printk("********************* Completion didnt go too well **************************** \n");
179 complete_all(&res
->completion
);
182 static int testmgr_alloc_buf(char *buf
[XBUFSIZE
])
186 for (i
= 0; i
< XBUFSIZE
; i
++) {
187 buf
[i
] = (void *)__get_free_page(GFP_KERNEL
);
196 free_page((unsigned long)buf
[i
]);
201 static void testmgr_free_buf(char *buf
[XBUFSIZE
])
205 for (i
= 0; i
< XBUFSIZE
; i
++)
206 free_page((unsigned long)buf
[i
]);
209 static int test_hash(struct crypto_ahash
*tfm
, struct hash_testvec
*template,
212 const char *algo
= crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm
));
213 unsigned int i
, j
, k
, temp
;
214 struct scatterlist sg
[8];
216 struct ahash_request
*req
;
217 struct tcrypt_result tresult
;
219 char *xbuf
[XBUFSIZE
];
222 if (testmgr_alloc_buf(xbuf
))
225 init_completion(&tresult
.completion
);
227 req
= ahash_request_alloc(tfm
, GFP_KERNEL
);
229 printk(KERN_ERR
"alg: hash: Failed to allocate request for "
233 ahash_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
234 tcrypt_complete
, &tresult
);
237 for (i
= 0; i
< tcount
; i
++) {
242 memset(result
, 0, 64);
246 memcpy(hash_buff
, template[i
].plaintext
, template[i
].psize
);
247 sg_init_one(&sg
[0], hash_buff
, template[i
].psize
);
249 if (template[i
].ksize
) {
250 crypto_ahash_clear_flags(tfm
, ~0);
251 ret
= crypto_ahash_setkey(tfm
, template[i
].key
,
254 printk(KERN_ERR
"alg: hash: setkey failed on "
255 "test %d for %s: ret=%d\n", j
, algo
,
261 ahash_request_set_crypt(req
, sg
, result
, template[i
].psize
);
262 ret
= crypto_ahash_digest(req
);
268 ret
= wait_for_completion_interruptible(
269 &tresult
.completion
);
270 if (!ret
&& !(ret
= tresult
.err
)) {
271 INIT_COMPLETION(tresult
.completion
);
276 printk(KERN_ERR
"alg: hash: digest failed on test %d "
277 "for %s: ret=%d\n", j
, algo
, -ret
);
281 if (memcmp(result
, template[i
].digest
,
282 crypto_ahash_digestsize(tfm
))) {
283 printk(KERN_ERR
"alg: hash: Test %d failed for %s\n",
285 hexdump(result
, crypto_ahash_digestsize(tfm
));
290 printk(KERN_ERR
"alg: hash: Test %d passed for %s\n",
292 hexdump(result
, crypto_ahash_digestsize(tfm
));
297 for (i
= 0; i
< tcount
; i
++) {
298 if (template[i
].np
) {
300 memset(result
, 0, 64);
303 sg_init_table(sg
, template[i
].np
);
305 for (k
= 0; k
< template[i
].np
; k
++) {
306 if (WARN_ON(offset_in_page(IDX
[k
]) +
307 template[i
].tap
[k
] > PAGE_SIZE
))
310 memcpy(xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
311 offset_in_page(IDX
[k
]),
312 template[i
].plaintext
+ temp
,
315 temp
+= template[i
].tap
[k
];
318 if (template[i
].ksize
) {
319 crypto_ahash_clear_flags(tfm
, ~0);
320 ret
= crypto_ahash_setkey(tfm
, template[i
].key
,
324 printk(KERN_ERR
"alg: hash: setkey "
325 "failed on chunking test %d "
326 "for %s: ret=%d\n", j
, algo
,
332 ahash_request_set_crypt(req
, sg
, result
,
334 ret
= crypto_ahash_digest(req
);
340 ret
= wait_for_completion_interruptible(
341 &tresult
.completion
);
342 if (!ret
&& !(ret
= tresult
.err
)) {
343 INIT_COMPLETION(tresult
.completion
);
348 printk(KERN_ERR
"alg: hash: digest failed "
349 "on chunking test %d for %s: "
350 "ret=%d\n", j
, algo
, -ret
);
354 if (memcmp(result
, template[i
].digest
,
355 crypto_ahash_digestsize(tfm
))) {
356 printk(KERN_ERR
"alg: hash: Chunking test %d "
357 "failed for %s\n", j
, algo
);
358 hexdump(result
, crypto_ahash_digestsize(tfm
));
363 printk(KERN_ERR
"alg: hash: Chunking test %d "
364 "passed for %s\n", j
, algo
);
365 hexdump(result
, crypto_ahash_digestsize(tfm
));
373 ahash_request_free(req
);
375 testmgr_free_buf(xbuf
);
380 static int test_aead(struct crypto_aead
*tfm
, int enc
,
381 struct aead_testvec
*template, unsigned int tcount
)
383 const char *algo
= crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm
));
384 unsigned int i
, j
, k
, n
, temp
;
388 struct aead_request
*req
;
389 struct scatterlist sg
[8];
390 struct scatterlist asg
[8];
392 struct tcrypt_result result
;
393 unsigned int authsize
;
397 char *xbuf
[XBUFSIZE
];
398 char *axbuf
[XBUFSIZE
];
400 if (testmgr_alloc_buf(xbuf
))
402 if (testmgr_alloc_buf(axbuf
))
410 init_completion(&result
.completion
);
412 req
= aead_request_alloc(tfm
, GFP_KERNEL
);
414 printk(KERN_ERR
"alg: aead: Failed to allocate request for "
419 aead_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
420 tcrypt_complete
, &result
);
422 for (i
= 0, j
= 0; i
< tcount
; i
++) {
423 if (!template[i
].np
) {
426 /* some tepmplates have no input data but they will
433 if (WARN_ON(template[i
].ilen
> PAGE_SIZE
||
434 template[i
].alen
> PAGE_SIZE
))
437 memcpy(input
, template[i
].input
, template[i
].ilen
);
438 memcpy(assoc
, template[i
].assoc
, template[i
].alen
);
440 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
442 memset(iv
, 0, MAX_IVLEN
);
444 crypto_aead_clear_flags(tfm
, ~0);
446 crypto_aead_set_flags(
447 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
449 key
= template[i
].key
;
451 ret
= crypto_aead_setkey(tfm
, key
,
453 if (!ret
== template[i
].fail
) {
454 printk(KERN_ERR
"alg: aead: setkey failed on "
455 "test %d for %s: flags=%x\n", j
, algo
,
456 crypto_aead_get_flags(tfm
));
461 authsize
= abs(template[i
].rlen
- template[i
].ilen
);
462 ret
= crypto_aead_setauthsize(tfm
, authsize
);
464 printk(KERN_ERR
"alg: aead: Failed to set "
465 "authsize to %u on test %d for %s\n",
470 sg_init_one(&sg
[0], input
,
471 template[i
].ilen
+ (enc
? authsize
: 0));
473 sg_init_one(&asg
[0], assoc
, template[i
].alen
);
475 aead_request_set_crypt(req
, sg
, sg
,
476 template[i
].ilen
, iv
);
478 aead_request_set_assoc(req
, asg
, template[i
].alen
);
481 crypto_aead_encrypt(req
) :
482 crypto_aead_decrypt(req
);
486 if (template[i
].novrfy
) {
487 /* verification was supposed to fail */
488 printk(KERN_ERR
"alg: aead: %s failed "
489 "on test %d for %s: ret was 0, "
490 "expected -EBADMSG\n",
492 /* so really, we got a bad message */
499 ret
= wait_for_completion_interruptible(
501 if (!ret
&& !(ret
= result
.err
)) {
502 INIT_COMPLETION(result
.completion
);
506 if (template[i
].novrfy
)
507 /* verification failure was expected */
511 printk(KERN_ERR
"alg: aead: %s failed on test "
512 "%d for %s: ret=%d\n", e
, j
, algo
, -ret
);
517 if (memcmp(q
, template[i
].result
, template[i
].rlen
)) {
518 printk(KERN_ERR
"alg: aead: Test %d failed on "
519 "%s for %s\n", j
, e
, algo
);
520 hexdump(q
, template[i
].rlen
);
525 printk(KERN_ERR
"alg: aead: Test %d passed on "
526 "%s for %s\n", j
, e
, algo
);
527 hexdump(q
, template[i
].rlen
);
532 for (i
= 0, j
= 0; i
< tcount
; i
++) {
533 if (template[i
].np
) {
537 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
539 memset(iv
, 0, MAX_IVLEN
);
541 crypto_aead_clear_flags(tfm
, ~0);
543 crypto_aead_set_flags(
544 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
545 key
= template[i
].key
;
547 ret
= crypto_aead_setkey(tfm
, key
, template[i
].klen
);
548 if (!ret
== template[i
].fail
) {
549 printk(KERN_ERR
"alg: aead: setkey failed on "
550 "chunk test %d for %s: flags=%x\n", j
,
551 algo
, crypto_aead_get_flags(tfm
));
556 authsize
= abs(template[i
].rlen
- template[i
].ilen
);
559 sg_init_table(sg
, template[i
].np
);
560 for (k
= 0, temp
= 0; k
< template[i
].np
; k
++) {
561 if (WARN_ON(offset_in_page(IDX
[k
]) +
562 template[i
].tap
[k
] > PAGE_SIZE
))
565 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
566 offset_in_page(IDX
[k
]);
568 memcpy(q
, template[i
].input
+ temp
,
571 n
= template[i
].tap
[k
];
572 if (k
== template[i
].np
- 1 && enc
)
574 if (offset_in_page(q
) + n
< PAGE_SIZE
)
577 sg_set_buf(&sg
[k
], q
, template[i
].tap
[k
]);
578 temp
+= template[i
].tap
[k
];
581 ret
= crypto_aead_setauthsize(tfm
, authsize
);
583 printk(KERN_ERR
"alg: aead: Failed to set "
584 "authsize to %u on chunk test %d for "
585 "%s\n", authsize
, j
, algo
);
590 if (WARN_ON(sg
[k
- 1].offset
+
591 sg
[k
- 1].length
+ authsize
>
597 sg
[k
- 1].length
+= authsize
;
600 sg_init_table(asg
, template[i
].anp
);
602 for (k
= 0, temp
= 0; k
< template[i
].anp
; k
++) {
603 if (WARN_ON(offset_in_page(IDX
[k
]) +
604 template[i
].atap
[k
] > PAGE_SIZE
))
607 memcpy(axbuf
[IDX
[k
] >> PAGE_SHIFT
] +
608 offset_in_page(IDX
[k
]),
609 template[i
].assoc
+ temp
,
610 template[i
].atap
[k
]),
611 template[i
].atap
[k
]);
612 temp
+= template[i
].atap
[k
];
615 aead_request_set_crypt(req
, sg
, sg
,
619 aead_request_set_assoc(req
, asg
, template[i
].alen
);
622 crypto_aead_encrypt(req
) :
623 crypto_aead_decrypt(req
);
627 if (template[i
].novrfy
) {
628 /* verification was supposed to fail */
629 printk(KERN_ERR
"alg: aead: %s failed "
630 "on chunk test %d for %s: ret "
631 "was 0, expected -EBADMSG\n",
633 /* so really, we got a bad message */
640 ret
= wait_for_completion_interruptible(
642 if (!ret
&& !(ret
= result
.err
)) {
643 INIT_COMPLETION(result
.completion
);
647 if (template[i
].novrfy
)
648 /* verification failure was expected */
652 printk(KERN_ERR
"alg: aead: %s failed on "
653 "chunk test %d for %s: ret=%d\n", e
, j
,
659 for (k
= 0, temp
= 0; k
< template[i
].np
; k
++) {
660 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
661 offset_in_page(IDX
[k
]);
663 n
= template[i
].tap
[k
];
664 if (k
== template[i
].np
- 1)
665 n
+= enc
? authsize
: -authsize
;
667 if (memcmp(q
, template[i
].result
+ temp
, n
)) {
668 printk(KERN_ERR
"alg: aead: Chunk "
669 "test %d failed on %s at page "
670 "%u for %s\n", j
, e
, k
, algo
);
675 printk(KERN_ERR
"alg: aead: Chunk "
676 "test %d passed on %s at page "
677 "%u for %s\n", j
, e
, k
, algo
);
682 if (k
== template[i
].np
- 1 && !enc
) {
683 if (memcmp(q
, template[i
].input
+
689 for (n
= 0; offset_in_page(q
+ n
) &&
694 printk(KERN_ERR
"alg: aead: Result "
695 "buffer corruption in chunk "
696 "test %d on %s at page %u for "
697 "%s: %u bytes:\n", j
, e
, k
,
702 temp
+= template[i
].tap
[k
];
710 aead_request_free(req
);
711 testmgr_free_buf(axbuf
);
713 testmgr_free_buf(xbuf
);
718 static int test_cipher(struct crypto_cipher
*tfm
, int enc
,
719 struct cipher_testvec
*template, unsigned int tcount
)
721 const char *algo
= crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm
));
722 unsigned int i
, j
, k
;
726 char *xbuf
[XBUFSIZE
];
729 if (testmgr_alloc_buf(xbuf
))
738 for (i
= 0; i
< tcount
; i
++) {
745 if (WARN_ON(template[i
].ilen
> PAGE_SIZE
))
749 memcpy(data
, template[i
].input
, template[i
].ilen
);
751 crypto_cipher_clear_flags(tfm
, ~0);
753 crypto_cipher_set_flags(tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
755 ret
= crypto_cipher_setkey(tfm
, template[i
].key
,
757 if (!ret
== template[i
].fail
) {
758 printk(KERN_ERR
"alg: cipher: setkey failed "
759 "on test %d for %s: flags=%x\n", j
,
760 algo
, crypto_cipher_get_flags(tfm
));
765 for (k
= 0; k
< template[i
].ilen
;
766 k
+= crypto_cipher_blocksize(tfm
)) {
768 crypto_cipher_encrypt_one(tfm
, data
+ k
,
771 crypto_cipher_decrypt_one(tfm
, data
+ k
,
776 if (memcmp(q
, template[i
].result
, template[i
].rlen
)) {
777 printk(KERN_ERR
"alg: cipher: Test %d failed "
778 "on %s for %s\n", j
, e
, algo
);
779 hexdump(q
, template[i
].rlen
);
784 printk(KERN_ERR
"alg: cipher: Test %d passed "
785 "on %s for %s\n", j
, e
, algo
);
786 hexdump(q
, template[i
].rlen
);
793 testmgr_free_buf(xbuf
);
798 static int test_skcipher(struct crypto_ablkcipher
*tfm
, int enc
,
799 struct cipher_testvec
*template, unsigned int tcount
)
802 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm
));
803 unsigned int i
, j
, k
, n
, temp
;
805 struct ablkcipher_request
*req
;
806 struct scatterlist sg
[8];
808 struct tcrypt_result result
;
811 char *xbuf
[XBUFSIZE
];
814 if (testmgr_alloc_buf(xbuf
))
822 init_completion(&result
.completion
);
824 req
= ablkcipher_request_alloc(tfm
, GFP_KERNEL
);
826 printk(KERN_ERR
"alg: skcipher: Failed to allocate request "
831 //printk("tcount: %u\n", tcount);
833 ablkcipher_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
834 tcrypt_complete
, &result
);
837 for (i
= 0; i
< tcount
; i
++) {
839 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
841 memset(iv
, 0, MAX_IVLEN
);
843 if (!(template[i
].np
)) {
844 //printk("np: %d, i: %d, j: %d\n", template[i].np, i, j);
848 if (WARN_ON(template[i
].ilen
> PAGE_SIZE
))
852 memcpy(data
, template[i
].input
, template[i
].ilen
);
854 crypto_ablkcipher_clear_flags(tfm
, ~0);
856 crypto_ablkcipher_set_flags(
857 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
859 ret
= crypto_ablkcipher_setkey(tfm
, template[i
].key
,
861 if (!ret
== template[i
].fail
) {
862 printk(KERN_ERR
"alg: skcipher: setkey failed "
863 "on test %d for %s: flags=%x\n", j
,
864 algo
, crypto_ablkcipher_get_flags(tfm
));
870 sg_init_one(&sg
[0], data
, template[i
].ilen
);
872 ablkcipher_request_set_crypt(req
, sg
, sg
,
873 template[i
].ilen
, iv
);
875 crypto_ablkcipher_encrypt(req
) :
876 crypto_ablkcipher_decrypt(req
);
883 ret
= wait_for_completion_interruptible(
885 if (!ret
&& !((ret
= result
.err
))) {
886 INIT_COMPLETION(result
.completion
);
891 printk(KERN_ERR
"alg: skcipher: %s failed on "
892 "test %d for %s: ret=%d\n", e
, j
, algo
,
898 if (memcmp(q
, template[i
].result
, template[i
].rlen
)) {
899 printk(KERN_ERR
"alg: skcipher: Test %d "
900 "failed on %s for %s\n", j
, e
, algo
);
901 hexdump(q
, template[i
].rlen
);
907 printk(KERN_ERR
"alg: skcipher: Test %d "
908 "*PASSED* on %s for %s\n", j
, e
, algo
);
909 hexdump(q
, template[i
].rlen
);
914 printk("Testing %s chunking across pages.\n", algo
);
916 for (i
= 0; i
< tcount
; i
++) {
918 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
920 memset(iv
, 0, MAX_IVLEN
);
922 if (template[i
].np
) {
925 crypto_ablkcipher_clear_flags(tfm
, ~0);
927 crypto_ablkcipher_set_flags(
928 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
930 ret
= crypto_ablkcipher_setkey(tfm
, template[i
].key
,
932 if (!ret
== template[i
].fail
) {
933 printk(KERN_ERR
"alg: skcipher: setkey failed "
934 "on chunk test %d for %s: flags=%x\n",
936 crypto_ablkcipher_get_flags(tfm
));
944 sg_init_table(sg
, template[i
].np
);
945 for (k
= 0; k
< template[i
].np
; k
++) {
946 if (WARN_ON(offset_in_page(IDX
[k
]) +
947 template[i
].tap
[k
] > PAGE_SIZE
))
950 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
951 offset_in_page(IDX
[k
]);
953 memcpy(q
, template[i
].input
+ temp
,
956 if (offset_in_page(q
) + template[i
].tap
[k
] <
958 q
[template[i
].tap
[k
]] = 0;
960 sg_set_buf(&sg
[k
], q
, template[i
].tap
[k
]);
962 temp
+= template[i
].tap
[k
];
965 ablkcipher_request_set_crypt(req
, sg
, sg
,
966 template[i
].ilen
, iv
);
969 crypto_ablkcipher_encrypt(req
) :
970 crypto_ablkcipher_decrypt(req
);
977 ret
= wait_for_completion_interruptible(
979 if (!ret
&& !((ret
= result
.err
))) {
980 INIT_COMPLETION(result
.completion
);
985 printk(KERN_ERR
"alg: skcipher: %s failed on "
986 "chunk test %d for %s: ret=%d\n", e
, j
,
994 for (k
= 0; k
< template[i
].np
; k
++) {
995 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
996 offset_in_page(IDX
[k
]);
998 if (memcmp(q
, template[i
].result
+ temp
,
999 template[i
].tap
[k
])) {
1000 printk(KERN_ERR
"alg: skcipher: Chunk "
1001 "test %d failed on %s at page "
1002 "%u for %s\n", j
, e
, k
, algo
);
1003 hexdump(q
, template[i
].tap
[k
]);
1008 printk(KERN_ERR
"alg: skcipher: Chunk "
1009 "test %d *PASSED* on %s at page "
1010 "%u for %s\n", j
, e
, k
, algo
);
1011 hexdump(q
, template[i
].tap
[k
]);
1015 q
+= template[i
].tap
[k
];
1016 for (n
= 0; offset_in_page(q
+ n
) && q
[n
]; n
++)
1020 printk(KERN_ERR
"alg: skcipher: "
1021 "Result buffer corruption in "
1022 "chunk test %d on %s at page "
1023 "%u for %s: %u bytes:\n", j
, e
,
1030 printk(KERN_ERR
"alg: skcipher: "
1031 "Result buffer clean in "
1032 "chunk test %d on %s at page "
1033 "%u for %s: %u bytes:\n", j
, e
,
1036 printk("Chunk Buffer clean\n");
1039 temp
+= template[i
].tap
[k
];
1046 ablkcipher_request_free(req
);
1047 testmgr_free_buf(xbuf
);
1052 static int test_comp(struct crypto_comp
*tfm
, struct comp_testvec
*ctemplate
,
1053 struct comp_testvec
*dtemplate
, int ctcount
, int dtcount
)
1055 const char *algo
= crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm
));
1057 char result
[COMP_BUF_SIZE
];
1060 for (i
= 0; i
< ctcount
; i
++) {
1062 unsigned int dlen
= COMP_BUF_SIZE
;
1064 memset(result
, 0, sizeof (result
));
1066 ilen
= ctemplate
[i
].inlen
;
1067 ret
= crypto_comp_compress(tfm
, ctemplate
[i
].input
,
1068 ilen
, result
, &dlen
);
1070 printk(KERN_ERR
"alg: comp: compression failed "
1071 "on test %d for %s: ret=%d\n", i
+ 1, algo
,
1076 if (dlen
!= ctemplate
[i
].outlen
) {
1077 printk(KERN_ERR
"alg: comp: Compression test %d "
1078 "failed for %s: output len = %d\n", i
+ 1, algo
,
1084 if (memcmp(result
, ctemplate
[i
].output
, dlen
)) {
1085 printk(KERN_ERR
"alg: comp: Compression test %d "
1086 "failed for %s\n", i
+ 1, algo
);
1087 hexdump(result
, dlen
);
1092 printk(KERN_ERR
"alg: comp: Compression test %d "
1093 "passed for %s\n", i
+ 1, algo
);
1094 hexdump(result
, dlen
);
1098 for (i
= 0; i
< dtcount
; i
++) {
1100 unsigned int dlen
= COMP_BUF_SIZE
;
1102 memset(result
, 0, sizeof (result
));
1104 ilen
= dtemplate
[i
].inlen
;
1105 ret
= crypto_comp_decompress(tfm
, dtemplate
[i
].input
,
1106 ilen
, result
, &dlen
);
1108 printk(KERN_ERR
"alg: comp: decompression failed "
1109 "on test %d for %s: ret=%d\n", i
+ 1, algo
,
1114 if (dlen
!= dtemplate
[i
].outlen
) {
1115 printk(KERN_ERR
"alg: comp: Decompression test %d "
1116 "failed for %s: output len = %d\n", i
+ 1, algo
,
1122 if (memcmp(result
, dtemplate
[i
].output
, dlen
)) {
1123 printk(KERN_ERR
"alg: comp: Decompression test %d "
1124 "failed for %s\n", i
+ 1, algo
);
1125 hexdump(result
, dlen
);
1130 printk(KERN_ERR
"alg: comp: Decompression test %d "
1131 "passed for %s\n", i
+ 1, algo
);
1132 hexdump(result
, dlen
);
1142 static int test_pcomp(struct crypto_pcomp
*tfm
,
1143 struct pcomp_testvec
*ctemplate
,
1144 struct pcomp_testvec
*dtemplate
, int ctcount
,
1147 const char *algo
= crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm
));
1149 char result
[COMP_BUF_SIZE
];
1152 for (i
= 0; i
< ctcount
; i
++) {
1153 struct comp_request req
;
1154 unsigned int produced
= 0;
1156 res
= crypto_compress_setup(tfm
, ctemplate
[i
].params
,
1157 ctemplate
[i
].paramsize
);
1159 pr_err("alg: pcomp: compression setup failed on test "
1160 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1164 res
= crypto_compress_init(tfm
);
1166 pr_err("alg: pcomp: compression init failed on test "
1167 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1171 memset(result
, 0, sizeof(result
));
1173 req
.next_in
= ctemplate
[i
].input
;
1174 req
.avail_in
= ctemplate
[i
].inlen
/ 2;
1175 req
.next_out
= result
;
1176 req
.avail_out
= ctemplate
[i
].outlen
/ 2;
1178 res
= crypto_compress_update(tfm
, &req
);
1179 if (res
< 0 && (res
!= -EAGAIN
|| req
.avail_in
)) {
1180 pr_err("alg: pcomp: compression update failed on test "
1181 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1187 /* Add remaining input data */
1188 req
.avail_in
+= (ctemplate
[i
].inlen
+ 1) / 2;
1190 res
= crypto_compress_update(tfm
, &req
);
1191 if (res
< 0 && (res
!= -EAGAIN
|| req
.avail_in
)) {
1192 pr_err("alg: pcomp: compression update failed on test "
1193 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1199 /* Provide remaining output space */
1200 req
.avail_out
+= COMP_BUF_SIZE
- ctemplate
[i
].outlen
/ 2;
1202 res
= crypto_compress_final(tfm
, &req
);
1204 pr_err("alg: pcomp: compression final failed on test "
1205 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1210 if (COMP_BUF_SIZE
- req
.avail_out
!= ctemplate
[i
].outlen
) {
1211 pr_err("alg: comp: Compression test %d failed for %s: "
1212 "output len = %d (expected %d)\n", i
+ 1, algo
,
1213 COMP_BUF_SIZE
- req
.avail_out
,
1214 ctemplate
[i
].outlen
);
1218 if (produced
!= ctemplate
[i
].outlen
) {
1219 pr_err("alg: comp: Compression test %d failed for %s: "
1220 "returned len = %u (expected %d)\n", i
+ 1,
1221 algo
, produced
, ctemplate
[i
].outlen
);
1225 if (memcmp(result
, ctemplate
[i
].output
, ctemplate
[i
].outlen
)) {
1226 pr_err("alg: pcomp: Compression test %d failed for "
1227 "%s\n", i
+ 1, algo
);
1228 hexdump(result
, ctemplate
[i
].outlen
);
1233 for (i
= 0; i
< dtcount
; i
++) {
1234 struct comp_request req
;
1235 unsigned int produced
= 0;
1237 res
= crypto_decompress_setup(tfm
, dtemplate
[i
].params
,
1238 dtemplate
[i
].paramsize
);
1240 pr_err("alg: pcomp: decompression setup failed on "
1241 "test %d for %s: error=%d\n", i
+ 1, algo
, res
);
1245 res
= crypto_decompress_init(tfm
);
1247 pr_err("alg: pcomp: decompression init failed on test "
1248 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1252 memset(result
, 0, sizeof(result
));
1254 req
.next_in
= dtemplate
[i
].input
;
1255 req
.avail_in
= dtemplate
[i
].inlen
/ 2;
1256 req
.next_out
= result
;
1257 req
.avail_out
= dtemplate
[i
].outlen
/ 2;
1259 res
= crypto_decompress_update(tfm
, &req
);
1260 if (res
< 0 && (res
!= -EAGAIN
|| req
.avail_in
)) {
1261 pr_err("alg: pcomp: decompression update failed on "
1262 "test %d for %s: error=%d\n", i
+ 1, algo
, res
);
1268 /* Add remaining input data */
1269 req
.avail_in
+= (dtemplate
[i
].inlen
+ 1) / 2;
1271 res
= crypto_decompress_update(tfm
, &req
);
1272 if (res
< 0 && (res
!= -EAGAIN
|| req
.avail_in
)) {
1273 pr_err("alg: pcomp: decompression update failed on "
1274 "test %d for %s: error=%d\n", i
+ 1, algo
, res
);
1280 /* Provide remaining output space */
1281 req
.avail_out
+= COMP_BUF_SIZE
- dtemplate
[i
].outlen
/ 2;
1283 res
= crypto_decompress_final(tfm
, &req
);
1284 if (res
< 0 && (res
!= -EAGAIN
|| req
.avail_in
)) {
1285 pr_err("alg: pcomp: decompression final failed on "
1286 "test %d for %s: error=%d\n", i
+ 1, algo
, res
);
1292 if (COMP_BUF_SIZE
- req
.avail_out
!= dtemplate
[i
].outlen
) {
1293 pr_err("alg: comp: Decompression test %d failed for "
1294 "%s: output len = %d (expected %d)\n", i
+ 1,
1295 algo
, COMP_BUF_SIZE
- req
.avail_out
,
1296 dtemplate
[i
].outlen
);
1300 if (produced
!= dtemplate
[i
].outlen
) {
1301 pr_err("alg: comp: Decompression test %d failed for "
1302 "%s: returned len = %u (expected %d)\n", i
+ 1,
1303 algo
, produced
, dtemplate
[i
].outlen
);
1307 if (memcmp(result
, dtemplate
[i
].output
, dtemplate
[i
].outlen
)) {
1308 pr_err("alg: pcomp: Decompression test %d failed for "
1309 "%s\n", i
+ 1, algo
);
1310 hexdump(result
, dtemplate
[i
].outlen
);
1318 static int test_ablkcipher_jiffies(struct ablkcipher_request
*req
, int enc
,
1319 int sec
, struct tcrypt_result
*result
,
1322 unsigned long start
, end
;
1326 for (start
= jiffies
, end
= start
+ sec
* HZ
, bcount
= 0;
1327 time_before(jiffies
, end
); bcount
++) {
1330 ret
= crypto_ablkcipher_encrypt(req
);
1332 ret
= crypto_ablkcipher_decrypt(req
);
1339 ret
= wait_for_completion_interruptible(
1340 &result
->completion
);
1341 if (!ret
&& !((ret
= result
->err
))) {
1342 INIT_COMPLETION(result
->completion
);
1351 printk("%d operations in %d seconds (%ld bytes)\n",
1352 bcount
, sec
, (long)bcount
* blen
);
1357 static int test_ablkcipher_cycles(struct ablkcipher_request
*req
, int enc
,
1358 int sec
, struct tcrypt_result
*result
,
1361 unsigned long cycles
= 0;
1364 unsigned long start
, end
= 0;
1365 //local_bh_disable();
1366 //local_irq_disable();
1368 for (i
= 0; i
< 4; i
++) {
1370 ret
= crypto_ablkcipher_encrypt(req
);
1372 ret
= crypto_ablkcipher_decrypt(req
);
1380 ret
= wait_for_completion_interruptible(
1381 &result
->completion
);
1382 if (!ret
&& !((ret
= result
->err
))) {
1383 INIT_COMPLETION(result
->completion
);
1388 wait_for_completion(&result
->completion
);
1389 INIT_COMPLETION(result
->completion
);
1397 if (signal_pending(current
)) {
1398 printk("Signal caught\n");
1404 //printk("Debug ln: (%d), fn: %s\n", __LINE__, __func__);
1405 /* The real thing. */
1406 for (i
= 0; i
< 8; i
++) {
1409 start
= read_c0_count();
1411 ret
= crypto_ablkcipher_encrypt(req
);
1413 ret
= crypto_ablkcipher_decrypt(req
);
1421 ret
= wait_for_completion_interruptible(
1422 &result
->completion
);
1424 if (!ret
&& !((ret
= result
->err
))) {
1425 INIT_COMPLETION(result
->completion
);
1429 wait_for_completion(&result
->completion
);
1430 end
= read_c0_count();
1431 INIT_COMPLETION(result
->completion
);
1439 if (signal_pending(current
)) {
1440 printk("Signal caught\n");
1444 cycles
+= end
- start
;
1447 // local_irq_enable();
1448 // local_bh_enable();
1450 printk("1 operation in %lu cycles (%d bytes)\n",
1451 (cycles
+ 4) / 8, blen
);
1457 static u32 b_size
[] = {16, 64, 256, 1024, 8192, 0};
1459 static int test_skcipher_speed(struct crypto_ablkcipher
*tfm
, int enc
,
1460 struct cipher_speed_template
*template,
1461 unsigned int tcount
, unsigned int sec
,
1465 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm
));
1467 unsigned int i
= 0, j
, iv_len
;
1468 struct ablkcipher_request
*req
;
1469 //struct scatterlist sg[8];
1471 struct tcrypt_result result
;
1473 static char *xbuf
[XBUFSIZE
];
1476 static char *tvmem_buf
[4];
1479 if (testmgr_alloc_buf(xbuf
))
1487 init_completion(&result
.completion
);
1489 printk("Start ablkcipher speed test\n");
1491 req
= ablkcipher_request_alloc(tfm
, GFP_KERNEL
);
1493 printk(KERN_ERR
"alg: skcipher: Failed to allocate request "
1498 // ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1499 ablkcipher_request_set_callback(req
, 0,
1500 tcrypt_complete
, &result
);
1504 block_size
= b_size
;
1507 struct scatterlist sg
[4];
1508 if ((*keysize
+ *block_size
) > 4 * PAGE_SIZE
) {
1509 printk("template (%u) too big for "
1510 "tvmem_buf (%lu)\n", *keysize
+ *block_size
,
1514 crypto_ablkcipher_clear_flags(tfm
, ~0);
1516 printk("test %u (%d bit key, %d byte blocks): ", i
,
1517 *keysize
* 8, *block_size
);
1519 memset(tvmem_buf
[0], 0xff, PAGE_SIZE
);
1522 for (j
= 0; j
< tcount
; j
++) {
1523 if (template[j
].klen
== *keysize
) {
1524 key
= template[j
].key
;
1528 ret
= crypto_ablkcipher_setkey(tfm
, key
, *keysize
);
1530 printk("Error setting of keys\n");
1534 sg_init_table(sg
, 4);
1536 for (j
= 0; j
< 4; j
++) {
1537 tvmem_buf
[j
] = xbuf
[j
];
1538 memset(tvmem_buf
[j
], 0xff, PAGE_SIZE
);
1539 sg_set_buf(sg
+ j
, tvmem_buf
[j
], PAGE_SIZE
);
1542 iv_len
= crypto_ablkcipher_ivsize(tfm
);
1544 memset(&iv
, 0xff, iv_len
);
1547 ablkcipher_request_set_crypt(req
, sg
, sg
,
1550 //printk("Debug ln: %d, %s\n", __LINE__, __func__);
1552 ret
= test_ablkcipher_jiffies(req
, enc
, sec
,
1553 &result
, *block_size
);
1555 ret
= test_ablkcipher_cycles(req
, enc
, sec
,
1556 &result
, *block_size
);
1560 printk(KERN_ERR
"alg: skcipher: %s failed on "
1561 "test %d for %s: ret=%d\n", e
, j
, algo
,
1568 } while (*block_size
);
1574 printk("End ablkcipher speed test\n");
1575 ablkcipher_request_free(req
);
1576 testmgr_free_buf(xbuf
);
1578 if (!completion_done(&result
->completion
)) {
1579 printk("There are threads waiting for completion, completing all\n");
1580 complete_all(&result
->completion
);
1584 //testmgr_free_buf(tvbuf);
1590 static int test_cprng(struct crypto_rng
*tfm
, struct cprng_testvec
*template,
1591 unsigned int tcount
)
1593 const char *algo
= crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm
));
1594 int err
= 0, i
, j
, seedsize
;
1598 seedsize
= crypto_rng_seedsize(tfm
);
1600 seed
= kmalloc(seedsize
, GFP_KERNEL
);
1602 printk(KERN_ERR
"alg: cprng: Failed to allocate seed space "
1607 for (i
= 0; i
< tcount
; i
++) {
1608 memset(result
, 0, 32);
1610 memcpy(seed
, template[i
].v
, template[i
].vlen
);
1611 memcpy(seed
+ template[i
].vlen
, template[i
].key
,
1613 memcpy(seed
+ template[i
].vlen
+ template[i
].klen
,
1614 template[i
].dt
, template[i
].dtlen
);
1616 err
= crypto_rng_reset(tfm
, seed
, seedsize
);
1618 printk(KERN_ERR
"alg: cprng: Failed to reset rng "
1623 for (j
= 0; j
< template[i
].loops
; j
++) {
1624 err
= crypto_rng_get_bytes(tfm
, result
,
1626 if (err
!= template[i
].rlen
) {
1627 printk(KERN_ERR
"alg: cprng: Failed to obtain "
1628 "the correct amount of random data for "
1629 "%s (requested %d, got %d)\n", algo
,
1630 template[i
].rlen
, err
);
1635 err
= memcmp(result
, template[i
].result
,
1638 printk(KERN_ERR
"alg: cprng: Test %d failed for %s\n",
1640 hexdump(result
, template[i
].rlen
);
1651 static int alg_test_aead(const struct alg_test_desc
*desc
, const char *driver
,
1654 struct crypto_aead
*tfm
;
1657 tfm
= crypto_alloc_aead(driver
, type
, mask
);
1659 printk(KERN_ERR
"alg: aead: Failed to load transform for %s: "
1660 "%ld\n", driver
, PTR_ERR(tfm
));
1661 return PTR_ERR(tfm
);
1664 if (desc
->suite
.aead
.enc
.vecs
) {
1665 err
= test_aead(tfm
, ENCRYPT
, desc
->suite
.aead
.enc
.vecs
,
1666 desc
->suite
.aead
.enc
.count
);
1671 if (!err
&& desc
->suite
.aead
.dec
.vecs
)
1672 err
= test_aead(tfm
, DECRYPT
, desc
->suite
.aead
.dec
.vecs
,
1673 desc
->suite
.aead
.dec
.count
);
1676 crypto_free_aead(tfm
);
1680 static int alg_test_cipher(const struct alg_test_desc
*desc
,
1681 const char *driver
, u32 type
, u32 mask
)
1683 struct crypto_cipher
*tfm
;
1686 tfm
= crypto_alloc_cipher(driver
, type
, mask
);
1688 printk(KERN_ERR
"alg: cipher: Failed to load transform for "
1689 "%s: %ld\n", driver
, PTR_ERR(tfm
));
1690 return PTR_ERR(tfm
);
1693 if (desc
->suite
.cipher
.enc
.vecs
) {
1694 err
= test_cipher(tfm
, ENCRYPT
, desc
->suite
.cipher
.enc
.vecs
,
1695 desc
->suite
.cipher
.enc
.count
);
1700 if (desc
->suite
.cipher
.dec
.vecs
)
1701 err
= test_cipher(tfm
, DECRYPT
, desc
->suite
.cipher
.dec
.vecs
,
1702 desc
->suite
.cipher
.dec
.count
);
1705 crypto_free_cipher(tfm
);
1709 static int alg_test_skcipher(const struct alg_test_desc
*desc
,
1710 const char *driver
, u32 type
, u32 mask
)
1712 struct crypto_ablkcipher
*tfm
;
1715 tfm
= crypto_alloc_ablkcipher(driver
, type
, mask
);
1717 printk(KERN_ERR
"alg: skcipher: Failed to load transform for "
1718 "%s: %ld\n", driver
, PTR_ERR(tfm
));
1719 return PTR_ERR(tfm
);
1722 if (desc
->suite
.cipher
.enc
.vecs
) {
1723 err
= test_skcipher(tfm
, ENCRYPT
, desc
->suite
.cipher
.enc
.vecs
,
1724 desc
->suite
.cipher
.enc
.count
);
1729 if (desc
->suite
.cipher
.dec
.vecs
)
1730 err
= test_skcipher(tfm
, DECRYPT
, desc
->suite
.cipher
.dec
.vecs
,
1731 desc
->suite
.cipher
.dec
.count
);
1734 crypto_free_ablkcipher(tfm
);
1738 static int alg_test_comp(const struct alg_test_desc
*desc
, const char *driver
,
1741 struct crypto_comp
*tfm
;
1744 tfm
= crypto_alloc_comp(driver
, type
, mask
);
1746 printk(KERN_ERR
"alg: comp: Failed to load transform for %s: "
1747 "%ld\n", driver
, PTR_ERR(tfm
));
1748 return PTR_ERR(tfm
);
1751 err
= test_comp(tfm
, desc
->suite
.comp
.comp
.vecs
,
1752 desc
->suite
.comp
.decomp
.vecs
,
1753 desc
->suite
.comp
.comp
.count
,
1754 desc
->suite
.comp
.decomp
.count
);
1756 crypto_free_comp(tfm
);
1760 static int alg_test_pcomp(const struct alg_test_desc
*desc
, const char *driver
,
1763 struct crypto_pcomp
*tfm
;
1766 tfm
= crypto_alloc_pcomp(driver
, type
, mask
);
1768 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1769 driver
, PTR_ERR(tfm
));
1770 return PTR_ERR(tfm
);
1773 err
= test_pcomp(tfm
, desc
->suite
.pcomp
.comp
.vecs
,
1774 desc
->suite
.pcomp
.decomp
.vecs
,
1775 desc
->suite
.pcomp
.comp
.count
,
1776 desc
->suite
.pcomp
.decomp
.count
);
1778 crypto_free_pcomp(tfm
);
1782 static int alg_test_hash(const struct alg_test_desc
*desc
, const char *driver
,
1785 struct crypto_ahash
*tfm
;
1788 tfm
= crypto_alloc_ahash(driver
, type
, mask
);
1790 printk(KERN_ERR
"alg: hash: Failed to load transform for %s: "
1791 "%ld\n", driver
, PTR_ERR(tfm
));
1792 return PTR_ERR(tfm
);
1795 err
= test_hash(tfm
, desc
->suite
.hash
.vecs
, desc
->suite
.hash
.count
);
1797 crypto_free_ahash(tfm
);
1801 static int alg_test_crc32c(const struct alg_test_desc
*desc
,
1802 const char *driver
, u32 type
, u32 mask
)
1804 struct crypto_shash
*tfm
;
1808 err
= alg_test_hash(desc
, driver
, type
, mask
);
1812 tfm
= crypto_alloc_shash(driver
, type
, mask
);
1814 printk(KERN_ERR
"alg: crc32c: Failed to load transform for %s: "
1815 "%ld\n", driver
, PTR_ERR(tfm
));
1822 struct shash_desc shash
;
1823 char ctx
[crypto_shash_descsize(tfm
)];
1826 sdesc
.shash
.tfm
= tfm
;
1827 sdesc
.shash
.flags
= 0;
1829 *(u32
*)sdesc
.ctx
= le32_to_cpu(420553207);
1830 err
= crypto_shash_final(&sdesc
.shash
, (u8
*)&val
);
1832 printk(KERN_ERR
"alg: crc32c: Operation failed for "
1833 "%s: %d\n", driver
, err
);
1837 if (val
!= ~420553207) {
1838 printk(KERN_ERR
"alg: crc32c: Test failed for %s: "
1839 "%d\n", driver
, val
);
1844 crypto_free_shash(tfm
);
1850 static int alg_test_cprng(const struct alg_test_desc
*desc
, const char *driver
,
1853 struct crypto_rng
*rng
;
1856 rng
= crypto_alloc_rng(driver
, type
, mask
);
1858 printk(KERN_ERR
"alg: cprng: Failed to load transform for %s: "
1859 "%ld\n", driver
, PTR_ERR(rng
));
1860 return PTR_ERR(rng
);
1863 err
= test_cprng(rng
, desc
->suite
.cprng
.vecs
, desc
->suite
.cprng
.count
);
1865 crypto_free_rng(rng
);
1870 /* Please keep this list sorted by algorithm name. */
1871 static const struct alg_test_desc alg_test_descs
[] = {
1873 .alg
= "ansi_cprng",
1874 .test
= alg_test_cprng
,
1878 .vecs
= ansi_cprng_aes_tv_template
,
1879 .count
= ANSI_CPRNG_AES_TEST_VECTORS
1884 .test
= alg_test_skcipher
,
1889 .vecs
= aes_cbc_enc_tv_template
,
1890 .count
= AES_CBC_ENC_TEST_VECTORS
1893 .vecs
= aes_cbc_dec_tv_template
,
1894 .count
= AES_CBC_DEC_TEST_VECTORS
1899 .alg
= "cbc(anubis)",
1900 .test
= alg_test_skcipher
,
1904 .vecs
= anubis_cbc_enc_tv_template
,
1905 .count
= ANUBIS_CBC_ENC_TEST_VECTORS
1908 .vecs
= anubis_cbc_dec_tv_template
,
1909 .count
= ANUBIS_CBC_DEC_TEST_VECTORS
1914 .alg
= "cbc(blowfish)",
1915 .test
= alg_test_skcipher
,
1919 .vecs
= bf_cbc_enc_tv_template
,
1920 .count
= BF_CBC_ENC_TEST_VECTORS
1923 .vecs
= bf_cbc_dec_tv_template
,
1924 .count
= BF_CBC_DEC_TEST_VECTORS
1929 .alg
= "cbc(camellia)",
1930 .test
= alg_test_skcipher
,
1934 .vecs
= camellia_cbc_enc_tv_template
,
1935 .count
= CAMELLIA_CBC_ENC_TEST_VECTORS
1938 .vecs
= camellia_cbc_dec_tv_template
,
1939 .count
= CAMELLIA_CBC_DEC_TEST_VECTORS
1945 .test
= alg_test_skcipher
,
1949 .vecs
= des_cbc_enc_tv_template
,
1950 .count
= DES_CBC_ENC_TEST_VECTORS
1953 .vecs
= des_cbc_dec_tv_template
,
1954 .count
= DES_CBC_DEC_TEST_VECTORS
1959 .alg
= "cbc(des3_ede)",
1960 .test
= alg_test_skcipher
,
1965 .vecs
= des3_ede_cbc_enc_tv_template
,
1966 .count
= DES3_EDE_CBC_ENC_TEST_VECTORS
1969 .vecs
= des3_ede_cbc_dec_tv_template
,
1970 .count
= DES3_EDE_CBC_DEC_TEST_VECTORS
1975 .alg
= "cbc(twofish)",
1976 .test
= alg_test_skcipher
,
1980 .vecs
= tf_cbc_enc_tv_template
,
1981 .count
= TF_CBC_ENC_TEST_VECTORS
1984 .vecs
= tf_cbc_dec_tv_template
,
1985 .count
= TF_CBC_DEC_TEST_VECTORS
1991 .test
= alg_test_aead
,
1996 .vecs
= aes_ccm_enc_tv_template
,
1997 .count
= AES_CCM_ENC_TEST_VECTORS
2000 .vecs
= aes_ccm_dec_tv_template
,
2001 .count
= AES_CCM_DEC_TEST_VECTORS
2007 .test
= alg_test_crc32c
,
2011 .vecs
= crc32c_tv_template
,
2012 .count
= CRC32C_TEST_VECTORS
2017 .test
= alg_test_skcipher
,
2022 .vecs
= aes_ctr_enc_tv_template
,
2023 .count
= AES_CTR_ENC_TEST_VECTORS
2026 .vecs
= aes_ctr_dec_tv_template
,
2027 .count
= AES_CTR_DEC_TEST_VECTORS
2032 .alg
= "cts(cbc(aes))",
2033 .test
= alg_test_skcipher
,
2037 .vecs
= cts_mode_enc_tv_template
,
2038 .count
= CTS_MODE_ENC_TEST_VECTORS
2041 .vecs
= cts_mode_dec_tv_template
,
2042 .count
= CTS_MODE_DEC_TEST_VECTORS
2048 .test
= alg_test_comp
,
2052 .vecs
= deflate_comp_tv_template
,
2053 .count
= DEFLATE_COMP_TEST_VECTORS
2056 .vecs
= deflate_decomp_tv_template
,
2057 .count
= DEFLATE_DECOMP_TEST_VECTORS
2063 .test
= alg_test_skcipher
,
2068 .vecs
= aes_enc_tv_template
,
2069 .count
= AES_ENC_TEST_VECTORS
2072 .vecs
= aes_dec_tv_template
,
2073 .count
= AES_DEC_TEST_VECTORS
2078 .alg
= "ecb(anubis)",
2079 .test
= alg_test_skcipher
,
2083 .vecs
= anubis_enc_tv_template
,
2084 .count
= ANUBIS_ENC_TEST_VECTORS
2087 .vecs
= anubis_dec_tv_template
,
2088 .count
= ANUBIS_DEC_TEST_VECTORS
2094 .test
= alg_test_skcipher
,
2098 .vecs
= arc4_enc_tv_template
,
2099 .count
= ARC4_ENC_TEST_VECTORS
2102 .vecs
= arc4_dec_tv_template
,
2103 .count
= ARC4_DEC_TEST_VECTORS
2108 .alg
= "ecb(blowfish)",
2109 .test
= alg_test_skcipher
,
2113 .vecs
= bf_enc_tv_template
,
2114 .count
= BF_ENC_TEST_VECTORS
2117 .vecs
= bf_dec_tv_template
,
2118 .count
= BF_DEC_TEST_VECTORS
2123 .alg
= "ecb(camellia)",
2124 .test
= alg_test_skcipher
,
2128 .vecs
= camellia_enc_tv_template
,
2129 .count
= CAMELLIA_ENC_TEST_VECTORS
2132 .vecs
= camellia_dec_tv_template
,
2133 .count
= CAMELLIA_DEC_TEST_VECTORS
2138 .alg
= "ecb(cast5)",
2139 .test
= alg_test_skcipher
,
2143 .vecs
= cast5_enc_tv_template
,
2144 .count
= CAST5_ENC_TEST_VECTORS
2147 .vecs
= cast5_dec_tv_template
,
2148 .count
= CAST5_DEC_TEST_VECTORS
2153 .alg
= "ecb(cast6)",
2154 .test
= alg_test_skcipher
,
2158 .vecs
= cast6_enc_tv_template
,
2159 .count
= CAST6_ENC_TEST_VECTORS
2162 .vecs
= cast6_dec_tv_template
,
2163 .count
= CAST6_DEC_TEST_VECTORS
2169 .test
= alg_test_skcipher
,
2174 .vecs
= des_enc_tv_template
,
2175 .count
= DES_ENC_TEST_VECTORS
2178 .vecs
= des_dec_tv_template
,
2179 .count
= DES_DEC_TEST_VECTORS
2184 .alg
= "ecb(des3_ede)",
2185 .test
= alg_test_skcipher
,
2190 .vecs
= des3_ede_enc_tv_template
,
2191 .count
= DES3_EDE_ENC_TEST_VECTORS
2194 .vecs
= des3_ede_dec_tv_template
,
2195 .count
= DES3_EDE_DEC_TEST_VECTORS
2200 .alg
= "ecb(khazad)",
2201 .test
= alg_test_skcipher
,
2205 .vecs
= khazad_enc_tv_template
,
2206 .count
= KHAZAD_ENC_TEST_VECTORS
2209 .vecs
= khazad_dec_tv_template
,
2210 .count
= KHAZAD_DEC_TEST_VECTORS
2216 .test
= alg_test_skcipher
,
2220 .vecs
= seed_enc_tv_template
,
2221 .count
= SEED_ENC_TEST_VECTORS
2224 .vecs
= seed_dec_tv_template
,
2225 .count
= SEED_DEC_TEST_VECTORS
2230 .alg
= "ecb(serpent)",
2231 .test
= alg_test_skcipher
,
2235 .vecs
= serpent_enc_tv_template
,
2236 .count
= SERPENT_ENC_TEST_VECTORS
2239 .vecs
= serpent_dec_tv_template
,
2240 .count
= SERPENT_DEC_TEST_VECTORS
2246 .test
= alg_test_skcipher
,
2250 .vecs
= tea_enc_tv_template
,
2251 .count
= TEA_ENC_TEST_VECTORS
2254 .vecs
= tea_dec_tv_template
,
2255 .count
= TEA_DEC_TEST_VECTORS
2260 .alg
= "ecb(tnepres)",
2261 .test
= alg_test_skcipher
,
2265 .vecs
= tnepres_enc_tv_template
,
2266 .count
= TNEPRES_ENC_TEST_VECTORS
2269 .vecs
= tnepres_dec_tv_template
,
2270 .count
= TNEPRES_DEC_TEST_VECTORS
2275 .alg
= "ecb(twofish)",
2276 .test
= alg_test_skcipher
,
2280 .vecs
= tf_enc_tv_template
,
2281 .count
= TF_ENC_TEST_VECTORS
2284 .vecs
= tf_dec_tv_template
,
2285 .count
= TF_DEC_TEST_VECTORS
2291 .test
= alg_test_skcipher
,
2295 .vecs
= xeta_enc_tv_template
,
2296 .count
= XETA_ENC_TEST_VECTORS
2299 .vecs
= xeta_dec_tv_template
,
2300 .count
= XETA_DEC_TEST_VECTORS
2306 .test
= alg_test_skcipher
,
2310 .vecs
= xtea_enc_tv_template
,
2311 .count
= XTEA_ENC_TEST_VECTORS
2314 .vecs
= xtea_dec_tv_template
,
2315 .count
= XTEA_DEC_TEST_VECTORS
2321 .test
= alg_test_aead
,
2326 .vecs
= aes_gcm_enc_tv_template
,
2327 .count
= AES_GCM_ENC_TEST_VECTORS
2330 .vecs
= aes_gcm_dec_tv_template
,
2331 .count
= AES_GCM_DEC_TEST_VECTORS
2337 .test
= alg_test_hash
,
2340 .vecs
= hmac_md5_tv_template
,
2341 .count
= HMAC_MD5_TEST_VECTORS
2345 .alg
= "hmac(rmd128)",
2346 .test
= alg_test_hash
,
2349 .vecs
= hmac_rmd128_tv_template
,
2350 .count
= HMAC_RMD128_TEST_VECTORS
2354 .alg
= "hmac(rmd160)",
2355 .test
= alg_test_hash
,
2358 .vecs
= hmac_rmd160_tv_template
,
2359 .count
= HMAC_RMD160_TEST_VECTORS
2363 .alg
= "hmac(sha1)",
2364 .test
= alg_test_hash
,
2368 .vecs
= hmac_sha1_tv_template
,
2369 .count
= HMAC_SHA1_TEST_VECTORS
2373 .alg
= "hmac(sha224)",
2374 .test
= alg_test_hash
,
2378 .vecs
= hmac_sha224_tv_template
,
2379 .count
= HMAC_SHA224_TEST_VECTORS
2383 .alg
= "hmac(sha256)",
2384 .test
= alg_test_hash
,
2388 .vecs
= hmac_sha256_tv_template
,
2389 .count
= HMAC_SHA256_TEST_VECTORS
2393 .alg
= "hmac(sha384)",
2394 .test
= alg_test_hash
,
2398 .vecs
= hmac_sha384_tv_template
,
2399 .count
= HMAC_SHA384_TEST_VECTORS
2403 .alg
= "hmac(sha512)",
2404 .test
= alg_test_hash
,
2408 .vecs
= hmac_sha512_tv_template
,
2409 .count
= HMAC_SHA512_TEST_VECTORS
2412 #if !defined(CONFIG_CRYPTO_DEV_AES) && !defined(CONFIG_CRYPTO_ASYNC_AES)
2415 .test
= alg_test_skcipher
,
2419 .vecs
= aes_lrw_enc_tv_template
,
2420 .count
= AES_LRW_ENC_TEST_VECTORS
2423 .vecs
= aes_lrw_dec_tv_template
,
2424 .count
= AES_LRW_DEC_TEST_VECTORS
2431 .test
= alg_test_comp
,
2435 .vecs
= lzo_comp_tv_template
,
2436 .count
= LZO_COMP_TEST_VECTORS
2439 .vecs
= lzo_decomp_tv_template
,
2440 .count
= LZO_DECOMP_TEST_VECTORS
2446 .test
= alg_test_hash
,
2449 .vecs
= md4_tv_template
,
2450 .count
= MD4_TEST_VECTORS
2455 .test
= alg_test_hash
,
2458 .vecs
= md5_tv_template
,
2459 .count
= MD5_TEST_VECTORS
2463 .alg
= "michael_mic",
2464 .test
= alg_test_hash
,
2467 .vecs
= michael_mic_tv_template
,
2468 .count
= MICHAEL_MIC_TEST_VECTORS
2472 .alg
= "pcbc(fcrypt)",
2473 .test
= alg_test_skcipher
,
2477 .vecs
= fcrypt_pcbc_enc_tv_template
,
2478 .count
= FCRYPT_ENC_TEST_VECTORS
2481 .vecs
= fcrypt_pcbc_dec_tv_template
,
2482 .count
= FCRYPT_DEC_TEST_VECTORS
2488 .alg
= "rfc3686(ctr(aes))",
2489 .test
= alg_test_skcipher
,
2494 .vecs
= aes_ctr_rfc3686_enc_tv_template
,
2495 .count
= AES_CTR_3686_ENC_TEST_VECTORS
2498 .vecs
= aes_ctr_rfc3686_dec_tv_template
,
2499 .count
= AES_CTR_3686_DEC_TEST_VECTORS
2504 .alg
= "rfc4309(ccm(aes))",
2505 .test
= alg_test_aead
,
2510 .vecs
= aes_ccm_rfc4309_enc_tv_template
,
2511 .count
= AES_CCM_4309_ENC_TEST_VECTORS
2514 .vecs
= aes_ccm_rfc4309_dec_tv_template
,
2515 .count
= AES_CCM_4309_DEC_TEST_VECTORS
2521 .test
= alg_test_hash
,
2524 .vecs
= rmd128_tv_template
,
2525 .count
= RMD128_TEST_VECTORS
2530 .test
= alg_test_hash
,
2533 .vecs
= rmd160_tv_template
,
2534 .count
= RMD160_TEST_VECTORS
2539 .test
= alg_test_hash
,
2542 .vecs
= rmd256_tv_template
,
2543 .count
= RMD256_TEST_VECTORS
2548 .test
= alg_test_hash
,
2551 .vecs
= rmd320_tv_template
,
2552 .count
= RMD320_TEST_VECTORS
2557 .test
= alg_test_skcipher
,
2561 .vecs
= salsa20_stream_enc_tv_template
,
2562 .count
= SALSA20_STREAM_ENC_TEST_VECTORS
2568 .test
= alg_test_hash
,
2572 .vecs
= sha1_tv_template
,
2573 .count
= SHA1_TEST_VECTORS
2578 .test
= alg_test_hash
,
2582 .vecs
= sha224_tv_template
,
2583 .count
= SHA224_TEST_VECTORS
2588 .test
= alg_test_hash
,
2592 .vecs
= sha256_tv_template
,
2593 .count
= SHA256_TEST_VECTORS
2598 .test
= alg_test_hash
,
2602 .vecs
= sha384_tv_template
,
2603 .count
= SHA384_TEST_VECTORS
2608 .test
= alg_test_hash
,
2612 .vecs
= sha512_tv_template
,
2613 .count
= SHA512_TEST_VECTORS
2618 .test
= alg_test_hash
,
2621 .vecs
= tgr128_tv_template
,
2622 .count
= TGR128_TEST_VECTORS
2627 .test
= alg_test_hash
,
2630 .vecs
= tgr160_tv_template
,
2631 .count
= TGR160_TEST_VECTORS
2636 .test
= alg_test_hash
,
2639 .vecs
= tgr192_tv_template
,
2640 .count
= TGR192_TEST_VECTORS
2645 .test
= alg_test_hash
,
2648 .vecs
= aes_vmac128_tv_template
,
2649 .count
= VMAC_AES_TEST_VECTORS
2654 .test
= alg_test_hash
,
2657 .vecs
= wp256_tv_template
,
2658 .count
= WP256_TEST_VECTORS
2663 .test
= alg_test_hash
,
2666 .vecs
= wp384_tv_template
,
2667 .count
= WP384_TEST_VECTORS
2672 .test
= alg_test_hash
,
2675 .vecs
= wp512_tv_template
,
2676 .count
= WP512_TEST_VECTORS
2681 .test
= alg_test_hash
,
2684 .vecs
= aes_xcbc128_tv_template
,
2685 .count
= XCBC_AES_TEST_VECTORS
2691 .test
= alg_test_skcipher
,
2695 .vecs
= aes_xts_enc_tv_template
,
2696 .count
= AES_XTS_ENC_TEST_VECTORS
2699 .vecs
= aes_xts_dec_tv_template
,
2700 .count
= AES_XTS_DEC_TEST_VECTORS
2707 .test
= alg_test_pcomp
,
2711 .vecs
= zlib_comp_tv_template
,
2712 .count
= ZLIB_COMP_TEST_VECTORS
2715 .vecs
= zlib_decomp_tv_template
,
2716 .count
= ZLIB_DECOMP_TEST_VECTORS
2723 static int alg_find_test(const char *alg
)
2726 int end
= ARRAY_SIZE(alg_test_descs
);
2728 while (start
< end
) {
2729 int i
= (start
+ end
) / 2;
2730 int diff
= strcmp(alg_test_descs
[i
].alg
, alg
);
2748 static int ifx_alg_test(const char *driver
, const char *alg
, u32 type
, u32 mask
)
2754 if ((type
& CRYPTO_ALG_TYPE_MASK
) == CRYPTO_ALG_TYPE_CIPHER
) {
2755 char nalg
[CRYPTO_MAX_ALG_NAME
];
2757 if (snprintf(nalg
, sizeof(nalg
), "ecb(%s)", alg
) >=
2759 return -ENAMETOOLONG
;
2761 i
= alg_find_test(nalg
);
2765 if (fips_enabled
&& !alg_test_descs
[i
].fips_allowed
)
2768 rc
= alg_test_cipher(alg_test_descs
+ i
, driver
, type
, mask
);
2772 i
= alg_find_test(alg
);
2773 j
= alg_find_test(driver
);
2777 if (fips_enabled
&& ((i
>= 0 && !alg_test_descs
[i
].fips_allowed
) ||
2778 (j
>= 0 && !alg_test_descs
[j
].fips_allowed
)))
2783 rc
|= alg_test_descs
[i
].test(alg_test_descs
+ i
, driver
,
2786 rc
|= alg_test_descs
[j
].test(alg_test_descs
+ j
, driver
,
2790 if (fips_enabled
&& rc
)
2791 panic("%s: %s alg self test failed in fips mode!\n", driver
, alg
);
2793 if (fips_enabled
&& !rc
)
2794 printk(KERN_INFO
"alg: self-tests for %s (%s) passed\n",
2800 printk(KERN_INFO
"alg: No test for %s (%s)\n", alg
, driver
);
2805 EXPORT_SYMBOL_GPL(ifx_alg_test
);
2807 /* Modified speed test for async block cipher mode*/
2809 static int ifx_alg_speed_test(const char *driver
, const char *alg
,
2811 struct cipher_speed_template
*template,
2812 unsigned int tcount
, u8
*keysize
)
2817 int type
= 0, mask
= 0;
2818 struct crypto_ablkcipher
*tfm
;
2820 i
= alg_find_test(alg
);
2821 j
= alg_find_test(driver
);
2826 if (fips_enabled
&& ((i
>= 0 && !alg_test_descs
[i
].fips_allowed
) ||
2827 (j
>= 0 && !alg_test_descs
[j
].fips_allowed
)))
2830 tfm
= crypto_alloc_ablkcipher(driver
, type
, mask
);
2833 printk(KERN_ERR
"alg: skcipher: Failed to load transform for "
2834 "%s: %ld\n", driver
, PTR_ERR(tfm
));
2835 return PTR_ERR(tfm
);
2837 err
= test_skcipher_speed(tfm
, ENCRYPT
, template,
2838 tcount
, sec
, keysize
);
2842 err
= test_skcipher_speed(tfm
, DECRYPT
, template,
2843 tcount
, sec
, keysize
);
2853 if (fips_enabled
&& err
)
2854 panic("%s: %s alg self test failed in fips mode!\n", driver
, alg
);
2856 if (fips_enabled
&& !err
)
2857 printk(KERN_INFO
"alg: self-tests for %s (%s) passed\n",
2860 crypto_free_ablkcipher(tfm
);
2863 EXPORT_SYMBOL_GPL(ifx_alg_speed_test
);
2866 static int test_cipher_jiffies(struct blkcipher_desc
*desc
, int enc
,
2867 struct scatterlist
*sg
, int blen
, int sec
)
2869 unsigned long start
, end
;
2873 for (start
= jiffies
, end
= start
+ sec
* HZ
, bcount
= 0;
2874 time_before(jiffies
, end
); bcount
++) {
2876 ret
= crypto_blkcipher_encrypt(desc
, sg
, sg
, blen
);
2878 ret
= crypto_blkcipher_decrypt(desc
, sg
, sg
, blen
);
2884 printk("%d operations in %d seconds (%ld bytes)\n",
2885 bcount
, sec
, (long)bcount
* blen
);
2889 static int test_cipher_cycles(struct blkcipher_desc
*desc
, int enc
,
2890 struct scatterlist
*sg
, int blen
)
2892 unsigned long cycles
= 0;
2893 unsigned long start
, end
;
2898 local_irq_disable();
2901 for (i
= 0; i
< 4; i
++) {
2903 ret
= crypto_blkcipher_encrypt(desc
, sg
, sg
, blen
);
2905 ret
= crypto_blkcipher_decrypt(desc
, sg
, sg
, blen
);
2911 /* The real thing. */
2912 for (i
= 0; i
< 8; i
++) {
2913 /* Original code to get cycles, does not work with MIPS
2914 * cycles_t start, end;
2915 * start = get_cycles();
2918 start
= read_c0_count(); // LQ modified tcrypt
2921 ret
= crypto_blkcipher_encrypt(desc
, sg
, sg
, blen
);
2923 ret
= crypto_blkcipher_decrypt(desc
, sg
, sg
, blen
);
2925 /* Original code to get cycles, does not work with MIPS
2926 * end = get_cycles();
2929 end
= read_c0_count(); //LQ modified tcrypt
2934 cycles
+= end
- start
;
2942 printk("1 operation in %lu cycles (%d bytes)\n",
2943 (cycles
+ 4) / 8, blen
);
2948 static u32 block_sizes
[] = { 16, 64, 256, 1024, 8192, 0 };
2950 static void test_cipher_speed(const char *algo
, int enc
, unsigned int sec
,
2951 struct cipher_speed_template
*template,
2952 unsigned int tcount
, u8
*keysize
)
2954 unsigned int ret
, i
, j
, iv_len
;
2955 const char *key
, iv
[128];
2956 struct crypto_blkcipher
*tfm
;
2957 struct blkcipher_desc desc
;
2966 printk("\n ******* testing speed of %s %s ******* \n", algo
, e
);
2968 tfm
= crypto_alloc_blkcipher(algo
, 0, CRYPTO_ALG_ASYNC
);
2971 printk("failed to load transform for %s: %ld\n", algo
,
2981 b_size
= block_sizes
;
2983 struct scatterlist sg
[TVMEMSIZE
];
2985 if ((*keysize
+ *b_size
) > TVMEMSIZE
* PAGE_SIZE
) {
2986 printk("template (%u) too big for "
2987 "tvmem (%lu)\n", *keysize
+ *b_size
,
2988 TVMEMSIZE
* PAGE_SIZE
);
2992 printk("test %u (%d bit key, %d byte blocks): ", i
,
2993 *keysize
* 8, *b_size
);
2995 memset(tvmem
[0], 0xff, PAGE_SIZE
);
2997 /* set key, plain text and IV */
2999 for (j
= 0; j
< tcount
; j
++) {
3000 if (template[j
].klen
== *keysize
) {
3001 key
= template[j
].key
;
3006 ret
= crypto_blkcipher_setkey(tfm
, key
, *keysize
);
3008 printk("setkey() failed flags=%x\n",
3009 crypto_blkcipher_get_flags(tfm
));
3013 sg_init_table(sg
, TVMEMSIZE
);
3014 sg_set_buf(sg
, tvmem
[0] + *keysize
,
3015 PAGE_SIZE
- *keysize
);
3016 for (j
= 1; j
< TVMEMSIZE
; j
++) {
3017 sg_set_buf(sg
+ j
, tvmem
[j
], PAGE_SIZE
);
3018 memset (tvmem
[j
], 0xff, PAGE_SIZE
);
3021 iv_len
= crypto_blkcipher_ivsize(tfm
);
3023 memset(&iv
, 0xff, iv_len
);
3024 crypto_blkcipher_set_iv(tfm
, iv
, iv_len
);
3028 ret
= test_cipher_jiffies(&desc
, enc
, sg
,
3031 ret
= test_cipher_cycles(&desc
, enc
, sg
,
3035 printk("%s() failed flags=%x\n", e
, desc
.flags
);
3045 crypto_free_blkcipher(tfm
);
3048 static int test_hash_jiffies_digest(struct hash_desc
*desc
,
3049 struct scatterlist
*sg
, int blen
,
3052 unsigned long start
, end
;
3056 for (start
= jiffies
, end
= start
+ sec
* HZ
, bcount
= 0;
3057 time_before(jiffies
, end
); bcount
++) {
3058 ret
= crypto_hash_digest(desc
, sg
, blen
, out
);
3063 printk("%6u opers/sec, %9lu bytes/sec\n",
3064 bcount
/ sec
, ((long)bcount
* blen
) / sec
);
3069 static int test_hash_jiffies(struct hash_desc
*desc
, struct scatterlist
*sg
,
3070 int blen
, int plen
, char *out
, int sec
)
3072 unsigned long start
, end
;
3077 return test_hash_jiffies_digest(desc
, sg
, blen
, out
, sec
);
3079 for (start
= jiffies
, end
= start
+ sec
* HZ
, bcount
= 0;
3080 time_before(jiffies
, end
); bcount
++) {
3081 ret
= crypto_hash_init(desc
);
3084 for (pcount
= 0; pcount
< blen
; pcount
+= plen
) {
3085 ret
= crypto_hash_update(desc
, sg
, plen
);
3089 /* we assume there is enough space in 'out' for the result */
3090 ret
= crypto_hash_final(desc
, out
);
3095 printk("%6u opers/sec, %9lu bytes/sec\n",
3096 bcount
/ sec
, ((long)bcount
* blen
) / sec
);
3101 static int test_hash_cycles_digest(struct hash_desc
*desc
,
3102 struct scatterlist
*sg
, int blen
, char *out
)
3104 unsigned long cycles
= 0;
3105 unsigned long start
, end
;
3110 local_irq_disable();
3113 for (i
= 0; i
< 4; i
++) {
3114 ret
= crypto_hash_digest(desc
, sg
, blen
, out
);
3119 /* The real thing. */
3120 for (i
= 0; i
< 8; i
++) {
3122 /* Original code to get cycles, does not work with MIPS
3123 * cycles_t start, end;
3124 * start = get_cycles();
3127 start
= read_c0_count(); // LQ modified tcrypt
3129 ret
= crypto_hash_digest(desc
, sg
, blen
, out
);
3133 /* Original code to get cycles, does not work with MIPS
3134 * end = get_cycles();
3137 end
= read_c0_count(); // LQ modified tcrypt
3139 cycles
+= end
- start
;
3149 printk("%6lu cycles/operation, %4lu cycles/byte\n",
3150 cycles
/ 8, cycles
/ (8 * blen
));
3155 static int test_hash_cycles(struct hash_desc
*desc
, struct scatterlist
*sg
,
3156 int blen
, int plen
, char *out
)
3158 unsigned long cycles
= 0;
3159 unsigned long start
, end
;
3164 return test_hash_cycles_digest(desc
, sg
, blen
, out
);
3167 local_irq_disable();
3170 for (i
= 0; i
< 4; i
++) {
3171 ret
= crypto_hash_init(desc
);
3174 for (pcount
= 0; pcount
< blen
; pcount
+= plen
) {
3175 ret
= crypto_hash_update(desc
, sg
, plen
);
3179 ret
= crypto_hash_final(desc
, out
);
3184 /* The real thing. */
3185 for (i
= 0; i
< 8; i
++) {
3187 /* Original code for getting cycles, not working for MIPS
3188 * cycle_t start, end;
3189 * end = get_cycles();
3192 start
= read_c0_count(); // LQ modified tcrypt
3194 ret
= crypto_hash_init(desc
);
3197 for (pcount
= 0; pcount
< blen
; pcount
+= plen
) {
3198 ret
= crypto_hash_update(desc
, sg
, plen
);
3202 ret
= crypto_hash_final(desc
, out
);
3206 /* Original code for getting cycles, not working for MIPS
3207 * end = get_cycles();
3210 end
= read_c0_count(); // LQ modified tcrypt
3212 cycles
+= end
- start
;
3222 printk("%6lu cycles/operation, %4lu cycles/byte\n",
3223 cycles
/ 8, cycles
/ (8 * blen
));
3228 static void test_hash_speed(const char *algo
, unsigned int sec
,
3229 struct hash_speed
*speed
)
3231 struct scatterlist sg
[TVMEMSIZE
];
3232 struct crypto_hash
*tfm
;
3233 struct hash_desc desc
;
3234 static char output
[1024];
3238 printk(KERN_INFO
"\ntesting speed of %s\n", algo
);
3240 tfm
= crypto_alloc_hash(algo
, 0, CRYPTO_ALG_ASYNC
);
3243 printk(KERN_ERR
"failed to load transform for %s: %ld\n", algo
,
3251 if (crypto_hash_digestsize(tfm
) > sizeof(output
)) {
3252 printk(KERN_ERR
"digestsize(%u) > outputbuffer(%zu)\n",
3253 crypto_hash_digestsize(tfm
), sizeof(output
));
3257 sg_init_table(sg
, TVMEMSIZE
);
3258 for (i
= 0; i
< TVMEMSIZE
; i
++) {
3259 sg_set_buf(sg
+ i
, tvmem
[i
], PAGE_SIZE
);
3260 memset(tvmem
[i
], 0xff, PAGE_SIZE
);
3263 for (i
= 0; speed
[i
].blen
!= 0; i
++) {
3264 if (speed
[i
].blen
> TVMEMSIZE
* PAGE_SIZE
) {
3266 "template (%u) too big for tvmem (%lu)\n",
3267 speed
[i
].blen
, TVMEMSIZE
* PAGE_SIZE
);
3271 printk(KERN_INFO
"test%3u "
3272 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
3273 i
, speed
[i
].blen
, speed
[i
].plen
, speed
[i
].blen
/ speed
[i
].plen
);
3276 ret
= test_hash_jiffies(&desc
, sg
, speed
[i
].blen
,
3277 speed
[i
].plen
, output
, sec
);
3279 ret
= test_hash_cycles(&desc
, sg
, speed
[i
].blen
,
3280 speed
[i
].plen
, output
);
3283 printk(KERN_ERR
"hashing failed ret=%d\n", ret
);
3289 crypto_free_hash(tfm
);
3293 static void test_available(void)
3295 char **name
= check
;
3298 printk("alg %s ", *name
);
3299 printk(crypto_has_alg(*name
, 0, 0) ?
3300 "found\n" : "not found\n");
3305 static inline int tcrypt_test(const char *alg
)
3309 printk("Running test %s\n", alg
);
3310 ret
= ifx_alg_test(alg
, alg
, 0, 0);
3311 /* non-fips algs return -EINVAL in fips mode */
3312 if (fips_enabled
&& ret
== -EINVAL
)
3317 static inline int tcrypt_speedtest(const char *alg
,
3318 struct cipher_speed_template
*template,
3319 unsigned int tcount
, u8
*keysize
)
3323 printk("[****** Running speedtest %s *******]\n", alg
);
3324 ret
= ifx_alg_speed_test(alg
, alg
, sec
, template, tcount
, keysize
);
3325 if (fips_enabled
&& ret
== -EINVAL
)
3331 static int do_test(int m
)
3338 for (i
= 1; i
< 200; i
++)
3343 ret
+= tcrypt_test("md5");
3347 ret
+= tcrypt_test("sha1");
3351 ret
+= tcrypt_test("ecb(des)");
3352 ret
+= tcrypt_test("cbc(des)");
3356 ret
+= tcrypt_test("ecb(des3_ede)");
3357 ret
+= tcrypt_test("cbc(des3_ede)");
3361 ret
+= tcrypt_test("md4");
3365 ret
+= tcrypt_test("sha256");
3369 ret
+= tcrypt_test("ecb(blowfish)");
3370 ret
+= tcrypt_test("cbc(blowfish)");
3374 ret
+= tcrypt_test("ecb(twofish)");
3375 ret
+= tcrypt_test("cbc(twofish)");
3379 ret
+= tcrypt_test("ecb(serpent)");
3383 ret
+= tcrypt_test("ecb(aes)");
3384 ret
+= tcrypt_test("cbc(aes)");
3385 // ret += tcrypt_test("lrw(aes)");
3386 // ret += tcrypt_test("xts(aes)");
3387 ret
+= tcrypt_test("ctr(aes)");
3388 ret
+= tcrypt_test("rfc3686(ctr(aes))");
3392 ret
+= tcrypt_test("sha384");
3396 ret
+= tcrypt_test("sha512");
3400 ret
+= tcrypt_test("deflate");
3404 ret
+= tcrypt_test("ecb(cast5)");
3408 ret
+= tcrypt_test("ecb(cast6)");
3412 ret
+= tcrypt_test("ecb(arc4)");
3416 ret
+= tcrypt_test("michael_mic");
3420 ret
+= tcrypt_test("crc32c");
3424 ret
+= tcrypt_test("ecb(tea)");
3428 ret
+= tcrypt_test("ecb(xtea)");
3432 ret
+= tcrypt_test("ecb(khazad)");
3436 ret
+= tcrypt_test("wp512");
3440 ret
+= tcrypt_test("wp384");
3444 ret
+= tcrypt_test("wp256");
3448 ret
+= tcrypt_test("ecb(tnepres)");
3452 ret
+= tcrypt_test("ecb(anubis)");
3453 ret
+= tcrypt_test("cbc(anubis)");
3457 ret
+= tcrypt_test("tgr192");
3462 ret
+= tcrypt_test("tgr160");
3466 ret
+= tcrypt_test("tgr128");
3470 ret
+= tcrypt_test("ecb(xeta)");
3474 ret
+= tcrypt_test("pcbc(fcrypt)");
3478 ret
+= tcrypt_test("ecb(camellia)");
3479 ret
+= tcrypt_test("cbc(camellia)");
3482 ret
+= tcrypt_test("sha224");
3486 ret
+= tcrypt_test("salsa20");
3490 ret
+= tcrypt_test("gcm(aes)");
3494 ret
+= tcrypt_test("lzo");
3498 ret
+= tcrypt_test("ccm(aes)");
3502 ret
+= tcrypt_test("cts(cbc(aes))");
3506 ret
+= tcrypt_test("rmd128");
3510 ret
+= tcrypt_test("rmd160");
3514 ret
+= tcrypt_test("rmd256");
3518 ret
+= tcrypt_test("rmd320");
3522 ret
+= tcrypt_test("ecb(seed)");
3526 ret
+= tcrypt_test("zlib");
3530 ret
+= tcrypt_test("rfc4309(ccm(aes))");
3534 ret
+= tcrypt_test("hmac(md5)");
3538 ret
+= tcrypt_test("hmac(sha1)");
3542 ret
+= tcrypt_test("hmac(sha256)");
3546 ret
+= tcrypt_test("hmac(sha384)");
3550 ret
+= tcrypt_test("hmac(sha512)");
3554 ret
+= tcrypt_test("hmac(sha224)");
3558 ret
+= tcrypt_test("xcbc(aes)");
3562 ret
+= tcrypt_test("hmac(rmd128)");
3566 ret
+= tcrypt_test("hmac(rmd160)");
3570 ret
+= tcrypt_test("vmac(aes)");
3574 ret
+= tcrypt_test("ansi_cprng");
3578 test_cipher_speed("ecb(aes)", ENCRYPT
, sec
, NULL
, 0,
3579 speed_template_16_24_32
);
3580 test_cipher_speed("ecb(aes)", DECRYPT
, sec
, NULL
, 0,
3581 speed_template_16_24_32
);
3582 test_cipher_speed("cbc(aes)", ENCRYPT
, sec
, NULL
, 0,
3583 speed_template_16_24_32
);
3584 test_cipher_speed("cbc(aes)", DECRYPT
, sec
, NULL
, 0,
3585 speed_template_16_24_32
);
3586 #if !defined(CONFIG_CRYPTO_DEV_AES) && !defined(CONFIG_CRYPTO_ASYNC_AES)
3587 test_cipher_speed("lrw(aes)", ENCRYPT
, sec
, NULL
, 0,
3588 speed_template_32_40_48
);
3589 test_cipher_speed("lrw(aes)", DECRYPT
, sec
, NULL
, 0,
3590 speed_template_32_40_48
);
3591 test_cipher_speed("xts(aes)", ENCRYPT
, sec
, NULL
, 0,
3592 speed_template_32_48_64
);
3593 test_cipher_speed("xts(aes)", DECRYPT
, sec
, NULL
, 0,
3594 speed_template_32_48_64
);
3599 test_cipher_speed("ecb(des3_ede)", ENCRYPT
, sec
,
3600 des3_speed_template
, DES3_SPEED_VECTORS
,
3602 test_cipher_speed("ecb(des3_ede)", DECRYPT
, sec
,
3603 des3_speed_template
, DES3_SPEED_VECTORS
,
3605 test_cipher_speed("cbc(des3_ede)", ENCRYPT
, sec
,
3606 des3_speed_template
, DES3_SPEED_VECTORS
,
3608 test_cipher_speed("cbc(des3_ede)", DECRYPT
, sec
,
3609 des3_speed_template
, DES3_SPEED_VECTORS
,
3614 test_cipher_speed("ecb(twofish)", ENCRYPT
, sec
, NULL
, 0,
3615 speed_template_16_24_32
);
3616 test_cipher_speed("ecb(twofish)", DECRYPT
, sec
, NULL
, 0,
3617 speed_template_16_24_32
);
3618 test_cipher_speed("cbc(twofish)", ENCRYPT
, sec
, NULL
, 0,
3619 speed_template_16_24_32
);
3620 test_cipher_speed("cbc(twofish)", DECRYPT
, sec
, NULL
, 0,
3621 speed_template_16_24_32
);
3625 test_cipher_speed("ecb(blowfish)", ENCRYPT
, sec
, NULL
, 0,
3626 speed_template_8_32
);
3627 test_cipher_speed("ecb(blowfish)", DECRYPT
, sec
, NULL
, 0,
3628 speed_template_8_32
);
3629 test_cipher_speed("cbc(blowfish)", ENCRYPT
, sec
, NULL
, 0,
3630 speed_template_8_32
);
3631 test_cipher_speed("cbc(blowfish)", DECRYPT
, sec
, NULL
, 0,
3632 speed_template_8_32
);
3636 test_cipher_speed("ecb(des)", ENCRYPT
, sec
, NULL
, 0,
3638 test_cipher_speed("ecb(des)", DECRYPT
, sec
, NULL
, 0,
3640 test_cipher_speed("cbc(des)", ENCRYPT
, sec
, NULL
, 0,
3642 test_cipher_speed("cbc(des)", DECRYPT
, sec
, NULL
, 0,
3647 test_cipher_speed("ecb(camellia)", ENCRYPT
, sec
, NULL
, 0,
3648 speed_template_16_24_32
);
3649 test_cipher_speed("ecb(camellia)", DECRYPT
, sec
, NULL
, 0,
3650 speed_template_16_24_32
);
3651 test_cipher_speed("cbc(camellia)", ENCRYPT
, sec
, NULL
, 0,
3652 speed_template_16_24_32
);
3653 test_cipher_speed("cbc(camellia)", DECRYPT
, sec
, NULL
, 0,
3654 speed_template_16_24_32
);
3658 test_cipher_speed("salsa20", ENCRYPT
, sec
, NULL
, 0,
3659 speed_template_16_32
);
3666 test_hash_speed("md4", sec
, generic_hash_speed_template
);
3667 if (mode
> 300 && mode
< 400) break;
3670 test_hash_speed("md5", sec
, generic_hash_speed_template
);
3671 if (mode
> 300 && mode
< 400) break;
3674 test_hash_speed("sha1", sec
, generic_hash_speed_template
);
3675 if (mode
> 300 && mode
< 400) break;
3678 test_hash_speed("sha256", sec
, generic_hash_speed_template
);
3679 if (mode
> 300 && mode
< 400) break;
3682 test_hash_speed("sha384", sec
, generic_hash_speed_template
);
3683 if (mode
> 300 && mode
< 400) break;
3686 test_hash_speed("sha512", sec
, generic_hash_speed_template
);
3687 if (mode
> 300 && mode
< 400) break;
3690 test_hash_speed("wp256", sec
, generic_hash_speed_template
);
3691 if (mode
> 300 && mode
< 400) break;
3694 test_hash_speed("wp384", sec
, generic_hash_speed_template
);
3695 if (mode
> 300 && mode
< 400) break;
3698 test_hash_speed("wp512", sec
, generic_hash_speed_template
);
3699 if (mode
> 300 && mode
< 400) break;
3702 test_hash_speed("tgr128", sec
, generic_hash_speed_template
);
3703 if (mode
> 300 && mode
< 400) break;
3706 test_hash_speed("tgr160", sec
, generic_hash_speed_template
);
3707 if (mode
> 300 && mode
< 400) break;
3710 test_hash_speed("tgr192", sec
, generic_hash_speed_template
);
3711 if (mode
> 300 && mode
< 400) break;
3714 test_hash_speed("sha224", sec
, generic_hash_speed_template
);
3715 if (mode
> 300 && mode
< 400) break;
3718 test_hash_speed("rmd128", sec
, generic_hash_speed_template
);
3719 if (mode
> 300 && mode
< 400) break;
3722 test_hash_speed("rmd160", sec
, generic_hash_speed_template
);
3723 if (mode
> 300 && mode
< 400) break;
3726 test_hash_speed("rmd256", sec
, generic_hash_speed_template
);
3727 if (mode
> 300 && mode
< 400) break;
3730 test_hash_speed("rmd320", sec
, generic_hash_speed_template
);
3731 if (mode
> 300 && mode
< 400) break;
3736 /* Modified speed test for async block cipher mode */
3738 tcrypt_speedtest("ecb(aes)", NULL
, 0,
3739 speed_template_16_24_32
);
3740 tcrypt_speedtest("cbc(aes)", NULL
, 0,
3741 speed_template_16_24_32
);
3745 tcrypt_speedtest("ecb(des3_ede)", des3_speed_template
,
3746 DES3_SPEED_VECTORS
,speed_template_24
);
3747 tcrypt_speedtest("cbc(des3_ede)", des3_speed_template
,
3748 DES3_SPEED_VECTORS
,speed_template_24
);
3752 tcrypt_speedtest("ecb(des)", NULL
, 0,
3754 tcrypt_speedtest("cbc(des)", NULL
, 0,
3765 #if !defined(CONFIG_CRYPTO_DEV_DEU)
3766 static int do_alg_test(const char *alg
, u32 type
, u32 mask
)
3768 return crypto_has_alg(alg
, type
, mask
?: CRYPTO_ALG_TYPE_MASK
) ?
3773 static int __init
tcrypt_mod_init(void)
3778 printk("Starting Lantiq DEU Crypto TESTS . . . . . . .\n");
3780 for (i
= 0; i
< TVMEMSIZE
; i
++) {
3781 tvmem
[i
] = (void *)__get_free_page(GFP_KERNEL
);
3786 #if defined(CONFIG_CRYPTO_DEV_DEU)
3787 #if defined(CONFIG_CRYPTO_DEV_MD5)
3788 mode
= 1; // test md5 only
3789 err
= do_test(mode
);
3795 printk(KERN_ERR
"md5: one or more tests failed!\n");
3799 #if defined(CONFIG_CRYPTO_DEV_SHA1)
3800 mode
= 2; // test sha1 only
3801 err
= do_test(mode
);
3807 printk(KERN_ERR
"sha1: one or more tests failed!\n");
3811 #if defined (CONFIG_CRYPTO_DEV_DES) || defined (CONFIG_CRYPTO_ASYNC_DES)
3812 mode
= 3; // test des only
3813 err
= do_test(mode
);
3817 mode
= 4; // test des3 only
3818 err
= do_test(mode
);
3824 printk(KERN_ERR
"des3: one or more tests failed!\n");
3828 #if defined (CONFIG_CRYPTO_ASYNC_AES) || defined (CONFIG_CRYPTO_DEV_AES)
3829 mode
= 10; // test aes only
3830 err
= do_test(mode
);
3836 printk(KERN_ERR
"aes: one or more tests failed!\n");
3840 #if defined(CONFIG_CRYPTO_DEV_ARC4)
3842 err
= do_test(mode
);
3845 printk(KERN_ERR
"arc4: one or more tests failed!\n");
3849 #if defined (CONFIG_CRYPTO_DEV_MD5_HMAC)
3851 err
= do_test(mode
);
3854 printk(KERN_ERR
"tcrypt: one or more tests failed!\n");
3858 #if defined (CONFIG_CRYPTO_DEV_SHA1_HMAC)
3860 err
= do_test(mode
);
3863 printk(KERN_ERR
"tcrypt: one or more tests failed!\n");
3868 /* Start Speed tests test modes */
3869 #if defined(CONFIG_CRYPTO_DEV_SPEED_TEST)
3870 #if defined(CONFIG_CRYPTO_DEV_AES)
3872 err
= do_test(mode
);
3876 #if defined (CONFIG_CRYPTO_DEV_DES)
3878 err
= do_test(mode
);
3883 err
= do_test(mode
);
3887 #if defined (CONFIG_CRYPTO_DEV_MD5)
3889 err
= do_test(mode
);
3893 #if defined (CONFIG_CRYPTO_DEV_SHA1)
3895 err
= do_test(mode
);
3899 printk("Speed tests finished successfully\n");
3903 printk(KERN_ERR
"tcrypt: one or more tests failed!\n");
3905 #endif /* CONFIG_CRYPTO_DEV_SPEED_TEST */
3909 err
= do_alg_test(alg
, type
, mask
);
3911 err
= do_test(mode
);
3914 printk(KERN_ERR
"tcrypt: one or more tests failed!\n");
3917 #endif /* CONFIG_CRYPTO_DEV_DEU */
3920 /* We intentionaly return -EAGAIN to prevent keeping the module,
3921 * unless we're running in fips mode. It does all its work from
3922 * init() and doesn't offer any runtime functionality, but in
3923 * the fips case, checking for a successful load is helpful.
3924 * => we don't need it in the memory, do we?
3931 for (i
= 0; i
< TVMEMSIZE
&& tvmem
[i
]; i
++ ){
3932 printk("Freeing page: %d\n", i
);
3933 free_page((unsigned long)tvmem
[i
]);
3936 printk("Finished DEU testing . . . . . .\n");
3941 * If an init function is provided, an exit function must also be provided
3942 * to allow module unload.
3944 static void __exit
tcrypt_mod_fini(void) {}
3947 module_init(tcrypt_mod_init
);
3948 module_exit(tcrypt_mod_fini
);
3950 module_param(alg
, charp
, 0);
3951 module_param(type
, uint
, 0);
3952 module_param(mask
, uint
, 0);
3953 module_param(mode
, int, 0);
3954 module_param(sec
, uint
, 0);
3955 MODULE_PARM_DESC(sec
, "Length in seconds of speed tests "
3956 "(defaults to zero which uses CPU cycles instead)");
3958 MODULE_LICENSE("GPL");
3959 MODULE_DESCRIPTION("Quick & dirty crypto testing module");
3960 MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");