2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
16 #include <crypto/hash.h>
17 #include <linux/err.h>
18 #include <linux/module.h>
19 #include <linux/scatterlist.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
22 #include <crypto/rng.h>
23 #include <linux/jiffies.h>
24 #include <linux/init.h>
25 #include <linux/moduleparam.h>
26 #include <linux/delay.h>
27 #include <linux/types.h>
28 #include <linux/sched.h>
31 #include "ifxmips_testmgr.h"
32 #include "ifxmips_tcrypt.h"
33 #include "ifxmips_deu.h"
35 /* changes for LQ ablkcipher speedtest */
36 #include <linux/timex.h>
37 #include <linux/interrupt.h>
38 #include <asm/mipsregs.h>
41 * Need slab memory for testing (size in number of pages).
46 * Indexes into the xbuf to simulate cross-page access.
58 * Used by test_cipher()
64 * Need slab memory for testing (size in number of pages).
69 * Used by test_cipher_speed()
75 * Used by test_cipher_speed()
79 static unsigned int sec;
81 static char *alg = NULL;
85 static char *tvmem[TVMEMSIZE];
87 static char *check[] = {
88 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256",
89 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
90 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
91 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
92 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
93 "lzo", "cts", "zlib", NULL
95 struct tcrypt_result {
96 struct completion completion;
100 struct aead_test_suite {
102 struct aead_testvec *vecs;
107 struct cipher_test_suite {
109 struct cipher_testvec *vecs;
114 struct comp_test_suite {
116 struct comp_testvec *vecs;
121 struct pcomp_test_suite {
123 struct pcomp_testvec *vecs;
128 struct hash_test_suite {
129 struct hash_testvec *vecs;
133 struct cprng_test_suite {
134 struct cprng_testvec *vecs;
138 struct alg_test_desc {
140 int (*test)(const struct alg_test_desc *desc, const char *driver,
142 int fips_allowed; /* set if alg is allowed in fips mode */
145 struct aead_test_suite aead;
146 struct cipher_test_suite cipher;
147 struct comp_test_suite comp;
148 struct pcomp_test_suite pcomp;
149 struct hash_test_suite hash;
150 struct cprng_test_suite cprng;
154 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
156 static void hexdump(unsigned char *buf, unsigned int len)
158 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
163 static void tcrypt_complete(struct crypto_async_request *req, int err)
165 struct tcrypt_result *res = req->data;
167 //printk("Signal done test\n");
169 if (err == -EINPROGRESS) {
170 printk("********************* Completion didnt go too well **************************** \n");
175 complete_all(&res->completion);
178 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
182 for (i = 0; i < XBUFSIZE; i++) {
183 buf[i] = (void *)__get_free_page(GFP_KERNEL);
192 free_page((unsigned long)buf[i]);
197 static void testmgr_free_buf(char *buf[XBUFSIZE])
201 for (i = 0; i < XBUFSIZE; i++)
202 free_page((unsigned long)buf[i]);
205 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
208 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
209 unsigned int i, j, k, temp;
210 struct scatterlist sg[8];
212 struct ahash_request *req;
213 struct tcrypt_result tresult;
215 char *xbuf[XBUFSIZE];
218 if (testmgr_alloc_buf(xbuf))
221 init_completion(&tresult.completion);
223 req = ahash_request_alloc(tfm, GFP_KERNEL);
225 printk(KERN_ERR "alg: hash: Failed to allocate request for "
229 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
230 tcrypt_complete, &tresult);
233 for (i = 0; i < tcount; i++) {
238 memset(result, 0, 64);
242 memcpy(hash_buff, template[i].plaintext, template[i].psize);
243 sg_init_one(&sg[0], hash_buff, template[i].psize);
245 if (template[i].ksize) {
246 crypto_ahash_clear_flags(tfm, ~0);
247 ret = crypto_ahash_setkey(tfm, template[i].key,
250 printk(KERN_ERR "alg: hash: setkey failed on "
251 "test %d for %s: ret=%d\n", j, algo,
257 ahash_request_set_crypt(req, sg, result, template[i].psize);
258 ret = crypto_ahash_digest(req);
264 ret = wait_for_completion_interruptible(
265 &tresult.completion);
266 if (!ret && !(ret = tresult.err)) {
267 INIT_COMPLETION(tresult.completion);
272 printk(KERN_ERR "alg: hash: digest failed on test %d "
273 "for %s: ret=%d\n", j, algo, -ret);
277 if (memcmp(result, template[i].digest,
278 crypto_ahash_digestsize(tfm))) {
279 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
281 hexdump(result, crypto_ahash_digestsize(tfm));
286 printk(KERN_ERR "alg: hash: Test %d passed for %s\n",
288 hexdump(result, crypto_ahash_digestsize(tfm));
293 for (i = 0; i < tcount; i++) {
294 if (template[i].np) {
296 memset(result, 0, 64);
299 sg_init_table(sg, template[i].np);
301 for (k = 0; k < template[i].np; k++) {
302 if (WARN_ON(offset_in_page(IDX[k]) +
303 template[i].tap[k] > PAGE_SIZE))
306 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
307 offset_in_page(IDX[k]),
308 template[i].plaintext + temp,
311 temp += template[i].tap[k];
314 if (template[i].ksize) {
315 crypto_ahash_clear_flags(tfm, ~0);
316 ret = crypto_ahash_setkey(tfm, template[i].key,
320 printk(KERN_ERR "alg: hash: setkey "
321 "failed on chunking test %d "
322 "for %s: ret=%d\n", j, algo,
328 ahash_request_set_crypt(req, sg, result,
330 ret = crypto_ahash_digest(req);
336 ret = wait_for_completion_interruptible(
337 &tresult.completion);
338 if (!ret && !(ret = tresult.err)) {
339 INIT_COMPLETION(tresult.completion);
344 printk(KERN_ERR "alg: hash: digest failed "
345 "on chunking test %d for %s: "
346 "ret=%d\n", j, algo, -ret);
350 if (memcmp(result, template[i].digest,
351 crypto_ahash_digestsize(tfm))) {
352 printk(KERN_ERR "alg: hash: Chunking test %d "
353 "failed for %s\n", j, algo);
354 hexdump(result, crypto_ahash_digestsize(tfm));
359 printk(KERN_ERR "alg: hash: Chunking test %d "
360 "passed for %s\n", j, algo);
361 hexdump(result, crypto_ahash_digestsize(tfm));
369 ahash_request_free(req);
371 testmgr_free_buf(xbuf);
376 static int test_aead(struct crypto_aead *tfm, int enc,
377 struct aead_testvec *template, unsigned int tcount)
379 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
380 unsigned int i, j, k, n, temp;
384 struct aead_request *req;
385 struct scatterlist sg[8];
386 struct scatterlist asg[8];
388 struct tcrypt_result result;
389 unsigned int authsize;
393 char *xbuf[XBUFSIZE];
394 char *axbuf[XBUFSIZE];
396 if (testmgr_alloc_buf(xbuf))
398 if (testmgr_alloc_buf(axbuf))
406 init_completion(&result.completion);
408 req = aead_request_alloc(tfm, GFP_KERNEL);
410 printk(KERN_ERR "alg: aead: Failed to allocate request for "
415 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
416 tcrypt_complete, &result);
418 for (i = 0, j = 0; i < tcount; i++) {
419 if (!template[i].np) {
422 /* some tepmplates have no input data but they will
429 if (WARN_ON(template[i].ilen > PAGE_SIZE ||
430 template[i].alen > PAGE_SIZE))
433 memcpy(input, template[i].input, template[i].ilen);
434 memcpy(assoc, template[i].assoc, template[i].alen);
436 memcpy(iv, template[i].iv, MAX_IVLEN);
438 memset(iv, 0, MAX_IVLEN);
440 crypto_aead_clear_flags(tfm, ~0);
442 crypto_aead_set_flags(
443 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
445 key = template[i].key;
447 ret = crypto_aead_setkey(tfm, key,
449 if (!ret == template[i].fail) {
450 printk(KERN_ERR "alg: aead: setkey failed on "
451 "test %d for %s: flags=%x\n", j, algo,
452 crypto_aead_get_flags(tfm));
457 authsize = abs(template[i].rlen - template[i].ilen);
458 ret = crypto_aead_setauthsize(tfm, authsize);
460 printk(KERN_ERR "alg: aead: Failed to set "
461 "authsize to %u on test %d for %s\n",
466 sg_init_one(&sg[0], input,
467 template[i].ilen + (enc ? authsize : 0));
469 sg_init_one(&asg[0], assoc, template[i].alen);
471 aead_request_set_crypt(req, sg, sg,
472 template[i].ilen, iv);
474 aead_request_set_assoc(req, asg, template[i].alen);
477 crypto_aead_encrypt(req) :
478 crypto_aead_decrypt(req);
482 if (template[i].novrfy) {
483 /* verification was supposed to fail */
484 printk(KERN_ERR "alg: aead: %s failed "
485 "on test %d for %s: ret was 0, "
486 "expected -EBADMSG\n",
488 /* so really, we got a bad message */
495 ret = wait_for_completion_interruptible(
497 if (!ret && !(ret = result.err)) {
498 INIT_COMPLETION(result.completion);
502 if (template[i].novrfy)
503 /* verification failure was expected */
507 printk(KERN_ERR "alg: aead: %s failed on test "
508 "%d for %s: ret=%d\n", e, j, algo, -ret);
513 if (memcmp(q, template[i].result, template[i].rlen)) {
514 printk(KERN_ERR "alg: aead: Test %d failed on "
515 "%s for %s\n", j, e, algo);
516 hexdump(q, template[i].rlen);
521 printk(KERN_ERR "alg: aead: Test %d passed on "
522 "%s for %s\n", j, e, algo);
523 hexdump(q, template[i].rlen);
528 for (i = 0, j = 0; i < tcount; i++) {
529 if (template[i].np) {
533 memcpy(iv, template[i].iv, MAX_IVLEN);
535 memset(iv, 0, MAX_IVLEN);
537 crypto_aead_clear_flags(tfm, ~0);
539 crypto_aead_set_flags(
540 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
541 key = template[i].key;
543 ret = crypto_aead_setkey(tfm, key, template[i].klen);
544 if (!ret == template[i].fail) {
545 printk(KERN_ERR "alg: aead: setkey failed on "
546 "chunk test %d for %s: flags=%x\n", j,
547 algo, crypto_aead_get_flags(tfm));
552 authsize = abs(template[i].rlen - template[i].ilen);
555 sg_init_table(sg, template[i].np);
556 for (k = 0, temp = 0; k < template[i].np; k++) {
557 if (WARN_ON(offset_in_page(IDX[k]) +
558 template[i].tap[k] > PAGE_SIZE))
561 q = xbuf[IDX[k] >> PAGE_SHIFT] +
562 offset_in_page(IDX[k]);
564 memcpy(q, template[i].input + temp,
567 n = template[i].tap[k];
568 if (k == template[i].np - 1 && enc)
570 if (offset_in_page(q) + n < PAGE_SIZE)
573 sg_set_buf(&sg[k], q, template[i].tap[k]);
574 temp += template[i].tap[k];
577 ret = crypto_aead_setauthsize(tfm, authsize);
579 printk(KERN_ERR "alg: aead: Failed to set "
580 "authsize to %u on chunk test %d for "
581 "%s\n", authsize, j, algo);
586 if (WARN_ON(sg[k - 1].offset +
587 sg[k - 1].length + authsize >
593 sg[k - 1].length += authsize;
596 sg_init_table(asg, template[i].anp);
598 for (k = 0, temp = 0; k < template[i].anp; k++) {
599 if (WARN_ON(offset_in_page(IDX[k]) +
600 template[i].atap[k] > PAGE_SIZE))
603 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
604 offset_in_page(IDX[k]),
605 template[i].assoc + temp,
606 template[i].atap[k]),
607 template[i].atap[k]);
608 temp += template[i].atap[k];
611 aead_request_set_crypt(req, sg, sg,
615 aead_request_set_assoc(req, asg, template[i].alen);
618 crypto_aead_encrypt(req) :
619 crypto_aead_decrypt(req);
623 if (template[i].novrfy) {
624 /* verification was supposed to fail */
625 printk(KERN_ERR "alg: aead: %s failed "
626 "on chunk test %d for %s: ret "
627 "was 0, expected -EBADMSG\n",
629 /* so really, we got a bad message */
636 ret = wait_for_completion_interruptible(
638 if (!ret && !(ret = result.err)) {
639 INIT_COMPLETION(result.completion);
643 if (template[i].novrfy)
644 /* verification failure was expected */
648 printk(KERN_ERR "alg: aead: %s failed on "
649 "chunk test %d for %s: ret=%d\n", e, j,
655 for (k = 0, temp = 0; k < template[i].np; k++) {
656 q = xbuf[IDX[k] >> PAGE_SHIFT] +
657 offset_in_page(IDX[k]);
659 n = template[i].tap[k];
660 if (k == template[i].np - 1)
661 n += enc ? authsize : -authsize;
663 if (memcmp(q, template[i].result + temp, n)) {
664 printk(KERN_ERR "alg: aead: Chunk "
665 "test %d failed on %s at page "
666 "%u for %s\n", j, e, k, algo);
671 printk(KERN_ERR "alg: aead: Chunk "
672 "test %d passed on %s at page "
673 "%u for %s\n", j, e, k, algo);
678 if (k == template[i].np - 1 && !enc) {
679 if (memcmp(q, template[i].input +
685 for (n = 0; offset_in_page(q + n) &&
690 printk(KERN_ERR "alg: aead: Result "
691 "buffer corruption in chunk "
692 "test %d on %s at page %u for "
693 "%s: %u bytes:\n", j, e, k,
698 temp += template[i].tap[k];
706 aead_request_free(req);
707 testmgr_free_buf(axbuf);
709 testmgr_free_buf(xbuf);
714 static int test_cipher(struct crypto_cipher *tfm, int enc,
715 struct cipher_testvec *template, unsigned int tcount)
717 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
718 unsigned int i, j, k;
722 char *xbuf[XBUFSIZE];
725 if (testmgr_alloc_buf(xbuf))
734 for (i = 0; i < tcount; i++) {
741 if (WARN_ON(template[i].ilen > PAGE_SIZE))
745 memcpy(data, template[i].input, template[i].ilen);
747 crypto_cipher_clear_flags(tfm, ~0);
749 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
751 ret = crypto_cipher_setkey(tfm, template[i].key,
753 if (!ret == template[i].fail) {
754 printk(KERN_ERR "alg: cipher: setkey failed "
755 "on test %d for %s: flags=%x\n", j,
756 algo, crypto_cipher_get_flags(tfm));
761 for (k = 0; k < template[i].ilen;
762 k += crypto_cipher_blocksize(tfm)) {
764 crypto_cipher_encrypt_one(tfm, data + k,
767 crypto_cipher_decrypt_one(tfm, data + k,
772 if (memcmp(q, template[i].result, template[i].rlen)) {
773 printk(KERN_ERR "alg: cipher: Test %d failed "
774 "on %s for %s\n", j, e, algo);
775 hexdump(q, template[i].rlen);
780 printk(KERN_ERR "alg: cipher: Test %d passed "
781 "on %s for %s\n", j, e, algo);
782 hexdump(q, template[i].rlen);
789 testmgr_free_buf(xbuf);
794 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
795 struct cipher_testvec *template, unsigned int tcount)
798 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
799 unsigned int i, j, k, n, temp;
801 struct ablkcipher_request *req;
802 struct scatterlist sg[8];
804 struct tcrypt_result result;
807 char *xbuf[XBUFSIZE];
810 if (testmgr_alloc_buf(xbuf))
818 init_completion(&result.completion);
820 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
822 printk(KERN_ERR "alg: skcipher: Failed to allocate request "
827 //printk("tcount: %u\n", tcount);
829 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
830 tcrypt_complete, &result);
833 for (i = 0; i < tcount; i++) {
835 memcpy(iv, template[i].iv, MAX_IVLEN);
837 memset(iv, 0, MAX_IVLEN);
839 if (!(template[i].np)) {
840 //printk("np: %d, i: %d, j: %d\n", template[i].np, i, j);
844 if (WARN_ON(template[i].ilen > PAGE_SIZE))
848 memcpy(data, template[i].input, template[i].ilen);
850 crypto_ablkcipher_clear_flags(tfm, ~0);
852 crypto_ablkcipher_set_flags(
853 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
855 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
857 if (!ret == template[i].fail) {
858 printk(KERN_ERR "alg: skcipher: setkey failed "
859 "on test %d for %s: flags=%x\n", j,
860 algo, crypto_ablkcipher_get_flags(tfm));
866 sg_init_one(&sg[0], data, template[i].ilen);
868 ablkcipher_request_set_crypt(req, sg, sg,
869 template[i].ilen, iv);
871 crypto_ablkcipher_encrypt(req) :
872 crypto_ablkcipher_decrypt(req);
879 ret = wait_for_completion_interruptible(
881 if (!ret && !((ret = result.err))) {
882 INIT_COMPLETION(result.completion);
887 printk(KERN_ERR "alg: skcipher: %s failed on "
888 "test %d for %s: ret=%d\n", e, j, algo,
894 if (memcmp(q, template[i].result, template[i].rlen)) {
895 printk(KERN_ERR "alg: skcipher: Test %d "
896 "failed on %s for %s\n", j, e, algo);
897 hexdump(q, template[i].rlen);
903 printk(KERN_ERR "alg: skcipher: Test %d "
904 "*PASSED* on %s for %s\n", j, e, algo);
905 hexdump(q, template[i].rlen);
910 printk("Testing %s chunking across pages.\n", algo);
912 for (i = 0; i < tcount; i++) {
914 memcpy(iv, template[i].iv, MAX_IVLEN);
916 memset(iv, 0, MAX_IVLEN);
918 if (template[i].np) {
921 crypto_ablkcipher_clear_flags(tfm, ~0);
923 crypto_ablkcipher_set_flags(
924 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
926 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
928 if (!ret == template[i].fail) {
929 printk(KERN_ERR "alg: skcipher: setkey failed "
930 "on chunk test %d for %s: flags=%x\n",
932 crypto_ablkcipher_get_flags(tfm));
940 sg_init_table(sg, template[i].np);
941 for (k = 0; k < template[i].np; k++) {
942 if (WARN_ON(offset_in_page(IDX[k]) +
943 template[i].tap[k] > PAGE_SIZE))
946 q = xbuf[IDX[k] >> PAGE_SHIFT] +
947 offset_in_page(IDX[k]);
949 memcpy(q, template[i].input + temp,
952 if (offset_in_page(q) + template[i].tap[k] <
954 q[template[i].tap[k]] = 0;
956 sg_set_buf(&sg[k], q, template[i].tap[k]);
958 temp += template[i].tap[k];
961 ablkcipher_request_set_crypt(req, sg, sg,
962 template[i].ilen, iv);
965 crypto_ablkcipher_encrypt(req) :
966 crypto_ablkcipher_decrypt(req);
973 ret = wait_for_completion_interruptible(
975 if (!ret && !((ret = result.err))) {
976 INIT_COMPLETION(result.completion);
981 printk(KERN_ERR "alg: skcipher: %s failed on "
982 "chunk test %d for %s: ret=%d\n", e, j,
990 for (k = 0; k < template[i].np; k++) {
991 q = xbuf[IDX[k] >> PAGE_SHIFT] +
992 offset_in_page(IDX[k]);
994 if (memcmp(q, template[i].result + temp,
995 template[i].tap[k])) {
996 printk(KERN_ERR "alg: skcipher: Chunk "
997 "test %d failed on %s at page "
998 "%u for %s\n", j, e, k, algo);
999 hexdump(q, template[i].tap[k]);
1004 printk(KERN_ERR "alg: skcipher: Chunk "
1005 "test %d *PASSED* on %s at page "
1006 "%u for %s\n", j, e, k, algo);
1007 hexdump(q, template[i].tap[k]);
1011 q += template[i].tap[k];
1012 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1016 printk(KERN_ERR "alg: skcipher: "
1017 "Result buffer corruption in "
1018 "chunk test %d on %s at page "
1019 "%u for %s: %u bytes:\n", j, e,
1026 printk(KERN_ERR "alg: skcipher: "
1027 "Result buffer clean in "
1028 "chunk test %d on %s at page "
1029 "%u for %s: %u bytes:\n", j, e,
1032 printk("Chunk Buffer clean\n");
1035 temp += template[i].tap[k];
1042 ablkcipher_request_free(req);
1043 testmgr_free_buf(xbuf);
1048 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1049 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1051 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1053 char result[COMP_BUF_SIZE];
1056 for (i = 0; i < ctcount; i++) {
1058 unsigned int dlen = COMP_BUF_SIZE;
1060 memset(result, 0, sizeof (result));
1062 ilen = ctemplate[i].inlen;
1063 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1064 ilen, result, &dlen);
1066 printk(KERN_ERR "alg: comp: compression failed "
1067 "on test %d for %s: ret=%d\n", i + 1, algo,
1072 if (dlen != ctemplate[i].outlen) {
1073 printk(KERN_ERR "alg: comp: Compression test %d "
1074 "failed for %s: output len = %d\n", i + 1, algo,
1080 if (memcmp(result, ctemplate[i].output, dlen)) {
1081 printk(KERN_ERR "alg: comp: Compression test %d "
1082 "failed for %s\n", i + 1, algo);
1083 hexdump(result, dlen);
1088 printk(KERN_ERR "alg: comp: Compression test %d "
1089 "passed for %s\n", i + 1, algo);
1090 hexdump(result, dlen);
1094 for (i = 0; i < dtcount; i++) {
1096 unsigned int dlen = COMP_BUF_SIZE;
1098 memset(result, 0, sizeof (result));
1100 ilen = dtemplate[i].inlen;
1101 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1102 ilen, result, &dlen);
1104 printk(KERN_ERR "alg: comp: decompression failed "
1105 "on test %d for %s: ret=%d\n", i + 1, algo,
1110 if (dlen != dtemplate[i].outlen) {
1111 printk(KERN_ERR "alg: comp: Decompression test %d "
1112 "failed for %s: output len = %d\n", i + 1, algo,
1118 if (memcmp(result, dtemplate[i].output, dlen)) {
1119 printk(KERN_ERR "alg: comp: Decompression test %d "
1120 "failed for %s\n", i + 1, algo);
1121 hexdump(result, dlen);
1126 printk(KERN_ERR "alg: comp: Decompression test %d "
1127 "passed for %s\n", i + 1, algo);
1128 hexdump(result, dlen);
1138 static int test_pcomp(struct crypto_pcomp *tfm,
1139 struct pcomp_testvec *ctemplate,
1140 struct pcomp_testvec *dtemplate, int ctcount,
1143 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
1145 char result[COMP_BUF_SIZE];
1148 for (i = 0; i < ctcount; i++) {
1149 struct comp_request req;
1150 unsigned int produced = 0;
1152 res = crypto_compress_setup(tfm, ctemplate[i].params,
1153 ctemplate[i].paramsize);
1155 pr_err("alg: pcomp: compression setup failed on test "
1156 "%d for %s: error=%d\n", i + 1, algo, res);
1160 res = crypto_compress_init(tfm);
1162 pr_err("alg: pcomp: compression init failed on test "
1163 "%d for %s: error=%d\n", i + 1, algo, res);
1167 memset(result, 0, sizeof(result));
1169 req.next_in = ctemplate[i].input;
1170 req.avail_in = ctemplate[i].inlen / 2;
1171 req.next_out = result;
1172 req.avail_out = ctemplate[i].outlen / 2;
1174 res = crypto_compress_update(tfm, &req);
1175 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1176 pr_err("alg: pcomp: compression update failed on test "
1177 "%d for %s: error=%d\n", i + 1, algo, res);
1183 /* Add remaining input data */
1184 req.avail_in += (ctemplate[i].inlen + 1) / 2;
1186 res = crypto_compress_update(tfm, &req);
1187 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1188 pr_err("alg: pcomp: compression update failed on test "
1189 "%d for %s: error=%d\n", i + 1, algo, res);
1195 /* Provide remaining output space */
1196 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1198 res = crypto_compress_final(tfm, &req);
1200 pr_err("alg: pcomp: compression final failed on test "
1201 "%d for %s: error=%d\n", i + 1, algo, res);
1206 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1207 pr_err("alg: comp: Compression test %d failed for %s: "
1208 "output len = %d (expected %d)\n", i + 1, algo,
1209 COMP_BUF_SIZE - req.avail_out,
1210 ctemplate[i].outlen);
1214 if (produced != ctemplate[i].outlen) {
1215 pr_err("alg: comp: Compression test %d failed for %s: "
1216 "returned len = %u (expected %d)\n", i + 1,
1217 algo, produced, ctemplate[i].outlen);
1221 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1222 pr_err("alg: pcomp: Compression test %d failed for "
1223 "%s\n", i + 1, algo);
1224 hexdump(result, ctemplate[i].outlen);
1229 for (i = 0; i < dtcount; i++) {
1230 struct comp_request req;
1231 unsigned int produced = 0;
1233 res = crypto_decompress_setup(tfm, dtemplate[i].params,
1234 dtemplate[i].paramsize);
1236 pr_err("alg: pcomp: decompression setup failed on "
1237 "test %d for %s: error=%d\n", i + 1, algo, res);
1241 res = crypto_decompress_init(tfm);
1243 pr_err("alg: pcomp: decompression init failed on test "
1244 "%d for %s: error=%d\n", i + 1, algo, res);
1248 memset(result, 0, sizeof(result));
1250 req.next_in = dtemplate[i].input;
1251 req.avail_in = dtemplate[i].inlen / 2;
1252 req.next_out = result;
1253 req.avail_out = dtemplate[i].outlen / 2;
1255 res = crypto_decompress_update(tfm, &req);
1256 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1257 pr_err("alg: pcomp: decompression update failed on "
1258 "test %d for %s: error=%d\n", i + 1, algo, res);
1264 /* Add remaining input data */
1265 req.avail_in += (dtemplate[i].inlen + 1) / 2;
1267 res = crypto_decompress_update(tfm, &req);
1268 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1269 pr_err("alg: pcomp: decompression update failed on "
1270 "test %d for %s: error=%d\n", i + 1, algo, res);
1276 /* Provide remaining output space */
1277 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1279 res = crypto_decompress_final(tfm, &req);
1280 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1281 pr_err("alg: pcomp: decompression final failed on "
1282 "test %d for %s: error=%d\n", i + 1, algo, res);
1288 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1289 pr_err("alg: comp: Decompression test %d failed for "
1290 "%s: output len = %d (expected %d)\n", i + 1,
1291 algo, COMP_BUF_SIZE - req.avail_out,
1292 dtemplate[i].outlen);
1296 if (produced != dtemplate[i].outlen) {
1297 pr_err("alg: comp: Decompression test %d failed for "
1298 "%s: returned len = %u (expected %d)\n", i + 1,
1299 algo, produced, dtemplate[i].outlen);
1303 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1304 pr_err("alg: pcomp: Decompression test %d failed for "
1305 "%s\n", i + 1, algo);
1306 hexdump(result, dtemplate[i].outlen);
1314 static int test_ablkcipher_jiffies(struct ablkcipher_request *req, int enc,
1315 int sec, struct tcrypt_result *result,
1318 unsigned long start, end;
1322 for (start = jiffies, end = start + sec * HZ, bcount = 0;
1323 time_before(jiffies, end); bcount++) {
1326 ret = crypto_ablkcipher_encrypt(req);
1328 ret = crypto_ablkcipher_decrypt(req);
1335 ret = wait_for_completion_interruptible(
1336 &result->completion);
1337 if (!ret && !((ret = result->err))) {
1338 INIT_COMPLETION(result->completion);
1347 printk("%d operations in %d seconds (%ld bytes)\n",
1348 bcount, sec, (long)bcount * blen);
1353 static int test_ablkcipher_cycles(struct ablkcipher_request *req, int enc,
1354 int sec, struct tcrypt_result *result,
1357 unsigned long cycles = 0;
1360 unsigned long start, end = 0;
1361 //local_bh_disable();
1362 //local_irq_disable();
1364 for (i = 0; i < 4; i++) {
1366 ret = crypto_ablkcipher_encrypt(req);
1368 ret = crypto_ablkcipher_decrypt(req);
1376 ret = wait_for_completion_interruptible(
1377 &result->completion);
1378 if (!ret && !((ret = result->err))) {
1379 INIT_COMPLETION(result->completion);
1384 wait_for_completion(&result->completion);
1385 INIT_COMPLETION(result->completion);
1393 if (signal_pending(current)) {
1394 printk("Signal caught\n");
1400 //printk("Debug ln: (%d), fn: %s\n", __LINE__, __func__);
1401 /* The real thing. */
1402 for (i = 0; i < 8; i++) {
1405 start = read_c0_count();
1407 ret = crypto_ablkcipher_encrypt(req);
1409 ret = crypto_ablkcipher_decrypt(req);
1417 ret = wait_for_completion_interruptible(
1418 &result->completion);
1420 if (!ret && !((ret = result->err))) {
1421 INIT_COMPLETION(result->completion);
1425 wait_for_completion(&result->completion);
1426 end = read_c0_count();
1427 INIT_COMPLETION(result->completion);
1435 if (signal_pending(current)) {
1436 printk("Signal caught\n");
1440 cycles += end - start;
1443 // local_irq_enable();
1444 // local_bh_enable();
1446 printk("1 operation in %lu cycles (%d bytes)\n",
1447 (cycles + 4) / 8, blen);
1453 static u32 b_size[] = {16, 64, 256, 1024, 8192, 0};
1455 static int test_skcipher_speed(struct crypto_ablkcipher *tfm, int enc,
1456 struct cipher_speed_template *template,
1457 unsigned int tcount, unsigned int sec,
1461 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
1463 unsigned int i = 0, j, iv_len;
1464 struct ablkcipher_request *req;
1465 //struct scatterlist sg[8];
1467 struct tcrypt_result result;
1469 static char *xbuf[XBUFSIZE];
1472 static char *tvmem_buf[4];
1475 if (testmgr_alloc_buf(xbuf))
1483 init_completion(&result.completion);
1485 printk("Start ablkcipher speed test\n");
1487 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
1489 printk(KERN_ERR "alg: skcipher: Failed to allocate request "
1494 // ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1495 ablkcipher_request_set_callback(req, 0,
1496 tcrypt_complete, &result);
1500 block_size = b_size;
1503 struct scatterlist sg[4];
1504 if ((*keysize + *block_size) > 4 * PAGE_SIZE) {
1505 printk("template (%u) too big for "
1506 "tvmem_buf (%lu)\n", *keysize + *block_size,
1510 crypto_ablkcipher_clear_flags(tfm, ~0);
1512 printk("test %u (%d bit key, %d byte blocks): ", i,
1513 *keysize * 8, *block_size);
1515 memset(tvmem_buf[0], 0xff, PAGE_SIZE);
1518 for (j = 0; j < tcount; j++) {
1519 if (template[j].klen == *keysize) {
1520 key = template[j].key;
1524 ret = crypto_ablkcipher_setkey(tfm, key, *keysize);
1526 printk("Error setting of keys\n");
1530 sg_init_table(sg, 4);
1532 for (j = 0; j < 4; j++) {
1533 tvmem_buf[j] = xbuf[j];
1534 memset(tvmem_buf[j], 0xff, PAGE_SIZE);
1535 sg_set_buf(sg + j, tvmem_buf[j], PAGE_SIZE);
1538 iv_len = crypto_ablkcipher_ivsize(tfm);
1540 memset(&iv, 0xff, iv_len);
1543 ablkcipher_request_set_crypt(req, sg, sg,
1546 //printk("Debug ln: %d, %s\n", __LINE__, __func__);
1548 ret = test_ablkcipher_jiffies(req, enc, sec,
1549 &result, *block_size);
1551 ret = test_ablkcipher_cycles(req, enc, sec,
1552 &result, *block_size);
1556 printk(KERN_ERR "alg: skcipher: %s failed on "
1557 "test %d for %s: ret=%d\n", e, j, algo,
1564 } while (*block_size);
1570 printk("End ablkcipher speed test\n");
1571 ablkcipher_request_free(req);
1572 testmgr_free_buf(xbuf);
1574 if (!completion_done(&result->completion)) {
1575 printk("There are threads waiting for completion, completing all\n");
1576 complete_all(&result->completion);
1580 //testmgr_free_buf(tvbuf);
1586 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1587 unsigned int tcount)
1589 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1590 int err = 0, i, j, seedsize;
1594 seedsize = crypto_rng_seedsize(tfm);
1596 seed = kmalloc(seedsize, GFP_KERNEL);
1598 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1603 for (i = 0; i < tcount; i++) {
1604 memset(result, 0, 32);
1606 memcpy(seed, template[i].v, template[i].vlen);
1607 memcpy(seed + template[i].vlen, template[i].key,
1609 memcpy(seed + template[i].vlen + template[i].klen,
1610 template[i].dt, template[i].dtlen);
1612 err = crypto_rng_reset(tfm, seed, seedsize);
1614 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1619 for (j = 0; j < template[i].loops; j++) {
1620 err = crypto_rng_get_bytes(tfm, result,
1622 if (err != template[i].rlen) {
1623 printk(KERN_ERR "alg: cprng: Failed to obtain "
1624 "the correct amount of random data for "
1625 "%s (requested %d, got %d)\n", algo,
1626 template[i].rlen, err);
1631 err = memcmp(result, template[i].result,
1634 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1636 hexdump(result, template[i].rlen);
1647 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1650 struct crypto_aead *tfm;
1653 tfm = crypto_alloc_aead(driver, type, mask);
1655 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1656 "%ld\n", driver, PTR_ERR(tfm));
1657 return PTR_ERR(tfm);
1660 if (desc->suite.aead.enc.vecs) {
1661 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1662 desc->suite.aead.enc.count);
1667 if (!err && desc->suite.aead.dec.vecs)
1668 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1669 desc->suite.aead.dec.count);
1672 crypto_free_aead(tfm);
1676 static int alg_test_cipher(const struct alg_test_desc *desc,
1677 const char *driver, u32 type, u32 mask)
1679 struct crypto_cipher *tfm;
1682 tfm = crypto_alloc_cipher(driver, type, mask);
1684 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1685 "%s: %ld\n", driver, PTR_ERR(tfm));
1686 return PTR_ERR(tfm);
1689 if (desc->suite.cipher.enc.vecs) {
1690 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1691 desc->suite.cipher.enc.count);
1696 if (desc->suite.cipher.dec.vecs)
1697 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1698 desc->suite.cipher.dec.count);
1701 crypto_free_cipher(tfm);
1705 static int alg_test_skcipher(const struct alg_test_desc *desc,
1706 const char *driver, u32 type, u32 mask)
1708 struct crypto_ablkcipher *tfm;
1711 tfm = crypto_alloc_ablkcipher(driver, type, mask);
1713 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1714 "%s: %ld\n", driver, PTR_ERR(tfm));
1715 return PTR_ERR(tfm);
1718 if (desc->suite.cipher.enc.vecs) {
1719 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1720 desc->suite.cipher.enc.count);
1725 if (desc->suite.cipher.dec.vecs)
1726 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1727 desc->suite.cipher.dec.count);
1730 crypto_free_ablkcipher(tfm);
1734 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1737 struct crypto_comp *tfm;
1740 tfm = crypto_alloc_comp(driver, type, mask);
1742 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1743 "%ld\n", driver, PTR_ERR(tfm));
1744 return PTR_ERR(tfm);
1747 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1748 desc->suite.comp.decomp.vecs,
1749 desc->suite.comp.comp.count,
1750 desc->suite.comp.decomp.count);
1752 crypto_free_comp(tfm);
1756 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1759 struct crypto_pcomp *tfm;
1762 tfm = crypto_alloc_pcomp(driver, type, mask);
1764 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1765 driver, PTR_ERR(tfm));
1766 return PTR_ERR(tfm);
1769 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1770 desc->suite.pcomp.decomp.vecs,
1771 desc->suite.pcomp.comp.count,
1772 desc->suite.pcomp.decomp.count);
1774 crypto_free_pcomp(tfm);
1778 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1781 struct crypto_ahash *tfm;
1784 tfm = crypto_alloc_ahash(driver, type, mask);
1786 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1787 "%ld\n", driver, PTR_ERR(tfm));
1788 return PTR_ERR(tfm);
1791 err = test_hash(tfm, desc->suite.hash.vecs, desc->suite.hash.count);
1793 crypto_free_ahash(tfm);
1797 static int alg_test_crc32c(const struct alg_test_desc *desc,
1798 const char *driver, u32 type, u32 mask)
1800 struct crypto_shash *tfm;
1804 err = alg_test_hash(desc, driver, type, mask);
1808 tfm = crypto_alloc_shash(driver, type, mask);
1810 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1811 "%ld\n", driver, PTR_ERR(tfm));
1818 struct shash_desc shash;
1819 char ctx[crypto_shash_descsize(tfm)];
1822 sdesc.shash.tfm = tfm;
1823 sdesc.shash.flags = 0;
1825 *(u32 *)sdesc.ctx = le32_to_cpu(420553207);
1826 err = crypto_shash_final(&sdesc.shash, (u8 *)&val);
1828 printk(KERN_ERR "alg: crc32c: Operation failed for "
1829 "%s: %d\n", driver, err);
1833 if (val != ~420553207) {
1834 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1835 "%d\n", driver, val);
1840 crypto_free_shash(tfm);
1846 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1849 struct crypto_rng *rng;
1852 rng = crypto_alloc_rng(driver, type, mask);
1854 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1855 "%ld\n", driver, PTR_ERR(rng));
1856 return PTR_ERR(rng);
1859 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1861 crypto_free_rng(rng);
1866 /* Please keep this list sorted by algorithm name. */
1867 static const struct alg_test_desc alg_test_descs[] = {
1869 .alg = "ansi_cprng",
1870 .test = alg_test_cprng,
1874 .vecs = ansi_cprng_aes_tv_template,
1875 .count = ANSI_CPRNG_AES_TEST_VECTORS
1880 .test = alg_test_skcipher,
1885 .vecs = aes_cbc_enc_tv_template,
1886 .count = AES_CBC_ENC_TEST_VECTORS
1889 .vecs = aes_cbc_dec_tv_template,
1890 .count = AES_CBC_DEC_TEST_VECTORS
1895 .alg = "cbc(anubis)",
1896 .test = alg_test_skcipher,
1900 .vecs = anubis_cbc_enc_tv_template,
1901 .count = ANUBIS_CBC_ENC_TEST_VECTORS
1904 .vecs = anubis_cbc_dec_tv_template,
1905 .count = ANUBIS_CBC_DEC_TEST_VECTORS
1910 .alg = "cbc(blowfish)",
1911 .test = alg_test_skcipher,
1915 .vecs = bf_cbc_enc_tv_template,
1916 .count = BF_CBC_ENC_TEST_VECTORS
1919 .vecs = bf_cbc_dec_tv_template,
1920 .count = BF_CBC_DEC_TEST_VECTORS
1925 .alg = "cbc(camellia)",
1926 .test = alg_test_skcipher,
1930 .vecs = camellia_cbc_enc_tv_template,
1931 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
1934 .vecs = camellia_cbc_dec_tv_template,
1935 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
1941 .test = alg_test_skcipher,
1945 .vecs = des_cbc_enc_tv_template,
1946 .count = DES_CBC_ENC_TEST_VECTORS
1949 .vecs = des_cbc_dec_tv_template,
1950 .count = DES_CBC_DEC_TEST_VECTORS
1955 .alg = "cbc(des3_ede)",
1956 .test = alg_test_skcipher,
1961 .vecs = des3_ede_cbc_enc_tv_template,
1962 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
1965 .vecs = des3_ede_cbc_dec_tv_template,
1966 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
1971 .alg = "cbc(twofish)",
1972 .test = alg_test_skcipher,
1976 .vecs = tf_cbc_enc_tv_template,
1977 .count = TF_CBC_ENC_TEST_VECTORS
1980 .vecs = tf_cbc_dec_tv_template,
1981 .count = TF_CBC_DEC_TEST_VECTORS
1987 .test = alg_test_aead,
1992 .vecs = aes_ccm_enc_tv_template,
1993 .count = AES_CCM_ENC_TEST_VECTORS
1996 .vecs = aes_ccm_dec_tv_template,
1997 .count = AES_CCM_DEC_TEST_VECTORS
2003 .test = alg_test_crc32c,
2007 .vecs = crc32c_tv_template,
2008 .count = CRC32C_TEST_VECTORS
2013 .test = alg_test_skcipher,
2018 .vecs = aes_ctr_enc_tv_template,
2019 .count = AES_CTR_ENC_TEST_VECTORS
2022 .vecs = aes_ctr_dec_tv_template,
2023 .count = AES_CTR_DEC_TEST_VECTORS
2028 .alg = "cts(cbc(aes))",
2029 .test = alg_test_skcipher,
2033 .vecs = cts_mode_enc_tv_template,
2034 .count = CTS_MODE_ENC_TEST_VECTORS
2037 .vecs = cts_mode_dec_tv_template,
2038 .count = CTS_MODE_DEC_TEST_VECTORS
2044 .test = alg_test_comp,
2048 .vecs = deflate_comp_tv_template,
2049 .count = DEFLATE_COMP_TEST_VECTORS
2052 .vecs = deflate_decomp_tv_template,
2053 .count = DEFLATE_DECOMP_TEST_VECTORS
2059 .test = alg_test_skcipher,
2064 .vecs = aes_enc_tv_template,
2065 .count = AES_ENC_TEST_VECTORS
2068 .vecs = aes_dec_tv_template,
2069 .count = AES_DEC_TEST_VECTORS
2074 .alg = "ecb(anubis)",
2075 .test = alg_test_skcipher,
2079 .vecs = anubis_enc_tv_template,
2080 .count = ANUBIS_ENC_TEST_VECTORS
2083 .vecs = anubis_dec_tv_template,
2084 .count = ANUBIS_DEC_TEST_VECTORS
2090 .test = alg_test_skcipher,
2094 .vecs = arc4_enc_tv_template,
2095 .count = ARC4_ENC_TEST_VECTORS
2098 .vecs = arc4_dec_tv_template,
2099 .count = ARC4_DEC_TEST_VECTORS
2104 .alg = "ecb(blowfish)",
2105 .test = alg_test_skcipher,
2109 .vecs = bf_enc_tv_template,
2110 .count = BF_ENC_TEST_VECTORS
2113 .vecs = bf_dec_tv_template,
2114 .count = BF_DEC_TEST_VECTORS
2119 .alg = "ecb(camellia)",
2120 .test = alg_test_skcipher,
2124 .vecs = camellia_enc_tv_template,
2125 .count = CAMELLIA_ENC_TEST_VECTORS
2128 .vecs = camellia_dec_tv_template,
2129 .count = CAMELLIA_DEC_TEST_VECTORS
2134 .alg = "ecb(cast5)",
2135 .test = alg_test_skcipher,
2139 .vecs = cast5_enc_tv_template,
2140 .count = CAST5_ENC_TEST_VECTORS
2143 .vecs = cast5_dec_tv_template,
2144 .count = CAST5_DEC_TEST_VECTORS
2149 .alg = "ecb(cast6)",
2150 .test = alg_test_skcipher,
2154 .vecs = cast6_enc_tv_template,
2155 .count = CAST6_ENC_TEST_VECTORS
2158 .vecs = cast6_dec_tv_template,
2159 .count = CAST6_DEC_TEST_VECTORS
2165 .test = alg_test_skcipher,
2170 .vecs = des_enc_tv_template,
2171 .count = DES_ENC_TEST_VECTORS
2174 .vecs = des_dec_tv_template,
2175 .count = DES_DEC_TEST_VECTORS
2180 .alg = "ecb(des3_ede)",
2181 .test = alg_test_skcipher,
2186 .vecs = des3_ede_enc_tv_template,
2187 .count = DES3_EDE_ENC_TEST_VECTORS
2190 .vecs = des3_ede_dec_tv_template,
2191 .count = DES3_EDE_DEC_TEST_VECTORS
2196 .alg = "ecb(khazad)",
2197 .test = alg_test_skcipher,
2201 .vecs = khazad_enc_tv_template,
2202 .count = KHAZAD_ENC_TEST_VECTORS
2205 .vecs = khazad_dec_tv_template,
2206 .count = KHAZAD_DEC_TEST_VECTORS
2212 .test = alg_test_skcipher,
2216 .vecs = seed_enc_tv_template,
2217 .count = SEED_ENC_TEST_VECTORS
2220 .vecs = seed_dec_tv_template,
2221 .count = SEED_DEC_TEST_VECTORS
2226 .alg = "ecb(serpent)",
2227 .test = alg_test_skcipher,
2231 .vecs = serpent_enc_tv_template,
2232 .count = SERPENT_ENC_TEST_VECTORS
2235 .vecs = serpent_dec_tv_template,
2236 .count = SERPENT_DEC_TEST_VECTORS
2242 .test = alg_test_skcipher,
2246 .vecs = tea_enc_tv_template,
2247 .count = TEA_ENC_TEST_VECTORS
2250 .vecs = tea_dec_tv_template,
2251 .count = TEA_DEC_TEST_VECTORS
2256 .alg = "ecb(tnepres)",
2257 .test = alg_test_skcipher,
2261 .vecs = tnepres_enc_tv_template,
2262 .count = TNEPRES_ENC_TEST_VECTORS
2265 .vecs = tnepres_dec_tv_template,
2266 .count = TNEPRES_DEC_TEST_VECTORS
2271 .alg = "ecb(twofish)",
2272 .test = alg_test_skcipher,
2276 .vecs = tf_enc_tv_template,
2277 .count = TF_ENC_TEST_VECTORS
2280 .vecs = tf_dec_tv_template,
2281 .count = TF_DEC_TEST_VECTORS
2287 .test = alg_test_skcipher,
2291 .vecs = xeta_enc_tv_template,
2292 .count = XETA_ENC_TEST_VECTORS
2295 .vecs = xeta_dec_tv_template,
2296 .count = XETA_DEC_TEST_VECTORS
2302 .test = alg_test_skcipher,
2306 .vecs = xtea_enc_tv_template,
2307 .count = XTEA_ENC_TEST_VECTORS
2310 .vecs = xtea_dec_tv_template,
2311 .count = XTEA_DEC_TEST_VECTORS
2317 .test = alg_test_aead,
2322 .vecs = aes_gcm_enc_tv_template,
2323 .count = AES_GCM_ENC_TEST_VECTORS
2326 .vecs = aes_gcm_dec_tv_template,
2327 .count = AES_GCM_DEC_TEST_VECTORS
2333 .test = alg_test_hash,
2336 .vecs = hmac_md5_tv_template,
2337 .count = HMAC_MD5_TEST_VECTORS
2341 .alg = "hmac(rmd128)",
2342 .test = alg_test_hash,
2345 .vecs = hmac_rmd128_tv_template,
2346 .count = HMAC_RMD128_TEST_VECTORS
2350 .alg = "hmac(rmd160)",
2351 .test = alg_test_hash,
2354 .vecs = hmac_rmd160_tv_template,
2355 .count = HMAC_RMD160_TEST_VECTORS
2359 .alg = "hmac(sha1)",
2360 .test = alg_test_hash,
2364 .vecs = hmac_sha1_tv_template,
2365 .count = HMAC_SHA1_TEST_VECTORS
2369 .alg = "hmac(sha224)",
2370 .test = alg_test_hash,
2374 .vecs = hmac_sha224_tv_template,
2375 .count = HMAC_SHA224_TEST_VECTORS
2379 .alg = "hmac(sha256)",
2380 .test = alg_test_hash,
2384 .vecs = hmac_sha256_tv_template,
2385 .count = HMAC_SHA256_TEST_VECTORS
2389 .alg = "hmac(sha384)",
2390 .test = alg_test_hash,
2394 .vecs = hmac_sha384_tv_template,
2395 .count = HMAC_SHA384_TEST_VECTORS
2399 .alg = "hmac(sha512)",
2400 .test = alg_test_hash,
2404 .vecs = hmac_sha512_tv_template,
2405 .count = HMAC_SHA512_TEST_VECTORS
2408 #if !defined(CONFIG_CRYPTO_DEV_AES) && !defined(CONFIG_CRYPTO_ASYNC_AES)
2411 .test = alg_test_skcipher,
2415 .vecs = aes_lrw_enc_tv_template,
2416 .count = AES_LRW_ENC_TEST_VECTORS
2419 .vecs = aes_lrw_dec_tv_template,
2420 .count = AES_LRW_DEC_TEST_VECTORS
2427 .test = alg_test_comp,
2431 .vecs = lzo_comp_tv_template,
2432 .count = LZO_COMP_TEST_VECTORS
2435 .vecs = lzo_decomp_tv_template,
2436 .count = LZO_DECOMP_TEST_VECTORS
2442 .test = alg_test_hash,
2445 .vecs = md4_tv_template,
2446 .count = MD4_TEST_VECTORS
2451 .test = alg_test_hash,
2454 .vecs = md5_tv_template,
2455 .count = MD5_TEST_VECTORS
2459 .alg = "michael_mic",
2460 .test = alg_test_hash,
2463 .vecs = michael_mic_tv_template,
2464 .count = MICHAEL_MIC_TEST_VECTORS
2468 .alg = "pcbc(fcrypt)",
2469 .test = alg_test_skcipher,
2473 .vecs = fcrypt_pcbc_enc_tv_template,
2474 .count = FCRYPT_ENC_TEST_VECTORS
2477 .vecs = fcrypt_pcbc_dec_tv_template,
2478 .count = FCRYPT_DEC_TEST_VECTORS
2484 .alg = "rfc3686(ctr(aes))",
2485 .test = alg_test_skcipher,
2490 .vecs = aes_ctr_rfc3686_enc_tv_template,
2491 .count = AES_CTR_3686_ENC_TEST_VECTORS
2494 .vecs = aes_ctr_rfc3686_dec_tv_template,
2495 .count = AES_CTR_3686_DEC_TEST_VECTORS
2500 .alg = "rfc4309(ccm(aes))",
2501 .test = alg_test_aead,
2506 .vecs = aes_ccm_rfc4309_enc_tv_template,
2507 .count = AES_CCM_4309_ENC_TEST_VECTORS
2510 .vecs = aes_ccm_rfc4309_dec_tv_template,
2511 .count = AES_CCM_4309_DEC_TEST_VECTORS
2517 .test = alg_test_hash,
2520 .vecs = rmd128_tv_template,
2521 .count = RMD128_TEST_VECTORS
2526 .test = alg_test_hash,
2529 .vecs = rmd160_tv_template,
2530 .count = RMD160_TEST_VECTORS
2535 .test = alg_test_hash,
2538 .vecs = rmd256_tv_template,
2539 .count = RMD256_TEST_VECTORS
2544 .test = alg_test_hash,
2547 .vecs = rmd320_tv_template,
2548 .count = RMD320_TEST_VECTORS
2553 .test = alg_test_skcipher,
2557 .vecs = salsa20_stream_enc_tv_template,
2558 .count = SALSA20_STREAM_ENC_TEST_VECTORS
2564 .test = alg_test_hash,
2568 .vecs = sha1_tv_template,
2569 .count = SHA1_TEST_VECTORS
2574 .test = alg_test_hash,
2578 .vecs = sha224_tv_template,
2579 .count = SHA224_TEST_VECTORS
2584 .test = alg_test_hash,
2588 .vecs = sha256_tv_template,
2589 .count = SHA256_TEST_VECTORS
2594 .test = alg_test_hash,
2598 .vecs = sha384_tv_template,
2599 .count = SHA384_TEST_VECTORS
2604 .test = alg_test_hash,
2608 .vecs = sha512_tv_template,
2609 .count = SHA512_TEST_VECTORS
2614 .test = alg_test_hash,
2617 .vecs = tgr128_tv_template,
2618 .count = TGR128_TEST_VECTORS
2623 .test = alg_test_hash,
2626 .vecs = tgr160_tv_template,
2627 .count = TGR160_TEST_VECTORS
2632 .test = alg_test_hash,
2635 .vecs = tgr192_tv_template,
2636 .count = TGR192_TEST_VECTORS
2641 .test = alg_test_hash,
2644 .vecs = aes_vmac128_tv_template,
2645 .count = VMAC_AES_TEST_VECTORS
2650 .test = alg_test_hash,
2653 .vecs = wp256_tv_template,
2654 .count = WP256_TEST_VECTORS
2659 .test = alg_test_hash,
2662 .vecs = wp384_tv_template,
2663 .count = WP384_TEST_VECTORS
2668 .test = alg_test_hash,
2671 .vecs = wp512_tv_template,
2672 .count = WP512_TEST_VECTORS
2677 .test = alg_test_hash,
2680 .vecs = aes_xcbc128_tv_template,
2681 .count = XCBC_AES_TEST_VECTORS
2687 .test = alg_test_skcipher,
2691 .vecs = aes_xts_enc_tv_template,
2692 .count = AES_XTS_ENC_TEST_VECTORS
2695 .vecs = aes_xts_dec_tv_template,
2696 .count = AES_XTS_DEC_TEST_VECTORS
2703 .test = alg_test_pcomp,
2707 .vecs = zlib_comp_tv_template,
2708 .count = ZLIB_COMP_TEST_VECTORS
2711 .vecs = zlib_decomp_tv_template,
2712 .count = ZLIB_DECOMP_TEST_VECTORS
2719 static int alg_find_test(const char *alg)
2722 int end = ARRAY_SIZE(alg_test_descs);
2724 while (start < end) {
2725 int i = (start + end) / 2;
2726 int diff = strcmp(alg_test_descs[i].alg, alg);
2744 static int ifx_alg_test(const char *driver, const char *alg, u32 type, u32 mask)
2750 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
2751 char nalg[CRYPTO_MAX_ALG_NAME];
2753 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
2755 return -ENAMETOOLONG;
2757 i = alg_find_test(nalg);
2761 if (fips_enabled && !alg_test_descs[i].fips_allowed)
2764 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
2768 i = alg_find_test(alg);
2769 j = alg_find_test(driver);
2773 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
2774 (j >= 0 && !alg_test_descs[j].fips_allowed)))
2779 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
2782 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
2786 if (fips_enabled && rc)
2787 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
2789 if (fips_enabled && !rc)
2790 printk(KERN_INFO "alg: self-tests for %s (%s) passed\n",
2796 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
2801 EXPORT_SYMBOL_GPL(ifx_alg_test);
2803 /* Modified speed test for async block cipher mode*/
2805 static int ifx_alg_speed_test(const char *driver, const char *alg,
2807 struct cipher_speed_template *template,
2808 unsigned int tcount, u8 *keysize)
2813 int type = 0, mask = 0;
2814 struct crypto_ablkcipher *tfm;
2816 i = alg_find_test(alg);
2817 j = alg_find_test(driver);
2822 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
2823 (j >= 0 && !alg_test_descs[j].fips_allowed)))
2826 tfm = crypto_alloc_ablkcipher(driver, type, mask);
2829 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
2830 "%s: %ld\n", driver, PTR_ERR(tfm));
2831 return PTR_ERR(tfm);
2833 err = test_skcipher_speed(tfm, ENCRYPT, template,
2834 tcount, sec, keysize);
2838 err = test_skcipher_speed(tfm, DECRYPT, template,
2839 tcount, sec, keysize);
2849 if (fips_enabled && err)
2850 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
2852 if (fips_enabled && !err)
2853 printk(KERN_INFO "alg: self-tests for %s (%s) passed\n",
2856 crypto_free_ablkcipher(tfm);
2859 EXPORT_SYMBOL_GPL(ifx_alg_speed_test);
2862 static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc,
2863 struct scatterlist *sg, int blen, int sec)
2865 unsigned long start, end;
2869 for (start = jiffies, end = start + sec * HZ, bcount = 0;
2870 time_before(jiffies, end); bcount++) {
2872 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
2874 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
2880 printk("%d operations in %d seconds (%ld bytes)\n",
2881 bcount, sec, (long)bcount * blen);
2885 static int test_cipher_cycles(struct blkcipher_desc *desc, int enc,
2886 struct scatterlist *sg, int blen)
2888 unsigned long cycles = 0;
2889 unsigned long start, end;
2894 local_irq_disable();
2897 for (i = 0; i < 4; i++) {
2899 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
2901 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
2907 /* The real thing. */
2908 for (i = 0; i < 8; i++) {
2909 /* Original code to get cycles, does not work with MIPS
2910 * cycles_t start, end;
2911 * start = get_cycles();
2914 start = read_c0_count(); // LQ modified tcrypt
2917 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
2919 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
2921 /* Original code to get cycles, does not work with MIPS
2922 * end = get_cycles();
2925 end = read_c0_count(); //LQ modified tcrypt
2930 cycles += end - start;
2938 printk("1 operation in %lu cycles (%d bytes)\n",
2939 (cycles + 4) / 8, blen);
2944 static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
2946 static void test_cipher_speed(const char *algo, int enc, unsigned int sec,
2947 struct cipher_speed_template *template,
2948 unsigned int tcount, u8 *keysize)
2950 unsigned int ret, i, j, iv_len;
2951 const char *key, iv[128];
2952 struct crypto_blkcipher *tfm;
2953 struct blkcipher_desc desc;
2962 printk("\n ******* testing speed of %s %s ******* \n", algo, e);
2964 tfm = crypto_alloc_blkcipher(algo, 0, CRYPTO_ALG_ASYNC);
2967 printk("failed to load transform for %s: %ld\n", algo,
2977 b_size = block_sizes;
2979 struct scatterlist sg[TVMEMSIZE];
2981 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
2982 printk("template (%u) too big for "
2983 "tvmem (%lu)\n", *keysize + *b_size,
2984 TVMEMSIZE * PAGE_SIZE);
2988 printk("test %u (%d bit key, %d byte blocks): ", i,
2989 *keysize * 8, *b_size);
2991 memset(tvmem[0], 0xff, PAGE_SIZE);
2993 /* set key, plain text and IV */
2995 for (j = 0; j < tcount; j++) {
2996 if (template[j].klen == *keysize) {
2997 key = template[j].key;
3002 ret = crypto_blkcipher_setkey(tfm, key, *keysize);
3004 printk("setkey() failed flags=%x\n",
3005 crypto_blkcipher_get_flags(tfm));
3009 sg_init_table(sg, TVMEMSIZE);
3010 sg_set_buf(sg, tvmem[0] + *keysize,
3011 PAGE_SIZE - *keysize);
3012 for (j = 1; j < TVMEMSIZE; j++) {
3013 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
3014 memset (tvmem[j], 0xff, PAGE_SIZE);
3017 iv_len = crypto_blkcipher_ivsize(tfm);
3019 memset(&iv, 0xff, iv_len);
3020 crypto_blkcipher_set_iv(tfm, iv, iv_len);
3024 ret = test_cipher_jiffies(&desc, enc, sg,
3027 ret = test_cipher_cycles(&desc, enc, sg,
3031 printk("%s() failed flags=%x\n", e, desc.flags);
3041 crypto_free_blkcipher(tfm);
3044 static int test_hash_jiffies_digest(struct hash_desc *desc,
3045 struct scatterlist *sg, int blen,
3048 unsigned long start, end;
3052 for (start = jiffies, end = start + sec * HZ, bcount = 0;
3053 time_before(jiffies, end); bcount++) {
3054 ret = crypto_hash_digest(desc, sg, blen, out);
3059 printk("%6u opers/sec, %9lu bytes/sec\n",
3060 bcount / sec, ((long)bcount * blen) / sec);
3065 static int test_hash_jiffies(struct hash_desc *desc, struct scatterlist *sg,
3066 int blen, int plen, char *out, int sec)
3068 unsigned long start, end;
3073 return test_hash_jiffies_digest(desc, sg, blen, out, sec);
3075 for (start = jiffies, end = start + sec * HZ, bcount = 0;
3076 time_before(jiffies, end); bcount++) {
3077 ret = crypto_hash_init(desc);
3080 for (pcount = 0; pcount < blen; pcount += plen) {
3081 ret = crypto_hash_update(desc, sg, plen);
3085 /* we assume there is enough space in 'out' for the result */
3086 ret = crypto_hash_final(desc, out);
3091 printk("%6u opers/sec, %9lu bytes/sec\n",
3092 bcount / sec, ((long)bcount * blen) / sec);
3097 static int test_hash_cycles_digest(struct hash_desc *desc,
3098 struct scatterlist *sg, int blen, char *out)
3100 unsigned long cycles = 0;
3101 unsigned long start, end;
3106 local_irq_disable();
3109 for (i = 0; i < 4; i++) {
3110 ret = crypto_hash_digest(desc, sg, blen, out);
3115 /* The real thing. */
3116 for (i = 0; i < 8; i++) {
3118 /* Original code to get cycles, does not work with MIPS
3119 * cycles_t start, end;
3120 * start = get_cycles();
3123 start = read_c0_count(); // LQ modified tcrypt
3125 ret = crypto_hash_digest(desc, sg, blen, out);
3129 /* Original code to get cycles, does not work with MIPS
3130 * end = get_cycles();
3133 end = read_c0_count(); // LQ modified tcrypt
3135 cycles += end - start;
3145 printk("%6lu cycles/operation, %4lu cycles/byte\n",
3146 cycles / 8, cycles / (8 * blen));
3151 static int test_hash_cycles(struct hash_desc *desc, struct scatterlist *sg,
3152 int blen, int plen, char *out)
3154 unsigned long cycles = 0;
3155 unsigned long start, end;
3160 return test_hash_cycles_digest(desc, sg, blen, out);
3163 local_irq_disable();
3166 for (i = 0; i < 4; i++) {
3167 ret = crypto_hash_init(desc);
3170 for (pcount = 0; pcount < blen; pcount += plen) {
3171 ret = crypto_hash_update(desc, sg, plen);
3175 ret = crypto_hash_final(desc, out);
3180 /* The real thing. */
3181 for (i = 0; i < 8; i++) {
3183 /* Original code for getting cycles, not working for MIPS
3184 * cycle_t start, end;
3185 * end = get_cycles();
3188 start = read_c0_count(); // LQ modified tcrypt
3190 ret = crypto_hash_init(desc);
3193 for (pcount = 0; pcount < blen; pcount += plen) {
3194 ret = crypto_hash_update(desc, sg, plen);
3198 ret = crypto_hash_final(desc, out);
3202 /* Original code for getting cycles, not working for MIPS
3203 * end = get_cycles();
3206 end = read_c0_count(); // LQ modified tcrypt
3208 cycles += end - start;
3218 printk("%6lu cycles/operation, %4lu cycles/byte\n",
3219 cycles / 8, cycles / (8 * blen));
3224 static void test_hash_speed(const char *algo, unsigned int sec,
3225 struct hash_speed *speed)
3227 struct scatterlist sg[TVMEMSIZE];
3228 struct crypto_hash *tfm;
3229 struct hash_desc desc;
3230 static char output[1024];
3234 printk(KERN_INFO "\ntesting speed of %s\n", algo);
3236 tfm = crypto_alloc_hash(algo, 0, CRYPTO_ALG_ASYNC);
3239 printk(KERN_ERR "failed to load transform for %s: %ld\n", algo,
3247 if (crypto_hash_digestsize(tfm) > sizeof(output)) {
3248 printk(KERN_ERR "digestsize(%u) > outputbuffer(%zu)\n",
3249 crypto_hash_digestsize(tfm), sizeof(output));
3253 sg_init_table(sg, TVMEMSIZE);
3254 for (i = 0; i < TVMEMSIZE; i++) {
3255 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
3256 memset(tvmem[i], 0xff, PAGE_SIZE);
3259 for (i = 0; speed[i].blen != 0; i++) {
3260 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
3262 "template (%u) too big for tvmem (%lu)\n",
3263 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
3267 printk(KERN_INFO "test%3u "
3268 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
3269 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
3272 ret = test_hash_jiffies(&desc, sg, speed[i].blen,
3273 speed[i].plen, output, sec);
3275 ret = test_hash_cycles(&desc, sg, speed[i].blen,
3276 speed[i].plen, output);
3279 printk(KERN_ERR "hashing failed ret=%d\n", ret);
3285 crypto_free_hash(tfm);
3289 static void test_available(void)
3291 char **name = check;
3294 printk("alg %s ", *name);
3295 printk(crypto_has_alg(*name, 0, 0) ?
3296 "found\n" : "not found\n");
3301 static inline int tcrypt_test(const char *alg)
3305 printk("Running test %s\n", alg);
3306 ret = ifx_alg_test(alg, alg, 0, 0);
3307 /* non-fips algs return -EINVAL in fips mode */
3308 if (fips_enabled && ret == -EINVAL)
3313 static inline int tcrypt_speedtest(const char *alg,
3314 struct cipher_speed_template *template,
3315 unsigned int tcount, u8 *keysize)
3319 printk("[****** Running speedtest %s *******]\n", alg);
3320 ret = ifx_alg_speed_test(alg, alg, sec, template, tcount, keysize);
3321 if (fips_enabled && ret == -EINVAL)
3327 static int do_test(int m)
3334 for (i = 1; i < 200; i++)
3339 ret += tcrypt_test("md5");
3343 ret += tcrypt_test("sha1");
3347 ret += tcrypt_test("ecb(des)");
3348 ret += tcrypt_test("cbc(des)");
3352 ret += tcrypt_test("ecb(des3_ede)");
3353 ret += tcrypt_test("cbc(des3_ede)");
3357 ret += tcrypt_test("md4");
3361 ret += tcrypt_test("sha256");
3365 ret += tcrypt_test("ecb(blowfish)");
3366 ret += tcrypt_test("cbc(blowfish)");
3370 ret += tcrypt_test("ecb(twofish)");
3371 ret += tcrypt_test("cbc(twofish)");
3375 ret += tcrypt_test("ecb(serpent)");
3379 ret += tcrypt_test("ecb(aes)");
3380 ret += tcrypt_test("cbc(aes)");
3381 // ret += tcrypt_test("lrw(aes)");
3382 // ret += tcrypt_test("xts(aes)");
3383 ret += tcrypt_test("ctr(aes)");
3384 ret += tcrypt_test("rfc3686(ctr(aes))");
3388 ret += tcrypt_test("sha384");
3392 ret += tcrypt_test("sha512");
3396 ret += tcrypt_test("deflate");
3400 ret += tcrypt_test("ecb(cast5)");
3404 ret += tcrypt_test("ecb(cast6)");
3408 ret += tcrypt_test("ecb(arc4)");
3412 ret += tcrypt_test("michael_mic");
3416 ret += tcrypt_test("crc32c");
3420 ret += tcrypt_test("ecb(tea)");
3424 ret += tcrypt_test("ecb(xtea)");
3428 ret += tcrypt_test("ecb(khazad)");
3432 ret += tcrypt_test("wp512");
3436 ret += tcrypt_test("wp384");
3440 ret += tcrypt_test("wp256");
3444 ret += tcrypt_test("ecb(tnepres)");
3448 ret += tcrypt_test("ecb(anubis)");
3449 ret += tcrypt_test("cbc(anubis)");
3453 ret += tcrypt_test("tgr192");
3458 ret += tcrypt_test("tgr160");
3462 ret += tcrypt_test("tgr128");
3466 ret += tcrypt_test("ecb(xeta)");
3470 ret += tcrypt_test("pcbc(fcrypt)");
3474 ret += tcrypt_test("ecb(camellia)");
3475 ret += tcrypt_test("cbc(camellia)");
3478 ret += tcrypt_test("sha224");
3482 ret += tcrypt_test("salsa20");
3486 ret += tcrypt_test("gcm(aes)");
3490 ret += tcrypt_test("lzo");
3494 ret += tcrypt_test("ccm(aes)");
3498 ret += tcrypt_test("cts(cbc(aes))");
3502 ret += tcrypt_test("rmd128");
3506 ret += tcrypt_test("rmd160");
3510 ret += tcrypt_test("rmd256");
3514 ret += tcrypt_test("rmd320");
3518 ret += tcrypt_test("ecb(seed)");
3522 ret += tcrypt_test("zlib");
3526 ret += tcrypt_test("rfc4309(ccm(aes))");
3530 ret += tcrypt_test("hmac(md5)");
3534 ret += tcrypt_test("hmac(sha1)");
3538 ret += tcrypt_test("hmac(sha256)");
3542 ret += tcrypt_test("hmac(sha384)");
3546 ret += tcrypt_test("hmac(sha512)");
3550 ret += tcrypt_test("hmac(sha224)");
3554 ret += tcrypt_test("xcbc(aes)");
3558 ret += tcrypt_test("hmac(rmd128)");
3562 ret += tcrypt_test("hmac(rmd160)");
3566 ret += tcrypt_test("vmac(aes)");
3570 ret += tcrypt_test("ansi_cprng");
3574 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
3575 speed_template_16_24_32);
3576 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
3577 speed_template_16_24_32);
3578 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
3579 speed_template_16_24_32);
3580 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
3581 speed_template_16_24_32);
3582 #if !defined(CONFIG_CRYPTO_DEV_AES) && !defined(CONFIG_CRYPTO_ASYNC_AES)
3583 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
3584 speed_template_32_40_48);
3585 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
3586 speed_template_32_40_48);
3587 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
3588 speed_template_32_48_64);
3589 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
3590 speed_template_32_48_64);
3595 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
3596 des3_speed_template, DES3_SPEED_VECTORS,
3598 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
3599 des3_speed_template, DES3_SPEED_VECTORS,
3601 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
3602 des3_speed_template, DES3_SPEED_VECTORS,
3604 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
3605 des3_speed_template, DES3_SPEED_VECTORS,
3610 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
3611 speed_template_16_24_32);
3612 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
3613 speed_template_16_24_32);
3614 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
3615 speed_template_16_24_32);
3616 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
3617 speed_template_16_24_32);
3621 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
3622 speed_template_8_32);
3623 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
3624 speed_template_8_32);
3625 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
3626 speed_template_8_32);
3627 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
3628 speed_template_8_32);
3632 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
3634 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
3636 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
3638 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
3643 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
3644 speed_template_16_24_32);
3645 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
3646 speed_template_16_24_32);
3647 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
3648 speed_template_16_24_32);
3649 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
3650 speed_template_16_24_32);
3654 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
3655 speed_template_16_32);
3662 test_hash_speed("md4", sec, generic_hash_speed_template);
3663 if (mode > 300 && mode < 400) break;
3666 test_hash_speed("md5", sec, generic_hash_speed_template);
3667 if (mode > 300 && mode < 400) break;
3670 test_hash_speed("sha1", sec, generic_hash_speed_template);
3671 if (mode > 300 && mode < 400) break;
3674 test_hash_speed("sha256", sec, generic_hash_speed_template);
3675 if (mode > 300 && mode < 400) break;
3678 test_hash_speed("sha384", sec, generic_hash_speed_template);
3679 if (mode > 300 && mode < 400) break;
3682 test_hash_speed("sha512", sec, generic_hash_speed_template);
3683 if (mode > 300 && mode < 400) break;
3686 test_hash_speed("wp256", sec, generic_hash_speed_template);
3687 if (mode > 300 && mode < 400) break;
3690 test_hash_speed("wp384", sec, generic_hash_speed_template);
3691 if (mode > 300 && mode < 400) break;
3694 test_hash_speed("wp512", sec, generic_hash_speed_template);
3695 if (mode > 300 && mode < 400) break;
3698 test_hash_speed("tgr128", sec, generic_hash_speed_template);
3699 if (mode > 300 && mode < 400) break;
3702 test_hash_speed("tgr160", sec, generic_hash_speed_template);
3703 if (mode > 300 && mode < 400) break;
3706 test_hash_speed("tgr192", sec, generic_hash_speed_template);
3707 if (mode > 300 && mode < 400) break;
3710 test_hash_speed("sha224", sec, generic_hash_speed_template);
3711 if (mode > 300 && mode < 400) break;
3714 test_hash_speed("rmd128", sec, generic_hash_speed_template);
3715 if (mode > 300 && mode < 400) break;
3718 test_hash_speed("rmd160", sec, generic_hash_speed_template);
3719 if (mode > 300 && mode < 400) break;
3722 test_hash_speed("rmd256", sec, generic_hash_speed_template);
3723 if (mode > 300 && mode < 400) break;
3726 test_hash_speed("rmd320", sec, generic_hash_speed_template);
3727 if (mode > 300 && mode < 400) break;
3732 /* Modified speed test for async block cipher mode */
3734 tcrypt_speedtest("ecb(aes)", NULL, 0,
3735 speed_template_16_24_32);
3736 tcrypt_speedtest("cbc(aes)", NULL, 0,
3737 speed_template_16_24_32);
3741 tcrypt_speedtest("ecb(des3_ede)", des3_speed_template,
3742 DES3_SPEED_VECTORS,speed_template_24);
3743 tcrypt_speedtest("cbc(des3_ede)", des3_speed_template,
3744 DES3_SPEED_VECTORS,speed_template_24);
3748 tcrypt_speedtest("ecb(des)", NULL, 0,
3750 tcrypt_speedtest("cbc(des)", NULL, 0,
3761 #if !defined(CONFIG_CRYPTO_DEV_DEU)
3762 static int do_alg_test(const char *alg, u32 type, u32 mask)
3764 return crypto_has_alg(alg, type, mask ?: CRYPTO_ALG_TYPE_MASK) ?
3769 static int __init tcrypt_mod_init(void)
3774 printk("Starting Lantiq DEU Crypto TESTS . . . . . . .\n");
3776 for (i = 0; i < TVMEMSIZE; i++) {
3777 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
3782 #if defined(CONFIG_CRYPTO_DEV_DEU)
3783 #if defined(CONFIG_CRYPTO_DEV_MD5)
3784 mode = 1; // test md5 only
3785 err = do_test(mode);
3791 printk(KERN_ERR "md5: one or more tests failed!\n");
3795 #if defined(CONFIG_CRYPTO_DEV_SHA1)
3796 mode = 2; // test sha1 only
3797 err = do_test(mode);
3803 printk(KERN_ERR "sha1: one or more tests failed!\n");
3807 #if defined (CONFIG_CRYPTO_DEV_DES) || defined (CONFIG_CRYPTO_ASYNC_DES)
3808 mode = 3; // test des only
3809 err = do_test(mode);
3813 mode = 4; // test des3 only
3814 err = do_test(mode);
3820 printk(KERN_ERR "des3: one or more tests failed!\n");
3824 #if defined (CONFIG_CRYPTO_ASYNC_AES) || defined (CONFIG_CRYPTO_DEV_AES)
3825 mode = 10; // test aes only
3826 err = do_test(mode);
3832 printk(KERN_ERR "aes: one or more tests failed!\n");
3836 #if defined(CONFIG_CRYPTO_DEV_ARC4)
3838 err = do_test(mode);
3841 printk(KERN_ERR "arc4: one or more tests failed!\n");
3845 #if defined (CONFIG_CRYPTO_DEV_MD5_HMAC)
3847 err = do_test(mode);
3850 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
3854 #if defined (CONFIG_CRYPTO_DEV_SHA1_HMAC)
3856 err = do_test(mode);
3859 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
3864 /* Start Speed tests test modes */
3865 #if defined(CONFIG_CRYPTO_DEV_SPEED_TEST)
3866 #if defined(CONFIG_CRYPTO_DEV_AES)
3868 err = do_test(mode);
3872 #if defined (CONFIG_CRYPTO_DEV_DES)
3874 err = do_test(mode);
3879 err = do_test(mode);
3883 #if defined (CONFIG_CRYPTO_DEV_MD5)
3885 err = do_test(mode);
3889 #if defined (CONFIG_CRYPTO_DEV_SHA1)
3891 err = do_test(mode);
3895 printk("Speed tests finished successfully\n");
3899 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
3901 #endif /* CONFIG_CRYPTO_DEV_SPEED_TEST */
3905 err = do_alg_test(alg, type, mask);
3907 err = do_test(mode);
3910 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
3913 #endif /* CONFIG_CRYPTO_DEV_DEU */
3916 /* We intentionaly return -EAGAIN to prevent keeping the module,
3917 * unless we're running in fips mode. It does all its work from
3918 * init() and doesn't offer any runtime functionality, but in
3919 * the fips case, checking for a successful load is helpful.
3920 * => we don't need it in the memory, do we?
3927 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++ ){
3928 printk("Freeing page: %d\n", i);
3929 free_page((unsigned long)tvmem[i]);
3932 printk("Finished DEU testing . . . . . .\n");
3937 * If an init function is provided, an exit function must also be provided
3938 * to allow module unload.
3940 static void __exit tcrypt_mod_fini(void) {}
3943 module_init(tcrypt_mod_init);
3944 module_exit(tcrypt_mod_fini);
3946 module_param(alg, charp, 0);
3947 module_param(type, uint, 0);
3948 module_param(mask, uint, 0);
3949 module_param(mode, int, 0);
3950 module_param(sec, uint, 0);
3951 MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
3952 "(defaults to zero which uses CPU cycles instead)");
3954 MODULE_LICENSE("GPL");
3955 MODULE_DESCRIPTION("Quick & dirty crypto testing module");
3956 MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");