2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
16 #include <crypto/hash.h>
17 #include <linux/err.h>
18 #include <linux/module.h>
19 #include <linux/scatterlist.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
22 #include <crypto/rng.h>
28 * Need slab memory for testing (size in number of pages).
33 * Indexes into the xbuf to simulate cross-page access.
45 * Used by test_cipher()
50 struct tcrypt_result {
51 struct completion completion;
55 struct aead_test_suite {
57 struct aead_testvec *vecs;
62 struct cipher_test_suite {
64 struct cipher_testvec *vecs;
69 struct comp_test_suite {
71 struct comp_testvec *vecs;
76 struct pcomp_test_suite {
78 struct pcomp_testvec *vecs;
83 struct hash_test_suite {
84 struct hash_testvec *vecs;
88 struct cprng_test_suite {
89 struct cprng_testvec *vecs;
93 struct alg_test_desc {
95 int (*test)(const struct alg_test_desc *desc, const char *driver,
97 int fips_allowed; /* set if alg is allowed in fips mode */
100 struct aead_test_suite aead;
101 struct cipher_test_suite cipher;
102 struct comp_test_suite comp;
103 struct pcomp_test_suite pcomp;
104 struct hash_test_suite hash;
105 struct cprng_test_suite cprng;
109 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
111 static void hexdump(unsigned char *buf, unsigned int len)
113 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
118 static void tcrypt_complete(struct crypto_async_request *req, int err)
120 struct tcrypt_result *res = req->data;
122 if (err == -EINPROGRESS)
126 complete(&res->completion);
129 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
133 for (i = 0; i < XBUFSIZE; i++) {
134 buf[i] = (void *)__get_free_page(GFP_KERNEL);
143 free_page((unsigned long)buf[i]);
148 static void testmgr_free_buf(char *buf[XBUFSIZE])
152 for (i = 0; i < XBUFSIZE; i++)
153 free_page((unsigned long)buf[i]);
156 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
159 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
160 unsigned int i, j, k, temp;
161 struct scatterlist sg[8];
163 struct ahash_request *req;
164 struct tcrypt_result tresult;
166 char *xbuf[XBUFSIZE];
169 if (testmgr_alloc_buf(xbuf))
172 init_completion(&tresult.completion);
174 req = ahash_request_alloc(tfm, GFP_KERNEL);
176 printk(KERN_ERR "alg: hash: Failed to allocate request for "
180 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
181 tcrypt_complete, &tresult);
184 for (i = 0; i < tcount; i++) {
189 memset(result, 0, 64);
194 if (WARN_ON(template[i].psize > PAGE_SIZE))
197 memcpy(hash_buff, template[i].plaintext, template[i].psize);
198 sg_init_one(&sg[0], hash_buff, template[i].psize);
200 if (template[i].ksize) {
201 crypto_ahash_clear_flags(tfm, ~0);
202 ret = crypto_ahash_setkey(tfm, template[i].key,
205 printk(KERN_ERR "alg: hash: setkey failed on "
206 "test %d for %s: ret=%d\n", j, algo,
212 ahash_request_set_crypt(req, sg, result, template[i].psize);
213 ret = crypto_ahash_digest(req);
219 ret = wait_for_completion_interruptible(
220 &tresult.completion);
221 if (!ret && !(ret = tresult.err)) {
222 INIT_COMPLETION(tresult.completion);
227 printk(KERN_ERR "alg: hash: digest failed on test %d "
228 "for %s: ret=%d\n", j, algo, -ret);
232 if (memcmp(result, template[i].digest,
233 crypto_ahash_digestsize(tfm))) {
234 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
236 hexdump(result, crypto_ahash_digestsize(tfm));
243 for (i = 0; i < tcount; i++) {
244 if (template[i].np) {
246 memset(result, 0, 64);
249 sg_init_table(sg, template[i].np);
251 for (k = 0; k < template[i].np; k++) {
252 if (WARN_ON(offset_in_page(IDX[k]) +
253 template[i].tap[k] > PAGE_SIZE))
256 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
257 offset_in_page(IDX[k]),
258 template[i].plaintext + temp,
261 temp += template[i].tap[k];
264 if (template[i].ksize) {
265 crypto_ahash_clear_flags(tfm, ~0);
266 ret = crypto_ahash_setkey(tfm, template[i].key,
270 printk(KERN_ERR "alg: hash: setkey "
271 "failed on chunking test %d "
272 "for %s: ret=%d\n", j, algo,
278 ahash_request_set_crypt(req, sg, result,
280 ret = crypto_ahash_digest(req);
286 ret = wait_for_completion_interruptible(
287 &tresult.completion);
288 if (!ret && !(ret = tresult.err)) {
289 INIT_COMPLETION(tresult.completion);
294 printk(KERN_ERR "alg: hash: digest failed "
295 "on chunking test %d for %s: "
296 "ret=%d\n", j, algo, -ret);
300 if (memcmp(result, template[i].digest,
301 crypto_ahash_digestsize(tfm))) {
302 printk(KERN_ERR "alg: hash: Chunking test %d "
303 "failed for %s\n", j, algo);
304 hexdump(result, crypto_ahash_digestsize(tfm));
314 ahash_request_free(req);
316 testmgr_free_buf(xbuf);
321 static int test_aead(struct crypto_aead *tfm, int enc,
322 struct aead_testvec *template, unsigned int tcount)
324 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
325 unsigned int i, j, k, n, temp;
329 struct aead_request *req;
330 struct scatterlist sg[8];
331 struct scatterlist asg[8];
333 struct tcrypt_result result;
334 unsigned int authsize;
338 char *xbuf[XBUFSIZE];
339 char *axbuf[XBUFSIZE];
341 if (testmgr_alloc_buf(xbuf))
343 if (testmgr_alloc_buf(axbuf))
351 init_completion(&result.completion);
353 req = aead_request_alloc(tfm, GFP_KERNEL);
355 printk(KERN_ERR "alg: aead: Failed to allocate request for "
360 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
361 tcrypt_complete, &result);
363 for (i = 0, j = 0; i < tcount; i++) {
364 if (!template[i].np) {
367 /* some tepmplates have no input data but they will
374 if (WARN_ON(template[i].ilen > PAGE_SIZE ||
375 template[i].alen > PAGE_SIZE))
378 memcpy(input, template[i].input, template[i].ilen);
379 memcpy(assoc, template[i].assoc, template[i].alen);
381 memcpy(iv, template[i].iv, MAX_IVLEN);
383 memset(iv, 0, MAX_IVLEN);
385 crypto_aead_clear_flags(tfm, ~0);
387 crypto_aead_set_flags(
388 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
390 key = template[i].key;
392 ret = crypto_aead_setkey(tfm, key,
394 if (!ret == template[i].fail) {
395 printk(KERN_ERR "alg: aead: setkey failed on "
396 "test %d for %s: flags=%x\n", j, algo,
397 crypto_aead_get_flags(tfm));
402 authsize = abs(template[i].rlen - template[i].ilen);
403 ret = crypto_aead_setauthsize(tfm, authsize);
405 printk(KERN_ERR "alg: aead: Failed to set "
406 "authsize to %u on test %d for %s\n",
411 sg_init_one(&sg[0], input,
412 template[i].ilen + (enc ? authsize : 0));
414 sg_init_one(&asg[0], assoc, template[i].alen);
416 aead_request_set_crypt(req, sg, sg,
417 template[i].ilen, iv);
419 aead_request_set_assoc(req, asg, template[i].alen);
422 crypto_aead_encrypt(req) :
423 crypto_aead_decrypt(req);
427 if (template[i].novrfy) {
428 /* verification was supposed to fail */
429 printk(KERN_ERR "alg: aead: %s failed "
430 "on test %d for %s: ret was 0, "
431 "expected -EBADMSG\n",
433 /* so really, we got a bad message */
440 ret = wait_for_completion_interruptible(
442 if (!ret && !(ret = result.err)) {
443 INIT_COMPLETION(result.completion);
447 if (template[i].novrfy)
448 /* verification failure was expected */
452 printk(KERN_ERR "alg: aead: %s failed on test "
453 "%d for %s: ret=%d\n", e, j, algo, -ret);
458 if (memcmp(q, template[i].result, template[i].rlen)) {
459 printk(KERN_ERR "alg: aead: Test %d failed on "
460 "%s for %s\n", j, e, algo);
461 hexdump(q, template[i].rlen);
468 for (i = 0, j = 0; i < tcount; i++) {
469 if (template[i].np) {
473 memcpy(iv, template[i].iv, MAX_IVLEN);
475 memset(iv, 0, MAX_IVLEN);
477 crypto_aead_clear_flags(tfm, ~0);
479 crypto_aead_set_flags(
480 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
481 key = template[i].key;
483 ret = crypto_aead_setkey(tfm, key, template[i].klen);
484 if (!ret == template[i].fail) {
485 printk(KERN_ERR "alg: aead: setkey failed on "
486 "chunk test %d for %s: flags=%x\n", j,
487 algo, crypto_aead_get_flags(tfm));
492 authsize = abs(template[i].rlen - template[i].ilen);
495 sg_init_table(sg, template[i].np);
496 for (k = 0, temp = 0; k < template[i].np; k++) {
497 if (WARN_ON(offset_in_page(IDX[k]) +
498 template[i].tap[k] > PAGE_SIZE))
501 q = xbuf[IDX[k] >> PAGE_SHIFT] +
502 offset_in_page(IDX[k]);
504 memcpy(q, template[i].input + temp,
507 n = template[i].tap[k];
508 if (k == template[i].np - 1 && enc)
510 if (offset_in_page(q) + n < PAGE_SIZE)
513 sg_set_buf(&sg[k], q, template[i].tap[k]);
514 temp += template[i].tap[k];
517 ret = crypto_aead_setauthsize(tfm, authsize);
519 printk(KERN_ERR "alg: aead: Failed to set "
520 "authsize to %u on chunk test %d for "
521 "%s\n", authsize, j, algo);
526 if (WARN_ON(sg[k - 1].offset +
527 sg[k - 1].length + authsize >
533 sg[k - 1].length += authsize;
536 sg_init_table(asg, template[i].anp);
538 for (k = 0, temp = 0; k < template[i].anp; k++) {
539 if (WARN_ON(offset_in_page(IDX[k]) +
540 template[i].atap[k] > PAGE_SIZE))
543 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
544 offset_in_page(IDX[k]),
545 template[i].assoc + temp,
546 template[i].atap[k]),
547 template[i].atap[k]);
548 temp += template[i].atap[k];
551 aead_request_set_crypt(req, sg, sg,
555 aead_request_set_assoc(req, asg, template[i].alen);
558 crypto_aead_encrypt(req) :
559 crypto_aead_decrypt(req);
563 if (template[i].novrfy) {
564 /* verification was supposed to fail */
565 printk(KERN_ERR "alg: aead: %s failed "
566 "on chunk test %d for %s: ret "
567 "was 0, expected -EBADMSG\n",
569 /* so really, we got a bad message */
576 ret = wait_for_completion_interruptible(
578 if (!ret && !(ret = result.err)) {
579 INIT_COMPLETION(result.completion);
583 if (template[i].novrfy)
584 /* verification failure was expected */
588 printk(KERN_ERR "alg: aead: %s failed on "
589 "chunk test %d for %s: ret=%d\n", e, j,
595 for (k = 0, temp = 0; k < template[i].np; k++) {
596 q = xbuf[IDX[k] >> PAGE_SHIFT] +
597 offset_in_page(IDX[k]);
599 n = template[i].tap[k];
600 if (k == template[i].np - 1)
601 n += enc ? authsize : -authsize;
603 if (memcmp(q, template[i].result + temp, n)) {
604 printk(KERN_ERR "alg: aead: Chunk "
605 "test %d failed on %s at page "
606 "%u for %s\n", j, e, k, algo);
612 if (k == template[i].np - 1 && !enc) {
613 if (memcmp(q, template[i].input +
619 for (n = 0; offset_in_page(q + n) &&
624 printk(KERN_ERR "alg: aead: Result "
625 "buffer corruption in chunk "
626 "test %d on %s at page %u for "
627 "%s: %u bytes:\n", j, e, k,
633 temp += template[i].tap[k];
641 aead_request_free(req);
642 testmgr_free_buf(axbuf);
644 testmgr_free_buf(xbuf);
649 static int test_cipher(struct crypto_cipher *tfm, int enc,
650 struct cipher_testvec *template, unsigned int tcount)
652 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
653 unsigned int i, j, k;
657 char *xbuf[XBUFSIZE];
660 if (testmgr_alloc_buf(xbuf))
669 for (i = 0; i < tcount; i++) {
676 if (WARN_ON(template[i].ilen > PAGE_SIZE))
680 memcpy(data, template[i].input, template[i].ilen);
682 crypto_cipher_clear_flags(tfm, ~0);
684 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
686 ret = crypto_cipher_setkey(tfm, template[i].key,
688 if (!ret == template[i].fail) {
689 printk(KERN_ERR "alg: cipher: setkey failed "
690 "on test %d for %s: flags=%x\n", j,
691 algo, crypto_cipher_get_flags(tfm));
696 for (k = 0; k < template[i].ilen;
697 k += crypto_cipher_blocksize(tfm)) {
699 crypto_cipher_encrypt_one(tfm, data + k,
702 crypto_cipher_decrypt_one(tfm, data + k,
707 if (memcmp(q, template[i].result, template[i].rlen)) {
708 printk(KERN_ERR "alg: cipher: Test %d failed "
709 "on %s for %s\n", j, e, algo);
710 hexdump(q, template[i].rlen);
719 testmgr_free_buf(xbuf);
724 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
725 struct cipher_testvec *template, unsigned int tcount)
728 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
729 unsigned int i, j, k, n, temp;
731 struct ablkcipher_request *req;
732 struct scatterlist sg[8];
734 struct tcrypt_result result;
737 char *xbuf[XBUFSIZE];
740 if (testmgr_alloc_buf(xbuf))
748 init_completion(&result.completion);
750 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
752 printk(KERN_ERR "alg: skcipher: Failed to allocate request "
757 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
758 tcrypt_complete, &result);
761 for (i = 0; i < tcount; i++) {
763 memcpy(iv, template[i].iv, MAX_IVLEN);
765 memset(iv, 0, MAX_IVLEN);
767 if (!(template[i].np)) {
771 if (WARN_ON(template[i].ilen > PAGE_SIZE))
775 memcpy(data, template[i].input, template[i].ilen);
777 crypto_ablkcipher_clear_flags(tfm, ~0);
779 crypto_ablkcipher_set_flags(
780 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
782 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
784 if (!ret == template[i].fail) {
785 printk(KERN_ERR "alg: skcipher: setkey failed "
786 "on test %d for %s: flags=%x\n", j,
787 algo, crypto_ablkcipher_get_flags(tfm));
792 sg_init_one(&sg[0], data, template[i].ilen);
794 ablkcipher_request_set_crypt(req, sg, sg,
795 template[i].ilen, iv);
797 crypto_ablkcipher_encrypt(req) :
798 crypto_ablkcipher_decrypt(req);
805 ret = wait_for_completion_interruptible(
807 if (!ret && !((ret = result.err))) {
808 INIT_COMPLETION(result.completion);
813 printk(KERN_ERR "alg: skcipher: %s failed on "
814 "test %d for %s: ret=%d\n", e, j, algo,
820 if (memcmp(q, template[i].result, template[i].rlen)) {
821 printk(KERN_ERR "alg: skcipher: Test %d "
822 "failed on %s for %s\n", j, e, algo);
823 hexdump(q, template[i].rlen);
831 for (i = 0; i < tcount; i++) {
834 memcpy(iv, template[i].iv, MAX_IVLEN);
836 memset(iv, 0, MAX_IVLEN);
838 if (template[i].np) {
841 crypto_ablkcipher_clear_flags(tfm, ~0);
843 crypto_ablkcipher_set_flags(
844 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
846 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
848 if (!ret == template[i].fail) {
849 printk(KERN_ERR "alg: skcipher: setkey failed "
850 "on chunk test %d for %s: flags=%x\n",
852 crypto_ablkcipher_get_flags(tfm));
859 sg_init_table(sg, template[i].np);
860 for (k = 0; k < template[i].np; k++) {
861 if (WARN_ON(offset_in_page(IDX[k]) +
862 template[i].tap[k] > PAGE_SIZE))
865 q = xbuf[IDX[k] >> PAGE_SHIFT] +
866 offset_in_page(IDX[k]);
868 memcpy(q, template[i].input + temp,
871 if (offset_in_page(q) + template[i].tap[k] <
873 q[template[i].tap[k]] = 0;
875 sg_set_buf(&sg[k], q, template[i].tap[k]);
877 temp += template[i].tap[k];
880 ablkcipher_request_set_crypt(req, sg, sg,
881 template[i].ilen, iv);
884 crypto_ablkcipher_encrypt(req) :
885 crypto_ablkcipher_decrypt(req);
892 ret = wait_for_completion_interruptible(
894 if (!ret && !((ret = result.err))) {
895 INIT_COMPLETION(result.completion);
900 printk(KERN_ERR "alg: skcipher: %s failed on "
901 "chunk test %d for %s: ret=%d\n", e, j,
908 for (k = 0; k < template[i].np; k++) {
909 q = xbuf[IDX[k] >> PAGE_SHIFT] +
910 offset_in_page(IDX[k]);
912 if (memcmp(q, template[i].result + temp,
913 template[i].tap[k])) {
914 printk(KERN_ERR "alg: skcipher: Chunk "
915 "test %d failed on %s at page "
916 "%u for %s\n", j, e, k, algo);
917 hexdump(q, template[i].tap[k]);
921 q += template[i].tap[k];
922 for (n = 0; offset_in_page(q + n) && q[n]; n++)
925 printk(KERN_ERR "alg: skcipher: "
926 "Result buffer corruption in "
927 "chunk test %d on %s at page "
928 "%u for %s: %u bytes:\n", j, e,
933 temp += template[i].tap[k];
941 ablkcipher_request_free(req);
942 testmgr_free_buf(xbuf);
947 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
948 struct comp_testvec *dtemplate, int ctcount, int dtcount)
950 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
952 char result[COMP_BUF_SIZE];
955 for (i = 0; i < ctcount; i++) {
957 unsigned int dlen = COMP_BUF_SIZE;
959 memset(result, 0, sizeof (result));
961 ilen = ctemplate[i].inlen;
962 ret = crypto_comp_compress(tfm, ctemplate[i].input,
963 ilen, result, &dlen);
965 printk(KERN_ERR "alg: comp: compression failed "
966 "on test %d for %s: ret=%d\n", i + 1, algo,
971 if (dlen != ctemplate[i].outlen) {
972 printk(KERN_ERR "alg: comp: Compression test %d "
973 "failed for %s: output len = %d\n", i + 1, algo,
979 if (memcmp(result, ctemplate[i].output, dlen)) {
980 printk(KERN_ERR "alg: comp: Compression test %d "
981 "failed for %s\n", i + 1, algo);
982 hexdump(result, dlen);
988 for (i = 0; i < dtcount; i++) {
990 unsigned int dlen = COMP_BUF_SIZE;
992 memset(result, 0, sizeof (result));
994 ilen = dtemplate[i].inlen;
995 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
996 ilen, result, &dlen);
998 printk(KERN_ERR "alg: comp: decompression failed "
999 "on test %d for %s: ret=%d\n", i + 1, algo,
1004 if (dlen != dtemplate[i].outlen) {
1005 printk(KERN_ERR "alg: comp: Decompression test %d "
1006 "failed for %s: output len = %d\n", i + 1, algo,
1012 if (memcmp(result, dtemplate[i].output, dlen)) {
1013 printk(KERN_ERR "alg: comp: Decompression test %d "
1014 "failed for %s\n", i + 1, algo);
1015 hexdump(result, dlen);
1027 static int test_pcomp(struct crypto_pcomp *tfm,
1028 struct pcomp_testvec *ctemplate,
1029 struct pcomp_testvec *dtemplate, int ctcount,
1032 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
1034 char result[COMP_BUF_SIZE];
1037 for (i = 0; i < ctcount; i++) {
1038 struct comp_request req;
1039 unsigned int produced = 0;
1041 res = crypto_compress_setup(tfm, ctemplate[i].params,
1042 ctemplate[i].paramsize);
1044 pr_err("alg: pcomp: compression setup failed on test "
1045 "%d for %s: error=%d\n", i + 1, algo, res);
1049 res = crypto_compress_init(tfm);
1051 pr_err("alg: pcomp: compression init failed on test "
1052 "%d for %s: error=%d\n", i + 1, algo, res);
1056 memset(result, 0, sizeof(result));
1058 req.next_in = ctemplate[i].input;
1059 req.avail_in = ctemplate[i].inlen / 2;
1060 req.next_out = result;
1061 req.avail_out = ctemplate[i].outlen / 2;
1063 res = crypto_compress_update(tfm, &req);
1064 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1065 pr_err("alg: pcomp: compression update failed on test "
1066 "%d for %s: error=%d\n", i + 1, algo, res);
1072 /* Add remaining input data */
1073 req.avail_in += (ctemplate[i].inlen + 1) / 2;
1075 res = crypto_compress_update(tfm, &req);
1076 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1077 pr_err("alg: pcomp: compression update failed on test "
1078 "%d for %s: error=%d\n", i + 1, algo, res);
1084 /* Provide remaining output space */
1085 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1087 res = crypto_compress_final(tfm, &req);
1089 pr_err("alg: pcomp: compression final failed on test "
1090 "%d for %s: error=%d\n", i + 1, algo, res);
1095 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1096 pr_err("alg: comp: Compression test %d failed for %s: "
1097 "output len = %d (expected %d)\n", i + 1, algo,
1098 COMP_BUF_SIZE - req.avail_out,
1099 ctemplate[i].outlen);
1103 if (produced != ctemplate[i].outlen) {
1104 pr_err("alg: comp: Compression test %d failed for %s: "
1105 "returned len = %u (expected %d)\n", i + 1,
1106 algo, produced, ctemplate[i].outlen);
1110 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1111 pr_err("alg: pcomp: Compression test %d failed for "
1112 "%s\n", i + 1, algo);
1113 hexdump(result, ctemplate[i].outlen);
1118 for (i = 0; i < dtcount; i++) {
1119 struct comp_request req;
1120 unsigned int produced = 0;
1122 res = crypto_decompress_setup(tfm, dtemplate[i].params,
1123 dtemplate[i].paramsize);
1125 pr_err("alg: pcomp: decompression setup failed on "
1126 "test %d for %s: error=%d\n", i + 1, algo, res);
1130 res = crypto_decompress_init(tfm);
1132 pr_err("alg: pcomp: decompression init failed on test "
1133 "%d for %s: error=%d\n", i + 1, algo, res);
1137 memset(result, 0, sizeof(result));
1139 req.next_in = dtemplate[i].input;
1140 req.avail_in = dtemplate[i].inlen / 2;
1141 req.next_out = result;
1142 req.avail_out = dtemplate[i].outlen / 2;
1144 res = crypto_decompress_update(tfm, &req);
1145 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1146 pr_err("alg: pcomp: decompression update failed on "
1147 "test %d for %s: error=%d\n", i + 1, algo, res);
1153 /* Add remaining input data */
1154 req.avail_in += (dtemplate[i].inlen + 1) / 2;
1156 res = crypto_decompress_update(tfm, &req);
1157 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1158 pr_err("alg: pcomp: decompression update failed on "
1159 "test %d for %s: error=%d\n", i + 1, algo, res);
1165 /* Provide remaining output space */
1166 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1168 res = crypto_decompress_final(tfm, &req);
1169 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1170 pr_err("alg: pcomp: decompression final failed on "
1171 "test %d for %s: error=%d\n", i + 1, algo, res);
1177 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1178 pr_err("alg: comp: Decompression test %d failed for "
1179 "%s: output len = %d (expected %d)\n", i + 1,
1180 algo, COMP_BUF_SIZE - req.avail_out,
1181 dtemplate[i].outlen);
1185 if (produced != dtemplate[i].outlen) {
1186 pr_err("alg: comp: Decompression test %d failed for "
1187 "%s: returned len = %u (expected %d)\n", i + 1,
1188 algo, produced, dtemplate[i].outlen);
1192 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1193 pr_err("alg: pcomp: Decompression test %d failed for "
1194 "%s\n", i + 1, algo);
1195 hexdump(result, dtemplate[i].outlen);
1204 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1205 unsigned int tcount)
1207 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1208 int err, i, j, seedsize;
1212 seedsize = crypto_rng_seedsize(tfm);
1214 seed = kmalloc(seedsize, GFP_KERNEL);
1216 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1221 for (i = 0; i < tcount; i++) {
1222 memset(result, 0, 32);
1224 memcpy(seed, template[i].v, template[i].vlen);
1225 memcpy(seed + template[i].vlen, template[i].key,
1227 memcpy(seed + template[i].vlen + template[i].klen,
1228 template[i].dt, template[i].dtlen);
1230 err = crypto_rng_reset(tfm, seed, seedsize);
1232 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1237 for (j = 0; j < template[i].loops; j++) {
1238 err = crypto_rng_get_bytes(tfm, result,
1240 if (err != template[i].rlen) {
1241 printk(KERN_ERR "alg: cprng: Failed to obtain "
1242 "the correct amount of random data for "
1243 "%s (requested %d, got %d)\n", algo,
1244 template[i].rlen, err);
1249 err = memcmp(result, template[i].result,
1252 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1254 hexdump(result, template[i].rlen);
1265 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1268 struct crypto_aead *tfm;
1271 tfm = crypto_alloc_aead(driver, type, mask);
1273 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1274 "%ld\n", driver, PTR_ERR(tfm));
1275 return PTR_ERR(tfm);
1278 if (desc->suite.aead.enc.vecs) {
1279 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1280 desc->suite.aead.enc.count);
1285 if (!err && desc->suite.aead.dec.vecs)
1286 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1287 desc->suite.aead.dec.count);
1290 crypto_free_aead(tfm);
1294 static int alg_test_cipher(const struct alg_test_desc *desc,
1295 const char *driver, u32 type, u32 mask)
1297 struct crypto_cipher *tfm;
1300 tfm = crypto_alloc_cipher(driver, type, mask);
1302 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1303 "%s: %ld\n", driver, PTR_ERR(tfm));
1304 return PTR_ERR(tfm);
1307 if (desc->suite.cipher.enc.vecs) {
1308 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1309 desc->suite.cipher.enc.count);
1314 if (desc->suite.cipher.dec.vecs)
1315 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1316 desc->suite.cipher.dec.count);
1319 crypto_free_cipher(tfm);
1323 static int alg_test_skcipher(const struct alg_test_desc *desc,
1324 const char *driver, u32 type, u32 mask)
1326 struct crypto_ablkcipher *tfm;
1329 tfm = crypto_alloc_ablkcipher(driver, type, mask);
1331 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1332 "%s: %ld\n", driver, PTR_ERR(tfm));
1333 return PTR_ERR(tfm);
1336 if (desc->suite.cipher.enc.vecs) {
1337 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1338 desc->suite.cipher.enc.count);
1343 if (desc->suite.cipher.dec.vecs)
1344 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1345 desc->suite.cipher.dec.count);
1348 crypto_free_ablkcipher(tfm);
1352 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1355 struct crypto_comp *tfm;
1358 tfm = crypto_alloc_comp(driver, type, mask);
1360 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1361 "%ld\n", driver, PTR_ERR(tfm));
1362 return PTR_ERR(tfm);
1365 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1366 desc->suite.comp.decomp.vecs,
1367 desc->suite.comp.comp.count,
1368 desc->suite.comp.decomp.count);
1370 crypto_free_comp(tfm);
1374 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1377 struct crypto_pcomp *tfm;
1380 tfm = crypto_alloc_pcomp(driver, type, mask);
1382 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1383 driver, PTR_ERR(tfm));
1384 return PTR_ERR(tfm);
1387 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1388 desc->suite.pcomp.decomp.vecs,
1389 desc->suite.pcomp.comp.count,
1390 desc->suite.pcomp.decomp.count);
1392 crypto_free_pcomp(tfm);
1396 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1399 struct crypto_ahash *tfm;
1402 tfm = crypto_alloc_ahash(driver, type, mask);
1404 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1405 "%ld\n", driver, PTR_ERR(tfm));
1406 return PTR_ERR(tfm);
1409 err = test_hash(tfm, desc->suite.hash.vecs, desc->suite.hash.count);
1411 crypto_free_ahash(tfm);
1415 static int alg_test_crc32c(const struct alg_test_desc *desc,
1416 const char *driver, u32 type, u32 mask)
1418 struct crypto_shash *tfm;
1422 err = alg_test_hash(desc, driver, type, mask);
1426 tfm = crypto_alloc_shash(driver, type, mask);
1428 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1429 "%ld\n", driver, PTR_ERR(tfm));
1436 struct shash_desc shash;
1437 char ctx[crypto_shash_descsize(tfm)];
1440 sdesc.shash.tfm = tfm;
1441 sdesc.shash.flags = 0;
1443 *(u32 *)sdesc.ctx = le32_to_cpu(420553207);
1444 err = crypto_shash_final(&sdesc.shash, (u8 *)&val);
1446 printk(KERN_ERR "alg: crc32c: Operation failed for "
1447 "%s: %d\n", driver, err);
1451 if (val != ~420553207) {
1452 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1453 "%d\n", driver, val);
1458 crypto_free_shash(tfm);
1464 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1467 struct crypto_rng *rng;
1470 rng = crypto_alloc_rng(driver, type, mask);
1472 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1473 "%ld\n", driver, PTR_ERR(rng));
1474 return PTR_ERR(rng);
1477 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1479 crypto_free_rng(rng);
1484 /* Please keep this list sorted by algorithm name. */
1485 static const struct alg_test_desc alg_test_descs[] = {
1487 .alg = "ansi_cprng",
1488 .test = alg_test_cprng,
1492 .vecs = ansi_cprng_aes_tv_template,
1493 .count = ANSI_CPRNG_AES_TEST_VECTORS
1498 .test = alg_test_skcipher,
1503 .vecs = aes_cbc_enc_tv_template,
1504 .count = AES_CBC_ENC_TEST_VECTORS
1507 .vecs = aes_cbc_dec_tv_template,
1508 .count = AES_CBC_DEC_TEST_VECTORS
1513 .alg = "cbc(anubis)",
1514 .test = alg_test_skcipher,
1518 .vecs = anubis_cbc_enc_tv_template,
1519 .count = ANUBIS_CBC_ENC_TEST_VECTORS
1522 .vecs = anubis_cbc_dec_tv_template,
1523 .count = ANUBIS_CBC_DEC_TEST_VECTORS
1528 .alg = "cbc(blowfish)",
1529 .test = alg_test_skcipher,
1533 .vecs = bf_cbc_enc_tv_template,
1534 .count = BF_CBC_ENC_TEST_VECTORS
1537 .vecs = bf_cbc_dec_tv_template,
1538 .count = BF_CBC_DEC_TEST_VECTORS
1543 .alg = "cbc(camellia)",
1544 .test = alg_test_skcipher,
1548 .vecs = camellia_cbc_enc_tv_template,
1549 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
1552 .vecs = camellia_cbc_dec_tv_template,
1553 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
1559 .test = alg_test_skcipher,
1563 .vecs = des_cbc_enc_tv_template,
1564 .count = DES_CBC_ENC_TEST_VECTORS
1567 .vecs = des_cbc_dec_tv_template,
1568 .count = DES_CBC_DEC_TEST_VECTORS
1573 .alg = "cbc(des3_ede)",
1574 .test = alg_test_skcipher,
1579 .vecs = des3_ede_cbc_enc_tv_template,
1580 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
1583 .vecs = des3_ede_cbc_dec_tv_template,
1584 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
1589 .alg = "cbc(twofish)",
1590 .test = alg_test_skcipher,
1594 .vecs = tf_cbc_enc_tv_template,
1595 .count = TF_CBC_ENC_TEST_VECTORS
1598 .vecs = tf_cbc_dec_tv_template,
1599 .count = TF_CBC_DEC_TEST_VECTORS
1605 .test = alg_test_aead,
1610 .vecs = aes_ccm_enc_tv_template,
1611 .count = AES_CCM_ENC_TEST_VECTORS
1614 .vecs = aes_ccm_dec_tv_template,
1615 .count = AES_CCM_DEC_TEST_VECTORS
1621 .test = alg_test_crc32c,
1625 .vecs = crc32c_tv_template,
1626 .count = CRC32C_TEST_VECTORS
1631 .test = alg_test_skcipher,
1636 .vecs = aes_ctr_enc_tv_template,
1637 .count = AES_CTR_ENC_TEST_VECTORS
1640 .vecs = aes_ctr_dec_tv_template,
1641 .count = AES_CTR_DEC_TEST_VECTORS
1646 .alg = "cts(cbc(aes))",
1647 .test = alg_test_skcipher,
1651 .vecs = cts_mode_enc_tv_template,
1652 .count = CTS_MODE_ENC_TEST_VECTORS
1655 .vecs = cts_mode_dec_tv_template,
1656 .count = CTS_MODE_DEC_TEST_VECTORS
1662 .test = alg_test_comp,
1666 .vecs = deflate_comp_tv_template,
1667 .count = DEFLATE_COMP_TEST_VECTORS
1670 .vecs = deflate_decomp_tv_template,
1671 .count = DEFLATE_DECOMP_TEST_VECTORS
1677 .test = alg_test_skcipher,
1682 .vecs = aes_enc_tv_template,
1683 .count = AES_ENC_TEST_VECTORS
1686 .vecs = aes_dec_tv_template,
1687 .count = AES_DEC_TEST_VECTORS
1692 .alg = "ecb(anubis)",
1693 .test = alg_test_skcipher,
1697 .vecs = anubis_enc_tv_template,
1698 .count = ANUBIS_ENC_TEST_VECTORS
1701 .vecs = anubis_dec_tv_template,
1702 .count = ANUBIS_DEC_TEST_VECTORS
1708 .test = alg_test_skcipher,
1712 .vecs = arc4_enc_tv_template,
1713 .count = ARC4_ENC_TEST_VECTORS
1716 .vecs = arc4_dec_tv_template,
1717 .count = ARC4_DEC_TEST_VECTORS
1722 .alg = "ecb(blowfish)",
1723 .test = alg_test_skcipher,
1727 .vecs = bf_enc_tv_template,
1728 .count = BF_ENC_TEST_VECTORS
1731 .vecs = bf_dec_tv_template,
1732 .count = BF_DEC_TEST_VECTORS
1737 .alg = "ecb(camellia)",
1738 .test = alg_test_skcipher,
1742 .vecs = camellia_enc_tv_template,
1743 .count = CAMELLIA_ENC_TEST_VECTORS
1746 .vecs = camellia_dec_tv_template,
1747 .count = CAMELLIA_DEC_TEST_VECTORS
1752 .alg = "ecb(cast5)",
1753 .test = alg_test_skcipher,
1757 .vecs = cast5_enc_tv_template,
1758 .count = CAST5_ENC_TEST_VECTORS
1761 .vecs = cast5_dec_tv_template,
1762 .count = CAST5_DEC_TEST_VECTORS
1767 .alg = "ecb(cast6)",
1768 .test = alg_test_skcipher,
1772 .vecs = cast6_enc_tv_template,
1773 .count = CAST6_ENC_TEST_VECTORS
1776 .vecs = cast6_dec_tv_template,
1777 .count = CAST6_DEC_TEST_VECTORS
1783 .test = alg_test_skcipher,
1788 .vecs = des_enc_tv_template,
1789 .count = DES_ENC_TEST_VECTORS
1792 .vecs = des_dec_tv_template,
1793 .count = DES_DEC_TEST_VECTORS
1798 .alg = "ecb(des3_ede)",
1799 .test = alg_test_skcipher,
1804 .vecs = des3_ede_enc_tv_template,
1805 .count = DES3_EDE_ENC_TEST_VECTORS
1808 .vecs = des3_ede_dec_tv_template,
1809 .count = DES3_EDE_DEC_TEST_VECTORS
1814 .alg = "ecb(khazad)",
1815 .test = alg_test_skcipher,
1819 .vecs = khazad_enc_tv_template,
1820 .count = KHAZAD_ENC_TEST_VECTORS
1823 .vecs = khazad_dec_tv_template,
1824 .count = KHAZAD_DEC_TEST_VECTORS
1830 .test = alg_test_skcipher,
1834 .vecs = seed_enc_tv_template,
1835 .count = SEED_ENC_TEST_VECTORS
1838 .vecs = seed_dec_tv_template,
1839 .count = SEED_DEC_TEST_VECTORS
1844 .alg = "ecb(serpent)",
1845 .test = alg_test_skcipher,
1849 .vecs = serpent_enc_tv_template,
1850 .count = SERPENT_ENC_TEST_VECTORS
1853 .vecs = serpent_dec_tv_template,
1854 .count = SERPENT_DEC_TEST_VECTORS
1860 .test = alg_test_skcipher,
1864 .vecs = tea_enc_tv_template,
1865 .count = TEA_ENC_TEST_VECTORS
1868 .vecs = tea_dec_tv_template,
1869 .count = TEA_DEC_TEST_VECTORS
1874 .alg = "ecb(tnepres)",
1875 .test = alg_test_skcipher,
1879 .vecs = tnepres_enc_tv_template,
1880 .count = TNEPRES_ENC_TEST_VECTORS
1883 .vecs = tnepres_dec_tv_template,
1884 .count = TNEPRES_DEC_TEST_VECTORS
1889 .alg = "ecb(twofish)",
1890 .test = alg_test_skcipher,
1894 .vecs = tf_enc_tv_template,
1895 .count = TF_ENC_TEST_VECTORS
1898 .vecs = tf_dec_tv_template,
1899 .count = TF_DEC_TEST_VECTORS
1905 .test = alg_test_skcipher,
1909 .vecs = xeta_enc_tv_template,
1910 .count = XETA_ENC_TEST_VECTORS
1913 .vecs = xeta_dec_tv_template,
1914 .count = XETA_DEC_TEST_VECTORS
1920 .test = alg_test_skcipher,
1924 .vecs = xtea_enc_tv_template,
1925 .count = XTEA_ENC_TEST_VECTORS
1928 .vecs = xtea_dec_tv_template,
1929 .count = XTEA_DEC_TEST_VECTORS
1935 .test = alg_test_aead,
1940 .vecs = aes_gcm_enc_tv_template,
1941 .count = AES_GCM_ENC_TEST_VECTORS
1944 .vecs = aes_gcm_dec_tv_template,
1945 .count = AES_GCM_DEC_TEST_VECTORS
1951 .test = alg_test_hash,
1954 .vecs = hmac_md5_tv_template,
1955 .count = HMAC_MD5_TEST_VECTORS
1959 .alg = "hmac(rmd128)",
1960 .test = alg_test_hash,
1963 .vecs = hmac_rmd128_tv_template,
1964 .count = HMAC_RMD128_TEST_VECTORS
1968 .alg = "hmac(rmd160)",
1969 .test = alg_test_hash,
1972 .vecs = hmac_rmd160_tv_template,
1973 .count = HMAC_RMD160_TEST_VECTORS
1977 .alg = "hmac(sha1)",
1978 .test = alg_test_hash,
1982 .vecs = hmac_sha1_tv_template,
1983 .count = HMAC_SHA1_TEST_VECTORS
1987 .alg = "hmac(sha224)",
1988 .test = alg_test_hash,
1992 .vecs = hmac_sha224_tv_template,
1993 .count = HMAC_SHA224_TEST_VECTORS
1997 .alg = "hmac(sha256)",
1998 .test = alg_test_hash,
2002 .vecs = hmac_sha256_tv_template,
2003 .count = HMAC_SHA256_TEST_VECTORS
2007 .alg = "hmac(sha384)",
2008 .test = alg_test_hash,
2012 .vecs = hmac_sha384_tv_template,
2013 .count = HMAC_SHA384_TEST_VECTORS
2017 .alg = "hmac(sha512)",
2018 .test = alg_test_hash,
2022 .vecs = hmac_sha512_tv_template,
2023 .count = HMAC_SHA512_TEST_VECTORS
2028 .test = alg_test_skcipher,
2032 .vecs = aes_lrw_enc_tv_template,
2033 .count = AES_LRW_ENC_TEST_VECTORS
2036 .vecs = aes_lrw_dec_tv_template,
2037 .count = AES_LRW_DEC_TEST_VECTORS
2043 .test = alg_test_comp,
2047 .vecs = lzo_comp_tv_template,
2048 .count = LZO_COMP_TEST_VECTORS
2051 .vecs = lzo_decomp_tv_template,
2052 .count = LZO_DECOMP_TEST_VECTORS
2058 .test = alg_test_hash,
2061 .vecs = md4_tv_template,
2062 .count = MD4_TEST_VECTORS
2067 .test = alg_test_hash,
2070 .vecs = md5_tv_template,
2071 .count = MD5_TEST_VECTORS
2075 .alg = "michael_mic",
2076 .test = alg_test_hash,
2079 .vecs = michael_mic_tv_template,
2080 .count = MICHAEL_MIC_TEST_VECTORS
2084 .alg = "pcbc(fcrypt)",
2085 .test = alg_test_skcipher,
2089 .vecs = fcrypt_pcbc_enc_tv_template,
2090 .count = FCRYPT_ENC_TEST_VECTORS
2093 .vecs = fcrypt_pcbc_dec_tv_template,
2094 .count = FCRYPT_DEC_TEST_VECTORS
2099 .alg = "rfc3686(ctr(aes))",
2100 .test = alg_test_skcipher,
2105 .vecs = aes_ctr_rfc3686_enc_tv_template,
2106 .count = AES_CTR_3686_ENC_TEST_VECTORS
2109 .vecs = aes_ctr_rfc3686_dec_tv_template,
2110 .count = AES_CTR_3686_DEC_TEST_VECTORS
2115 .alg = "rfc4309(ccm(aes))",
2116 .test = alg_test_aead,
2121 .vecs = aes_ccm_rfc4309_enc_tv_template,
2122 .count = AES_CCM_4309_ENC_TEST_VECTORS
2125 .vecs = aes_ccm_rfc4309_dec_tv_template,
2126 .count = AES_CCM_4309_DEC_TEST_VECTORS
2132 .test = alg_test_hash,
2135 .vecs = rmd128_tv_template,
2136 .count = RMD128_TEST_VECTORS
2141 .test = alg_test_hash,
2144 .vecs = rmd160_tv_template,
2145 .count = RMD160_TEST_VECTORS
2150 .test = alg_test_hash,
2153 .vecs = rmd256_tv_template,
2154 .count = RMD256_TEST_VECTORS
2159 .test = alg_test_hash,
2162 .vecs = rmd320_tv_template,
2163 .count = RMD320_TEST_VECTORS
2168 .test = alg_test_skcipher,
2172 .vecs = salsa20_stream_enc_tv_template,
2173 .count = SALSA20_STREAM_ENC_TEST_VECTORS
2179 .test = alg_test_hash,
2183 .vecs = sha1_tv_template,
2184 .count = SHA1_TEST_VECTORS
2189 .test = alg_test_hash,
2193 .vecs = sha224_tv_template,
2194 .count = SHA224_TEST_VECTORS
2199 .test = alg_test_hash,
2203 .vecs = sha256_tv_template,
2204 .count = SHA256_TEST_VECTORS
2209 .test = alg_test_hash,
2213 .vecs = sha384_tv_template,
2214 .count = SHA384_TEST_VECTORS
2219 .test = alg_test_hash,
2223 .vecs = sha512_tv_template,
2224 .count = SHA512_TEST_VECTORS
2229 .test = alg_test_hash,
2232 .vecs = tgr128_tv_template,
2233 .count = TGR128_TEST_VECTORS
2238 .test = alg_test_hash,
2241 .vecs = tgr160_tv_template,
2242 .count = TGR160_TEST_VECTORS
2247 .test = alg_test_hash,
2250 .vecs = tgr192_tv_template,
2251 .count = TGR192_TEST_VECTORS
2256 .test = alg_test_hash,
2259 .vecs = wp256_tv_template,
2260 .count = WP256_TEST_VECTORS
2265 .test = alg_test_hash,
2268 .vecs = wp384_tv_template,
2269 .count = WP384_TEST_VECTORS
2274 .test = alg_test_hash,
2277 .vecs = wp512_tv_template,
2278 .count = WP512_TEST_VECTORS
2283 .test = alg_test_hash,
2286 .vecs = aes_xcbc128_tv_template,
2287 .count = XCBC_AES_TEST_VECTORS
2292 .test = alg_test_skcipher,
2296 .vecs = aes_xts_enc_tv_template,
2297 .count = AES_XTS_ENC_TEST_VECTORS
2300 .vecs = aes_xts_dec_tv_template,
2301 .count = AES_XTS_DEC_TEST_VECTORS
2307 .test = alg_test_pcomp,
2311 .vecs = zlib_comp_tv_template,
2312 .count = ZLIB_COMP_TEST_VECTORS
2315 .vecs = zlib_decomp_tv_template,
2316 .count = ZLIB_DECOMP_TEST_VECTORS
2323 static int alg_find_test(const char *alg)
2326 int end = ARRAY_SIZE(alg_test_descs);
2328 while (start < end) {
2329 int i = (start + end) / 2;
2330 int diff = strcmp(alg_test_descs[i].alg, alg);
2348 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
2353 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
2354 char nalg[CRYPTO_MAX_ALG_NAME];
2356 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
2358 return -ENAMETOOLONG;
2360 i = alg_find_test(nalg);
2364 if (fips_enabled && !alg_test_descs[i].fips_allowed)
2367 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
2371 i = alg_find_test(alg);
2375 if (fips_enabled && !alg_test_descs[i].fips_allowed)
2378 rc = alg_test_descs[i].test(alg_test_descs + i, driver,
2381 if (fips_enabled && rc)
2382 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
2384 if (fips_enabled && !rc)
2385 printk(KERN_INFO "alg: self-tests for %s (%s) passed\n",
2391 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
2396 EXPORT_SYMBOL_GPL(alg_test);