2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
23 #include <crypto/hash.h>
24 #include <linux/err.h>
25 #include <linux/module.h>
26 #include <linux/scatterlist.h>
27 #include <linux/slab.h>
28 #include <linux/string.h>
29 #include <crypto/rng.h>
33 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
36 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
46 * Need slab memory for testing (size in number of pages).
51 * Indexes into the xbuf to simulate cross-page access.
63 * Used by test_cipher()
68 struct tcrypt_result {
69 struct completion completion;
73 struct aead_test_suite {
75 struct aead_testvec *vecs;
80 struct cipher_test_suite {
82 struct cipher_testvec *vecs;
87 struct comp_test_suite {
89 struct comp_testvec *vecs;
94 struct pcomp_test_suite {
96 struct pcomp_testvec *vecs;
101 struct hash_test_suite {
102 struct hash_testvec *vecs;
106 struct cprng_test_suite {
107 struct cprng_testvec *vecs;
111 struct alg_test_desc {
113 int (*test)(const struct alg_test_desc *desc, const char *driver,
115 int fips_allowed; /* set if alg is allowed in fips mode */
118 struct aead_test_suite aead;
119 struct cipher_test_suite cipher;
120 struct comp_test_suite comp;
121 struct pcomp_test_suite pcomp;
122 struct hash_test_suite hash;
123 struct cprng_test_suite cprng;
127 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
129 static void hexdump(unsigned char *buf, unsigned int len)
131 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
136 static void tcrypt_complete(struct crypto_async_request *req, int err)
138 struct tcrypt_result *res = req->data;
140 if (err == -EINPROGRESS)
144 complete(&res->completion);
147 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
151 for (i = 0; i < XBUFSIZE; i++) {
152 buf[i] = (void *)__get_free_page(GFP_KERNEL);
161 free_page((unsigned long)buf[i]);
166 static void testmgr_free_buf(char *buf[XBUFSIZE])
170 for (i = 0; i < XBUFSIZE; i++)
171 free_page((unsigned long)buf[i]);
174 static int do_one_async_hash_op(struct ahash_request *req,
175 struct tcrypt_result *tr,
178 if (ret == -EINPROGRESS || ret == -EBUSY) {
179 ret = wait_for_completion_interruptible(&tr->completion);
182 INIT_COMPLETION(tr->completion);
187 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
188 unsigned int tcount, bool use_digest)
190 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
191 unsigned int i, j, k, temp;
192 struct scatterlist sg[8];
194 struct ahash_request *req;
195 struct tcrypt_result tresult;
197 char *xbuf[XBUFSIZE];
200 if (testmgr_alloc_buf(xbuf))
203 init_completion(&tresult.completion);
205 req = ahash_request_alloc(tfm, GFP_KERNEL);
207 printk(KERN_ERR "alg: hash: Failed to allocate request for "
211 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
212 tcrypt_complete, &tresult);
215 for (i = 0; i < tcount; i++) {
220 memset(result, 0, 64);
224 memcpy(hash_buff, template[i].plaintext, template[i].psize);
225 sg_init_one(&sg[0], hash_buff, template[i].psize);
227 if (template[i].ksize) {
228 crypto_ahash_clear_flags(tfm, ~0);
229 ret = crypto_ahash_setkey(tfm, template[i].key,
232 printk(KERN_ERR "alg: hash: setkey failed on "
233 "test %d for %s: ret=%d\n", j, algo,
239 ahash_request_set_crypt(req, sg, result, template[i].psize);
241 ret = do_one_async_hash_op(req, &tresult,
242 crypto_ahash_digest(req));
244 pr_err("alg: hash: digest failed on test %d "
245 "for %s: ret=%d\n", j, algo, -ret);
249 ret = do_one_async_hash_op(req, &tresult,
250 crypto_ahash_init(req));
252 pr_err("alt: hash: init failed on test %d "
253 "for %s: ret=%d\n", j, algo, -ret);
256 ret = do_one_async_hash_op(req, &tresult,
257 crypto_ahash_update(req));
259 pr_err("alt: hash: update failed on test %d "
260 "for %s: ret=%d\n", j, algo, -ret);
263 ret = do_one_async_hash_op(req, &tresult,
264 crypto_ahash_final(req));
266 pr_err("alt: hash: final failed on test %d "
267 "for %s: ret=%d\n", j, algo, -ret);
272 if (memcmp(result, template[i].digest,
273 crypto_ahash_digestsize(tfm))) {
274 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
276 hexdump(result, crypto_ahash_digestsize(tfm));
283 for (i = 0; i < tcount; i++) {
284 if (template[i].np) {
286 memset(result, 0, 64);
289 sg_init_table(sg, template[i].np);
291 for (k = 0; k < template[i].np; k++) {
292 if (WARN_ON(offset_in_page(IDX[k]) +
293 template[i].tap[k] > PAGE_SIZE))
296 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
297 offset_in_page(IDX[k]),
298 template[i].plaintext + temp,
301 temp += template[i].tap[k];
304 if (template[i].ksize) {
305 crypto_ahash_clear_flags(tfm, ~0);
306 ret = crypto_ahash_setkey(tfm, template[i].key,
310 printk(KERN_ERR "alg: hash: setkey "
311 "failed on chunking test %d "
312 "for %s: ret=%d\n", j, algo,
318 ahash_request_set_crypt(req, sg, result,
320 ret = crypto_ahash_digest(req);
326 ret = wait_for_completion_interruptible(
327 &tresult.completion);
328 if (!ret && !(ret = tresult.err)) {
329 INIT_COMPLETION(tresult.completion);
334 printk(KERN_ERR "alg: hash: digest failed "
335 "on chunking test %d for %s: "
336 "ret=%d\n", j, algo, -ret);
340 if (memcmp(result, template[i].digest,
341 crypto_ahash_digestsize(tfm))) {
342 printk(KERN_ERR "alg: hash: Chunking test %d "
343 "failed for %s\n", j, algo);
344 hexdump(result, crypto_ahash_digestsize(tfm));
354 ahash_request_free(req);
356 testmgr_free_buf(xbuf);
361 static int __test_aead(struct crypto_aead *tfm, int enc,
362 struct aead_testvec *template, unsigned int tcount,
363 const bool diff_dst, const int align_offset)
365 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
366 unsigned int i, j, k, n, temp;
370 struct aead_request *req;
371 struct scatterlist *sg;
372 struct scatterlist *asg;
373 struct scatterlist *sgout;
375 struct tcrypt_result result;
376 unsigned int authsize;
381 char *xbuf[XBUFSIZE];
382 char *xoutbuf[XBUFSIZE];
383 char *axbuf[XBUFSIZE];
385 if (testmgr_alloc_buf(xbuf))
387 if (testmgr_alloc_buf(axbuf))
390 if (diff_dst && testmgr_alloc_buf(xoutbuf))
393 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
394 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 3 : 2), GFP_KERNEL);
410 init_completion(&result.completion);
412 req = aead_request_alloc(tfm, GFP_KERNEL);
414 pr_err("alg: aead%s: Failed to allocate request for %s\n",
419 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
420 tcrypt_complete, &result);
422 for (i = 0, j = 0; i < tcount; i++) {
423 if (!template[i].np) {
426 /* some templates have no input data but they will
430 input += align_offset;
434 if (WARN_ON(align_offset + template[i].ilen >
435 PAGE_SIZE || template[i].alen > PAGE_SIZE))
438 memcpy(input, template[i].input, template[i].ilen);
439 memcpy(assoc, template[i].assoc, template[i].alen);
441 memcpy(iv, template[i].iv, MAX_IVLEN);
443 memset(iv, 0, MAX_IVLEN);
445 crypto_aead_clear_flags(tfm, ~0);
447 crypto_aead_set_flags(
448 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
450 key = template[i].key;
452 ret = crypto_aead_setkey(tfm, key,
454 if (!ret == template[i].fail) {
455 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
456 d, j, algo, crypto_aead_get_flags(tfm));
461 authsize = abs(template[i].rlen - template[i].ilen);
462 ret = crypto_aead_setauthsize(tfm, authsize);
464 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
465 d, authsize, j, algo);
469 sg_init_one(&sg[0], input,
470 template[i].ilen + (enc ? authsize : 0));
474 output += align_offset;
475 sg_init_one(&sgout[0], output,
477 (enc ? authsize : 0));
482 sg_init_one(&asg[0], assoc, template[i].alen);
484 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
485 template[i].ilen, iv);
487 aead_request_set_assoc(req, asg, template[i].alen);
490 crypto_aead_encrypt(req) :
491 crypto_aead_decrypt(req);
495 if (template[i].novrfy) {
496 /* verification was supposed to fail */
497 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
499 /* so really, we got a bad message */
506 ret = wait_for_completion_interruptible(
508 if (!ret && !(ret = result.err)) {
509 INIT_COMPLETION(result.completion);
513 if (template[i].novrfy)
514 /* verification failure was expected */
518 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
519 d, e, j, algo, -ret);
524 if (memcmp(q, template[i].result, template[i].rlen)) {
525 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
527 hexdump(q, template[i].rlen);
534 for (i = 0, j = 0; i < tcount; i++) {
535 /* alignment tests are only done with continuous buffers */
536 if (align_offset != 0)
539 if (template[i].np) {
543 memcpy(iv, template[i].iv, MAX_IVLEN);
545 memset(iv, 0, MAX_IVLEN);
547 crypto_aead_clear_flags(tfm, ~0);
549 crypto_aead_set_flags(
550 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
551 key = template[i].key;
553 ret = crypto_aead_setkey(tfm, key, template[i].klen);
554 if (!ret == template[i].fail) {
555 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
556 d, j, algo, crypto_aead_get_flags(tfm));
561 authsize = abs(template[i].rlen - template[i].ilen);
564 sg_init_table(sg, template[i].np);
566 sg_init_table(sgout, template[i].np);
567 for (k = 0, temp = 0; k < template[i].np; k++) {
568 if (WARN_ON(offset_in_page(IDX[k]) +
569 template[i].tap[k] > PAGE_SIZE))
572 q = xbuf[IDX[k] >> PAGE_SHIFT] +
573 offset_in_page(IDX[k]);
575 memcpy(q, template[i].input + temp,
578 n = template[i].tap[k];
579 if (k == template[i].np - 1 && enc)
581 if (offset_in_page(q) + n < PAGE_SIZE)
584 sg_set_buf(&sg[k], q, template[i].tap[k]);
587 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
588 offset_in_page(IDX[k]);
590 memset(q, 0, template[i].tap[k]);
591 if (offset_in_page(q) + n < PAGE_SIZE)
594 sg_set_buf(&sgout[k], q,
598 temp += template[i].tap[k];
601 ret = crypto_aead_setauthsize(tfm, authsize);
603 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
604 d, authsize, j, algo);
609 if (WARN_ON(sg[k - 1].offset +
610 sg[k - 1].length + authsize >
616 sg[k - 1].length += authsize;
619 sgout[k - 1].length += authsize;
622 sg_init_table(asg, template[i].anp);
624 for (k = 0, temp = 0; k < template[i].anp; k++) {
625 if (WARN_ON(offset_in_page(IDX[k]) +
626 template[i].atap[k] > PAGE_SIZE))
629 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
630 offset_in_page(IDX[k]),
631 template[i].assoc + temp,
632 template[i].atap[k]),
633 template[i].atap[k]);
634 temp += template[i].atap[k];
637 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
641 aead_request_set_assoc(req, asg, template[i].alen);
644 crypto_aead_encrypt(req) :
645 crypto_aead_decrypt(req);
649 if (template[i].novrfy) {
650 /* verification was supposed to fail */
651 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
653 /* so really, we got a bad message */
660 ret = wait_for_completion_interruptible(
662 if (!ret && !(ret = result.err)) {
663 INIT_COMPLETION(result.completion);
667 if (template[i].novrfy)
668 /* verification failure was expected */
672 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
673 d, e, j, algo, -ret);
678 for (k = 0, temp = 0; k < template[i].np; k++) {
680 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
681 offset_in_page(IDX[k]);
683 q = xbuf[IDX[k] >> PAGE_SHIFT] +
684 offset_in_page(IDX[k]);
686 n = template[i].tap[k];
687 if (k == template[i].np - 1)
688 n += enc ? authsize : -authsize;
690 if (memcmp(q, template[i].result + temp, n)) {
691 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
698 if (k == template[i].np - 1 && !enc) {
700 memcmp(q, template[i].input +
706 for (n = 0; offset_in_page(q + n) &&
711 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
712 d, j, e, k, algo, n);
717 temp += template[i].tap[k];
725 aead_request_free(req);
729 testmgr_free_buf(xoutbuf);
731 testmgr_free_buf(axbuf);
733 testmgr_free_buf(xbuf);
738 static int test_aead(struct crypto_aead *tfm, int enc,
739 struct aead_testvec *template, unsigned int tcount)
741 unsigned int alignmask;
744 /* test 'dst == src' case */
745 ret = __test_aead(tfm, enc, template, tcount, false, 0);
749 /* test 'dst != src' case */
750 ret = __test_aead(tfm, enc, template, tcount, true, 0);
754 /* test unaligned buffers, check with one byte offset */
755 ret = __test_aead(tfm, enc, template, tcount, true, 1);
759 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
761 /* Check if alignment mask for tfm is correctly set. */
762 ret = __test_aead(tfm, enc, template, tcount, true,
771 static int test_cipher(struct crypto_cipher *tfm, int enc,
772 struct cipher_testvec *template, unsigned int tcount)
774 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
775 unsigned int i, j, k;
779 char *xbuf[XBUFSIZE];
782 if (testmgr_alloc_buf(xbuf))
791 for (i = 0; i < tcount; i++) {
798 if (WARN_ON(template[i].ilen > PAGE_SIZE))
802 memcpy(data, template[i].input, template[i].ilen);
804 crypto_cipher_clear_flags(tfm, ~0);
806 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
808 ret = crypto_cipher_setkey(tfm, template[i].key,
810 if (!ret == template[i].fail) {
811 printk(KERN_ERR "alg: cipher: setkey failed "
812 "on test %d for %s: flags=%x\n", j,
813 algo, crypto_cipher_get_flags(tfm));
818 for (k = 0; k < template[i].ilen;
819 k += crypto_cipher_blocksize(tfm)) {
821 crypto_cipher_encrypt_one(tfm, data + k,
824 crypto_cipher_decrypt_one(tfm, data + k,
829 if (memcmp(q, template[i].result, template[i].rlen)) {
830 printk(KERN_ERR "alg: cipher: Test %d failed "
831 "on %s for %s\n", j, e, algo);
832 hexdump(q, template[i].rlen);
841 testmgr_free_buf(xbuf);
846 static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc,
847 struct cipher_testvec *template, unsigned int tcount,
848 const bool diff_dst, const int align_offset)
851 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
852 unsigned int i, j, k, n, temp;
854 struct ablkcipher_request *req;
855 struct scatterlist sg[8];
856 struct scatterlist sgout[8];
858 struct tcrypt_result result;
861 char *xbuf[XBUFSIZE];
862 char *xoutbuf[XBUFSIZE];
865 if (testmgr_alloc_buf(xbuf))
868 if (diff_dst && testmgr_alloc_buf(xoutbuf))
881 init_completion(&result.completion);
883 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
885 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
890 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
891 tcrypt_complete, &result);
894 for (i = 0; i < tcount; i++) {
896 memcpy(iv, template[i].iv, MAX_IVLEN);
898 memset(iv, 0, MAX_IVLEN);
900 if (!(template[i].np) || (template[i].also_non_np)) {
904 if (WARN_ON(align_offset + template[i].ilen >
909 data += align_offset;
910 memcpy(data, template[i].input, template[i].ilen);
912 crypto_ablkcipher_clear_flags(tfm, ~0);
914 crypto_ablkcipher_set_flags(
915 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
917 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
919 if (!ret == template[i].fail) {
920 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
922 crypto_ablkcipher_get_flags(tfm));
927 sg_init_one(&sg[0], data, template[i].ilen);
930 data += align_offset;
931 sg_init_one(&sgout[0], data, template[i].ilen);
934 ablkcipher_request_set_crypt(req, sg,
935 (diff_dst) ? sgout : sg,
936 template[i].ilen, iv);
938 crypto_ablkcipher_encrypt(req) :
939 crypto_ablkcipher_decrypt(req);
946 ret = wait_for_completion_interruptible(
948 if (!ret && !((ret = result.err))) {
949 INIT_COMPLETION(result.completion);
954 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
955 d, e, j, algo, -ret);
960 if (memcmp(q, template[i].result, template[i].rlen)) {
961 pr_err("alg: skcipher%s: Test %d failed on %s for %s\n",
963 hexdump(q, template[i].rlen);
971 for (i = 0; i < tcount; i++) {
972 /* alignment tests are only done with continuous buffers */
973 if (align_offset != 0)
977 memcpy(iv, template[i].iv, MAX_IVLEN);
979 memset(iv, 0, MAX_IVLEN);
981 if (template[i].np) {
984 crypto_ablkcipher_clear_flags(tfm, ~0);
986 crypto_ablkcipher_set_flags(
987 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
989 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
991 if (!ret == template[i].fail) {
992 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
994 crypto_ablkcipher_get_flags(tfm));
1001 sg_init_table(sg, template[i].np);
1003 sg_init_table(sgout, template[i].np);
1004 for (k = 0; k < template[i].np; k++) {
1005 if (WARN_ON(offset_in_page(IDX[k]) +
1006 template[i].tap[k] > PAGE_SIZE))
1009 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1010 offset_in_page(IDX[k]);
1012 memcpy(q, template[i].input + temp,
1013 template[i].tap[k]);
1015 if (offset_in_page(q) + template[i].tap[k] <
1017 q[template[i].tap[k]] = 0;
1019 sg_set_buf(&sg[k], q, template[i].tap[k]);
1021 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1022 offset_in_page(IDX[k]);
1024 sg_set_buf(&sgout[k], q,
1025 template[i].tap[k]);
1027 memset(q, 0, template[i].tap[k]);
1028 if (offset_in_page(q) +
1029 template[i].tap[k] < PAGE_SIZE)
1030 q[template[i].tap[k]] = 0;
1033 temp += template[i].tap[k];
1036 ablkcipher_request_set_crypt(req, sg,
1037 (diff_dst) ? sgout : sg,
1038 template[i].ilen, iv);
1041 crypto_ablkcipher_encrypt(req) :
1042 crypto_ablkcipher_decrypt(req);
1049 ret = wait_for_completion_interruptible(
1050 &result.completion);
1051 if (!ret && !((ret = result.err))) {
1052 INIT_COMPLETION(result.completion);
1057 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1058 d, e, j, algo, -ret);
1064 for (k = 0; k < template[i].np; k++) {
1066 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1067 offset_in_page(IDX[k]);
1069 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1070 offset_in_page(IDX[k]);
1072 if (memcmp(q, template[i].result + temp,
1073 template[i].tap[k])) {
1074 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1076 hexdump(q, template[i].tap[k]);
1080 q += template[i].tap[k];
1081 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1084 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1085 d, j, e, k, algo, n);
1089 temp += template[i].tap[k];
1097 ablkcipher_request_free(req);
1099 testmgr_free_buf(xoutbuf);
1101 testmgr_free_buf(xbuf);
1106 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
1107 struct cipher_testvec *template, unsigned int tcount)
1109 unsigned int alignmask;
1112 /* test 'dst == src' case */
1113 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1117 /* test 'dst != src' case */
1118 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1122 /* test unaligned buffers, check with one byte offset */
1123 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1127 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1129 /* Check if alignment mask for tfm is correctly set. */
1130 ret = __test_skcipher(tfm, enc, template, tcount, true,
1139 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1140 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1142 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1144 char result[COMP_BUF_SIZE];
1147 for (i = 0; i < ctcount; i++) {
1149 unsigned int dlen = COMP_BUF_SIZE;
1151 memset(result, 0, sizeof (result));
1153 ilen = ctemplate[i].inlen;
1154 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1155 ilen, result, &dlen);
1157 printk(KERN_ERR "alg: comp: compression failed "
1158 "on test %d for %s: ret=%d\n", i + 1, algo,
1163 if (dlen != ctemplate[i].outlen) {
1164 printk(KERN_ERR "alg: comp: Compression test %d "
1165 "failed for %s: output len = %d\n", i + 1, algo,
1171 if (memcmp(result, ctemplate[i].output, dlen)) {
1172 printk(KERN_ERR "alg: comp: Compression test %d "
1173 "failed for %s\n", i + 1, algo);
1174 hexdump(result, dlen);
1180 for (i = 0; i < dtcount; i++) {
1182 unsigned int dlen = COMP_BUF_SIZE;
1184 memset(result, 0, sizeof (result));
1186 ilen = dtemplate[i].inlen;
1187 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1188 ilen, result, &dlen);
1190 printk(KERN_ERR "alg: comp: decompression failed "
1191 "on test %d for %s: ret=%d\n", i + 1, algo,
1196 if (dlen != dtemplate[i].outlen) {
1197 printk(KERN_ERR "alg: comp: Decompression test %d "
1198 "failed for %s: output len = %d\n", i + 1, algo,
1204 if (memcmp(result, dtemplate[i].output, dlen)) {
1205 printk(KERN_ERR "alg: comp: Decompression test %d "
1206 "failed for %s\n", i + 1, algo);
1207 hexdump(result, dlen);
1219 static int test_pcomp(struct crypto_pcomp *tfm,
1220 struct pcomp_testvec *ctemplate,
1221 struct pcomp_testvec *dtemplate, int ctcount,
1224 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
1226 char result[COMP_BUF_SIZE];
1229 for (i = 0; i < ctcount; i++) {
1230 struct comp_request req;
1231 unsigned int produced = 0;
1233 res = crypto_compress_setup(tfm, ctemplate[i].params,
1234 ctemplate[i].paramsize);
1236 pr_err("alg: pcomp: compression setup failed on test "
1237 "%d for %s: error=%d\n", i + 1, algo, res);
1241 res = crypto_compress_init(tfm);
1243 pr_err("alg: pcomp: compression init failed on test "
1244 "%d for %s: error=%d\n", i + 1, algo, res);
1248 memset(result, 0, sizeof(result));
1250 req.next_in = ctemplate[i].input;
1251 req.avail_in = ctemplate[i].inlen / 2;
1252 req.next_out = result;
1253 req.avail_out = ctemplate[i].outlen / 2;
1255 res = crypto_compress_update(tfm, &req);
1256 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1257 pr_err("alg: pcomp: compression update failed on test "
1258 "%d for %s: error=%d\n", i + 1, algo, res);
1264 /* Add remaining input data */
1265 req.avail_in += (ctemplate[i].inlen + 1) / 2;
1267 res = crypto_compress_update(tfm, &req);
1268 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1269 pr_err("alg: pcomp: compression update failed on test "
1270 "%d for %s: error=%d\n", i + 1, algo, res);
1276 /* Provide remaining output space */
1277 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1279 res = crypto_compress_final(tfm, &req);
1281 pr_err("alg: pcomp: compression final failed on test "
1282 "%d for %s: error=%d\n", i + 1, algo, res);
1287 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1288 pr_err("alg: comp: Compression test %d failed for %s: "
1289 "output len = %d (expected %d)\n", i + 1, algo,
1290 COMP_BUF_SIZE - req.avail_out,
1291 ctemplate[i].outlen);
1295 if (produced != ctemplate[i].outlen) {
1296 pr_err("alg: comp: Compression test %d failed for %s: "
1297 "returned len = %u (expected %d)\n", i + 1,
1298 algo, produced, ctemplate[i].outlen);
1302 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1303 pr_err("alg: pcomp: Compression test %d failed for "
1304 "%s\n", i + 1, algo);
1305 hexdump(result, ctemplate[i].outlen);
1310 for (i = 0; i < dtcount; i++) {
1311 struct comp_request req;
1312 unsigned int produced = 0;
1314 res = crypto_decompress_setup(tfm, dtemplate[i].params,
1315 dtemplate[i].paramsize);
1317 pr_err("alg: pcomp: decompression setup failed on "
1318 "test %d for %s: error=%d\n", i + 1, algo, res);
1322 res = crypto_decompress_init(tfm);
1324 pr_err("alg: pcomp: decompression init failed on test "
1325 "%d for %s: error=%d\n", i + 1, algo, res);
1329 memset(result, 0, sizeof(result));
1331 req.next_in = dtemplate[i].input;
1332 req.avail_in = dtemplate[i].inlen / 2;
1333 req.next_out = result;
1334 req.avail_out = dtemplate[i].outlen / 2;
1336 res = crypto_decompress_update(tfm, &req);
1337 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1338 pr_err("alg: pcomp: decompression update failed on "
1339 "test %d for %s: error=%d\n", i + 1, algo, res);
1345 /* Add remaining input data */
1346 req.avail_in += (dtemplate[i].inlen + 1) / 2;
1348 res = crypto_decompress_update(tfm, &req);
1349 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1350 pr_err("alg: pcomp: decompression update failed on "
1351 "test %d for %s: error=%d\n", i + 1, algo, res);
1357 /* Provide remaining output space */
1358 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1360 res = crypto_decompress_final(tfm, &req);
1361 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1362 pr_err("alg: pcomp: decompression final failed on "
1363 "test %d for %s: error=%d\n", i + 1, algo, res);
1369 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1370 pr_err("alg: comp: Decompression test %d failed for "
1371 "%s: output len = %d (expected %d)\n", i + 1,
1372 algo, COMP_BUF_SIZE - req.avail_out,
1373 dtemplate[i].outlen);
1377 if (produced != dtemplate[i].outlen) {
1378 pr_err("alg: comp: Decompression test %d failed for "
1379 "%s: returned len = %u (expected %d)\n", i + 1,
1380 algo, produced, dtemplate[i].outlen);
1384 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1385 pr_err("alg: pcomp: Decompression test %d failed for "
1386 "%s\n", i + 1, algo);
1387 hexdump(result, dtemplate[i].outlen);
1396 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1397 unsigned int tcount)
1399 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1400 int err = 0, i, j, seedsize;
1404 seedsize = crypto_rng_seedsize(tfm);
1406 seed = kmalloc(seedsize, GFP_KERNEL);
1408 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1413 for (i = 0; i < tcount; i++) {
1414 memset(result, 0, 32);
1416 memcpy(seed, template[i].v, template[i].vlen);
1417 memcpy(seed + template[i].vlen, template[i].key,
1419 memcpy(seed + template[i].vlen + template[i].klen,
1420 template[i].dt, template[i].dtlen);
1422 err = crypto_rng_reset(tfm, seed, seedsize);
1424 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1429 for (j = 0; j < template[i].loops; j++) {
1430 err = crypto_rng_get_bytes(tfm, result,
1432 if (err != template[i].rlen) {
1433 printk(KERN_ERR "alg: cprng: Failed to obtain "
1434 "the correct amount of random data for "
1435 "%s (requested %d, got %d)\n", algo,
1436 template[i].rlen, err);
1441 err = memcmp(result, template[i].result,
1444 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1446 hexdump(result, template[i].rlen);
1457 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1460 struct crypto_aead *tfm;
1463 tfm = crypto_alloc_aead(driver, type, mask);
1465 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1466 "%ld\n", driver, PTR_ERR(tfm));
1467 return PTR_ERR(tfm);
1470 if (desc->suite.aead.enc.vecs) {
1471 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1472 desc->suite.aead.enc.count);
1477 if (!err && desc->suite.aead.dec.vecs)
1478 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1479 desc->suite.aead.dec.count);
1482 crypto_free_aead(tfm);
1486 static int alg_test_cipher(const struct alg_test_desc *desc,
1487 const char *driver, u32 type, u32 mask)
1489 struct crypto_cipher *tfm;
1492 tfm = crypto_alloc_cipher(driver, type, mask);
1494 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1495 "%s: %ld\n", driver, PTR_ERR(tfm));
1496 return PTR_ERR(tfm);
1499 if (desc->suite.cipher.enc.vecs) {
1500 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1501 desc->suite.cipher.enc.count);
1506 if (desc->suite.cipher.dec.vecs)
1507 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1508 desc->suite.cipher.dec.count);
1511 crypto_free_cipher(tfm);
1515 static int alg_test_skcipher(const struct alg_test_desc *desc,
1516 const char *driver, u32 type, u32 mask)
1518 struct crypto_ablkcipher *tfm;
1521 tfm = crypto_alloc_ablkcipher(driver, type, mask);
1523 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1524 "%s: %ld\n", driver, PTR_ERR(tfm));
1525 return PTR_ERR(tfm);
1528 if (desc->suite.cipher.enc.vecs) {
1529 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1530 desc->suite.cipher.enc.count);
1535 if (desc->suite.cipher.dec.vecs)
1536 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1537 desc->suite.cipher.dec.count);
1540 crypto_free_ablkcipher(tfm);
1544 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1547 struct crypto_comp *tfm;
1550 tfm = crypto_alloc_comp(driver, type, mask);
1552 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1553 "%ld\n", driver, PTR_ERR(tfm));
1554 return PTR_ERR(tfm);
1557 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1558 desc->suite.comp.decomp.vecs,
1559 desc->suite.comp.comp.count,
1560 desc->suite.comp.decomp.count);
1562 crypto_free_comp(tfm);
1566 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1569 struct crypto_pcomp *tfm;
1572 tfm = crypto_alloc_pcomp(driver, type, mask);
1574 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1575 driver, PTR_ERR(tfm));
1576 return PTR_ERR(tfm);
1579 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1580 desc->suite.pcomp.decomp.vecs,
1581 desc->suite.pcomp.comp.count,
1582 desc->suite.pcomp.decomp.count);
1584 crypto_free_pcomp(tfm);
1588 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1591 struct crypto_ahash *tfm;
1594 tfm = crypto_alloc_ahash(driver, type, mask);
1596 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1597 "%ld\n", driver, PTR_ERR(tfm));
1598 return PTR_ERR(tfm);
1601 err = test_hash(tfm, desc->suite.hash.vecs,
1602 desc->suite.hash.count, true);
1604 err = test_hash(tfm, desc->suite.hash.vecs,
1605 desc->suite.hash.count, false);
1607 crypto_free_ahash(tfm);
1611 static int alg_test_crc32c(const struct alg_test_desc *desc,
1612 const char *driver, u32 type, u32 mask)
1614 struct crypto_shash *tfm;
1618 err = alg_test_hash(desc, driver, type, mask);
1622 tfm = crypto_alloc_shash(driver, type, mask);
1624 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1625 "%ld\n", driver, PTR_ERR(tfm));
1632 struct shash_desc shash;
1633 char ctx[crypto_shash_descsize(tfm)];
1636 sdesc.shash.tfm = tfm;
1637 sdesc.shash.flags = 0;
1639 *(u32 *)sdesc.ctx = le32_to_cpu(420553207);
1640 err = crypto_shash_final(&sdesc.shash, (u8 *)&val);
1642 printk(KERN_ERR "alg: crc32c: Operation failed for "
1643 "%s: %d\n", driver, err);
1647 if (val != ~420553207) {
1648 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1649 "%d\n", driver, val);
1654 crypto_free_shash(tfm);
1660 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1663 struct crypto_rng *rng;
1666 rng = crypto_alloc_rng(driver, type, mask);
1668 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1669 "%ld\n", driver, PTR_ERR(rng));
1670 return PTR_ERR(rng);
1673 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1675 crypto_free_rng(rng);
1680 static int alg_test_null(const struct alg_test_desc *desc,
1681 const char *driver, u32 type, u32 mask)
1686 /* Please keep this list sorted by algorithm name. */
1687 static const struct alg_test_desc alg_test_descs[] = {
1689 .alg = "__cbc-cast5-avx",
1690 .test = alg_test_null,
1692 .alg = "__cbc-cast6-avx",
1693 .test = alg_test_null,
1695 .alg = "__cbc-serpent-avx",
1696 .test = alg_test_null,
1698 .alg = "__cbc-serpent-avx2",
1699 .test = alg_test_null,
1701 .alg = "__cbc-serpent-sse2",
1702 .test = alg_test_null,
1704 .alg = "__cbc-twofish-avx",
1705 .test = alg_test_null,
1707 .alg = "__driver-cbc-aes-aesni",
1708 .test = alg_test_null,
1711 .alg = "__driver-cbc-camellia-aesni",
1712 .test = alg_test_null,
1714 .alg = "__driver-cbc-camellia-aesni-avx2",
1715 .test = alg_test_null,
1717 .alg = "__driver-cbc-cast5-avx",
1718 .test = alg_test_null,
1720 .alg = "__driver-cbc-cast6-avx",
1721 .test = alg_test_null,
1723 .alg = "__driver-cbc-serpent-avx",
1724 .test = alg_test_null,
1726 .alg = "__driver-cbc-serpent-avx2",
1727 .test = alg_test_null,
1729 .alg = "__driver-cbc-serpent-sse2",
1730 .test = alg_test_null,
1732 .alg = "__driver-cbc-twofish-avx",
1733 .test = alg_test_null,
1735 .alg = "__driver-ecb-aes-aesni",
1736 .test = alg_test_null,
1739 .alg = "__driver-ecb-camellia-aesni",
1740 .test = alg_test_null,
1742 .alg = "__driver-ecb-camellia-aesni-avx2",
1743 .test = alg_test_null,
1745 .alg = "__driver-ecb-cast5-avx",
1746 .test = alg_test_null,
1748 .alg = "__driver-ecb-cast6-avx",
1749 .test = alg_test_null,
1751 .alg = "__driver-ecb-serpent-avx",
1752 .test = alg_test_null,
1754 .alg = "__driver-ecb-serpent-avx2",
1755 .test = alg_test_null,
1757 .alg = "__driver-ecb-serpent-sse2",
1758 .test = alg_test_null,
1760 .alg = "__driver-ecb-twofish-avx",
1761 .test = alg_test_null,
1763 .alg = "__ghash-pclmulqdqni",
1764 .test = alg_test_null,
1767 .alg = "ansi_cprng",
1768 .test = alg_test_cprng,
1772 .vecs = ansi_cprng_aes_tv_template,
1773 .count = ANSI_CPRNG_AES_TEST_VECTORS
1777 .alg = "authenc(hmac(sha1),cbc(aes))",
1778 .test = alg_test_aead,
1783 .vecs = hmac_sha1_aes_cbc_enc_tv_template,
1784 .count = HMAC_SHA1_AES_CBC_ENC_TEST_VECTORS
1789 .alg = "authenc(hmac(sha256),cbc(aes))",
1790 .test = alg_test_aead,
1795 .vecs = hmac_sha256_aes_cbc_enc_tv_template,
1796 .count = HMAC_SHA256_AES_CBC_ENC_TEST_VECTORS
1801 .alg = "authenc(hmac(sha512),cbc(aes))",
1802 .test = alg_test_aead,
1807 .vecs = hmac_sha512_aes_cbc_enc_tv_template,
1808 .count = HMAC_SHA512_AES_CBC_ENC_TEST_VECTORS
1814 .test = alg_test_skcipher,
1819 .vecs = aes_cbc_enc_tv_template,
1820 .count = AES_CBC_ENC_TEST_VECTORS
1823 .vecs = aes_cbc_dec_tv_template,
1824 .count = AES_CBC_DEC_TEST_VECTORS
1829 .alg = "cbc(anubis)",
1830 .test = alg_test_skcipher,
1834 .vecs = anubis_cbc_enc_tv_template,
1835 .count = ANUBIS_CBC_ENC_TEST_VECTORS
1838 .vecs = anubis_cbc_dec_tv_template,
1839 .count = ANUBIS_CBC_DEC_TEST_VECTORS
1844 .alg = "cbc(blowfish)",
1845 .test = alg_test_skcipher,
1849 .vecs = bf_cbc_enc_tv_template,
1850 .count = BF_CBC_ENC_TEST_VECTORS
1853 .vecs = bf_cbc_dec_tv_template,
1854 .count = BF_CBC_DEC_TEST_VECTORS
1859 .alg = "cbc(camellia)",
1860 .test = alg_test_skcipher,
1864 .vecs = camellia_cbc_enc_tv_template,
1865 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
1868 .vecs = camellia_cbc_dec_tv_template,
1869 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
1874 .alg = "cbc(cast5)",
1875 .test = alg_test_skcipher,
1879 .vecs = cast5_cbc_enc_tv_template,
1880 .count = CAST5_CBC_ENC_TEST_VECTORS
1883 .vecs = cast5_cbc_dec_tv_template,
1884 .count = CAST5_CBC_DEC_TEST_VECTORS
1889 .alg = "cbc(cast6)",
1890 .test = alg_test_skcipher,
1894 .vecs = cast6_cbc_enc_tv_template,
1895 .count = CAST6_CBC_ENC_TEST_VECTORS
1898 .vecs = cast6_cbc_dec_tv_template,
1899 .count = CAST6_CBC_DEC_TEST_VECTORS
1905 .test = alg_test_skcipher,
1909 .vecs = des_cbc_enc_tv_template,
1910 .count = DES_CBC_ENC_TEST_VECTORS
1913 .vecs = des_cbc_dec_tv_template,
1914 .count = DES_CBC_DEC_TEST_VECTORS
1919 .alg = "cbc(des3_ede)",
1920 .test = alg_test_skcipher,
1925 .vecs = des3_ede_cbc_enc_tv_template,
1926 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
1929 .vecs = des3_ede_cbc_dec_tv_template,
1930 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
1935 .alg = "cbc(serpent)",
1936 .test = alg_test_skcipher,
1940 .vecs = serpent_cbc_enc_tv_template,
1941 .count = SERPENT_CBC_ENC_TEST_VECTORS
1944 .vecs = serpent_cbc_dec_tv_template,
1945 .count = SERPENT_CBC_DEC_TEST_VECTORS
1950 .alg = "cbc(twofish)",
1951 .test = alg_test_skcipher,
1955 .vecs = tf_cbc_enc_tv_template,
1956 .count = TF_CBC_ENC_TEST_VECTORS
1959 .vecs = tf_cbc_dec_tv_template,
1960 .count = TF_CBC_DEC_TEST_VECTORS
1966 .test = alg_test_aead,
1971 .vecs = aes_ccm_enc_tv_template,
1972 .count = AES_CCM_ENC_TEST_VECTORS
1975 .vecs = aes_ccm_dec_tv_template,
1976 .count = AES_CCM_DEC_TEST_VECTORS
1982 .test = alg_test_hash,
1985 .vecs = aes_cmac128_tv_template,
1986 .count = CMAC_AES_TEST_VECTORS
1990 .alg = "cmac(des3_ede)",
1991 .test = alg_test_hash,
1994 .vecs = des3_ede_cmac64_tv_template,
1995 .count = CMAC_DES3_EDE_TEST_VECTORS
1999 .alg = "compress_null",
2000 .test = alg_test_null,
2003 .test = alg_test_crc32c,
2007 .vecs = crc32c_tv_template,
2008 .count = CRC32C_TEST_VECTORS
2013 .test = alg_test_hash,
2017 .vecs = crct10dif_tv_template,
2018 .count = CRCT10DIF_TEST_VECTORS
2022 .alg = "cryptd(__driver-cbc-aes-aesni)",
2023 .test = alg_test_null,
2026 .alg = "cryptd(__driver-cbc-camellia-aesni)",
2027 .test = alg_test_null,
2029 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
2030 .test = alg_test_null,
2032 .alg = "cryptd(__driver-cbc-serpent-avx2)",
2033 .test = alg_test_null,
2035 .alg = "cryptd(__driver-ecb-aes-aesni)",
2036 .test = alg_test_null,
2039 .alg = "cryptd(__driver-ecb-camellia-aesni)",
2040 .test = alg_test_null,
2042 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
2043 .test = alg_test_null,
2045 .alg = "cryptd(__driver-ecb-cast5-avx)",
2046 .test = alg_test_null,
2048 .alg = "cryptd(__driver-ecb-cast6-avx)",
2049 .test = alg_test_null,
2051 .alg = "cryptd(__driver-ecb-serpent-avx)",
2052 .test = alg_test_null,
2054 .alg = "cryptd(__driver-ecb-serpent-avx2)",
2055 .test = alg_test_null,
2057 .alg = "cryptd(__driver-ecb-serpent-sse2)",
2058 .test = alg_test_null,
2060 .alg = "cryptd(__driver-ecb-twofish-avx)",
2061 .test = alg_test_null,
2063 .alg = "cryptd(__driver-gcm-aes-aesni)",
2064 .test = alg_test_null,
2067 .alg = "cryptd(__ghash-pclmulqdqni)",
2068 .test = alg_test_null,
2072 .test = alg_test_skcipher,
2077 .vecs = aes_ctr_enc_tv_template,
2078 .count = AES_CTR_ENC_TEST_VECTORS
2081 .vecs = aes_ctr_dec_tv_template,
2082 .count = AES_CTR_DEC_TEST_VECTORS
2087 .alg = "ctr(blowfish)",
2088 .test = alg_test_skcipher,
2092 .vecs = bf_ctr_enc_tv_template,
2093 .count = BF_CTR_ENC_TEST_VECTORS
2096 .vecs = bf_ctr_dec_tv_template,
2097 .count = BF_CTR_DEC_TEST_VECTORS
2102 .alg = "ctr(camellia)",
2103 .test = alg_test_skcipher,
2107 .vecs = camellia_ctr_enc_tv_template,
2108 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2111 .vecs = camellia_ctr_dec_tv_template,
2112 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2117 .alg = "ctr(cast5)",
2118 .test = alg_test_skcipher,
2122 .vecs = cast5_ctr_enc_tv_template,
2123 .count = CAST5_CTR_ENC_TEST_VECTORS
2126 .vecs = cast5_ctr_dec_tv_template,
2127 .count = CAST5_CTR_DEC_TEST_VECTORS
2132 .alg = "ctr(cast6)",
2133 .test = alg_test_skcipher,
2137 .vecs = cast6_ctr_enc_tv_template,
2138 .count = CAST6_CTR_ENC_TEST_VECTORS
2141 .vecs = cast6_ctr_dec_tv_template,
2142 .count = CAST6_CTR_DEC_TEST_VECTORS
2148 .test = alg_test_skcipher,
2152 .vecs = des_ctr_enc_tv_template,
2153 .count = DES_CTR_ENC_TEST_VECTORS
2156 .vecs = des_ctr_dec_tv_template,
2157 .count = DES_CTR_DEC_TEST_VECTORS
2162 .alg = "ctr(des3_ede)",
2163 .test = alg_test_skcipher,
2167 .vecs = des3_ede_ctr_enc_tv_template,
2168 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2171 .vecs = des3_ede_ctr_dec_tv_template,
2172 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2177 .alg = "ctr(serpent)",
2178 .test = alg_test_skcipher,
2182 .vecs = serpent_ctr_enc_tv_template,
2183 .count = SERPENT_CTR_ENC_TEST_VECTORS
2186 .vecs = serpent_ctr_dec_tv_template,
2187 .count = SERPENT_CTR_DEC_TEST_VECTORS
2192 .alg = "ctr(twofish)",
2193 .test = alg_test_skcipher,
2197 .vecs = tf_ctr_enc_tv_template,
2198 .count = TF_CTR_ENC_TEST_VECTORS
2201 .vecs = tf_ctr_dec_tv_template,
2202 .count = TF_CTR_DEC_TEST_VECTORS
2207 .alg = "cts(cbc(aes))",
2208 .test = alg_test_skcipher,
2212 .vecs = cts_mode_enc_tv_template,
2213 .count = CTS_MODE_ENC_TEST_VECTORS
2216 .vecs = cts_mode_dec_tv_template,
2217 .count = CTS_MODE_DEC_TEST_VECTORS
2223 .test = alg_test_comp,
2228 .vecs = deflate_comp_tv_template,
2229 .count = DEFLATE_COMP_TEST_VECTORS
2232 .vecs = deflate_decomp_tv_template,
2233 .count = DEFLATE_DECOMP_TEST_VECTORS
2238 .alg = "digest_null",
2239 .test = alg_test_null,
2241 .alg = "ecb(__aes-aesni)",
2242 .test = alg_test_null,
2246 .test = alg_test_skcipher,
2251 .vecs = aes_enc_tv_template,
2252 .count = AES_ENC_TEST_VECTORS
2255 .vecs = aes_dec_tv_template,
2256 .count = AES_DEC_TEST_VECTORS
2261 .alg = "ecb(anubis)",
2262 .test = alg_test_skcipher,
2266 .vecs = anubis_enc_tv_template,
2267 .count = ANUBIS_ENC_TEST_VECTORS
2270 .vecs = anubis_dec_tv_template,
2271 .count = ANUBIS_DEC_TEST_VECTORS
2277 .test = alg_test_skcipher,
2281 .vecs = arc4_enc_tv_template,
2282 .count = ARC4_ENC_TEST_VECTORS
2285 .vecs = arc4_dec_tv_template,
2286 .count = ARC4_DEC_TEST_VECTORS
2291 .alg = "ecb(blowfish)",
2292 .test = alg_test_skcipher,
2296 .vecs = bf_enc_tv_template,
2297 .count = BF_ENC_TEST_VECTORS
2300 .vecs = bf_dec_tv_template,
2301 .count = BF_DEC_TEST_VECTORS
2306 .alg = "ecb(camellia)",
2307 .test = alg_test_skcipher,
2311 .vecs = camellia_enc_tv_template,
2312 .count = CAMELLIA_ENC_TEST_VECTORS
2315 .vecs = camellia_dec_tv_template,
2316 .count = CAMELLIA_DEC_TEST_VECTORS
2321 .alg = "ecb(cast5)",
2322 .test = alg_test_skcipher,
2326 .vecs = cast5_enc_tv_template,
2327 .count = CAST5_ENC_TEST_VECTORS
2330 .vecs = cast5_dec_tv_template,
2331 .count = CAST5_DEC_TEST_VECTORS
2336 .alg = "ecb(cast6)",
2337 .test = alg_test_skcipher,
2341 .vecs = cast6_enc_tv_template,
2342 .count = CAST6_ENC_TEST_VECTORS
2345 .vecs = cast6_dec_tv_template,
2346 .count = CAST6_DEC_TEST_VECTORS
2351 .alg = "ecb(cipher_null)",
2352 .test = alg_test_null,
2355 .test = alg_test_skcipher,
2360 .vecs = des_enc_tv_template,
2361 .count = DES_ENC_TEST_VECTORS
2364 .vecs = des_dec_tv_template,
2365 .count = DES_DEC_TEST_VECTORS
2370 .alg = "ecb(des3_ede)",
2371 .test = alg_test_skcipher,
2376 .vecs = des3_ede_enc_tv_template,
2377 .count = DES3_EDE_ENC_TEST_VECTORS
2380 .vecs = des3_ede_dec_tv_template,
2381 .count = DES3_EDE_DEC_TEST_VECTORS
2386 .alg = "ecb(fcrypt)",
2387 .test = alg_test_skcipher,
2391 .vecs = fcrypt_pcbc_enc_tv_template,
2395 .vecs = fcrypt_pcbc_dec_tv_template,
2401 .alg = "ecb(khazad)",
2402 .test = alg_test_skcipher,
2406 .vecs = khazad_enc_tv_template,
2407 .count = KHAZAD_ENC_TEST_VECTORS
2410 .vecs = khazad_dec_tv_template,
2411 .count = KHAZAD_DEC_TEST_VECTORS
2417 .test = alg_test_skcipher,
2421 .vecs = seed_enc_tv_template,
2422 .count = SEED_ENC_TEST_VECTORS
2425 .vecs = seed_dec_tv_template,
2426 .count = SEED_DEC_TEST_VECTORS
2431 .alg = "ecb(serpent)",
2432 .test = alg_test_skcipher,
2436 .vecs = serpent_enc_tv_template,
2437 .count = SERPENT_ENC_TEST_VECTORS
2440 .vecs = serpent_dec_tv_template,
2441 .count = SERPENT_DEC_TEST_VECTORS
2447 .test = alg_test_skcipher,
2451 .vecs = tea_enc_tv_template,
2452 .count = TEA_ENC_TEST_VECTORS
2455 .vecs = tea_dec_tv_template,
2456 .count = TEA_DEC_TEST_VECTORS
2461 .alg = "ecb(tnepres)",
2462 .test = alg_test_skcipher,
2466 .vecs = tnepres_enc_tv_template,
2467 .count = TNEPRES_ENC_TEST_VECTORS
2470 .vecs = tnepres_dec_tv_template,
2471 .count = TNEPRES_DEC_TEST_VECTORS
2476 .alg = "ecb(twofish)",
2477 .test = alg_test_skcipher,
2481 .vecs = tf_enc_tv_template,
2482 .count = TF_ENC_TEST_VECTORS
2485 .vecs = tf_dec_tv_template,
2486 .count = TF_DEC_TEST_VECTORS
2492 .test = alg_test_skcipher,
2496 .vecs = xeta_enc_tv_template,
2497 .count = XETA_ENC_TEST_VECTORS
2500 .vecs = xeta_dec_tv_template,
2501 .count = XETA_DEC_TEST_VECTORS
2507 .test = alg_test_skcipher,
2511 .vecs = xtea_enc_tv_template,
2512 .count = XTEA_ENC_TEST_VECTORS
2515 .vecs = xtea_dec_tv_template,
2516 .count = XTEA_DEC_TEST_VECTORS
2522 .test = alg_test_aead,
2527 .vecs = aes_gcm_enc_tv_template,
2528 .count = AES_GCM_ENC_TEST_VECTORS
2531 .vecs = aes_gcm_dec_tv_template,
2532 .count = AES_GCM_DEC_TEST_VECTORS
2538 .test = alg_test_hash,
2542 .vecs = ghash_tv_template,
2543 .count = GHASH_TEST_VECTORS
2547 .alg = "hmac(crc32)",
2548 .test = alg_test_hash,
2551 .vecs = bfin_crc_tv_template,
2552 .count = BFIN_CRC_TEST_VECTORS
2557 .test = alg_test_hash,
2560 .vecs = hmac_md5_tv_template,
2561 .count = HMAC_MD5_TEST_VECTORS
2565 .alg = "hmac(rmd128)",
2566 .test = alg_test_hash,
2569 .vecs = hmac_rmd128_tv_template,
2570 .count = HMAC_RMD128_TEST_VECTORS
2574 .alg = "hmac(rmd160)",
2575 .test = alg_test_hash,
2578 .vecs = hmac_rmd160_tv_template,
2579 .count = HMAC_RMD160_TEST_VECTORS
2583 .alg = "hmac(sha1)",
2584 .test = alg_test_hash,
2588 .vecs = hmac_sha1_tv_template,
2589 .count = HMAC_SHA1_TEST_VECTORS
2593 .alg = "hmac(sha224)",
2594 .test = alg_test_hash,
2598 .vecs = hmac_sha224_tv_template,
2599 .count = HMAC_SHA224_TEST_VECTORS
2603 .alg = "hmac(sha256)",
2604 .test = alg_test_hash,
2608 .vecs = hmac_sha256_tv_template,
2609 .count = HMAC_SHA256_TEST_VECTORS
2613 .alg = "hmac(sha384)",
2614 .test = alg_test_hash,
2618 .vecs = hmac_sha384_tv_template,
2619 .count = HMAC_SHA384_TEST_VECTORS
2623 .alg = "hmac(sha512)",
2624 .test = alg_test_hash,
2628 .vecs = hmac_sha512_tv_template,
2629 .count = HMAC_SHA512_TEST_VECTORS
2634 .test = alg_test_skcipher,
2638 .vecs = aes_lrw_enc_tv_template,
2639 .count = AES_LRW_ENC_TEST_VECTORS
2642 .vecs = aes_lrw_dec_tv_template,
2643 .count = AES_LRW_DEC_TEST_VECTORS
2648 .alg = "lrw(camellia)",
2649 .test = alg_test_skcipher,
2653 .vecs = camellia_lrw_enc_tv_template,
2654 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
2657 .vecs = camellia_lrw_dec_tv_template,
2658 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
2663 .alg = "lrw(cast6)",
2664 .test = alg_test_skcipher,
2668 .vecs = cast6_lrw_enc_tv_template,
2669 .count = CAST6_LRW_ENC_TEST_VECTORS
2672 .vecs = cast6_lrw_dec_tv_template,
2673 .count = CAST6_LRW_DEC_TEST_VECTORS
2678 .alg = "lrw(serpent)",
2679 .test = alg_test_skcipher,
2683 .vecs = serpent_lrw_enc_tv_template,
2684 .count = SERPENT_LRW_ENC_TEST_VECTORS
2687 .vecs = serpent_lrw_dec_tv_template,
2688 .count = SERPENT_LRW_DEC_TEST_VECTORS
2693 .alg = "lrw(twofish)",
2694 .test = alg_test_skcipher,
2698 .vecs = tf_lrw_enc_tv_template,
2699 .count = TF_LRW_ENC_TEST_VECTORS
2702 .vecs = tf_lrw_dec_tv_template,
2703 .count = TF_LRW_DEC_TEST_VECTORS
2709 .test = alg_test_comp,
2714 .vecs = lzo_comp_tv_template,
2715 .count = LZO_COMP_TEST_VECTORS
2718 .vecs = lzo_decomp_tv_template,
2719 .count = LZO_DECOMP_TEST_VECTORS
2725 .test = alg_test_hash,
2728 .vecs = md4_tv_template,
2729 .count = MD4_TEST_VECTORS
2734 .test = alg_test_hash,
2737 .vecs = md5_tv_template,
2738 .count = MD5_TEST_VECTORS
2742 .alg = "michael_mic",
2743 .test = alg_test_hash,
2746 .vecs = michael_mic_tv_template,
2747 .count = MICHAEL_MIC_TEST_VECTORS
2752 .test = alg_test_skcipher,
2757 .vecs = aes_ofb_enc_tv_template,
2758 .count = AES_OFB_ENC_TEST_VECTORS
2761 .vecs = aes_ofb_dec_tv_template,
2762 .count = AES_OFB_DEC_TEST_VECTORS
2767 .alg = "pcbc(fcrypt)",
2768 .test = alg_test_skcipher,
2772 .vecs = fcrypt_pcbc_enc_tv_template,
2773 .count = FCRYPT_ENC_TEST_VECTORS
2776 .vecs = fcrypt_pcbc_dec_tv_template,
2777 .count = FCRYPT_DEC_TEST_VECTORS
2782 .alg = "rfc3686(ctr(aes))",
2783 .test = alg_test_skcipher,
2788 .vecs = aes_ctr_rfc3686_enc_tv_template,
2789 .count = AES_CTR_3686_ENC_TEST_VECTORS
2792 .vecs = aes_ctr_rfc3686_dec_tv_template,
2793 .count = AES_CTR_3686_DEC_TEST_VECTORS
2798 .alg = "rfc4106(gcm(aes))",
2799 .test = alg_test_aead,
2803 .vecs = aes_gcm_rfc4106_enc_tv_template,
2804 .count = AES_GCM_4106_ENC_TEST_VECTORS
2807 .vecs = aes_gcm_rfc4106_dec_tv_template,
2808 .count = AES_GCM_4106_DEC_TEST_VECTORS
2813 .alg = "rfc4309(ccm(aes))",
2814 .test = alg_test_aead,
2819 .vecs = aes_ccm_rfc4309_enc_tv_template,
2820 .count = AES_CCM_4309_ENC_TEST_VECTORS
2823 .vecs = aes_ccm_rfc4309_dec_tv_template,
2824 .count = AES_CCM_4309_DEC_TEST_VECTORS
2829 .alg = "rfc4543(gcm(aes))",
2830 .test = alg_test_aead,
2834 .vecs = aes_gcm_rfc4543_enc_tv_template,
2835 .count = AES_GCM_4543_ENC_TEST_VECTORS
2838 .vecs = aes_gcm_rfc4543_dec_tv_template,
2839 .count = AES_GCM_4543_DEC_TEST_VECTORS
2845 .test = alg_test_hash,
2848 .vecs = rmd128_tv_template,
2849 .count = RMD128_TEST_VECTORS
2854 .test = alg_test_hash,
2857 .vecs = rmd160_tv_template,
2858 .count = RMD160_TEST_VECTORS
2863 .test = alg_test_hash,
2866 .vecs = rmd256_tv_template,
2867 .count = RMD256_TEST_VECTORS
2872 .test = alg_test_hash,
2875 .vecs = rmd320_tv_template,
2876 .count = RMD320_TEST_VECTORS
2881 .test = alg_test_skcipher,
2885 .vecs = salsa20_stream_enc_tv_template,
2886 .count = SALSA20_STREAM_ENC_TEST_VECTORS
2892 .test = alg_test_hash,
2896 .vecs = sha1_tv_template,
2897 .count = SHA1_TEST_VECTORS
2902 .test = alg_test_hash,
2906 .vecs = sha224_tv_template,
2907 .count = SHA224_TEST_VECTORS
2912 .test = alg_test_hash,
2916 .vecs = sha256_tv_template,
2917 .count = SHA256_TEST_VECTORS
2922 .test = alg_test_hash,
2926 .vecs = sha384_tv_template,
2927 .count = SHA384_TEST_VECTORS
2932 .test = alg_test_hash,
2936 .vecs = sha512_tv_template,
2937 .count = SHA512_TEST_VECTORS
2942 .test = alg_test_hash,
2945 .vecs = tgr128_tv_template,
2946 .count = TGR128_TEST_VECTORS
2951 .test = alg_test_hash,
2954 .vecs = tgr160_tv_template,
2955 .count = TGR160_TEST_VECTORS
2960 .test = alg_test_hash,
2963 .vecs = tgr192_tv_template,
2964 .count = TGR192_TEST_VECTORS
2969 .test = alg_test_hash,
2972 .vecs = aes_vmac128_tv_template,
2973 .count = VMAC_AES_TEST_VECTORS
2978 .test = alg_test_hash,
2981 .vecs = wp256_tv_template,
2982 .count = WP256_TEST_VECTORS
2987 .test = alg_test_hash,
2990 .vecs = wp384_tv_template,
2991 .count = WP384_TEST_VECTORS
2996 .test = alg_test_hash,
2999 .vecs = wp512_tv_template,
3000 .count = WP512_TEST_VECTORS
3005 .test = alg_test_hash,
3008 .vecs = aes_xcbc128_tv_template,
3009 .count = XCBC_AES_TEST_VECTORS
3014 .test = alg_test_skcipher,
3019 .vecs = aes_xts_enc_tv_template,
3020 .count = AES_XTS_ENC_TEST_VECTORS
3023 .vecs = aes_xts_dec_tv_template,
3024 .count = AES_XTS_DEC_TEST_VECTORS
3029 .alg = "xts(camellia)",
3030 .test = alg_test_skcipher,
3034 .vecs = camellia_xts_enc_tv_template,
3035 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
3038 .vecs = camellia_xts_dec_tv_template,
3039 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
3044 .alg = "xts(cast6)",
3045 .test = alg_test_skcipher,
3049 .vecs = cast6_xts_enc_tv_template,
3050 .count = CAST6_XTS_ENC_TEST_VECTORS
3053 .vecs = cast6_xts_dec_tv_template,
3054 .count = CAST6_XTS_DEC_TEST_VECTORS
3059 .alg = "xts(serpent)",
3060 .test = alg_test_skcipher,
3064 .vecs = serpent_xts_enc_tv_template,
3065 .count = SERPENT_XTS_ENC_TEST_VECTORS
3068 .vecs = serpent_xts_dec_tv_template,
3069 .count = SERPENT_XTS_DEC_TEST_VECTORS
3074 .alg = "xts(twofish)",
3075 .test = alg_test_skcipher,
3079 .vecs = tf_xts_enc_tv_template,
3080 .count = TF_XTS_ENC_TEST_VECTORS
3083 .vecs = tf_xts_dec_tv_template,
3084 .count = TF_XTS_DEC_TEST_VECTORS
3090 .test = alg_test_pcomp,
3095 .vecs = zlib_comp_tv_template,
3096 .count = ZLIB_COMP_TEST_VECTORS
3099 .vecs = zlib_decomp_tv_template,
3100 .count = ZLIB_DECOMP_TEST_VECTORS
3107 static bool alg_test_descs_checked;
3109 static void alg_test_descs_check_order(void)
3113 /* only check once */
3114 if (alg_test_descs_checked)
3117 alg_test_descs_checked = true;
3119 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3120 int diff = strcmp(alg_test_descs[i - 1].alg,
3121 alg_test_descs[i].alg);
3123 if (WARN_ON(diff > 0)) {
3124 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3125 alg_test_descs[i - 1].alg,
3126 alg_test_descs[i].alg);
3129 if (WARN_ON(diff == 0)) {
3130 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3131 alg_test_descs[i].alg);
3136 static int alg_find_test(const char *alg)
3139 int end = ARRAY_SIZE(alg_test_descs);
3141 while (start < end) {
3142 int i = (start + end) / 2;
3143 int diff = strcmp(alg_test_descs[i].alg, alg);
3161 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3167 alg_test_descs_check_order();
3169 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3170 char nalg[CRYPTO_MAX_ALG_NAME];
3172 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3174 return -ENAMETOOLONG;
3176 i = alg_find_test(nalg);
3180 if (fips_enabled && !alg_test_descs[i].fips_allowed)
3183 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3187 i = alg_find_test(alg);
3188 j = alg_find_test(driver);
3192 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3193 (j >= 0 && !alg_test_descs[j].fips_allowed)))
3198 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3201 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3205 if (fips_enabled && rc)
3206 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3208 if (fips_enabled && !rc)
3209 printk(KERN_INFO "alg: self-tests for %s (%s) passed\n",
3215 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3221 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3223 EXPORT_SYMBOL_GPL(alg_test);