2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
23 #include <crypto/aead.h>
24 #include <crypto/hash.h>
25 #include <linux/err.h>
26 #include <linux/fips.h>
27 #include <linux/module.h>
28 #include <linux/scatterlist.h>
29 #include <linux/slab.h>
30 #include <linux/string.h>
31 #include <crypto/rng.h>
32 #include <crypto/drbg.h>
36 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
39 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
49 * Need slab memory for testing (size in number of pages).
54 * Indexes into the xbuf to simulate cross-page access.
66 * Used by test_cipher()
71 struct tcrypt_result {
72 struct completion completion;
76 struct aead_test_suite {
78 struct aead_testvec *vecs;
83 struct cipher_test_suite {
85 struct cipher_testvec *vecs;
90 struct comp_test_suite {
92 struct comp_testvec *vecs;
97 struct pcomp_test_suite {
99 struct pcomp_testvec *vecs;
104 struct hash_test_suite {
105 struct hash_testvec *vecs;
109 struct cprng_test_suite {
110 struct cprng_testvec *vecs;
114 struct drbg_test_suite {
115 struct drbg_testvec *vecs;
119 struct alg_test_desc {
121 int (*test)(const struct alg_test_desc *desc, const char *driver,
123 int fips_allowed; /* set if alg is allowed in fips mode */
126 struct aead_test_suite aead;
127 struct cipher_test_suite cipher;
128 struct comp_test_suite comp;
129 struct pcomp_test_suite pcomp;
130 struct hash_test_suite hash;
131 struct cprng_test_suite cprng;
132 struct drbg_test_suite drbg;
136 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
138 static void hexdump(unsigned char *buf, unsigned int len)
140 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
145 static void tcrypt_complete(struct crypto_async_request *req, int err)
147 struct tcrypt_result *res = req->data;
149 if (err == -EINPROGRESS)
153 complete(&res->completion);
156 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
160 for (i = 0; i < XBUFSIZE; i++) {
161 buf[i] = (void *)__get_free_page(GFP_KERNEL);
170 free_page((unsigned long)buf[i]);
175 static void testmgr_free_buf(char *buf[XBUFSIZE])
179 for (i = 0; i < XBUFSIZE; i++)
180 free_page((unsigned long)buf[i]);
183 static int wait_async_op(struct tcrypt_result *tr, int ret)
185 if (ret == -EINPROGRESS || ret == -EBUSY) {
186 wait_for_completion(&tr->completion);
187 reinit_completion(&tr->completion);
193 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
194 unsigned int tcount, bool use_digest,
195 const int align_offset)
197 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
198 unsigned int i, j, k, temp;
199 struct scatterlist sg[8];
202 struct ahash_request *req;
203 struct tcrypt_result tresult;
205 char *xbuf[XBUFSIZE];
208 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
211 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
214 if (testmgr_alloc_buf(xbuf))
217 init_completion(&tresult.completion);
219 req = ahash_request_alloc(tfm, GFP_KERNEL);
221 printk(KERN_ERR "alg: hash: Failed to allocate request for "
225 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
226 tcrypt_complete, &tresult);
229 for (i = 0; i < tcount; i++) {
234 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
238 memset(result, 0, MAX_DIGEST_SIZE);
241 hash_buff += align_offset;
243 memcpy(hash_buff, template[i].plaintext, template[i].psize);
244 sg_init_one(&sg[0], hash_buff, template[i].psize);
246 if (template[i].ksize) {
247 crypto_ahash_clear_flags(tfm, ~0);
248 if (template[i].ksize > MAX_KEYLEN) {
249 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
250 j, algo, template[i].ksize, MAX_KEYLEN);
254 memcpy(key, template[i].key, template[i].ksize);
255 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
257 printk(KERN_ERR "alg: hash: setkey failed on "
258 "test %d for %s: ret=%d\n", j, algo,
264 ahash_request_set_crypt(req, sg, result, template[i].psize);
266 ret = wait_async_op(&tresult, crypto_ahash_digest(req));
268 pr_err("alg: hash: digest failed on test %d "
269 "for %s: ret=%d\n", j, algo, -ret);
273 ret = wait_async_op(&tresult, crypto_ahash_init(req));
275 pr_err("alt: hash: init failed on test %d "
276 "for %s: ret=%d\n", j, algo, -ret);
279 ret = wait_async_op(&tresult, crypto_ahash_update(req));
281 pr_err("alt: hash: update failed on test %d "
282 "for %s: ret=%d\n", j, algo, -ret);
285 ret = wait_async_op(&tresult, crypto_ahash_final(req));
287 pr_err("alt: hash: final failed on test %d "
288 "for %s: ret=%d\n", j, algo, -ret);
293 if (memcmp(result, template[i].digest,
294 crypto_ahash_digestsize(tfm))) {
295 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
297 hexdump(result, crypto_ahash_digestsize(tfm));
304 for (i = 0; i < tcount; i++) {
305 /* alignment tests are only done with continuous buffers */
306 if (align_offset != 0)
313 memset(result, 0, MAX_DIGEST_SIZE);
316 sg_init_table(sg, template[i].np);
318 for (k = 0; k < template[i].np; k++) {
319 if (WARN_ON(offset_in_page(IDX[k]) +
320 template[i].tap[k] > PAGE_SIZE))
323 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
324 offset_in_page(IDX[k]),
325 template[i].plaintext + temp,
328 temp += template[i].tap[k];
331 if (template[i].ksize) {
332 if (template[i].ksize > MAX_KEYLEN) {
333 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
334 j, algo, template[i].ksize, MAX_KEYLEN);
338 crypto_ahash_clear_flags(tfm, ~0);
339 memcpy(key, template[i].key, template[i].ksize);
340 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
343 printk(KERN_ERR "alg: hash: setkey "
344 "failed on chunking test %d "
345 "for %s: ret=%d\n", j, algo, -ret);
350 ahash_request_set_crypt(req, sg, result, template[i].psize);
351 ret = crypto_ahash_digest(req);
357 wait_for_completion(&tresult.completion);
358 reinit_completion(&tresult.completion);
364 printk(KERN_ERR "alg: hash: digest failed "
365 "on chunking test %d for %s: "
366 "ret=%d\n", j, algo, -ret);
370 if (memcmp(result, template[i].digest,
371 crypto_ahash_digestsize(tfm))) {
372 printk(KERN_ERR "alg: hash: Chunking test %d "
373 "failed for %s\n", j, algo);
374 hexdump(result, crypto_ahash_digestsize(tfm));
383 ahash_request_free(req);
385 testmgr_free_buf(xbuf);
392 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
393 unsigned int tcount, bool use_digest)
395 unsigned int alignmask;
398 ret = __test_hash(tfm, template, tcount, use_digest, 0);
402 /* test unaligned buffers, check with one byte offset */
403 ret = __test_hash(tfm, template, tcount, use_digest, 1);
407 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
409 /* Check if alignment mask for tfm is correctly set. */
410 ret = __test_hash(tfm, template, tcount, use_digest,
419 static int __test_aead(struct crypto_aead *tfm, int enc,
420 struct aead_testvec *template, unsigned int tcount,
421 const bool diff_dst, const int align_offset)
423 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
424 unsigned int i, j, k, n, temp;
428 struct aead_request *req;
429 struct scatterlist *sg;
430 struct scatterlist *asg;
431 struct scatterlist *sgout;
433 struct tcrypt_result result;
434 unsigned int authsize, iv_len;
439 char *xbuf[XBUFSIZE];
440 char *xoutbuf[XBUFSIZE];
441 char *axbuf[XBUFSIZE];
443 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
446 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
449 if (testmgr_alloc_buf(xbuf))
451 if (testmgr_alloc_buf(axbuf))
453 if (diff_dst && testmgr_alloc_buf(xoutbuf))
456 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
457 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 3 : 2), GFP_KERNEL);
473 init_completion(&result.completion);
475 req = aead_request_alloc(tfm, GFP_KERNEL);
477 pr_err("alg: aead%s: Failed to allocate request for %s\n",
482 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
483 tcrypt_complete, &result);
485 for (i = 0, j = 0; i < tcount; i++) {
491 /* some templates have no input data but they will
495 input += align_offset;
499 if (WARN_ON(align_offset + template[i].ilen >
500 PAGE_SIZE || template[i].alen > PAGE_SIZE))
503 memcpy(input, template[i].input, template[i].ilen);
504 memcpy(assoc, template[i].assoc, template[i].alen);
505 iv_len = crypto_aead_ivsize(tfm);
507 memcpy(iv, template[i].iv, iv_len);
509 memset(iv, 0, iv_len);
511 crypto_aead_clear_flags(tfm, ~0);
513 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
515 if (template[i].klen > MAX_KEYLEN) {
516 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
517 d, j, algo, template[i].klen,
522 memcpy(key, template[i].key, template[i].klen);
524 ret = crypto_aead_setkey(tfm, key, template[i].klen);
525 if (!ret == template[i].fail) {
526 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
527 d, j, algo, crypto_aead_get_flags(tfm));
532 authsize = abs(template[i].rlen - template[i].ilen);
533 ret = crypto_aead_setauthsize(tfm, authsize);
535 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
536 d, authsize, j, algo);
542 output += align_offset;
543 sg_init_one(&sg[0], input, template[i].ilen);
544 sg_init_one(&sgout[0], output, template[i].rlen);
546 sg_init_one(&sg[0], input,
547 template[i].ilen + (enc ? authsize : 0));
551 sg_init_one(&asg[0], assoc, template[i].alen);
553 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
554 template[i].ilen, iv);
556 aead_request_set_assoc(req, asg, template[i].alen);
558 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
562 if (template[i].novrfy) {
563 /* verification was supposed to fail */
564 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
566 /* so really, we got a bad message */
573 wait_for_completion(&result.completion);
574 reinit_completion(&result.completion);
579 if (template[i].novrfy)
580 /* verification failure was expected */
584 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
585 d, e, j, algo, -ret);
590 if (memcmp(q, template[i].result, template[i].rlen)) {
591 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
593 hexdump(q, template[i].rlen);
599 for (i = 0, j = 0; i < tcount; i++) {
600 /* alignment tests are only done with continuous buffers */
601 if (align_offset != 0)
610 memcpy(iv, template[i].iv, MAX_IVLEN);
612 memset(iv, 0, MAX_IVLEN);
614 crypto_aead_clear_flags(tfm, ~0);
616 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
617 if (template[i].klen > MAX_KEYLEN) {
618 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
619 d, j, algo, template[i].klen, MAX_KEYLEN);
623 memcpy(key, template[i].key, template[i].klen);
625 ret = crypto_aead_setkey(tfm, key, template[i].klen);
626 if (!ret == template[i].fail) {
627 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
628 d, j, algo, crypto_aead_get_flags(tfm));
633 authsize = abs(template[i].rlen - template[i].ilen);
636 sg_init_table(sg, template[i].np);
638 sg_init_table(sgout, template[i].np);
639 for (k = 0, temp = 0; k < template[i].np; k++) {
640 if (WARN_ON(offset_in_page(IDX[k]) +
641 template[i].tap[k] > PAGE_SIZE))
644 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
645 memcpy(q, template[i].input + temp, template[i].tap[k]);
646 sg_set_buf(&sg[k], q, template[i].tap[k]);
649 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
650 offset_in_page(IDX[k]);
652 memset(q, 0, template[i].tap[k]);
654 sg_set_buf(&sgout[k], q, template[i].tap[k]);
657 n = template[i].tap[k];
658 if (k == template[i].np - 1 && enc)
660 if (offset_in_page(q) + n < PAGE_SIZE)
663 temp += template[i].tap[k];
666 ret = crypto_aead_setauthsize(tfm, authsize);
668 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
669 d, authsize, j, algo);
674 if (WARN_ON(sg[k - 1].offset +
675 sg[k - 1].length + authsize >
682 sgout[k - 1].length += authsize;
684 sg[k - 1].length += authsize;
687 sg_init_table(asg, template[i].anp);
689 for (k = 0, temp = 0; k < template[i].anp; k++) {
690 if (WARN_ON(offset_in_page(IDX[k]) +
691 template[i].atap[k] > PAGE_SIZE))
694 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
695 offset_in_page(IDX[k]),
696 template[i].assoc + temp,
697 template[i].atap[k]),
698 template[i].atap[k]);
699 temp += template[i].atap[k];
702 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
706 aead_request_set_assoc(req, asg, template[i].alen);
708 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
712 if (template[i].novrfy) {
713 /* verification was supposed to fail */
714 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
716 /* so really, we got a bad message */
723 wait_for_completion(&result.completion);
724 reinit_completion(&result.completion);
729 if (template[i].novrfy)
730 /* verification failure was expected */
734 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
735 d, e, j, algo, -ret);
740 for (k = 0, temp = 0; k < template[i].np; k++) {
742 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
743 offset_in_page(IDX[k]);
745 q = xbuf[IDX[k] >> PAGE_SHIFT] +
746 offset_in_page(IDX[k]);
748 n = template[i].tap[k];
749 if (k == template[i].np - 1)
750 n += enc ? authsize : -authsize;
752 if (memcmp(q, template[i].result + temp, n)) {
753 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
760 if (k == template[i].np - 1 && !enc) {
762 memcmp(q, template[i].input +
768 for (n = 0; offset_in_page(q + n) && q[n]; n++)
772 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
773 d, j, e, k, algo, n);
778 temp += template[i].tap[k];
785 aead_request_free(req);
789 testmgr_free_buf(xoutbuf);
791 testmgr_free_buf(axbuf);
793 testmgr_free_buf(xbuf);
800 static int test_aead(struct crypto_aead *tfm, int enc,
801 struct aead_testvec *template, unsigned int tcount)
803 unsigned int alignmask;
806 /* test 'dst == src' case */
807 ret = __test_aead(tfm, enc, template, tcount, false, 0);
811 /* test 'dst != src' case */
812 ret = __test_aead(tfm, enc, template, tcount, true, 0);
816 /* test unaligned buffers, check with one byte offset */
817 ret = __test_aead(tfm, enc, template, tcount, true, 1);
821 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
823 /* Check if alignment mask for tfm is correctly set. */
824 ret = __test_aead(tfm, enc, template, tcount, true,
833 static int test_cipher(struct crypto_cipher *tfm, int enc,
834 struct cipher_testvec *template, unsigned int tcount)
836 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
837 unsigned int i, j, k;
841 char *xbuf[XBUFSIZE];
844 if (testmgr_alloc_buf(xbuf))
853 for (i = 0; i < tcount; i++) {
860 if (WARN_ON(template[i].ilen > PAGE_SIZE))
864 memcpy(data, template[i].input, template[i].ilen);
866 crypto_cipher_clear_flags(tfm, ~0);
868 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
870 ret = crypto_cipher_setkey(tfm, template[i].key,
872 if (!ret == template[i].fail) {
873 printk(KERN_ERR "alg: cipher: setkey failed "
874 "on test %d for %s: flags=%x\n", j,
875 algo, crypto_cipher_get_flags(tfm));
880 for (k = 0; k < template[i].ilen;
881 k += crypto_cipher_blocksize(tfm)) {
883 crypto_cipher_encrypt_one(tfm, data + k,
886 crypto_cipher_decrypt_one(tfm, data + k,
891 if (memcmp(q, template[i].result, template[i].rlen)) {
892 printk(KERN_ERR "alg: cipher: Test %d failed "
893 "on %s for %s\n", j, e, algo);
894 hexdump(q, template[i].rlen);
903 testmgr_free_buf(xbuf);
908 static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc,
909 struct cipher_testvec *template, unsigned int tcount,
910 const bool diff_dst, const int align_offset)
913 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
914 unsigned int i, j, k, n, temp;
916 struct ablkcipher_request *req;
917 struct scatterlist sg[8];
918 struct scatterlist sgout[8];
920 struct tcrypt_result result;
923 char *xbuf[XBUFSIZE];
924 char *xoutbuf[XBUFSIZE];
927 if (testmgr_alloc_buf(xbuf))
930 if (diff_dst && testmgr_alloc_buf(xoutbuf))
943 init_completion(&result.completion);
945 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
947 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
952 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
953 tcrypt_complete, &result);
956 for (i = 0; i < tcount; i++) {
957 if (template[i].np && !template[i].also_non_np)
961 memcpy(iv, template[i].iv, MAX_IVLEN);
963 memset(iv, 0, MAX_IVLEN);
967 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE))
971 data += align_offset;
972 memcpy(data, template[i].input, template[i].ilen);
974 crypto_ablkcipher_clear_flags(tfm, ~0);
976 crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
978 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
980 if (!ret == template[i].fail) {
981 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
982 d, j, algo, crypto_ablkcipher_get_flags(tfm));
987 sg_init_one(&sg[0], data, template[i].ilen);
990 data += align_offset;
991 sg_init_one(&sgout[0], data, template[i].ilen);
994 ablkcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
995 template[i].ilen, iv);
996 ret = enc ? crypto_ablkcipher_encrypt(req) :
997 crypto_ablkcipher_decrypt(req);
1004 wait_for_completion(&result.completion);
1005 reinit_completion(&result.completion);
1011 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1012 d, e, j, algo, -ret);
1017 if (memcmp(q, template[i].result, template[i].rlen)) {
1018 pr_err("alg: skcipher%s: Test %d failed on %s for %s\n",
1020 hexdump(q, template[i].rlen);
1027 for (i = 0; i < tcount; i++) {
1028 /* alignment tests are only done with continuous buffers */
1029 if (align_offset != 0)
1032 if (!template[i].np)
1036 memcpy(iv, template[i].iv, MAX_IVLEN);
1038 memset(iv, 0, MAX_IVLEN);
1041 crypto_ablkcipher_clear_flags(tfm, ~0);
1043 crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1045 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
1047 if (!ret == template[i].fail) {
1048 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1049 d, j, algo, crypto_ablkcipher_get_flags(tfm));
1056 sg_init_table(sg, template[i].np);
1058 sg_init_table(sgout, template[i].np);
1059 for (k = 0; k < template[i].np; k++) {
1060 if (WARN_ON(offset_in_page(IDX[k]) +
1061 template[i].tap[k] > PAGE_SIZE))
1064 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1066 memcpy(q, template[i].input + temp, template[i].tap[k]);
1068 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1069 q[template[i].tap[k]] = 0;
1071 sg_set_buf(&sg[k], q, template[i].tap[k]);
1073 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1074 offset_in_page(IDX[k]);
1076 sg_set_buf(&sgout[k], q, template[i].tap[k]);
1078 memset(q, 0, template[i].tap[k]);
1079 if (offset_in_page(q) +
1080 template[i].tap[k] < PAGE_SIZE)
1081 q[template[i].tap[k]] = 0;
1084 temp += template[i].tap[k];
1087 ablkcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1088 template[i].ilen, iv);
1090 ret = enc ? crypto_ablkcipher_encrypt(req) :
1091 crypto_ablkcipher_decrypt(req);
1098 wait_for_completion(&result.completion);
1099 reinit_completion(&result.completion);
1105 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1106 d, e, j, algo, -ret);
1112 for (k = 0; k < template[i].np; k++) {
1114 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1115 offset_in_page(IDX[k]);
1117 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1118 offset_in_page(IDX[k]);
1120 if (memcmp(q, template[i].result + temp,
1121 template[i].tap[k])) {
1122 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1124 hexdump(q, template[i].tap[k]);
1128 q += template[i].tap[k];
1129 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1132 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1133 d, j, e, k, algo, n);
1137 temp += template[i].tap[k];
1144 ablkcipher_request_free(req);
1146 testmgr_free_buf(xoutbuf);
1148 testmgr_free_buf(xbuf);
1153 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
1154 struct cipher_testvec *template, unsigned int tcount)
1156 unsigned int alignmask;
1159 /* test 'dst == src' case */
1160 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1164 /* test 'dst != src' case */
1165 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1169 /* test unaligned buffers, check with one byte offset */
1170 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1174 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1176 /* Check if alignment mask for tfm is correctly set. */
1177 ret = __test_skcipher(tfm, enc, template, tcount, true,
1186 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1187 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1189 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1191 char result[COMP_BUF_SIZE];
1194 for (i = 0; i < ctcount; i++) {
1196 unsigned int dlen = COMP_BUF_SIZE;
1198 memset(result, 0, sizeof (result));
1200 ilen = ctemplate[i].inlen;
1201 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1202 ilen, result, &dlen);
1204 printk(KERN_ERR "alg: comp: compression failed "
1205 "on test %d for %s: ret=%d\n", i + 1, algo,
1210 if (dlen != ctemplate[i].outlen) {
1211 printk(KERN_ERR "alg: comp: Compression test %d "
1212 "failed for %s: output len = %d\n", i + 1, algo,
1218 if (memcmp(result, ctemplate[i].output, dlen)) {
1219 printk(KERN_ERR "alg: comp: Compression test %d "
1220 "failed for %s\n", i + 1, algo);
1221 hexdump(result, dlen);
1227 for (i = 0; i < dtcount; i++) {
1229 unsigned int dlen = COMP_BUF_SIZE;
1231 memset(result, 0, sizeof (result));
1233 ilen = dtemplate[i].inlen;
1234 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1235 ilen, result, &dlen);
1237 printk(KERN_ERR "alg: comp: decompression failed "
1238 "on test %d for %s: ret=%d\n", i + 1, algo,
1243 if (dlen != dtemplate[i].outlen) {
1244 printk(KERN_ERR "alg: comp: Decompression test %d "
1245 "failed for %s: output len = %d\n", i + 1, algo,
1251 if (memcmp(result, dtemplate[i].output, dlen)) {
1252 printk(KERN_ERR "alg: comp: Decompression test %d "
1253 "failed for %s\n", i + 1, algo);
1254 hexdump(result, dlen);
1266 static int test_pcomp(struct crypto_pcomp *tfm,
1267 struct pcomp_testvec *ctemplate,
1268 struct pcomp_testvec *dtemplate, int ctcount,
1271 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
1273 char result[COMP_BUF_SIZE];
1276 for (i = 0; i < ctcount; i++) {
1277 struct comp_request req;
1278 unsigned int produced = 0;
1280 res = crypto_compress_setup(tfm, ctemplate[i].params,
1281 ctemplate[i].paramsize);
1283 pr_err("alg: pcomp: compression setup failed on test "
1284 "%d for %s: error=%d\n", i + 1, algo, res);
1288 res = crypto_compress_init(tfm);
1290 pr_err("alg: pcomp: compression init failed on test "
1291 "%d for %s: error=%d\n", i + 1, algo, res);
1295 memset(result, 0, sizeof(result));
1297 req.next_in = ctemplate[i].input;
1298 req.avail_in = ctemplate[i].inlen / 2;
1299 req.next_out = result;
1300 req.avail_out = ctemplate[i].outlen / 2;
1302 res = crypto_compress_update(tfm, &req);
1303 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1304 pr_err("alg: pcomp: compression update failed on test "
1305 "%d for %s: error=%d\n", i + 1, algo, res);
1311 /* Add remaining input data */
1312 req.avail_in += (ctemplate[i].inlen + 1) / 2;
1314 res = crypto_compress_update(tfm, &req);
1315 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1316 pr_err("alg: pcomp: compression update failed on test "
1317 "%d for %s: error=%d\n", i + 1, algo, res);
1323 /* Provide remaining output space */
1324 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1326 res = crypto_compress_final(tfm, &req);
1328 pr_err("alg: pcomp: compression final failed on test "
1329 "%d for %s: error=%d\n", i + 1, algo, res);
1334 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1335 pr_err("alg: comp: Compression test %d failed for %s: "
1336 "output len = %d (expected %d)\n", i + 1, algo,
1337 COMP_BUF_SIZE - req.avail_out,
1338 ctemplate[i].outlen);
1342 if (produced != ctemplate[i].outlen) {
1343 pr_err("alg: comp: Compression test %d failed for %s: "
1344 "returned len = %u (expected %d)\n", i + 1,
1345 algo, produced, ctemplate[i].outlen);
1349 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1350 pr_err("alg: pcomp: Compression test %d failed for "
1351 "%s\n", i + 1, algo);
1352 hexdump(result, ctemplate[i].outlen);
1357 for (i = 0; i < dtcount; i++) {
1358 struct comp_request req;
1359 unsigned int produced = 0;
1361 res = crypto_decompress_setup(tfm, dtemplate[i].params,
1362 dtemplate[i].paramsize);
1364 pr_err("alg: pcomp: decompression setup failed on "
1365 "test %d for %s: error=%d\n", i + 1, algo, res);
1369 res = crypto_decompress_init(tfm);
1371 pr_err("alg: pcomp: decompression init failed on test "
1372 "%d for %s: error=%d\n", i + 1, algo, res);
1376 memset(result, 0, sizeof(result));
1378 req.next_in = dtemplate[i].input;
1379 req.avail_in = dtemplate[i].inlen / 2;
1380 req.next_out = result;
1381 req.avail_out = dtemplate[i].outlen / 2;
1383 res = crypto_decompress_update(tfm, &req);
1384 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1385 pr_err("alg: pcomp: decompression update failed on "
1386 "test %d for %s: error=%d\n", i + 1, algo, res);
1392 /* Add remaining input data */
1393 req.avail_in += (dtemplate[i].inlen + 1) / 2;
1395 res = crypto_decompress_update(tfm, &req);
1396 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1397 pr_err("alg: pcomp: decompression update failed on "
1398 "test %d for %s: error=%d\n", i + 1, algo, res);
1404 /* Provide remaining output space */
1405 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1407 res = crypto_decompress_final(tfm, &req);
1408 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1409 pr_err("alg: pcomp: decompression final failed on "
1410 "test %d for %s: error=%d\n", i + 1, algo, res);
1416 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1417 pr_err("alg: comp: Decompression test %d failed for "
1418 "%s: output len = %d (expected %d)\n", i + 1,
1419 algo, COMP_BUF_SIZE - req.avail_out,
1420 dtemplate[i].outlen);
1424 if (produced != dtemplate[i].outlen) {
1425 pr_err("alg: comp: Decompression test %d failed for "
1426 "%s: returned len = %u (expected %d)\n", i + 1,
1427 algo, produced, dtemplate[i].outlen);
1431 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1432 pr_err("alg: pcomp: Decompression test %d failed for "
1433 "%s\n", i + 1, algo);
1434 hexdump(result, dtemplate[i].outlen);
1443 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1444 unsigned int tcount)
1446 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1447 int err = 0, i, j, seedsize;
1451 seedsize = crypto_rng_seedsize(tfm);
1453 seed = kmalloc(seedsize, GFP_KERNEL);
1455 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1460 for (i = 0; i < tcount; i++) {
1461 memset(result, 0, 32);
1463 memcpy(seed, template[i].v, template[i].vlen);
1464 memcpy(seed + template[i].vlen, template[i].key,
1466 memcpy(seed + template[i].vlen + template[i].klen,
1467 template[i].dt, template[i].dtlen);
1469 err = crypto_rng_reset(tfm, seed, seedsize);
1471 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1476 for (j = 0; j < template[i].loops; j++) {
1477 err = crypto_rng_get_bytes(tfm, result,
1480 printk(KERN_ERR "alg: cprng: Failed to obtain "
1481 "the correct amount of random data for "
1482 "%s (requested %d)\n", algo,
1488 err = memcmp(result, template[i].result,
1491 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1493 hexdump(result, template[i].rlen);
1504 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1507 struct crypto_aead *tfm;
1510 tfm = crypto_alloc_aead(driver, type | CRYPTO_ALG_INTERNAL, mask);
1512 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1513 "%ld\n", driver, PTR_ERR(tfm));
1514 return PTR_ERR(tfm);
1517 if (desc->suite.aead.enc.vecs) {
1518 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1519 desc->suite.aead.enc.count);
1524 if (!err && desc->suite.aead.dec.vecs)
1525 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1526 desc->suite.aead.dec.count);
1529 crypto_free_aead(tfm);
1533 static int alg_test_cipher(const struct alg_test_desc *desc,
1534 const char *driver, u32 type, u32 mask)
1536 struct crypto_cipher *tfm;
1539 tfm = crypto_alloc_cipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1541 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1542 "%s: %ld\n", driver, PTR_ERR(tfm));
1543 return PTR_ERR(tfm);
1546 if (desc->suite.cipher.enc.vecs) {
1547 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1548 desc->suite.cipher.enc.count);
1553 if (desc->suite.cipher.dec.vecs)
1554 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1555 desc->suite.cipher.dec.count);
1558 crypto_free_cipher(tfm);
1562 static int alg_test_skcipher(const struct alg_test_desc *desc,
1563 const char *driver, u32 type, u32 mask)
1565 struct crypto_ablkcipher *tfm;
1568 tfm = crypto_alloc_ablkcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1570 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1571 "%s: %ld\n", driver, PTR_ERR(tfm));
1572 return PTR_ERR(tfm);
1575 if (desc->suite.cipher.enc.vecs) {
1576 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1577 desc->suite.cipher.enc.count);
1582 if (desc->suite.cipher.dec.vecs)
1583 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1584 desc->suite.cipher.dec.count);
1587 crypto_free_ablkcipher(tfm);
1591 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1594 struct crypto_comp *tfm;
1597 tfm = crypto_alloc_comp(driver, type, mask);
1599 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1600 "%ld\n", driver, PTR_ERR(tfm));
1601 return PTR_ERR(tfm);
1604 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1605 desc->suite.comp.decomp.vecs,
1606 desc->suite.comp.comp.count,
1607 desc->suite.comp.decomp.count);
1609 crypto_free_comp(tfm);
1613 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1616 struct crypto_pcomp *tfm;
1619 tfm = crypto_alloc_pcomp(driver, type, mask);
1621 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1622 driver, PTR_ERR(tfm));
1623 return PTR_ERR(tfm);
1626 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1627 desc->suite.pcomp.decomp.vecs,
1628 desc->suite.pcomp.comp.count,
1629 desc->suite.pcomp.decomp.count);
1631 crypto_free_pcomp(tfm);
1635 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1638 struct crypto_ahash *tfm;
1641 tfm = crypto_alloc_ahash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1643 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1644 "%ld\n", driver, PTR_ERR(tfm));
1645 return PTR_ERR(tfm);
1648 err = test_hash(tfm, desc->suite.hash.vecs,
1649 desc->suite.hash.count, true);
1651 err = test_hash(tfm, desc->suite.hash.vecs,
1652 desc->suite.hash.count, false);
1654 crypto_free_ahash(tfm);
1658 static int alg_test_crc32c(const struct alg_test_desc *desc,
1659 const char *driver, u32 type, u32 mask)
1661 struct crypto_shash *tfm;
1665 err = alg_test_hash(desc, driver, type, mask);
1669 tfm = crypto_alloc_shash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1671 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1672 "%ld\n", driver, PTR_ERR(tfm));
1678 SHASH_DESC_ON_STACK(shash, tfm);
1679 u32 *ctx = (u32 *)shash_desc_ctx(shash);
1684 *ctx = le32_to_cpu(420553207);
1685 err = crypto_shash_final(shash, (u8 *)&val);
1687 printk(KERN_ERR "alg: crc32c: Operation failed for "
1688 "%s: %d\n", driver, err);
1692 if (val != ~420553207) {
1693 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1694 "%d\n", driver, val);
1699 crypto_free_shash(tfm);
1705 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1708 struct crypto_rng *rng;
1711 rng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1713 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1714 "%ld\n", driver, PTR_ERR(rng));
1715 return PTR_ERR(rng);
1718 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1720 crypto_free_rng(rng);
1726 static int drbg_cavs_test(struct drbg_testvec *test, int pr,
1727 const char *driver, u32 type, u32 mask)
1730 struct crypto_rng *drng;
1731 struct drbg_test_data test_data;
1732 struct drbg_string addtl, pers, testentropy;
1733 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1738 drng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1740 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1746 test_data.testentropy = &testentropy;
1747 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1748 drbg_string_fill(&pers, test->pers, test->perslen);
1749 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1751 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1755 drbg_string_fill(&addtl, test->addtla, test->addtllen);
1757 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1758 ret = crypto_drbg_get_bytes_addtl_test(drng,
1759 buf, test->expectedlen, &addtl, &test_data);
1761 ret = crypto_drbg_get_bytes_addtl(drng,
1762 buf, test->expectedlen, &addtl);
1765 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1766 "driver %s\n", driver);
1770 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1772 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1773 ret = crypto_drbg_get_bytes_addtl_test(drng,
1774 buf, test->expectedlen, &addtl, &test_data);
1776 ret = crypto_drbg_get_bytes_addtl(drng,
1777 buf, test->expectedlen, &addtl);
1780 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1781 "driver %s\n", driver);
1785 ret = memcmp(test->expected, buf, test->expectedlen);
1788 crypto_free_rng(drng);
1794 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1800 struct drbg_testvec *template = desc->suite.drbg.vecs;
1801 unsigned int tcount = desc->suite.drbg.count;
1803 if (0 == memcmp(driver, "drbg_pr_", 8))
1806 for (i = 0; i < tcount; i++) {
1807 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1809 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1819 static int alg_test_null(const struct alg_test_desc *desc,
1820 const char *driver, u32 type, u32 mask)
1825 /* Please keep this list sorted by algorithm name. */
1826 static const struct alg_test_desc alg_test_descs[] = {
1828 .alg = "__cbc-cast5-avx",
1829 .test = alg_test_null,
1831 .alg = "__cbc-cast6-avx",
1832 .test = alg_test_null,
1834 .alg = "__cbc-serpent-avx",
1835 .test = alg_test_null,
1837 .alg = "__cbc-serpent-avx2",
1838 .test = alg_test_null,
1840 .alg = "__cbc-serpent-sse2",
1841 .test = alg_test_null,
1843 .alg = "__cbc-twofish-avx",
1844 .test = alg_test_null,
1846 .alg = "__driver-cbc-aes-aesni",
1847 .test = alg_test_null,
1850 .alg = "__driver-cbc-camellia-aesni",
1851 .test = alg_test_null,
1853 .alg = "__driver-cbc-camellia-aesni-avx2",
1854 .test = alg_test_null,
1856 .alg = "__driver-cbc-cast5-avx",
1857 .test = alg_test_null,
1859 .alg = "__driver-cbc-cast6-avx",
1860 .test = alg_test_null,
1862 .alg = "__driver-cbc-serpent-avx",
1863 .test = alg_test_null,
1865 .alg = "__driver-cbc-serpent-avx2",
1866 .test = alg_test_null,
1868 .alg = "__driver-cbc-serpent-sse2",
1869 .test = alg_test_null,
1871 .alg = "__driver-cbc-twofish-avx",
1872 .test = alg_test_null,
1874 .alg = "__driver-ecb-aes-aesni",
1875 .test = alg_test_null,
1878 .alg = "__driver-ecb-camellia-aesni",
1879 .test = alg_test_null,
1881 .alg = "__driver-ecb-camellia-aesni-avx2",
1882 .test = alg_test_null,
1884 .alg = "__driver-ecb-cast5-avx",
1885 .test = alg_test_null,
1887 .alg = "__driver-ecb-cast6-avx",
1888 .test = alg_test_null,
1890 .alg = "__driver-ecb-serpent-avx",
1891 .test = alg_test_null,
1893 .alg = "__driver-ecb-serpent-avx2",
1894 .test = alg_test_null,
1896 .alg = "__driver-ecb-serpent-sse2",
1897 .test = alg_test_null,
1899 .alg = "__driver-ecb-twofish-avx",
1900 .test = alg_test_null,
1902 .alg = "__ghash-pclmulqdqni",
1903 .test = alg_test_null,
1906 .alg = "ansi_cprng",
1907 .test = alg_test_cprng,
1911 .vecs = ansi_cprng_aes_tv_template,
1912 .count = ANSI_CPRNG_AES_TEST_VECTORS
1916 .alg = "authenc(hmac(md5),ecb(cipher_null))",
1917 .test = alg_test_aead,
1922 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template,
1923 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS
1926 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template,
1927 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS
1932 .alg = "authenc(hmac(sha1),cbc(aes))",
1933 .test = alg_test_aead,
1939 hmac_sha1_aes_cbc_enc_tv_temp,
1941 HMAC_SHA1_AES_CBC_ENC_TEST_VEC
1946 .alg = "authenc(hmac(sha1),cbc(des))",
1947 .test = alg_test_aead,
1953 hmac_sha1_des_cbc_enc_tv_temp,
1955 HMAC_SHA1_DES_CBC_ENC_TEST_VEC
1960 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
1961 .test = alg_test_aead,
1967 hmac_sha1_des3_ede_cbc_enc_tv_temp,
1969 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC
1974 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
1975 .test = alg_test_aead,
1981 hmac_sha1_ecb_cipher_null_enc_tv_temp,
1983 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC
1987 hmac_sha1_ecb_cipher_null_dec_tv_temp,
1989 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC
1994 .alg = "authenc(hmac(sha224),cbc(des))",
1995 .test = alg_test_aead,
2001 hmac_sha224_des_cbc_enc_tv_temp,
2003 HMAC_SHA224_DES_CBC_ENC_TEST_VEC
2008 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
2009 .test = alg_test_aead,
2015 hmac_sha224_des3_ede_cbc_enc_tv_temp,
2017 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC
2022 .alg = "authenc(hmac(sha256),cbc(aes))",
2023 .test = alg_test_aead,
2029 hmac_sha256_aes_cbc_enc_tv_temp,
2031 HMAC_SHA256_AES_CBC_ENC_TEST_VEC
2036 .alg = "authenc(hmac(sha256),cbc(des))",
2037 .test = alg_test_aead,
2043 hmac_sha256_des_cbc_enc_tv_temp,
2045 HMAC_SHA256_DES_CBC_ENC_TEST_VEC
2050 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
2051 .test = alg_test_aead,
2057 hmac_sha256_des3_ede_cbc_enc_tv_temp,
2059 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC
2064 .alg = "authenc(hmac(sha384),cbc(des))",
2065 .test = alg_test_aead,
2071 hmac_sha384_des_cbc_enc_tv_temp,
2073 HMAC_SHA384_DES_CBC_ENC_TEST_VEC
2078 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
2079 .test = alg_test_aead,
2085 hmac_sha384_des3_ede_cbc_enc_tv_temp,
2087 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC
2092 .alg = "authenc(hmac(sha512),cbc(aes))",
2093 .test = alg_test_aead,
2099 hmac_sha512_aes_cbc_enc_tv_temp,
2101 HMAC_SHA512_AES_CBC_ENC_TEST_VEC
2106 .alg = "authenc(hmac(sha512),cbc(des))",
2107 .test = alg_test_aead,
2113 hmac_sha512_des_cbc_enc_tv_temp,
2115 HMAC_SHA512_DES_CBC_ENC_TEST_VEC
2120 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
2121 .test = alg_test_aead,
2127 hmac_sha512_des3_ede_cbc_enc_tv_temp,
2129 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC
2135 .test = alg_test_skcipher,
2140 .vecs = aes_cbc_enc_tv_template,
2141 .count = AES_CBC_ENC_TEST_VECTORS
2144 .vecs = aes_cbc_dec_tv_template,
2145 .count = AES_CBC_DEC_TEST_VECTORS
2150 .alg = "cbc(anubis)",
2151 .test = alg_test_skcipher,
2155 .vecs = anubis_cbc_enc_tv_template,
2156 .count = ANUBIS_CBC_ENC_TEST_VECTORS
2159 .vecs = anubis_cbc_dec_tv_template,
2160 .count = ANUBIS_CBC_DEC_TEST_VECTORS
2165 .alg = "cbc(blowfish)",
2166 .test = alg_test_skcipher,
2170 .vecs = bf_cbc_enc_tv_template,
2171 .count = BF_CBC_ENC_TEST_VECTORS
2174 .vecs = bf_cbc_dec_tv_template,
2175 .count = BF_CBC_DEC_TEST_VECTORS
2180 .alg = "cbc(camellia)",
2181 .test = alg_test_skcipher,
2185 .vecs = camellia_cbc_enc_tv_template,
2186 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
2189 .vecs = camellia_cbc_dec_tv_template,
2190 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
2195 .alg = "cbc(cast5)",
2196 .test = alg_test_skcipher,
2200 .vecs = cast5_cbc_enc_tv_template,
2201 .count = CAST5_CBC_ENC_TEST_VECTORS
2204 .vecs = cast5_cbc_dec_tv_template,
2205 .count = CAST5_CBC_DEC_TEST_VECTORS
2210 .alg = "cbc(cast6)",
2211 .test = alg_test_skcipher,
2215 .vecs = cast6_cbc_enc_tv_template,
2216 .count = CAST6_CBC_ENC_TEST_VECTORS
2219 .vecs = cast6_cbc_dec_tv_template,
2220 .count = CAST6_CBC_DEC_TEST_VECTORS
2226 .test = alg_test_skcipher,
2230 .vecs = des_cbc_enc_tv_template,
2231 .count = DES_CBC_ENC_TEST_VECTORS
2234 .vecs = des_cbc_dec_tv_template,
2235 .count = DES_CBC_DEC_TEST_VECTORS
2240 .alg = "cbc(des3_ede)",
2241 .test = alg_test_skcipher,
2246 .vecs = des3_ede_cbc_enc_tv_template,
2247 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
2250 .vecs = des3_ede_cbc_dec_tv_template,
2251 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
2256 .alg = "cbc(serpent)",
2257 .test = alg_test_skcipher,
2261 .vecs = serpent_cbc_enc_tv_template,
2262 .count = SERPENT_CBC_ENC_TEST_VECTORS
2265 .vecs = serpent_cbc_dec_tv_template,
2266 .count = SERPENT_CBC_DEC_TEST_VECTORS
2271 .alg = "cbc(twofish)",
2272 .test = alg_test_skcipher,
2276 .vecs = tf_cbc_enc_tv_template,
2277 .count = TF_CBC_ENC_TEST_VECTORS
2280 .vecs = tf_cbc_dec_tv_template,
2281 .count = TF_CBC_DEC_TEST_VECTORS
2287 .test = alg_test_aead,
2292 .vecs = aes_ccm_enc_tv_template,
2293 .count = AES_CCM_ENC_TEST_VECTORS
2296 .vecs = aes_ccm_dec_tv_template,
2297 .count = AES_CCM_DEC_TEST_VECTORS
2303 .test = alg_test_hash,
2306 .vecs = aes_cmac128_tv_template,
2307 .count = CMAC_AES_TEST_VECTORS
2311 .alg = "cmac(des3_ede)",
2312 .test = alg_test_hash,
2315 .vecs = des3_ede_cmac64_tv_template,
2316 .count = CMAC_DES3_EDE_TEST_VECTORS
2320 .alg = "compress_null",
2321 .test = alg_test_null,
2324 .test = alg_test_hash,
2327 .vecs = crc32_tv_template,
2328 .count = CRC32_TEST_VECTORS
2333 .test = alg_test_crc32c,
2337 .vecs = crc32c_tv_template,
2338 .count = CRC32C_TEST_VECTORS
2343 .test = alg_test_hash,
2347 .vecs = crct10dif_tv_template,
2348 .count = CRCT10DIF_TEST_VECTORS
2352 .alg = "cryptd(__driver-cbc-aes-aesni)",
2353 .test = alg_test_null,
2356 .alg = "cryptd(__driver-cbc-camellia-aesni)",
2357 .test = alg_test_null,
2359 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
2360 .test = alg_test_null,
2362 .alg = "cryptd(__driver-cbc-serpent-avx2)",
2363 .test = alg_test_null,
2365 .alg = "cryptd(__driver-ecb-aes-aesni)",
2366 .test = alg_test_null,
2369 .alg = "cryptd(__driver-ecb-camellia-aesni)",
2370 .test = alg_test_null,
2372 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
2373 .test = alg_test_null,
2375 .alg = "cryptd(__driver-ecb-cast5-avx)",
2376 .test = alg_test_null,
2378 .alg = "cryptd(__driver-ecb-cast6-avx)",
2379 .test = alg_test_null,
2381 .alg = "cryptd(__driver-ecb-serpent-avx)",
2382 .test = alg_test_null,
2384 .alg = "cryptd(__driver-ecb-serpent-avx2)",
2385 .test = alg_test_null,
2387 .alg = "cryptd(__driver-ecb-serpent-sse2)",
2388 .test = alg_test_null,
2390 .alg = "cryptd(__driver-ecb-twofish-avx)",
2391 .test = alg_test_null,
2393 .alg = "cryptd(__driver-gcm-aes-aesni)",
2394 .test = alg_test_null,
2397 .alg = "cryptd(__ghash-pclmulqdqni)",
2398 .test = alg_test_null,
2402 .test = alg_test_skcipher,
2407 .vecs = aes_ctr_enc_tv_template,
2408 .count = AES_CTR_ENC_TEST_VECTORS
2411 .vecs = aes_ctr_dec_tv_template,
2412 .count = AES_CTR_DEC_TEST_VECTORS
2417 .alg = "ctr(blowfish)",
2418 .test = alg_test_skcipher,
2422 .vecs = bf_ctr_enc_tv_template,
2423 .count = BF_CTR_ENC_TEST_VECTORS
2426 .vecs = bf_ctr_dec_tv_template,
2427 .count = BF_CTR_DEC_TEST_VECTORS
2432 .alg = "ctr(camellia)",
2433 .test = alg_test_skcipher,
2437 .vecs = camellia_ctr_enc_tv_template,
2438 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2441 .vecs = camellia_ctr_dec_tv_template,
2442 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2447 .alg = "ctr(cast5)",
2448 .test = alg_test_skcipher,
2452 .vecs = cast5_ctr_enc_tv_template,
2453 .count = CAST5_CTR_ENC_TEST_VECTORS
2456 .vecs = cast5_ctr_dec_tv_template,
2457 .count = CAST5_CTR_DEC_TEST_VECTORS
2462 .alg = "ctr(cast6)",
2463 .test = alg_test_skcipher,
2467 .vecs = cast6_ctr_enc_tv_template,
2468 .count = CAST6_CTR_ENC_TEST_VECTORS
2471 .vecs = cast6_ctr_dec_tv_template,
2472 .count = CAST6_CTR_DEC_TEST_VECTORS
2478 .test = alg_test_skcipher,
2482 .vecs = des_ctr_enc_tv_template,
2483 .count = DES_CTR_ENC_TEST_VECTORS
2486 .vecs = des_ctr_dec_tv_template,
2487 .count = DES_CTR_DEC_TEST_VECTORS
2492 .alg = "ctr(des3_ede)",
2493 .test = alg_test_skcipher,
2497 .vecs = des3_ede_ctr_enc_tv_template,
2498 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2501 .vecs = des3_ede_ctr_dec_tv_template,
2502 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2507 .alg = "ctr(serpent)",
2508 .test = alg_test_skcipher,
2512 .vecs = serpent_ctr_enc_tv_template,
2513 .count = SERPENT_CTR_ENC_TEST_VECTORS
2516 .vecs = serpent_ctr_dec_tv_template,
2517 .count = SERPENT_CTR_DEC_TEST_VECTORS
2522 .alg = "ctr(twofish)",
2523 .test = alg_test_skcipher,
2527 .vecs = tf_ctr_enc_tv_template,
2528 .count = TF_CTR_ENC_TEST_VECTORS
2531 .vecs = tf_ctr_dec_tv_template,
2532 .count = TF_CTR_DEC_TEST_VECTORS
2537 .alg = "cts(cbc(aes))",
2538 .test = alg_test_skcipher,
2542 .vecs = cts_mode_enc_tv_template,
2543 .count = CTS_MODE_ENC_TEST_VECTORS
2546 .vecs = cts_mode_dec_tv_template,
2547 .count = CTS_MODE_DEC_TEST_VECTORS
2553 .test = alg_test_comp,
2558 .vecs = deflate_comp_tv_template,
2559 .count = DEFLATE_COMP_TEST_VECTORS
2562 .vecs = deflate_decomp_tv_template,
2563 .count = DEFLATE_DECOMP_TEST_VECTORS
2568 .alg = "digest_null",
2569 .test = alg_test_null,
2571 .alg = "drbg_nopr_ctr_aes128",
2572 .test = alg_test_drbg,
2576 .vecs = drbg_nopr_ctr_aes128_tv_template,
2577 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template)
2581 .alg = "drbg_nopr_ctr_aes192",
2582 .test = alg_test_drbg,
2586 .vecs = drbg_nopr_ctr_aes192_tv_template,
2587 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template)
2591 .alg = "drbg_nopr_ctr_aes256",
2592 .test = alg_test_drbg,
2596 .vecs = drbg_nopr_ctr_aes256_tv_template,
2597 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template)
2602 * There is no need to specifically test the DRBG with every
2603 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2605 .alg = "drbg_nopr_hmac_sha1",
2607 .test = alg_test_null,
2609 .alg = "drbg_nopr_hmac_sha256",
2610 .test = alg_test_drbg,
2614 .vecs = drbg_nopr_hmac_sha256_tv_template,
2616 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template)
2620 /* covered by drbg_nopr_hmac_sha256 test */
2621 .alg = "drbg_nopr_hmac_sha384",
2623 .test = alg_test_null,
2625 .alg = "drbg_nopr_hmac_sha512",
2626 .test = alg_test_null,
2629 .alg = "drbg_nopr_sha1",
2631 .test = alg_test_null,
2633 .alg = "drbg_nopr_sha256",
2634 .test = alg_test_drbg,
2638 .vecs = drbg_nopr_sha256_tv_template,
2639 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template)
2643 /* covered by drbg_nopr_sha256 test */
2644 .alg = "drbg_nopr_sha384",
2646 .test = alg_test_null,
2648 .alg = "drbg_nopr_sha512",
2650 .test = alg_test_null,
2652 .alg = "drbg_pr_ctr_aes128",
2653 .test = alg_test_drbg,
2657 .vecs = drbg_pr_ctr_aes128_tv_template,
2658 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template)
2662 /* covered by drbg_pr_ctr_aes128 test */
2663 .alg = "drbg_pr_ctr_aes192",
2665 .test = alg_test_null,
2667 .alg = "drbg_pr_ctr_aes256",
2669 .test = alg_test_null,
2671 .alg = "drbg_pr_hmac_sha1",
2673 .test = alg_test_null,
2675 .alg = "drbg_pr_hmac_sha256",
2676 .test = alg_test_drbg,
2680 .vecs = drbg_pr_hmac_sha256_tv_template,
2681 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template)
2685 /* covered by drbg_pr_hmac_sha256 test */
2686 .alg = "drbg_pr_hmac_sha384",
2688 .test = alg_test_null,
2690 .alg = "drbg_pr_hmac_sha512",
2691 .test = alg_test_null,
2694 .alg = "drbg_pr_sha1",
2696 .test = alg_test_null,
2698 .alg = "drbg_pr_sha256",
2699 .test = alg_test_drbg,
2703 .vecs = drbg_pr_sha256_tv_template,
2704 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template)
2708 /* covered by drbg_pr_sha256 test */
2709 .alg = "drbg_pr_sha384",
2711 .test = alg_test_null,
2713 .alg = "drbg_pr_sha512",
2715 .test = alg_test_null,
2717 .alg = "ecb(__aes-aesni)",
2718 .test = alg_test_null,
2722 .test = alg_test_skcipher,
2727 .vecs = aes_enc_tv_template,
2728 .count = AES_ENC_TEST_VECTORS
2731 .vecs = aes_dec_tv_template,
2732 .count = AES_DEC_TEST_VECTORS
2737 .alg = "ecb(anubis)",
2738 .test = alg_test_skcipher,
2742 .vecs = anubis_enc_tv_template,
2743 .count = ANUBIS_ENC_TEST_VECTORS
2746 .vecs = anubis_dec_tv_template,
2747 .count = ANUBIS_DEC_TEST_VECTORS
2753 .test = alg_test_skcipher,
2757 .vecs = arc4_enc_tv_template,
2758 .count = ARC4_ENC_TEST_VECTORS
2761 .vecs = arc4_dec_tv_template,
2762 .count = ARC4_DEC_TEST_VECTORS
2767 .alg = "ecb(blowfish)",
2768 .test = alg_test_skcipher,
2772 .vecs = bf_enc_tv_template,
2773 .count = BF_ENC_TEST_VECTORS
2776 .vecs = bf_dec_tv_template,
2777 .count = BF_DEC_TEST_VECTORS
2782 .alg = "ecb(camellia)",
2783 .test = alg_test_skcipher,
2787 .vecs = camellia_enc_tv_template,
2788 .count = CAMELLIA_ENC_TEST_VECTORS
2791 .vecs = camellia_dec_tv_template,
2792 .count = CAMELLIA_DEC_TEST_VECTORS
2797 .alg = "ecb(cast5)",
2798 .test = alg_test_skcipher,
2802 .vecs = cast5_enc_tv_template,
2803 .count = CAST5_ENC_TEST_VECTORS
2806 .vecs = cast5_dec_tv_template,
2807 .count = CAST5_DEC_TEST_VECTORS
2812 .alg = "ecb(cast6)",
2813 .test = alg_test_skcipher,
2817 .vecs = cast6_enc_tv_template,
2818 .count = CAST6_ENC_TEST_VECTORS
2821 .vecs = cast6_dec_tv_template,
2822 .count = CAST6_DEC_TEST_VECTORS
2827 .alg = "ecb(cipher_null)",
2828 .test = alg_test_null,
2831 .test = alg_test_skcipher,
2836 .vecs = des_enc_tv_template,
2837 .count = DES_ENC_TEST_VECTORS
2840 .vecs = des_dec_tv_template,
2841 .count = DES_DEC_TEST_VECTORS
2846 .alg = "ecb(des3_ede)",
2847 .test = alg_test_skcipher,
2852 .vecs = des3_ede_enc_tv_template,
2853 .count = DES3_EDE_ENC_TEST_VECTORS
2856 .vecs = des3_ede_dec_tv_template,
2857 .count = DES3_EDE_DEC_TEST_VECTORS
2862 .alg = "ecb(fcrypt)",
2863 .test = alg_test_skcipher,
2867 .vecs = fcrypt_pcbc_enc_tv_template,
2871 .vecs = fcrypt_pcbc_dec_tv_template,
2877 .alg = "ecb(khazad)",
2878 .test = alg_test_skcipher,
2882 .vecs = khazad_enc_tv_template,
2883 .count = KHAZAD_ENC_TEST_VECTORS
2886 .vecs = khazad_dec_tv_template,
2887 .count = KHAZAD_DEC_TEST_VECTORS
2893 .test = alg_test_skcipher,
2897 .vecs = seed_enc_tv_template,
2898 .count = SEED_ENC_TEST_VECTORS
2901 .vecs = seed_dec_tv_template,
2902 .count = SEED_DEC_TEST_VECTORS
2907 .alg = "ecb(serpent)",
2908 .test = alg_test_skcipher,
2912 .vecs = serpent_enc_tv_template,
2913 .count = SERPENT_ENC_TEST_VECTORS
2916 .vecs = serpent_dec_tv_template,
2917 .count = SERPENT_DEC_TEST_VECTORS
2923 .test = alg_test_skcipher,
2927 .vecs = tea_enc_tv_template,
2928 .count = TEA_ENC_TEST_VECTORS
2931 .vecs = tea_dec_tv_template,
2932 .count = TEA_DEC_TEST_VECTORS
2937 .alg = "ecb(tnepres)",
2938 .test = alg_test_skcipher,
2942 .vecs = tnepres_enc_tv_template,
2943 .count = TNEPRES_ENC_TEST_VECTORS
2946 .vecs = tnepres_dec_tv_template,
2947 .count = TNEPRES_DEC_TEST_VECTORS
2952 .alg = "ecb(twofish)",
2953 .test = alg_test_skcipher,
2957 .vecs = tf_enc_tv_template,
2958 .count = TF_ENC_TEST_VECTORS
2961 .vecs = tf_dec_tv_template,
2962 .count = TF_DEC_TEST_VECTORS
2968 .test = alg_test_skcipher,
2972 .vecs = xeta_enc_tv_template,
2973 .count = XETA_ENC_TEST_VECTORS
2976 .vecs = xeta_dec_tv_template,
2977 .count = XETA_DEC_TEST_VECTORS
2983 .test = alg_test_skcipher,
2987 .vecs = xtea_enc_tv_template,
2988 .count = XTEA_ENC_TEST_VECTORS
2991 .vecs = xtea_dec_tv_template,
2992 .count = XTEA_DEC_TEST_VECTORS
2998 .test = alg_test_aead,
3003 .vecs = aes_gcm_enc_tv_template,
3004 .count = AES_GCM_ENC_TEST_VECTORS
3007 .vecs = aes_gcm_dec_tv_template,
3008 .count = AES_GCM_DEC_TEST_VECTORS
3014 .test = alg_test_hash,
3018 .vecs = ghash_tv_template,
3019 .count = GHASH_TEST_VECTORS
3023 .alg = "hmac(crc32)",
3024 .test = alg_test_hash,
3027 .vecs = bfin_crc_tv_template,
3028 .count = BFIN_CRC_TEST_VECTORS
3033 .test = alg_test_hash,
3036 .vecs = hmac_md5_tv_template,
3037 .count = HMAC_MD5_TEST_VECTORS
3041 .alg = "hmac(rmd128)",
3042 .test = alg_test_hash,
3045 .vecs = hmac_rmd128_tv_template,
3046 .count = HMAC_RMD128_TEST_VECTORS
3050 .alg = "hmac(rmd160)",
3051 .test = alg_test_hash,
3054 .vecs = hmac_rmd160_tv_template,
3055 .count = HMAC_RMD160_TEST_VECTORS
3059 .alg = "hmac(sha1)",
3060 .test = alg_test_hash,
3064 .vecs = hmac_sha1_tv_template,
3065 .count = HMAC_SHA1_TEST_VECTORS
3069 .alg = "hmac(sha224)",
3070 .test = alg_test_hash,
3074 .vecs = hmac_sha224_tv_template,
3075 .count = HMAC_SHA224_TEST_VECTORS
3079 .alg = "hmac(sha256)",
3080 .test = alg_test_hash,
3084 .vecs = hmac_sha256_tv_template,
3085 .count = HMAC_SHA256_TEST_VECTORS
3089 .alg = "hmac(sha384)",
3090 .test = alg_test_hash,
3094 .vecs = hmac_sha384_tv_template,
3095 .count = HMAC_SHA384_TEST_VECTORS
3099 .alg = "hmac(sha512)",
3100 .test = alg_test_hash,
3104 .vecs = hmac_sha512_tv_template,
3105 .count = HMAC_SHA512_TEST_VECTORS
3110 .test = alg_test_skcipher,
3114 .vecs = aes_lrw_enc_tv_template,
3115 .count = AES_LRW_ENC_TEST_VECTORS
3118 .vecs = aes_lrw_dec_tv_template,
3119 .count = AES_LRW_DEC_TEST_VECTORS
3124 .alg = "lrw(camellia)",
3125 .test = alg_test_skcipher,
3129 .vecs = camellia_lrw_enc_tv_template,
3130 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
3133 .vecs = camellia_lrw_dec_tv_template,
3134 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
3139 .alg = "lrw(cast6)",
3140 .test = alg_test_skcipher,
3144 .vecs = cast6_lrw_enc_tv_template,
3145 .count = CAST6_LRW_ENC_TEST_VECTORS
3148 .vecs = cast6_lrw_dec_tv_template,
3149 .count = CAST6_LRW_DEC_TEST_VECTORS
3154 .alg = "lrw(serpent)",
3155 .test = alg_test_skcipher,
3159 .vecs = serpent_lrw_enc_tv_template,
3160 .count = SERPENT_LRW_ENC_TEST_VECTORS
3163 .vecs = serpent_lrw_dec_tv_template,
3164 .count = SERPENT_LRW_DEC_TEST_VECTORS
3169 .alg = "lrw(twofish)",
3170 .test = alg_test_skcipher,
3174 .vecs = tf_lrw_enc_tv_template,
3175 .count = TF_LRW_ENC_TEST_VECTORS
3178 .vecs = tf_lrw_dec_tv_template,
3179 .count = TF_LRW_DEC_TEST_VECTORS
3185 .test = alg_test_comp,
3190 .vecs = lz4_comp_tv_template,
3191 .count = LZ4_COMP_TEST_VECTORS
3194 .vecs = lz4_decomp_tv_template,
3195 .count = LZ4_DECOMP_TEST_VECTORS
3201 .test = alg_test_comp,
3206 .vecs = lz4hc_comp_tv_template,
3207 .count = LZ4HC_COMP_TEST_VECTORS
3210 .vecs = lz4hc_decomp_tv_template,
3211 .count = LZ4HC_DECOMP_TEST_VECTORS
3217 .test = alg_test_comp,
3222 .vecs = lzo_comp_tv_template,
3223 .count = LZO_COMP_TEST_VECTORS
3226 .vecs = lzo_decomp_tv_template,
3227 .count = LZO_DECOMP_TEST_VECTORS
3233 .test = alg_test_hash,
3236 .vecs = md4_tv_template,
3237 .count = MD4_TEST_VECTORS
3242 .test = alg_test_hash,
3245 .vecs = md5_tv_template,
3246 .count = MD5_TEST_VECTORS
3250 .alg = "michael_mic",
3251 .test = alg_test_hash,
3254 .vecs = michael_mic_tv_template,
3255 .count = MICHAEL_MIC_TEST_VECTORS
3260 .test = alg_test_skcipher,
3265 .vecs = aes_ofb_enc_tv_template,
3266 .count = AES_OFB_ENC_TEST_VECTORS
3269 .vecs = aes_ofb_dec_tv_template,
3270 .count = AES_OFB_DEC_TEST_VECTORS
3275 .alg = "pcbc(fcrypt)",
3276 .test = alg_test_skcipher,
3280 .vecs = fcrypt_pcbc_enc_tv_template,
3281 .count = FCRYPT_ENC_TEST_VECTORS
3284 .vecs = fcrypt_pcbc_dec_tv_template,
3285 .count = FCRYPT_DEC_TEST_VECTORS
3290 .alg = "rfc3686(ctr(aes))",
3291 .test = alg_test_skcipher,
3296 .vecs = aes_ctr_rfc3686_enc_tv_template,
3297 .count = AES_CTR_3686_ENC_TEST_VECTORS
3300 .vecs = aes_ctr_rfc3686_dec_tv_template,
3301 .count = AES_CTR_3686_DEC_TEST_VECTORS
3306 .alg = "rfc4106(gcm(aes))",
3307 .test = alg_test_aead,
3312 .vecs = aes_gcm_rfc4106_enc_tv_template,
3313 .count = AES_GCM_4106_ENC_TEST_VECTORS
3316 .vecs = aes_gcm_rfc4106_dec_tv_template,
3317 .count = AES_GCM_4106_DEC_TEST_VECTORS
3322 .alg = "rfc4309(ccm(aes))",
3323 .test = alg_test_aead,
3328 .vecs = aes_ccm_rfc4309_enc_tv_template,
3329 .count = AES_CCM_4309_ENC_TEST_VECTORS
3332 .vecs = aes_ccm_rfc4309_dec_tv_template,
3333 .count = AES_CCM_4309_DEC_TEST_VECTORS
3338 .alg = "rfc4543(gcm(aes))",
3339 .test = alg_test_aead,
3343 .vecs = aes_gcm_rfc4543_enc_tv_template,
3344 .count = AES_GCM_4543_ENC_TEST_VECTORS
3347 .vecs = aes_gcm_rfc4543_dec_tv_template,
3348 .count = AES_GCM_4543_DEC_TEST_VECTORS
3354 .test = alg_test_hash,
3357 .vecs = rmd128_tv_template,
3358 .count = RMD128_TEST_VECTORS
3363 .test = alg_test_hash,
3366 .vecs = rmd160_tv_template,
3367 .count = RMD160_TEST_VECTORS
3372 .test = alg_test_hash,
3375 .vecs = rmd256_tv_template,
3376 .count = RMD256_TEST_VECTORS
3381 .test = alg_test_hash,
3384 .vecs = rmd320_tv_template,
3385 .count = RMD320_TEST_VECTORS
3390 .test = alg_test_skcipher,
3394 .vecs = salsa20_stream_enc_tv_template,
3395 .count = SALSA20_STREAM_ENC_TEST_VECTORS
3401 .test = alg_test_hash,
3405 .vecs = sha1_tv_template,
3406 .count = SHA1_TEST_VECTORS
3411 .test = alg_test_hash,
3415 .vecs = sha224_tv_template,
3416 .count = SHA224_TEST_VECTORS
3421 .test = alg_test_hash,
3425 .vecs = sha256_tv_template,
3426 .count = SHA256_TEST_VECTORS
3431 .test = alg_test_hash,
3435 .vecs = sha384_tv_template,
3436 .count = SHA384_TEST_VECTORS
3441 .test = alg_test_hash,
3445 .vecs = sha512_tv_template,
3446 .count = SHA512_TEST_VECTORS
3451 .test = alg_test_hash,
3454 .vecs = tgr128_tv_template,
3455 .count = TGR128_TEST_VECTORS
3460 .test = alg_test_hash,
3463 .vecs = tgr160_tv_template,
3464 .count = TGR160_TEST_VECTORS
3469 .test = alg_test_hash,
3472 .vecs = tgr192_tv_template,
3473 .count = TGR192_TEST_VECTORS
3478 .test = alg_test_hash,
3481 .vecs = aes_vmac128_tv_template,
3482 .count = VMAC_AES_TEST_VECTORS
3487 .test = alg_test_hash,
3490 .vecs = wp256_tv_template,
3491 .count = WP256_TEST_VECTORS
3496 .test = alg_test_hash,
3499 .vecs = wp384_tv_template,
3500 .count = WP384_TEST_VECTORS
3505 .test = alg_test_hash,
3508 .vecs = wp512_tv_template,
3509 .count = WP512_TEST_VECTORS
3514 .test = alg_test_hash,
3517 .vecs = aes_xcbc128_tv_template,
3518 .count = XCBC_AES_TEST_VECTORS
3523 .test = alg_test_skcipher,
3528 .vecs = aes_xts_enc_tv_template,
3529 .count = AES_XTS_ENC_TEST_VECTORS
3532 .vecs = aes_xts_dec_tv_template,
3533 .count = AES_XTS_DEC_TEST_VECTORS
3538 .alg = "xts(camellia)",
3539 .test = alg_test_skcipher,
3543 .vecs = camellia_xts_enc_tv_template,
3544 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
3547 .vecs = camellia_xts_dec_tv_template,
3548 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
3553 .alg = "xts(cast6)",
3554 .test = alg_test_skcipher,
3558 .vecs = cast6_xts_enc_tv_template,
3559 .count = CAST6_XTS_ENC_TEST_VECTORS
3562 .vecs = cast6_xts_dec_tv_template,
3563 .count = CAST6_XTS_DEC_TEST_VECTORS
3568 .alg = "xts(serpent)",
3569 .test = alg_test_skcipher,
3573 .vecs = serpent_xts_enc_tv_template,
3574 .count = SERPENT_XTS_ENC_TEST_VECTORS
3577 .vecs = serpent_xts_dec_tv_template,
3578 .count = SERPENT_XTS_DEC_TEST_VECTORS
3583 .alg = "xts(twofish)",
3584 .test = alg_test_skcipher,
3588 .vecs = tf_xts_enc_tv_template,
3589 .count = TF_XTS_ENC_TEST_VECTORS
3592 .vecs = tf_xts_dec_tv_template,
3593 .count = TF_XTS_DEC_TEST_VECTORS
3599 .test = alg_test_pcomp,
3604 .vecs = zlib_comp_tv_template,
3605 .count = ZLIB_COMP_TEST_VECTORS
3608 .vecs = zlib_decomp_tv_template,
3609 .count = ZLIB_DECOMP_TEST_VECTORS
3616 static bool alg_test_descs_checked;
3618 static void alg_test_descs_check_order(void)
3622 /* only check once */
3623 if (alg_test_descs_checked)
3626 alg_test_descs_checked = true;
3628 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3629 int diff = strcmp(alg_test_descs[i - 1].alg,
3630 alg_test_descs[i].alg);
3632 if (WARN_ON(diff > 0)) {
3633 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3634 alg_test_descs[i - 1].alg,
3635 alg_test_descs[i].alg);
3638 if (WARN_ON(diff == 0)) {
3639 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3640 alg_test_descs[i].alg);
3645 static int alg_find_test(const char *alg)
3648 int end = ARRAY_SIZE(alg_test_descs);
3650 while (start < end) {
3651 int i = (start + end) / 2;
3652 int diff = strcmp(alg_test_descs[i].alg, alg);
3670 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3676 alg_test_descs_check_order();
3678 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3679 char nalg[CRYPTO_MAX_ALG_NAME];
3681 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3683 return -ENAMETOOLONG;
3685 i = alg_find_test(nalg);
3689 if (fips_enabled && !alg_test_descs[i].fips_allowed)
3692 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3696 i = alg_find_test(alg);
3697 j = alg_find_test(driver);
3701 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3702 (j >= 0 && !alg_test_descs[j].fips_allowed)))
3707 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3709 if (j >= 0 && j != i)
3710 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3714 if (fips_enabled && rc)
3715 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3717 if (fips_enabled && !rc)
3718 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
3723 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3729 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3731 EXPORT_SYMBOL_GPL(alg_test);