2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
23 #include <crypto/aead.h>
24 #include <crypto/hash.h>
25 #include <linux/err.h>
26 #include <linux/fips.h>
27 #include <linux/module.h>
28 #include <linux/scatterlist.h>
29 #include <linux/slab.h>
30 #include <linux/string.h>
31 #include <crypto/rng.h>
32 #include <crypto/drbg.h>
36 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
39 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
49 * Need slab memory for testing (size in number of pages).
54 * Indexes into the xbuf to simulate cross-page access.
66 * Used by test_cipher()
71 struct tcrypt_result {
72 struct completion completion;
76 struct aead_test_suite {
78 struct aead_testvec *vecs;
83 struct cipher_test_suite {
85 struct cipher_testvec *vecs;
90 struct comp_test_suite {
92 struct comp_testvec *vecs;
97 struct pcomp_test_suite {
99 struct pcomp_testvec *vecs;
104 struct hash_test_suite {
105 struct hash_testvec *vecs;
109 struct cprng_test_suite {
110 struct cprng_testvec *vecs;
114 struct drbg_test_suite {
115 struct drbg_testvec *vecs;
119 struct alg_test_desc {
121 int (*test)(const struct alg_test_desc *desc, const char *driver,
123 int fips_allowed; /* set if alg is allowed in fips mode */
126 struct aead_test_suite aead;
127 struct cipher_test_suite cipher;
128 struct comp_test_suite comp;
129 struct pcomp_test_suite pcomp;
130 struct hash_test_suite hash;
131 struct cprng_test_suite cprng;
132 struct drbg_test_suite drbg;
136 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
138 static void hexdump(unsigned char *buf, unsigned int len)
140 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
145 static void tcrypt_complete(struct crypto_async_request *req, int err)
147 struct tcrypt_result *res = req->data;
149 if (err == -EINPROGRESS)
153 complete(&res->completion);
156 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
160 for (i = 0; i < XBUFSIZE; i++) {
161 buf[i] = (void *)__get_free_page(GFP_KERNEL);
170 free_page((unsigned long)buf[i]);
175 static void testmgr_free_buf(char *buf[XBUFSIZE])
179 for (i = 0; i < XBUFSIZE; i++)
180 free_page((unsigned long)buf[i]);
183 static int wait_async_op(struct tcrypt_result *tr, int ret)
185 if (ret == -EINPROGRESS || ret == -EBUSY) {
186 wait_for_completion(&tr->completion);
187 reinit_completion(&tr->completion);
193 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
194 unsigned int tcount, bool use_digest,
195 const int align_offset)
197 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
198 unsigned int i, j, k, temp;
199 struct scatterlist sg[8];
202 struct ahash_request *req;
203 struct tcrypt_result tresult;
205 char *xbuf[XBUFSIZE];
208 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
211 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
214 if (testmgr_alloc_buf(xbuf))
217 init_completion(&tresult.completion);
219 req = ahash_request_alloc(tfm, GFP_KERNEL);
221 printk(KERN_ERR "alg: hash: Failed to allocate request for "
225 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
226 tcrypt_complete, &tresult);
229 for (i = 0; i < tcount; i++) {
234 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
238 memset(result, 0, MAX_DIGEST_SIZE);
241 hash_buff += align_offset;
243 memcpy(hash_buff, template[i].plaintext, template[i].psize);
244 sg_init_one(&sg[0], hash_buff, template[i].psize);
246 if (template[i].ksize) {
247 crypto_ahash_clear_flags(tfm, ~0);
248 if (template[i].ksize > MAX_KEYLEN) {
249 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
250 j, algo, template[i].ksize, MAX_KEYLEN);
254 memcpy(key, template[i].key, template[i].ksize);
255 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
257 printk(KERN_ERR "alg: hash: setkey failed on "
258 "test %d for %s: ret=%d\n", j, algo,
264 ahash_request_set_crypt(req, sg, result, template[i].psize);
266 ret = wait_async_op(&tresult, crypto_ahash_digest(req));
268 pr_err("alg: hash: digest failed on test %d "
269 "for %s: ret=%d\n", j, algo, -ret);
273 ret = wait_async_op(&tresult, crypto_ahash_init(req));
275 pr_err("alt: hash: init failed on test %d "
276 "for %s: ret=%d\n", j, algo, -ret);
279 ret = wait_async_op(&tresult, crypto_ahash_update(req));
281 pr_err("alt: hash: update failed on test %d "
282 "for %s: ret=%d\n", j, algo, -ret);
285 ret = wait_async_op(&tresult, crypto_ahash_final(req));
287 pr_err("alt: hash: final failed on test %d "
288 "for %s: ret=%d\n", j, algo, -ret);
293 if (memcmp(result, template[i].digest,
294 crypto_ahash_digestsize(tfm))) {
295 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
297 hexdump(result, crypto_ahash_digestsize(tfm));
304 for (i = 0; i < tcount; i++) {
305 /* alignment tests are only done with continuous buffers */
306 if (align_offset != 0)
313 memset(result, 0, MAX_DIGEST_SIZE);
316 sg_init_table(sg, template[i].np);
318 for (k = 0; k < template[i].np; k++) {
319 if (WARN_ON(offset_in_page(IDX[k]) +
320 template[i].tap[k] > PAGE_SIZE))
323 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
324 offset_in_page(IDX[k]),
325 template[i].plaintext + temp,
328 temp += template[i].tap[k];
331 if (template[i].ksize) {
332 if (template[i].ksize > MAX_KEYLEN) {
333 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
334 j, algo, template[i].ksize, MAX_KEYLEN);
338 crypto_ahash_clear_flags(tfm, ~0);
339 memcpy(key, template[i].key, template[i].ksize);
340 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
343 printk(KERN_ERR "alg: hash: setkey "
344 "failed on chunking test %d "
345 "for %s: ret=%d\n", j, algo, -ret);
350 ahash_request_set_crypt(req, sg, result, template[i].psize);
351 ret = crypto_ahash_digest(req);
357 wait_for_completion(&tresult.completion);
358 reinit_completion(&tresult.completion);
364 printk(KERN_ERR "alg: hash: digest failed "
365 "on chunking test %d for %s: "
366 "ret=%d\n", j, algo, -ret);
370 if (memcmp(result, template[i].digest,
371 crypto_ahash_digestsize(tfm))) {
372 printk(KERN_ERR "alg: hash: Chunking test %d "
373 "failed for %s\n", j, algo);
374 hexdump(result, crypto_ahash_digestsize(tfm));
383 ahash_request_free(req);
385 testmgr_free_buf(xbuf);
392 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
393 unsigned int tcount, bool use_digest)
395 unsigned int alignmask;
398 ret = __test_hash(tfm, template, tcount, use_digest, 0);
402 /* test unaligned buffers, check with one byte offset */
403 ret = __test_hash(tfm, template, tcount, use_digest, 1);
407 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
409 /* Check if alignment mask for tfm is correctly set. */
410 ret = __test_hash(tfm, template, tcount, use_digest,
419 static int __test_aead(struct crypto_aead *tfm, int enc,
420 struct aead_testvec *template, unsigned int tcount,
421 const bool diff_dst, const int align_offset)
423 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
424 unsigned int i, j, k, n, temp;
428 struct aead_request *req;
429 struct scatterlist *sg;
430 struct scatterlist *sgout;
432 struct tcrypt_result result;
433 unsigned int authsize, iv_len;
438 char *xbuf[XBUFSIZE];
439 char *xoutbuf[XBUFSIZE];
440 char *axbuf[XBUFSIZE];
442 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
445 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
448 if (testmgr_alloc_buf(xbuf))
450 if (testmgr_alloc_buf(axbuf))
452 if (diff_dst && testmgr_alloc_buf(xoutbuf))
455 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
456 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL);
471 init_completion(&result.completion);
473 req = aead_request_alloc(tfm, GFP_KERNEL);
475 pr_err("alg: aead%s: Failed to allocate request for %s\n",
480 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
481 tcrypt_complete, &result);
483 for (i = 0, j = 0; i < tcount; i++) {
489 /* some templates have no input data but they will
493 input += align_offset;
497 if (WARN_ON(align_offset + template[i].ilen >
498 PAGE_SIZE || template[i].alen > PAGE_SIZE))
501 memcpy(input, template[i].input, template[i].ilen);
502 memcpy(assoc, template[i].assoc, template[i].alen);
503 iv_len = crypto_aead_ivsize(tfm);
505 memcpy(iv, template[i].iv, iv_len);
507 memset(iv, 0, iv_len);
509 crypto_aead_clear_flags(tfm, ~0);
511 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
513 if (template[i].klen > MAX_KEYLEN) {
514 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
515 d, j, algo, template[i].klen,
520 memcpy(key, template[i].key, template[i].klen);
522 ret = crypto_aead_setkey(tfm, key, template[i].klen);
523 if (!ret == template[i].fail) {
524 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
525 d, j, algo, crypto_aead_get_flags(tfm));
530 authsize = abs(template[i].rlen - template[i].ilen);
531 ret = crypto_aead_setauthsize(tfm, authsize);
533 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
534 d, authsize, j, algo);
538 k = !!template[i].alen;
539 sg_init_table(sg, k + 1);
540 sg_set_buf(&sg[0], assoc, template[i].alen);
541 sg_set_buf(&sg[k], input,
542 template[i].ilen + (enc ? authsize : 0));
546 sg_init_table(sgout, k + 1);
547 sg_set_buf(&sgout[0], assoc, template[i].alen);
550 output += align_offset;
551 sg_set_buf(&sgout[k], output,
552 template[i].rlen + (enc ? 0 : authsize));
555 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
556 template[i].ilen, iv);
558 aead_request_set_ad(req, template[i].alen);
560 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
564 if (template[i].novrfy) {
565 /* verification was supposed to fail */
566 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
568 /* so really, we got a bad message */
575 wait_for_completion(&result.completion);
576 reinit_completion(&result.completion);
581 if (template[i].novrfy)
582 /* verification failure was expected */
586 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
587 d, e, j, algo, -ret);
592 if (memcmp(q, template[i].result, template[i].rlen)) {
593 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
595 hexdump(q, template[i].rlen);
601 for (i = 0, j = 0; i < tcount; i++) {
602 /* alignment tests are only done with continuous buffers */
603 if (align_offset != 0)
612 memcpy(iv, template[i].iv, MAX_IVLEN);
614 memset(iv, 0, MAX_IVLEN);
616 crypto_aead_clear_flags(tfm, ~0);
618 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
619 if (template[i].klen > MAX_KEYLEN) {
620 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
621 d, j, algo, template[i].klen, MAX_KEYLEN);
625 memcpy(key, template[i].key, template[i].klen);
627 ret = crypto_aead_setkey(tfm, key, template[i].klen);
628 if (!ret == template[i].fail) {
629 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
630 d, j, algo, crypto_aead_get_flags(tfm));
635 authsize = abs(template[i].rlen - template[i].ilen);
638 sg_init_table(sg, template[i].anp + template[i].np);
640 sg_init_table(sgout, template[i].anp + template[i].np);
643 for (k = 0, temp = 0; k < template[i].anp; k++) {
644 if (WARN_ON(offset_in_page(IDX[k]) +
645 template[i].atap[k] > PAGE_SIZE))
648 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
649 offset_in_page(IDX[k]),
650 template[i].assoc + temp,
651 template[i].atap[k]),
652 template[i].atap[k]);
654 sg_set_buf(&sgout[k],
655 axbuf[IDX[k] >> PAGE_SHIFT] +
656 offset_in_page(IDX[k]),
657 template[i].atap[k]);
658 temp += template[i].atap[k];
661 for (k = 0, temp = 0; k < template[i].np; k++) {
662 if (WARN_ON(offset_in_page(IDX[k]) +
663 template[i].tap[k] > PAGE_SIZE))
666 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
667 memcpy(q, template[i].input + temp, template[i].tap[k]);
668 sg_set_buf(&sg[template[i].anp + k],
669 q, template[i].tap[k]);
672 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
673 offset_in_page(IDX[k]);
675 memset(q, 0, template[i].tap[k]);
677 sg_set_buf(&sgout[template[i].anp + k],
678 q, template[i].tap[k]);
681 n = template[i].tap[k];
682 if (k == template[i].np - 1 && enc)
684 if (offset_in_page(q) + n < PAGE_SIZE)
687 temp += template[i].tap[k];
690 ret = crypto_aead_setauthsize(tfm, authsize);
692 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
693 d, authsize, j, algo);
698 if (WARN_ON(sg[template[i].anp + k - 1].offset +
699 sg[template[i].anp + k - 1].length +
700 authsize > PAGE_SIZE)) {
706 sgout[template[i].anp + k - 1].length +=
708 sg[template[i].anp + k - 1].length += authsize;
711 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
715 aead_request_set_ad(req, template[i].alen);
717 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
721 if (template[i].novrfy) {
722 /* verification was supposed to fail */
723 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
725 /* so really, we got a bad message */
732 wait_for_completion(&result.completion);
733 reinit_completion(&result.completion);
738 if (template[i].novrfy)
739 /* verification failure was expected */
743 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
744 d, e, j, algo, -ret);
749 for (k = 0, temp = 0; k < template[i].np; k++) {
751 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
752 offset_in_page(IDX[k]);
754 q = xbuf[IDX[k] >> PAGE_SHIFT] +
755 offset_in_page(IDX[k]);
757 n = template[i].tap[k];
758 if (k == template[i].np - 1)
759 n += enc ? authsize : -authsize;
761 if (memcmp(q, template[i].result + temp, n)) {
762 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
769 if (k == template[i].np - 1 && !enc) {
771 memcmp(q, template[i].input +
777 for (n = 0; offset_in_page(q + n) && q[n]; n++)
781 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
782 d, j, e, k, algo, n);
787 temp += template[i].tap[k];
794 aead_request_free(req);
798 testmgr_free_buf(xoutbuf);
800 testmgr_free_buf(axbuf);
802 testmgr_free_buf(xbuf);
809 static int test_aead(struct crypto_aead *tfm, int enc,
810 struct aead_testvec *template, unsigned int tcount)
812 unsigned int alignmask;
815 /* test 'dst == src' case */
816 ret = __test_aead(tfm, enc, template, tcount, false, 0);
820 /* test 'dst != src' case */
821 ret = __test_aead(tfm, enc, template, tcount, true, 0);
825 /* test unaligned buffers, check with one byte offset */
826 ret = __test_aead(tfm, enc, template, tcount, true, 1);
830 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
832 /* Check if alignment mask for tfm is correctly set. */
833 ret = __test_aead(tfm, enc, template, tcount, true,
842 static int test_cipher(struct crypto_cipher *tfm, int enc,
843 struct cipher_testvec *template, unsigned int tcount)
845 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
846 unsigned int i, j, k;
850 char *xbuf[XBUFSIZE];
853 if (testmgr_alloc_buf(xbuf))
862 for (i = 0; i < tcount; i++) {
869 if (WARN_ON(template[i].ilen > PAGE_SIZE))
873 memcpy(data, template[i].input, template[i].ilen);
875 crypto_cipher_clear_flags(tfm, ~0);
877 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
879 ret = crypto_cipher_setkey(tfm, template[i].key,
881 if (!ret == template[i].fail) {
882 printk(KERN_ERR "alg: cipher: setkey failed "
883 "on test %d for %s: flags=%x\n", j,
884 algo, crypto_cipher_get_flags(tfm));
889 for (k = 0; k < template[i].ilen;
890 k += crypto_cipher_blocksize(tfm)) {
892 crypto_cipher_encrypt_one(tfm, data + k,
895 crypto_cipher_decrypt_one(tfm, data + k,
900 if (memcmp(q, template[i].result, template[i].rlen)) {
901 printk(KERN_ERR "alg: cipher: Test %d failed "
902 "on %s for %s\n", j, e, algo);
903 hexdump(q, template[i].rlen);
912 testmgr_free_buf(xbuf);
917 static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc,
918 struct cipher_testvec *template, unsigned int tcount,
919 const bool diff_dst, const int align_offset)
922 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
923 unsigned int i, j, k, n, temp;
925 struct ablkcipher_request *req;
926 struct scatterlist sg[8];
927 struct scatterlist sgout[8];
929 struct tcrypt_result result;
932 char *xbuf[XBUFSIZE];
933 char *xoutbuf[XBUFSIZE];
936 if (testmgr_alloc_buf(xbuf))
939 if (diff_dst && testmgr_alloc_buf(xoutbuf))
952 init_completion(&result.completion);
954 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
956 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
961 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
962 tcrypt_complete, &result);
965 for (i = 0; i < tcount; i++) {
966 if (template[i].np && !template[i].also_non_np)
970 memcpy(iv, template[i].iv, MAX_IVLEN);
972 memset(iv, 0, MAX_IVLEN);
976 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE))
980 data += align_offset;
981 memcpy(data, template[i].input, template[i].ilen);
983 crypto_ablkcipher_clear_flags(tfm, ~0);
985 crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
987 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
989 if (!ret == template[i].fail) {
990 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
991 d, j, algo, crypto_ablkcipher_get_flags(tfm));
996 sg_init_one(&sg[0], data, template[i].ilen);
999 data += align_offset;
1000 sg_init_one(&sgout[0], data, template[i].ilen);
1003 ablkcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1004 template[i].ilen, iv);
1005 ret = enc ? crypto_ablkcipher_encrypt(req) :
1006 crypto_ablkcipher_decrypt(req);
1013 wait_for_completion(&result.completion);
1014 reinit_completion(&result.completion);
1020 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1021 d, e, j, algo, -ret);
1026 if (memcmp(q, template[i].result, template[i].rlen)) {
1027 pr_err("alg: skcipher%s: Test %d failed on %s for %s\n",
1029 hexdump(q, template[i].rlen);
1036 for (i = 0; i < tcount; i++) {
1037 /* alignment tests are only done with continuous buffers */
1038 if (align_offset != 0)
1041 if (!template[i].np)
1045 memcpy(iv, template[i].iv, MAX_IVLEN);
1047 memset(iv, 0, MAX_IVLEN);
1050 crypto_ablkcipher_clear_flags(tfm, ~0);
1052 crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1054 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
1056 if (!ret == template[i].fail) {
1057 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1058 d, j, algo, crypto_ablkcipher_get_flags(tfm));
1065 sg_init_table(sg, template[i].np);
1067 sg_init_table(sgout, template[i].np);
1068 for (k = 0; k < template[i].np; k++) {
1069 if (WARN_ON(offset_in_page(IDX[k]) +
1070 template[i].tap[k] > PAGE_SIZE))
1073 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1075 memcpy(q, template[i].input + temp, template[i].tap[k]);
1077 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1078 q[template[i].tap[k]] = 0;
1080 sg_set_buf(&sg[k], q, template[i].tap[k]);
1082 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1083 offset_in_page(IDX[k]);
1085 sg_set_buf(&sgout[k], q, template[i].tap[k]);
1087 memset(q, 0, template[i].tap[k]);
1088 if (offset_in_page(q) +
1089 template[i].tap[k] < PAGE_SIZE)
1090 q[template[i].tap[k]] = 0;
1093 temp += template[i].tap[k];
1096 ablkcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1097 template[i].ilen, iv);
1099 ret = enc ? crypto_ablkcipher_encrypt(req) :
1100 crypto_ablkcipher_decrypt(req);
1107 wait_for_completion(&result.completion);
1108 reinit_completion(&result.completion);
1114 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1115 d, e, j, algo, -ret);
1121 for (k = 0; k < template[i].np; k++) {
1123 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1124 offset_in_page(IDX[k]);
1126 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1127 offset_in_page(IDX[k]);
1129 if (memcmp(q, template[i].result + temp,
1130 template[i].tap[k])) {
1131 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1133 hexdump(q, template[i].tap[k]);
1137 q += template[i].tap[k];
1138 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1141 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1142 d, j, e, k, algo, n);
1146 temp += template[i].tap[k];
1153 ablkcipher_request_free(req);
1155 testmgr_free_buf(xoutbuf);
1157 testmgr_free_buf(xbuf);
1162 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
1163 struct cipher_testvec *template, unsigned int tcount)
1165 unsigned int alignmask;
1168 /* test 'dst == src' case */
1169 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1173 /* test 'dst != src' case */
1174 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1178 /* test unaligned buffers, check with one byte offset */
1179 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1183 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1185 /* Check if alignment mask for tfm is correctly set. */
1186 ret = __test_skcipher(tfm, enc, template, tcount, true,
1195 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1196 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1198 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1200 char result[COMP_BUF_SIZE];
1203 for (i = 0; i < ctcount; i++) {
1205 unsigned int dlen = COMP_BUF_SIZE;
1207 memset(result, 0, sizeof (result));
1209 ilen = ctemplate[i].inlen;
1210 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1211 ilen, result, &dlen);
1213 printk(KERN_ERR "alg: comp: compression failed "
1214 "on test %d for %s: ret=%d\n", i + 1, algo,
1219 if (dlen != ctemplate[i].outlen) {
1220 printk(KERN_ERR "alg: comp: Compression test %d "
1221 "failed for %s: output len = %d\n", i + 1, algo,
1227 if (memcmp(result, ctemplate[i].output, dlen)) {
1228 printk(KERN_ERR "alg: comp: Compression test %d "
1229 "failed for %s\n", i + 1, algo);
1230 hexdump(result, dlen);
1236 for (i = 0; i < dtcount; i++) {
1238 unsigned int dlen = COMP_BUF_SIZE;
1240 memset(result, 0, sizeof (result));
1242 ilen = dtemplate[i].inlen;
1243 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1244 ilen, result, &dlen);
1246 printk(KERN_ERR "alg: comp: decompression failed "
1247 "on test %d for %s: ret=%d\n", i + 1, algo,
1252 if (dlen != dtemplate[i].outlen) {
1253 printk(KERN_ERR "alg: comp: Decompression test %d "
1254 "failed for %s: output len = %d\n", i + 1, algo,
1260 if (memcmp(result, dtemplate[i].output, dlen)) {
1261 printk(KERN_ERR "alg: comp: Decompression test %d "
1262 "failed for %s\n", i + 1, algo);
1263 hexdump(result, dlen);
1275 static int test_pcomp(struct crypto_pcomp *tfm,
1276 struct pcomp_testvec *ctemplate,
1277 struct pcomp_testvec *dtemplate, int ctcount,
1280 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
1282 char result[COMP_BUF_SIZE];
1285 for (i = 0; i < ctcount; i++) {
1286 struct comp_request req;
1287 unsigned int produced = 0;
1289 res = crypto_compress_setup(tfm, ctemplate[i].params,
1290 ctemplate[i].paramsize);
1292 pr_err("alg: pcomp: compression setup failed on test "
1293 "%d for %s: error=%d\n", i + 1, algo, res);
1297 res = crypto_compress_init(tfm);
1299 pr_err("alg: pcomp: compression init failed on test "
1300 "%d for %s: error=%d\n", i + 1, algo, res);
1304 memset(result, 0, sizeof(result));
1306 req.next_in = ctemplate[i].input;
1307 req.avail_in = ctemplate[i].inlen / 2;
1308 req.next_out = result;
1309 req.avail_out = ctemplate[i].outlen / 2;
1311 res = crypto_compress_update(tfm, &req);
1312 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1313 pr_err("alg: pcomp: compression update failed on test "
1314 "%d for %s: error=%d\n", i + 1, algo, res);
1320 /* Add remaining input data */
1321 req.avail_in += (ctemplate[i].inlen + 1) / 2;
1323 res = crypto_compress_update(tfm, &req);
1324 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1325 pr_err("alg: pcomp: compression update failed on test "
1326 "%d for %s: error=%d\n", i + 1, algo, res);
1332 /* Provide remaining output space */
1333 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1335 res = crypto_compress_final(tfm, &req);
1337 pr_err("alg: pcomp: compression final failed on test "
1338 "%d for %s: error=%d\n", i + 1, algo, res);
1343 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1344 pr_err("alg: comp: Compression test %d failed for %s: "
1345 "output len = %d (expected %d)\n", i + 1, algo,
1346 COMP_BUF_SIZE - req.avail_out,
1347 ctemplate[i].outlen);
1351 if (produced != ctemplate[i].outlen) {
1352 pr_err("alg: comp: Compression test %d failed for %s: "
1353 "returned len = %u (expected %d)\n", i + 1,
1354 algo, produced, ctemplate[i].outlen);
1358 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1359 pr_err("alg: pcomp: Compression test %d failed for "
1360 "%s\n", i + 1, algo);
1361 hexdump(result, ctemplate[i].outlen);
1366 for (i = 0; i < dtcount; i++) {
1367 struct comp_request req;
1368 unsigned int produced = 0;
1370 res = crypto_decompress_setup(tfm, dtemplate[i].params,
1371 dtemplate[i].paramsize);
1373 pr_err("alg: pcomp: decompression setup failed on "
1374 "test %d for %s: error=%d\n", i + 1, algo, res);
1378 res = crypto_decompress_init(tfm);
1380 pr_err("alg: pcomp: decompression init failed on test "
1381 "%d for %s: error=%d\n", i + 1, algo, res);
1385 memset(result, 0, sizeof(result));
1387 req.next_in = dtemplate[i].input;
1388 req.avail_in = dtemplate[i].inlen / 2;
1389 req.next_out = result;
1390 req.avail_out = dtemplate[i].outlen / 2;
1392 res = crypto_decompress_update(tfm, &req);
1393 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1394 pr_err("alg: pcomp: decompression update failed on "
1395 "test %d for %s: error=%d\n", i + 1, algo, res);
1401 /* Add remaining input data */
1402 req.avail_in += (dtemplate[i].inlen + 1) / 2;
1404 res = crypto_decompress_update(tfm, &req);
1405 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1406 pr_err("alg: pcomp: decompression update failed on "
1407 "test %d for %s: error=%d\n", i + 1, algo, res);
1413 /* Provide remaining output space */
1414 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1416 res = crypto_decompress_final(tfm, &req);
1417 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1418 pr_err("alg: pcomp: decompression final failed on "
1419 "test %d for %s: error=%d\n", i + 1, algo, res);
1425 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1426 pr_err("alg: comp: Decompression test %d failed for "
1427 "%s: output len = %d (expected %d)\n", i + 1,
1428 algo, COMP_BUF_SIZE - req.avail_out,
1429 dtemplate[i].outlen);
1433 if (produced != dtemplate[i].outlen) {
1434 pr_err("alg: comp: Decompression test %d failed for "
1435 "%s: returned len = %u (expected %d)\n", i + 1,
1436 algo, produced, dtemplate[i].outlen);
1440 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1441 pr_err("alg: pcomp: Decompression test %d failed for "
1442 "%s\n", i + 1, algo);
1443 hexdump(result, dtemplate[i].outlen);
1452 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1453 unsigned int tcount)
1455 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1456 int err = 0, i, j, seedsize;
1460 seedsize = crypto_rng_seedsize(tfm);
1462 seed = kmalloc(seedsize, GFP_KERNEL);
1464 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1469 for (i = 0; i < tcount; i++) {
1470 memset(result, 0, 32);
1472 memcpy(seed, template[i].v, template[i].vlen);
1473 memcpy(seed + template[i].vlen, template[i].key,
1475 memcpy(seed + template[i].vlen + template[i].klen,
1476 template[i].dt, template[i].dtlen);
1478 err = crypto_rng_reset(tfm, seed, seedsize);
1480 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1485 for (j = 0; j < template[i].loops; j++) {
1486 err = crypto_rng_get_bytes(tfm, result,
1489 printk(KERN_ERR "alg: cprng: Failed to obtain "
1490 "the correct amount of random data for "
1491 "%s (requested %d)\n", algo,
1497 err = memcmp(result, template[i].result,
1500 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1502 hexdump(result, template[i].rlen);
1513 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1516 struct crypto_aead *tfm;
1519 tfm = crypto_alloc_aead(driver, type | CRYPTO_ALG_INTERNAL, mask);
1521 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1522 "%ld\n", driver, PTR_ERR(tfm));
1523 return PTR_ERR(tfm);
1526 if (desc->suite.aead.enc.vecs) {
1527 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1528 desc->suite.aead.enc.count);
1533 if (!err && desc->suite.aead.dec.vecs)
1534 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1535 desc->suite.aead.dec.count);
1538 crypto_free_aead(tfm);
1542 static int alg_test_cipher(const struct alg_test_desc *desc,
1543 const char *driver, u32 type, u32 mask)
1545 struct crypto_cipher *tfm;
1548 tfm = crypto_alloc_cipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1550 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1551 "%s: %ld\n", driver, PTR_ERR(tfm));
1552 return PTR_ERR(tfm);
1555 if (desc->suite.cipher.enc.vecs) {
1556 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1557 desc->suite.cipher.enc.count);
1562 if (desc->suite.cipher.dec.vecs)
1563 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1564 desc->suite.cipher.dec.count);
1567 crypto_free_cipher(tfm);
1571 static int alg_test_skcipher(const struct alg_test_desc *desc,
1572 const char *driver, u32 type, u32 mask)
1574 struct crypto_ablkcipher *tfm;
1577 tfm = crypto_alloc_ablkcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1579 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1580 "%s: %ld\n", driver, PTR_ERR(tfm));
1581 return PTR_ERR(tfm);
1584 if (desc->suite.cipher.enc.vecs) {
1585 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1586 desc->suite.cipher.enc.count);
1591 if (desc->suite.cipher.dec.vecs)
1592 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1593 desc->suite.cipher.dec.count);
1596 crypto_free_ablkcipher(tfm);
1600 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1603 struct crypto_comp *tfm;
1606 tfm = crypto_alloc_comp(driver, type, mask);
1608 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1609 "%ld\n", driver, PTR_ERR(tfm));
1610 return PTR_ERR(tfm);
1613 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1614 desc->suite.comp.decomp.vecs,
1615 desc->suite.comp.comp.count,
1616 desc->suite.comp.decomp.count);
1618 crypto_free_comp(tfm);
1622 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1625 struct crypto_pcomp *tfm;
1628 tfm = crypto_alloc_pcomp(driver, type, mask);
1630 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1631 driver, PTR_ERR(tfm));
1632 return PTR_ERR(tfm);
1635 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1636 desc->suite.pcomp.decomp.vecs,
1637 desc->suite.pcomp.comp.count,
1638 desc->suite.pcomp.decomp.count);
1640 crypto_free_pcomp(tfm);
1644 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1647 struct crypto_ahash *tfm;
1650 tfm = crypto_alloc_ahash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1652 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1653 "%ld\n", driver, PTR_ERR(tfm));
1654 return PTR_ERR(tfm);
1657 err = test_hash(tfm, desc->suite.hash.vecs,
1658 desc->suite.hash.count, true);
1660 err = test_hash(tfm, desc->suite.hash.vecs,
1661 desc->suite.hash.count, false);
1663 crypto_free_ahash(tfm);
1667 static int alg_test_crc32c(const struct alg_test_desc *desc,
1668 const char *driver, u32 type, u32 mask)
1670 struct crypto_shash *tfm;
1674 err = alg_test_hash(desc, driver, type, mask);
1678 tfm = crypto_alloc_shash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1680 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1681 "%ld\n", driver, PTR_ERR(tfm));
1687 SHASH_DESC_ON_STACK(shash, tfm);
1688 u32 *ctx = (u32 *)shash_desc_ctx(shash);
1693 *ctx = le32_to_cpu(420553207);
1694 err = crypto_shash_final(shash, (u8 *)&val);
1696 printk(KERN_ERR "alg: crc32c: Operation failed for "
1697 "%s: %d\n", driver, err);
1701 if (val != ~420553207) {
1702 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1703 "%d\n", driver, val);
1708 crypto_free_shash(tfm);
1714 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1717 struct crypto_rng *rng;
1720 rng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1722 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1723 "%ld\n", driver, PTR_ERR(rng));
1724 return PTR_ERR(rng);
1727 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1729 crypto_free_rng(rng);
1735 static int drbg_cavs_test(struct drbg_testvec *test, int pr,
1736 const char *driver, u32 type, u32 mask)
1739 struct crypto_rng *drng;
1740 struct drbg_test_data test_data;
1741 struct drbg_string addtl, pers, testentropy;
1742 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1747 drng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1749 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1755 test_data.testentropy = &testentropy;
1756 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1757 drbg_string_fill(&pers, test->pers, test->perslen);
1758 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1760 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1764 drbg_string_fill(&addtl, test->addtla, test->addtllen);
1766 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1767 ret = crypto_drbg_get_bytes_addtl_test(drng,
1768 buf, test->expectedlen, &addtl, &test_data);
1770 ret = crypto_drbg_get_bytes_addtl(drng,
1771 buf, test->expectedlen, &addtl);
1774 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1775 "driver %s\n", driver);
1779 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1781 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1782 ret = crypto_drbg_get_bytes_addtl_test(drng,
1783 buf, test->expectedlen, &addtl, &test_data);
1785 ret = crypto_drbg_get_bytes_addtl(drng,
1786 buf, test->expectedlen, &addtl);
1789 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1790 "driver %s\n", driver);
1794 ret = memcmp(test->expected, buf, test->expectedlen);
1797 crypto_free_rng(drng);
1803 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1809 struct drbg_testvec *template = desc->suite.drbg.vecs;
1810 unsigned int tcount = desc->suite.drbg.count;
1812 if (0 == memcmp(driver, "drbg_pr_", 8))
1815 for (i = 0; i < tcount; i++) {
1816 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1818 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1828 static int alg_test_null(const struct alg_test_desc *desc,
1829 const char *driver, u32 type, u32 mask)
1834 /* Please keep this list sorted by algorithm name. */
1835 static const struct alg_test_desc alg_test_descs[] = {
1837 .alg = "__cbc-cast5-avx",
1838 .test = alg_test_null,
1840 .alg = "__cbc-cast6-avx",
1841 .test = alg_test_null,
1843 .alg = "__cbc-serpent-avx",
1844 .test = alg_test_null,
1846 .alg = "__cbc-serpent-avx2",
1847 .test = alg_test_null,
1849 .alg = "__cbc-serpent-sse2",
1850 .test = alg_test_null,
1852 .alg = "__cbc-twofish-avx",
1853 .test = alg_test_null,
1855 .alg = "__driver-cbc-aes-aesni",
1856 .test = alg_test_null,
1859 .alg = "__driver-cbc-camellia-aesni",
1860 .test = alg_test_null,
1862 .alg = "__driver-cbc-camellia-aesni-avx2",
1863 .test = alg_test_null,
1865 .alg = "__driver-cbc-cast5-avx",
1866 .test = alg_test_null,
1868 .alg = "__driver-cbc-cast6-avx",
1869 .test = alg_test_null,
1871 .alg = "__driver-cbc-serpent-avx",
1872 .test = alg_test_null,
1874 .alg = "__driver-cbc-serpent-avx2",
1875 .test = alg_test_null,
1877 .alg = "__driver-cbc-serpent-sse2",
1878 .test = alg_test_null,
1880 .alg = "__driver-cbc-twofish-avx",
1881 .test = alg_test_null,
1883 .alg = "__driver-ecb-aes-aesni",
1884 .test = alg_test_null,
1887 .alg = "__driver-ecb-camellia-aesni",
1888 .test = alg_test_null,
1890 .alg = "__driver-ecb-camellia-aesni-avx2",
1891 .test = alg_test_null,
1893 .alg = "__driver-ecb-cast5-avx",
1894 .test = alg_test_null,
1896 .alg = "__driver-ecb-cast6-avx",
1897 .test = alg_test_null,
1899 .alg = "__driver-ecb-serpent-avx",
1900 .test = alg_test_null,
1902 .alg = "__driver-ecb-serpent-avx2",
1903 .test = alg_test_null,
1905 .alg = "__driver-ecb-serpent-sse2",
1906 .test = alg_test_null,
1908 .alg = "__driver-ecb-twofish-avx",
1909 .test = alg_test_null,
1911 .alg = "__ghash-pclmulqdqni",
1912 .test = alg_test_null,
1915 .alg = "ansi_cprng",
1916 .test = alg_test_cprng,
1920 .vecs = ansi_cprng_aes_tv_template,
1921 .count = ANSI_CPRNG_AES_TEST_VECTORS
1925 .alg = "authenc(hmac(md5),ecb(cipher_null))",
1926 .test = alg_test_aead,
1931 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template,
1932 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS
1935 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template,
1936 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS
1941 .alg = "authenc(hmac(sha1),cbc(aes))",
1942 .test = alg_test_aead,
1948 hmac_sha1_aes_cbc_enc_tv_temp,
1950 HMAC_SHA1_AES_CBC_ENC_TEST_VEC
1955 .alg = "authenc(hmac(sha1),cbc(des))",
1956 .test = alg_test_aead,
1962 hmac_sha1_des_cbc_enc_tv_temp,
1964 HMAC_SHA1_DES_CBC_ENC_TEST_VEC
1969 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
1970 .test = alg_test_aead,
1976 hmac_sha1_des3_ede_cbc_enc_tv_temp,
1978 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC
1983 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
1984 .test = alg_test_aead,
1990 hmac_sha1_ecb_cipher_null_enc_tv_temp,
1992 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC
1996 hmac_sha1_ecb_cipher_null_dec_tv_temp,
1998 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC
2003 .alg = "authenc(hmac(sha224),cbc(des))",
2004 .test = alg_test_aead,
2010 hmac_sha224_des_cbc_enc_tv_temp,
2012 HMAC_SHA224_DES_CBC_ENC_TEST_VEC
2017 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
2018 .test = alg_test_aead,
2024 hmac_sha224_des3_ede_cbc_enc_tv_temp,
2026 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC
2031 .alg = "authenc(hmac(sha256),cbc(aes))",
2032 .test = alg_test_aead,
2038 hmac_sha256_aes_cbc_enc_tv_temp,
2040 HMAC_SHA256_AES_CBC_ENC_TEST_VEC
2045 .alg = "authenc(hmac(sha256),cbc(des))",
2046 .test = alg_test_aead,
2052 hmac_sha256_des_cbc_enc_tv_temp,
2054 HMAC_SHA256_DES_CBC_ENC_TEST_VEC
2059 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
2060 .test = alg_test_aead,
2066 hmac_sha256_des3_ede_cbc_enc_tv_temp,
2068 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC
2073 .alg = "authenc(hmac(sha384),cbc(des))",
2074 .test = alg_test_aead,
2080 hmac_sha384_des_cbc_enc_tv_temp,
2082 HMAC_SHA384_DES_CBC_ENC_TEST_VEC
2087 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
2088 .test = alg_test_aead,
2094 hmac_sha384_des3_ede_cbc_enc_tv_temp,
2096 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC
2101 .alg = "authenc(hmac(sha512),cbc(aes))",
2102 .test = alg_test_aead,
2108 hmac_sha512_aes_cbc_enc_tv_temp,
2110 HMAC_SHA512_AES_CBC_ENC_TEST_VEC
2115 .alg = "authenc(hmac(sha512),cbc(des))",
2116 .test = alg_test_aead,
2122 hmac_sha512_des_cbc_enc_tv_temp,
2124 HMAC_SHA512_DES_CBC_ENC_TEST_VEC
2129 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
2130 .test = alg_test_aead,
2136 hmac_sha512_des3_ede_cbc_enc_tv_temp,
2138 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC
2144 .test = alg_test_skcipher,
2149 .vecs = aes_cbc_enc_tv_template,
2150 .count = AES_CBC_ENC_TEST_VECTORS
2153 .vecs = aes_cbc_dec_tv_template,
2154 .count = AES_CBC_DEC_TEST_VECTORS
2159 .alg = "cbc(anubis)",
2160 .test = alg_test_skcipher,
2164 .vecs = anubis_cbc_enc_tv_template,
2165 .count = ANUBIS_CBC_ENC_TEST_VECTORS
2168 .vecs = anubis_cbc_dec_tv_template,
2169 .count = ANUBIS_CBC_DEC_TEST_VECTORS
2174 .alg = "cbc(blowfish)",
2175 .test = alg_test_skcipher,
2179 .vecs = bf_cbc_enc_tv_template,
2180 .count = BF_CBC_ENC_TEST_VECTORS
2183 .vecs = bf_cbc_dec_tv_template,
2184 .count = BF_CBC_DEC_TEST_VECTORS
2189 .alg = "cbc(camellia)",
2190 .test = alg_test_skcipher,
2194 .vecs = camellia_cbc_enc_tv_template,
2195 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
2198 .vecs = camellia_cbc_dec_tv_template,
2199 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
2204 .alg = "cbc(cast5)",
2205 .test = alg_test_skcipher,
2209 .vecs = cast5_cbc_enc_tv_template,
2210 .count = CAST5_CBC_ENC_TEST_VECTORS
2213 .vecs = cast5_cbc_dec_tv_template,
2214 .count = CAST5_CBC_DEC_TEST_VECTORS
2219 .alg = "cbc(cast6)",
2220 .test = alg_test_skcipher,
2224 .vecs = cast6_cbc_enc_tv_template,
2225 .count = CAST6_CBC_ENC_TEST_VECTORS
2228 .vecs = cast6_cbc_dec_tv_template,
2229 .count = CAST6_CBC_DEC_TEST_VECTORS
2235 .test = alg_test_skcipher,
2239 .vecs = des_cbc_enc_tv_template,
2240 .count = DES_CBC_ENC_TEST_VECTORS
2243 .vecs = des_cbc_dec_tv_template,
2244 .count = DES_CBC_DEC_TEST_VECTORS
2249 .alg = "cbc(des3_ede)",
2250 .test = alg_test_skcipher,
2255 .vecs = des3_ede_cbc_enc_tv_template,
2256 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
2259 .vecs = des3_ede_cbc_dec_tv_template,
2260 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
2265 .alg = "cbc(serpent)",
2266 .test = alg_test_skcipher,
2270 .vecs = serpent_cbc_enc_tv_template,
2271 .count = SERPENT_CBC_ENC_TEST_VECTORS
2274 .vecs = serpent_cbc_dec_tv_template,
2275 .count = SERPENT_CBC_DEC_TEST_VECTORS
2280 .alg = "cbc(twofish)",
2281 .test = alg_test_skcipher,
2285 .vecs = tf_cbc_enc_tv_template,
2286 .count = TF_CBC_ENC_TEST_VECTORS
2289 .vecs = tf_cbc_dec_tv_template,
2290 .count = TF_CBC_DEC_TEST_VECTORS
2296 .test = alg_test_aead,
2301 .vecs = aes_ccm_enc_tv_template,
2302 .count = AES_CCM_ENC_TEST_VECTORS
2305 .vecs = aes_ccm_dec_tv_template,
2306 .count = AES_CCM_DEC_TEST_VECTORS
2312 .test = alg_test_skcipher,
2316 .vecs = chacha20_enc_tv_template,
2317 .count = CHACHA20_ENC_TEST_VECTORS
2320 .vecs = chacha20_enc_tv_template,
2321 .count = CHACHA20_ENC_TEST_VECTORS
2327 .test = alg_test_hash,
2330 .vecs = aes_cmac128_tv_template,
2331 .count = CMAC_AES_TEST_VECTORS
2335 .alg = "cmac(des3_ede)",
2336 .test = alg_test_hash,
2339 .vecs = des3_ede_cmac64_tv_template,
2340 .count = CMAC_DES3_EDE_TEST_VECTORS
2344 .alg = "compress_null",
2345 .test = alg_test_null,
2348 .test = alg_test_hash,
2351 .vecs = crc32_tv_template,
2352 .count = CRC32_TEST_VECTORS
2357 .test = alg_test_crc32c,
2361 .vecs = crc32c_tv_template,
2362 .count = CRC32C_TEST_VECTORS
2367 .test = alg_test_hash,
2371 .vecs = crct10dif_tv_template,
2372 .count = CRCT10DIF_TEST_VECTORS
2376 .alg = "cryptd(__driver-cbc-aes-aesni)",
2377 .test = alg_test_null,
2380 .alg = "cryptd(__driver-cbc-camellia-aesni)",
2381 .test = alg_test_null,
2383 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
2384 .test = alg_test_null,
2386 .alg = "cryptd(__driver-cbc-serpent-avx2)",
2387 .test = alg_test_null,
2389 .alg = "cryptd(__driver-ecb-aes-aesni)",
2390 .test = alg_test_null,
2393 .alg = "cryptd(__driver-ecb-camellia-aesni)",
2394 .test = alg_test_null,
2396 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
2397 .test = alg_test_null,
2399 .alg = "cryptd(__driver-ecb-cast5-avx)",
2400 .test = alg_test_null,
2402 .alg = "cryptd(__driver-ecb-cast6-avx)",
2403 .test = alg_test_null,
2405 .alg = "cryptd(__driver-ecb-serpent-avx)",
2406 .test = alg_test_null,
2408 .alg = "cryptd(__driver-ecb-serpent-avx2)",
2409 .test = alg_test_null,
2411 .alg = "cryptd(__driver-ecb-serpent-sse2)",
2412 .test = alg_test_null,
2414 .alg = "cryptd(__driver-ecb-twofish-avx)",
2415 .test = alg_test_null,
2417 .alg = "cryptd(__driver-gcm-aes-aesni)",
2418 .test = alg_test_null,
2421 .alg = "cryptd(__ghash-pclmulqdqni)",
2422 .test = alg_test_null,
2426 .test = alg_test_skcipher,
2431 .vecs = aes_ctr_enc_tv_template,
2432 .count = AES_CTR_ENC_TEST_VECTORS
2435 .vecs = aes_ctr_dec_tv_template,
2436 .count = AES_CTR_DEC_TEST_VECTORS
2441 .alg = "ctr(blowfish)",
2442 .test = alg_test_skcipher,
2446 .vecs = bf_ctr_enc_tv_template,
2447 .count = BF_CTR_ENC_TEST_VECTORS
2450 .vecs = bf_ctr_dec_tv_template,
2451 .count = BF_CTR_DEC_TEST_VECTORS
2456 .alg = "ctr(camellia)",
2457 .test = alg_test_skcipher,
2461 .vecs = camellia_ctr_enc_tv_template,
2462 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2465 .vecs = camellia_ctr_dec_tv_template,
2466 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2471 .alg = "ctr(cast5)",
2472 .test = alg_test_skcipher,
2476 .vecs = cast5_ctr_enc_tv_template,
2477 .count = CAST5_CTR_ENC_TEST_VECTORS
2480 .vecs = cast5_ctr_dec_tv_template,
2481 .count = CAST5_CTR_DEC_TEST_VECTORS
2486 .alg = "ctr(cast6)",
2487 .test = alg_test_skcipher,
2491 .vecs = cast6_ctr_enc_tv_template,
2492 .count = CAST6_CTR_ENC_TEST_VECTORS
2495 .vecs = cast6_ctr_dec_tv_template,
2496 .count = CAST6_CTR_DEC_TEST_VECTORS
2502 .test = alg_test_skcipher,
2506 .vecs = des_ctr_enc_tv_template,
2507 .count = DES_CTR_ENC_TEST_VECTORS
2510 .vecs = des_ctr_dec_tv_template,
2511 .count = DES_CTR_DEC_TEST_VECTORS
2516 .alg = "ctr(des3_ede)",
2517 .test = alg_test_skcipher,
2521 .vecs = des3_ede_ctr_enc_tv_template,
2522 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2525 .vecs = des3_ede_ctr_dec_tv_template,
2526 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2531 .alg = "ctr(serpent)",
2532 .test = alg_test_skcipher,
2536 .vecs = serpent_ctr_enc_tv_template,
2537 .count = SERPENT_CTR_ENC_TEST_VECTORS
2540 .vecs = serpent_ctr_dec_tv_template,
2541 .count = SERPENT_CTR_DEC_TEST_VECTORS
2546 .alg = "ctr(twofish)",
2547 .test = alg_test_skcipher,
2551 .vecs = tf_ctr_enc_tv_template,
2552 .count = TF_CTR_ENC_TEST_VECTORS
2555 .vecs = tf_ctr_dec_tv_template,
2556 .count = TF_CTR_DEC_TEST_VECTORS
2561 .alg = "cts(cbc(aes))",
2562 .test = alg_test_skcipher,
2566 .vecs = cts_mode_enc_tv_template,
2567 .count = CTS_MODE_ENC_TEST_VECTORS
2570 .vecs = cts_mode_dec_tv_template,
2571 .count = CTS_MODE_DEC_TEST_VECTORS
2577 .test = alg_test_comp,
2582 .vecs = deflate_comp_tv_template,
2583 .count = DEFLATE_COMP_TEST_VECTORS
2586 .vecs = deflate_decomp_tv_template,
2587 .count = DEFLATE_DECOMP_TEST_VECTORS
2592 .alg = "digest_null",
2593 .test = alg_test_null,
2595 .alg = "drbg_nopr_ctr_aes128",
2596 .test = alg_test_drbg,
2600 .vecs = drbg_nopr_ctr_aes128_tv_template,
2601 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template)
2605 .alg = "drbg_nopr_ctr_aes192",
2606 .test = alg_test_drbg,
2610 .vecs = drbg_nopr_ctr_aes192_tv_template,
2611 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template)
2615 .alg = "drbg_nopr_ctr_aes256",
2616 .test = alg_test_drbg,
2620 .vecs = drbg_nopr_ctr_aes256_tv_template,
2621 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template)
2626 * There is no need to specifically test the DRBG with every
2627 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2629 .alg = "drbg_nopr_hmac_sha1",
2631 .test = alg_test_null,
2633 .alg = "drbg_nopr_hmac_sha256",
2634 .test = alg_test_drbg,
2638 .vecs = drbg_nopr_hmac_sha256_tv_template,
2640 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template)
2644 /* covered by drbg_nopr_hmac_sha256 test */
2645 .alg = "drbg_nopr_hmac_sha384",
2647 .test = alg_test_null,
2649 .alg = "drbg_nopr_hmac_sha512",
2650 .test = alg_test_null,
2653 .alg = "drbg_nopr_sha1",
2655 .test = alg_test_null,
2657 .alg = "drbg_nopr_sha256",
2658 .test = alg_test_drbg,
2662 .vecs = drbg_nopr_sha256_tv_template,
2663 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template)
2667 /* covered by drbg_nopr_sha256 test */
2668 .alg = "drbg_nopr_sha384",
2670 .test = alg_test_null,
2672 .alg = "drbg_nopr_sha512",
2674 .test = alg_test_null,
2676 .alg = "drbg_pr_ctr_aes128",
2677 .test = alg_test_drbg,
2681 .vecs = drbg_pr_ctr_aes128_tv_template,
2682 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template)
2686 /* covered by drbg_pr_ctr_aes128 test */
2687 .alg = "drbg_pr_ctr_aes192",
2689 .test = alg_test_null,
2691 .alg = "drbg_pr_ctr_aes256",
2693 .test = alg_test_null,
2695 .alg = "drbg_pr_hmac_sha1",
2697 .test = alg_test_null,
2699 .alg = "drbg_pr_hmac_sha256",
2700 .test = alg_test_drbg,
2704 .vecs = drbg_pr_hmac_sha256_tv_template,
2705 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template)
2709 /* covered by drbg_pr_hmac_sha256 test */
2710 .alg = "drbg_pr_hmac_sha384",
2712 .test = alg_test_null,
2714 .alg = "drbg_pr_hmac_sha512",
2715 .test = alg_test_null,
2718 .alg = "drbg_pr_sha1",
2720 .test = alg_test_null,
2722 .alg = "drbg_pr_sha256",
2723 .test = alg_test_drbg,
2727 .vecs = drbg_pr_sha256_tv_template,
2728 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template)
2732 /* covered by drbg_pr_sha256 test */
2733 .alg = "drbg_pr_sha384",
2735 .test = alg_test_null,
2737 .alg = "drbg_pr_sha512",
2739 .test = alg_test_null,
2741 .alg = "ecb(__aes-aesni)",
2742 .test = alg_test_null,
2746 .test = alg_test_skcipher,
2751 .vecs = aes_enc_tv_template,
2752 .count = AES_ENC_TEST_VECTORS
2755 .vecs = aes_dec_tv_template,
2756 .count = AES_DEC_TEST_VECTORS
2761 .alg = "ecb(anubis)",
2762 .test = alg_test_skcipher,
2766 .vecs = anubis_enc_tv_template,
2767 .count = ANUBIS_ENC_TEST_VECTORS
2770 .vecs = anubis_dec_tv_template,
2771 .count = ANUBIS_DEC_TEST_VECTORS
2777 .test = alg_test_skcipher,
2781 .vecs = arc4_enc_tv_template,
2782 .count = ARC4_ENC_TEST_VECTORS
2785 .vecs = arc4_dec_tv_template,
2786 .count = ARC4_DEC_TEST_VECTORS
2791 .alg = "ecb(blowfish)",
2792 .test = alg_test_skcipher,
2796 .vecs = bf_enc_tv_template,
2797 .count = BF_ENC_TEST_VECTORS
2800 .vecs = bf_dec_tv_template,
2801 .count = BF_DEC_TEST_VECTORS
2806 .alg = "ecb(camellia)",
2807 .test = alg_test_skcipher,
2811 .vecs = camellia_enc_tv_template,
2812 .count = CAMELLIA_ENC_TEST_VECTORS
2815 .vecs = camellia_dec_tv_template,
2816 .count = CAMELLIA_DEC_TEST_VECTORS
2821 .alg = "ecb(cast5)",
2822 .test = alg_test_skcipher,
2826 .vecs = cast5_enc_tv_template,
2827 .count = CAST5_ENC_TEST_VECTORS
2830 .vecs = cast5_dec_tv_template,
2831 .count = CAST5_DEC_TEST_VECTORS
2836 .alg = "ecb(cast6)",
2837 .test = alg_test_skcipher,
2841 .vecs = cast6_enc_tv_template,
2842 .count = CAST6_ENC_TEST_VECTORS
2845 .vecs = cast6_dec_tv_template,
2846 .count = CAST6_DEC_TEST_VECTORS
2851 .alg = "ecb(cipher_null)",
2852 .test = alg_test_null,
2855 .test = alg_test_skcipher,
2860 .vecs = des_enc_tv_template,
2861 .count = DES_ENC_TEST_VECTORS
2864 .vecs = des_dec_tv_template,
2865 .count = DES_DEC_TEST_VECTORS
2870 .alg = "ecb(des3_ede)",
2871 .test = alg_test_skcipher,
2876 .vecs = des3_ede_enc_tv_template,
2877 .count = DES3_EDE_ENC_TEST_VECTORS
2880 .vecs = des3_ede_dec_tv_template,
2881 .count = DES3_EDE_DEC_TEST_VECTORS
2886 .alg = "ecb(fcrypt)",
2887 .test = alg_test_skcipher,
2891 .vecs = fcrypt_pcbc_enc_tv_template,
2895 .vecs = fcrypt_pcbc_dec_tv_template,
2901 .alg = "ecb(khazad)",
2902 .test = alg_test_skcipher,
2906 .vecs = khazad_enc_tv_template,
2907 .count = KHAZAD_ENC_TEST_VECTORS
2910 .vecs = khazad_dec_tv_template,
2911 .count = KHAZAD_DEC_TEST_VECTORS
2917 .test = alg_test_skcipher,
2921 .vecs = seed_enc_tv_template,
2922 .count = SEED_ENC_TEST_VECTORS
2925 .vecs = seed_dec_tv_template,
2926 .count = SEED_DEC_TEST_VECTORS
2931 .alg = "ecb(serpent)",
2932 .test = alg_test_skcipher,
2936 .vecs = serpent_enc_tv_template,
2937 .count = SERPENT_ENC_TEST_VECTORS
2940 .vecs = serpent_dec_tv_template,
2941 .count = SERPENT_DEC_TEST_VECTORS
2947 .test = alg_test_skcipher,
2951 .vecs = tea_enc_tv_template,
2952 .count = TEA_ENC_TEST_VECTORS
2955 .vecs = tea_dec_tv_template,
2956 .count = TEA_DEC_TEST_VECTORS
2961 .alg = "ecb(tnepres)",
2962 .test = alg_test_skcipher,
2966 .vecs = tnepres_enc_tv_template,
2967 .count = TNEPRES_ENC_TEST_VECTORS
2970 .vecs = tnepres_dec_tv_template,
2971 .count = TNEPRES_DEC_TEST_VECTORS
2976 .alg = "ecb(twofish)",
2977 .test = alg_test_skcipher,
2981 .vecs = tf_enc_tv_template,
2982 .count = TF_ENC_TEST_VECTORS
2985 .vecs = tf_dec_tv_template,
2986 .count = TF_DEC_TEST_VECTORS
2992 .test = alg_test_skcipher,
2996 .vecs = xeta_enc_tv_template,
2997 .count = XETA_ENC_TEST_VECTORS
3000 .vecs = xeta_dec_tv_template,
3001 .count = XETA_DEC_TEST_VECTORS
3007 .test = alg_test_skcipher,
3011 .vecs = xtea_enc_tv_template,
3012 .count = XTEA_ENC_TEST_VECTORS
3015 .vecs = xtea_dec_tv_template,
3016 .count = XTEA_DEC_TEST_VECTORS
3022 .test = alg_test_aead,
3027 .vecs = aes_gcm_enc_tv_template,
3028 .count = AES_GCM_ENC_TEST_VECTORS
3031 .vecs = aes_gcm_dec_tv_template,
3032 .count = AES_GCM_DEC_TEST_VECTORS
3038 .test = alg_test_hash,
3042 .vecs = ghash_tv_template,
3043 .count = GHASH_TEST_VECTORS
3047 .alg = "hmac(crc32)",
3048 .test = alg_test_hash,
3051 .vecs = bfin_crc_tv_template,
3052 .count = BFIN_CRC_TEST_VECTORS
3057 .test = alg_test_hash,
3060 .vecs = hmac_md5_tv_template,
3061 .count = HMAC_MD5_TEST_VECTORS
3065 .alg = "hmac(rmd128)",
3066 .test = alg_test_hash,
3069 .vecs = hmac_rmd128_tv_template,
3070 .count = HMAC_RMD128_TEST_VECTORS
3074 .alg = "hmac(rmd160)",
3075 .test = alg_test_hash,
3078 .vecs = hmac_rmd160_tv_template,
3079 .count = HMAC_RMD160_TEST_VECTORS
3083 .alg = "hmac(sha1)",
3084 .test = alg_test_hash,
3088 .vecs = hmac_sha1_tv_template,
3089 .count = HMAC_SHA1_TEST_VECTORS
3093 .alg = "hmac(sha224)",
3094 .test = alg_test_hash,
3098 .vecs = hmac_sha224_tv_template,
3099 .count = HMAC_SHA224_TEST_VECTORS
3103 .alg = "hmac(sha256)",
3104 .test = alg_test_hash,
3108 .vecs = hmac_sha256_tv_template,
3109 .count = HMAC_SHA256_TEST_VECTORS
3113 .alg = "hmac(sha384)",
3114 .test = alg_test_hash,
3118 .vecs = hmac_sha384_tv_template,
3119 .count = HMAC_SHA384_TEST_VECTORS
3123 .alg = "hmac(sha512)",
3124 .test = alg_test_hash,
3128 .vecs = hmac_sha512_tv_template,
3129 .count = HMAC_SHA512_TEST_VECTORS
3133 .alg = "jitterentropy_rng",
3135 .test = alg_test_null,
3138 .test = alg_test_skcipher,
3142 .vecs = aes_lrw_enc_tv_template,
3143 .count = AES_LRW_ENC_TEST_VECTORS
3146 .vecs = aes_lrw_dec_tv_template,
3147 .count = AES_LRW_DEC_TEST_VECTORS
3152 .alg = "lrw(camellia)",
3153 .test = alg_test_skcipher,
3157 .vecs = camellia_lrw_enc_tv_template,
3158 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
3161 .vecs = camellia_lrw_dec_tv_template,
3162 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
3167 .alg = "lrw(cast6)",
3168 .test = alg_test_skcipher,
3172 .vecs = cast6_lrw_enc_tv_template,
3173 .count = CAST6_LRW_ENC_TEST_VECTORS
3176 .vecs = cast6_lrw_dec_tv_template,
3177 .count = CAST6_LRW_DEC_TEST_VECTORS
3182 .alg = "lrw(serpent)",
3183 .test = alg_test_skcipher,
3187 .vecs = serpent_lrw_enc_tv_template,
3188 .count = SERPENT_LRW_ENC_TEST_VECTORS
3191 .vecs = serpent_lrw_dec_tv_template,
3192 .count = SERPENT_LRW_DEC_TEST_VECTORS
3197 .alg = "lrw(twofish)",
3198 .test = alg_test_skcipher,
3202 .vecs = tf_lrw_enc_tv_template,
3203 .count = TF_LRW_ENC_TEST_VECTORS
3206 .vecs = tf_lrw_dec_tv_template,
3207 .count = TF_LRW_DEC_TEST_VECTORS
3213 .test = alg_test_comp,
3218 .vecs = lz4_comp_tv_template,
3219 .count = LZ4_COMP_TEST_VECTORS
3222 .vecs = lz4_decomp_tv_template,
3223 .count = LZ4_DECOMP_TEST_VECTORS
3229 .test = alg_test_comp,
3234 .vecs = lz4hc_comp_tv_template,
3235 .count = LZ4HC_COMP_TEST_VECTORS
3238 .vecs = lz4hc_decomp_tv_template,
3239 .count = LZ4HC_DECOMP_TEST_VECTORS
3245 .test = alg_test_comp,
3250 .vecs = lzo_comp_tv_template,
3251 .count = LZO_COMP_TEST_VECTORS
3254 .vecs = lzo_decomp_tv_template,
3255 .count = LZO_DECOMP_TEST_VECTORS
3261 .test = alg_test_hash,
3264 .vecs = md4_tv_template,
3265 .count = MD4_TEST_VECTORS
3270 .test = alg_test_hash,
3273 .vecs = md5_tv_template,
3274 .count = MD5_TEST_VECTORS
3278 .alg = "michael_mic",
3279 .test = alg_test_hash,
3282 .vecs = michael_mic_tv_template,
3283 .count = MICHAEL_MIC_TEST_VECTORS
3288 .test = alg_test_skcipher,
3293 .vecs = aes_ofb_enc_tv_template,
3294 .count = AES_OFB_ENC_TEST_VECTORS
3297 .vecs = aes_ofb_dec_tv_template,
3298 .count = AES_OFB_DEC_TEST_VECTORS
3303 .alg = "pcbc(fcrypt)",
3304 .test = alg_test_skcipher,
3308 .vecs = fcrypt_pcbc_enc_tv_template,
3309 .count = FCRYPT_ENC_TEST_VECTORS
3312 .vecs = fcrypt_pcbc_dec_tv_template,
3313 .count = FCRYPT_DEC_TEST_VECTORS
3318 .alg = "rfc3686(ctr(aes))",
3319 .test = alg_test_skcipher,
3324 .vecs = aes_ctr_rfc3686_enc_tv_template,
3325 .count = AES_CTR_3686_ENC_TEST_VECTORS
3328 .vecs = aes_ctr_rfc3686_dec_tv_template,
3329 .count = AES_CTR_3686_DEC_TEST_VECTORS
3334 .alg = "rfc4106(gcm(aes))",
3335 .test = alg_test_aead,
3340 .vecs = aes_gcm_rfc4106_enc_tv_template,
3341 .count = AES_GCM_4106_ENC_TEST_VECTORS
3344 .vecs = aes_gcm_rfc4106_dec_tv_template,
3345 .count = AES_GCM_4106_DEC_TEST_VECTORS
3350 .alg = "rfc4309(ccm(aes))",
3351 .test = alg_test_aead,
3356 .vecs = aes_ccm_rfc4309_enc_tv_template,
3357 .count = AES_CCM_4309_ENC_TEST_VECTORS
3360 .vecs = aes_ccm_rfc4309_dec_tv_template,
3361 .count = AES_CCM_4309_DEC_TEST_VECTORS
3366 .alg = "rfc4543(gcm(aes))",
3367 .test = alg_test_aead,
3371 .vecs = aes_gcm_rfc4543_enc_tv_template,
3372 .count = AES_GCM_4543_ENC_TEST_VECTORS
3375 .vecs = aes_gcm_rfc4543_dec_tv_template,
3376 .count = AES_GCM_4543_DEC_TEST_VECTORS
3382 .test = alg_test_hash,
3385 .vecs = rmd128_tv_template,
3386 .count = RMD128_TEST_VECTORS
3391 .test = alg_test_hash,
3394 .vecs = rmd160_tv_template,
3395 .count = RMD160_TEST_VECTORS
3400 .test = alg_test_hash,
3403 .vecs = rmd256_tv_template,
3404 .count = RMD256_TEST_VECTORS
3409 .test = alg_test_hash,
3412 .vecs = rmd320_tv_template,
3413 .count = RMD320_TEST_VECTORS
3418 .test = alg_test_skcipher,
3422 .vecs = salsa20_stream_enc_tv_template,
3423 .count = SALSA20_STREAM_ENC_TEST_VECTORS
3429 .test = alg_test_hash,
3433 .vecs = sha1_tv_template,
3434 .count = SHA1_TEST_VECTORS
3439 .test = alg_test_hash,
3443 .vecs = sha224_tv_template,
3444 .count = SHA224_TEST_VECTORS
3449 .test = alg_test_hash,
3453 .vecs = sha256_tv_template,
3454 .count = SHA256_TEST_VECTORS
3459 .test = alg_test_hash,
3463 .vecs = sha384_tv_template,
3464 .count = SHA384_TEST_VECTORS
3469 .test = alg_test_hash,
3473 .vecs = sha512_tv_template,
3474 .count = SHA512_TEST_VECTORS
3479 .test = alg_test_hash,
3482 .vecs = tgr128_tv_template,
3483 .count = TGR128_TEST_VECTORS
3488 .test = alg_test_hash,
3491 .vecs = tgr160_tv_template,
3492 .count = TGR160_TEST_VECTORS
3497 .test = alg_test_hash,
3500 .vecs = tgr192_tv_template,
3501 .count = TGR192_TEST_VECTORS
3506 .test = alg_test_hash,
3509 .vecs = aes_vmac128_tv_template,
3510 .count = VMAC_AES_TEST_VECTORS
3515 .test = alg_test_hash,
3518 .vecs = wp256_tv_template,
3519 .count = WP256_TEST_VECTORS
3524 .test = alg_test_hash,
3527 .vecs = wp384_tv_template,
3528 .count = WP384_TEST_VECTORS
3533 .test = alg_test_hash,
3536 .vecs = wp512_tv_template,
3537 .count = WP512_TEST_VECTORS
3542 .test = alg_test_hash,
3545 .vecs = aes_xcbc128_tv_template,
3546 .count = XCBC_AES_TEST_VECTORS
3551 .test = alg_test_skcipher,
3556 .vecs = aes_xts_enc_tv_template,
3557 .count = AES_XTS_ENC_TEST_VECTORS
3560 .vecs = aes_xts_dec_tv_template,
3561 .count = AES_XTS_DEC_TEST_VECTORS
3566 .alg = "xts(camellia)",
3567 .test = alg_test_skcipher,
3571 .vecs = camellia_xts_enc_tv_template,
3572 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
3575 .vecs = camellia_xts_dec_tv_template,
3576 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
3581 .alg = "xts(cast6)",
3582 .test = alg_test_skcipher,
3586 .vecs = cast6_xts_enc_tv_template,
3587 .count = CAST6_XTS_ENC_TEST_VECTORS
3590 .vecs = cast6_xts_dec_tv_template,
3591 .count = CAST6_XTS_DEC_TEST_VECTORS
3596 .alg = "xts(serpent)",
3597 .test = alg_test_skcipher,
3601 .vecs = serpent_xts_enc_tv_template,
3602 .count = SERPENT_XTS_ENC_TEST_VECTORS
3605 .vecs = serpent_xts_dec_tv_template,
3606 .count = SERPENT_XTS_DEC_TEST_VECTORS
3611 .alg = "xts(twofish)",
3612 .test = alg_test_skcipher,
3616 .vecs = tf_xts_enc_tv_template,
3617 .count = TF_XTS_ENC_TEST_VECTORS
3620 .vecs = tf_xts_dec_tv_template,
3621 .count = TF_XTS_DEC_TEST_VECTORS
3627 .test = alg_test_pcomp,
3632 .vecs = zlib_comp_tv_template,
3633 .count = ZLIB_COMP_TEST_VECTORS
3636 .vecs = zlib_decomp_tv_template,
3637 .count = ZLIB_DECOMP_TEST_VECTORS
3644 static bool alg_test_descs_checked;
3646 static void alg_test_descs_check_order(void)
3650 /* only check once */
3651 if (alg_test_descs_checked)
3654 alg_test_descs_checked = true;
3656 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3657 int diff = strcmp(alg_test_descs[i - 1].alg,
3658 alg_test_descs[i].alg);
3660 if (WARN_ON(diff > 0)) {
3661 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3662 alg_test_descs[i - 1].alg,
3663 alg_test_descs[i].alg);
3666 if (WARN_ON(diff == 0)) {
3667 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3668 alg_test_descs[i].alg);
3673 static int alg_find_test(const char *alg)
3676 int end = ARRAY_SIZE(alg_test_descs);
3678 while (start < end) {
3679 int i = (start + end) / 2;
3680 int diff = strcmp(alg_test_descs[i].alg, alg);
3698 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3704 alg_test_descs_check_order();
3706 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3707 char nalg[CRYPTO_MAX_ALG_NAME];
3709 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3711 return -ENAMETOOLONG;
3713 i = alg_find_test(nalg);
3717 if (fips_enabled && !alg_test_descs[i].fips_allowed)
3720 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3724 i = alg_find_test(alg);
3725 j = alg_find_test(driver);
3729 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3730 (j >= 0 && !alg_test_descs[j].fips_allowed)))
3735 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3737 if (j >= 0 && j != i)
3738 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3742 if (fips_enabled && rc)
3743 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3745 if (fips_enabled && !rc)
3746 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
3751 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3757 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3759 EXPORT_SYMBOL_GPL(alg_test);