Merge branch 'x86/urgent' into x86/cpu, to pick up dependency
[deliverable/linux.git] / crypto / testmgr.c
1 /*
2 * Algorithm testing framework and tests.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
15 *
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
19 * any later version.
20 *
21 */
22
23 #include <crypto/aead.h>
24 #include <crypto/hash.h>
25 #include <crypto/skcipher.h>
26 #include <linux/err.h>
27 #include <linux/fips.h>
28 #include <linux/module.h>
29 #include <linux/scatterlist.h>
30 #include <linux/slab.h>
31 #include <linux/string.h>
32 #include <crypto/rng.h>
33 #include <crypto/drbg.h>
34 #include <crypto/akcipher.h>
35
36 #include "internal.h"
37
38 static bool notests;
39 module_param(notests, bool, 0644);
40 MODULE_PARM_DESC(notests, "disable crypto self-tests");
41
42 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
43
44 /* a perfect nop */
45 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
46 {
47 return 0;
48 }
49
50 #else
51
52 #include "testmgr.h"
53
54 /*
55 * Need slab memory for testing (size in number of pages).
56 */
57 #define XBUFSIZE 8
58
59 /*
60 * Indexes into the xbuf to simulate cross-page access.
61 */
62 #define IDX1 32
63 #define IDX2 32400
64 #define IDX3 1
65 #define IDX4 8193
66 #define IDX5 22222
67 #define IDX6 17101
68 #define IDX7 27333
69 #define IDX8 3000
70
71 /*
72 * Used by test_cipher()
73 */
74 #define ENCRYPT 1
75 #define DECRYPT 0
76
77 struct tcrypt_result {
78 struct completion completion;
79 int err;
80 };
81
82 struct aead_test_suite {
83 struct {
84 struct aead_testvec *vecs;
85 unsigned int count;
86 } enc, dec;
87 };
88
89 struct cipher_test_suite {
90 struct {
91 struct cipher_testvec *vecs;
92 unsigned int count;
93 } enc, dec;
94 };
95
96 struct comp_test_suite {
97 struct {
98 struct comp_testvec *vecs;
99 unsigned int count;
100 } comp, decomp;
101 };
102
103 struct hash_test_suite {
104 struct hash_testvec *vecs;
105 unsigned int count;
106 };
107
108 struct cprng_test_suite {
109 struct cprng_testvec *vecs;
110 unsigned int count;
111 };
112
113 struct drbg_test_suite {
114 struct drbg_testvec *vecs;
115 unsigned int count;
116 };
117
118 struct akcipher_test_suite {
119 struct akcipher_testvec *vecs;
120 unsigned int count;
121 };
122
123 struct alg_test_desc {
124 const char *alg;
125 int (*test)(const struct alg_test_desc *desc, const char *driver,
126 u32 type, u32 mask);
127 int fips_allowed; /* set if alg is allowed in fips mode */
128
129 union {
130 struct aead_test_suite aead;
131 struct cipher_test_suite cipher;
132 struct comp_test_suite comp;
133 struct hash_test_suite hash;
134 struct cprng_test_suite cprng;
135 struct drbg_test_suite drbg;
136 struct akcipher_test_suite akcipher;
137 } suite;
138 };
139
140 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
141
142 static void hexdump(unsigned char *buf, unsigned int len)
143 {
144 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
145 16, 1,
146 buf, len, false);
147 }
148
149 static void tcrypt_complete(struct crypto_async_request *req, int err)
150 {
151 struct tcrypt_result *res = req->data;
152
153 if (err == -EINPROGRESS)
154 return;
155
156 res->err = err;
157 complete(&res->completion);
158 }
159
160 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
161 {
162 int i;
163
164 for (i = 0; i < XBUFSIZE; i++) {
165 buf[i] = (void *)__get_free_page(GFP_KERNEL);
166 if (!buf[i])
167 goto err_free_buf;
168 }
169
170 return 0;
171
172 err_free_buf:
173 while (i-- > 0)
174 free_page((unsigned long)buf[i]);
175
176 return -ENOMEM;
177 }
178
179 static void testmgr_free_buf(char *buf[XBUFSIZE])
180 {
181 int i;
182
183 for (i = 0; i < XBUFSIZE; i++)
184 free_page((unsigned long)buf[i]);
185 }
186
187 static int wait_async_op(struct tcrypt_result *tr, int ret)
188 {
189 if (ret == -EINPROGRESS || ret == -EBUSY) {
190 wait_for_completion(&tr->completion);
191 reinit_completion(&tr->completion);
192 ret = tr->err;
193 }
194 return ret;
195 }
196
197 static int ahash_partial_update(struct ahash_request **preq,
198 struct crypto_ahash *tfm, struct hash_testvec *template,
199 void *hash_buff, int k, int temp, struct scatterlist *sg,
200 const char *algo, char *result, struct tcrypt_result *tresult)
201 {
202 char *state;
203 struct ahash_request *req;
204 int statesize, ret = -EINVAL;
205
206 req = *preq;
207 statesize = crypto_ahash_statesize(
208 crypto_ahash_reqtfm(req));
209 state = kmalloc(statesize, GFP_KERNEL);
210 if (!state) {
211 pr_err("alt: hash: Failed to alloc state for %s\n", algo);
212 goto out_nostate;
213 }
214 ret = crypto_ahash_export(req, state);
215 if (ret) {
216 pr_err("alt: hash: Failed to export() for %s\n", algo);
217 goto out;
218 }
219 ahash_request_free(req);
220 req = ahash_request_alloc(tfm, GFP_KERNEL);
221 if (!req) {
222 pr_err("alg: hash: Failed to alloc request for %s\n", algo);
223 goto out_noreq;
224 }
225 ahash_request_set_callback(req,
226 CRYPTO_TFM_REQ_MAY_BACKLOG,
227 tcrypt_complete, tresult);
228
229 memcpy(hash_buff, template->plaintext + temp,
230 template->tap[k]);
231 sg_init_one(&sg[0], hash_buff, template->tap[k]);
232 ahash_request_set_crypt(req, sg, result, template->tap[k]);
233 ret = crypto_ahash_import(req, state);
234 if (ret) {
235 pr_err("alg: hash: Failed to import() for %s\n", algo);
236 goto out;
237 }
238 ret = wait_async_op(tresult, crypto_ahash_update(req));
239 if (ret)
240 goto out;
241 *preq = req;
242 ret = 0;
243 goto out_noreq;
244 out:
245 ahash_request_free(req);
246 out_noreq:
247 kfree(state);
248 out_nostate:
249 return ret;
250 }
251
252 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
253 unsigned int tcount, bool use_digest,
254 const int align_offset)
255 {
256 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
257 unsigned int i, j, k, temp;
258 struct scatterlist sg[8];
259 char *result;
260 char *key;
261 struct ahash_request *req;
262 struct tcrypt_result tresult;
263 void *hash_buff;
264 char *xbuf[XBUFSIZE];
265 int ret = -ENOMEM;
266
267 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
268 if (!result)
269 return ret;
270 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
271 if (!key)
272 goto out_nobuf;
273 if (testmgr_alloc_buf(xbuf))
274 goto out_nobuf;
275
276 init_completion(&tresult.completion);
277
278 req = ahash_request_alloc(tfm, GFP_KERNEL);
279 if (!req) {
280 printk(KERN_ERR "alg: hash: Failed to allocate request for "
281 "%s\n", algo);
282 goto out_noreq;
283 }
284 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
285 tcrypt_complete, &tresult);
286
287 j = 0;
288 for (i = 0; i < tcount; i++) {
289 if (template[i].np)
290 continue;
291
292 ret = -EINVAL;
293 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
294 goto out;
295
296 j++;
297 memset(result, 0, MAX_DIGEST_SIZE);
298
299 hash_buff = xbuf[0];
300 hash_buff += align_offset;
301
302 memcpy(hash_buff, template[i].plaintext, template[i].psize);
303 sg_init_one(&sg[0], hash_buff, template[i].psize);
304
305 if (template[i].ksize) {
306 crypto_ahash_clear_flags(tfm, ~0);
307 if (template[i].ksize > MAX_KEYLEN) {
308 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
309 j, algo, template[i].ksize, MAX_KEYLEN);
310 ret = -EINVAL;
311 goto out;
312 }
313 memcpy(key, template[i].key, template[i].ksize);
314 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
315 if (ret) {
316 printk(KERN_ERR "alg: hash: setkey failed on "
317 "test %d for %s: ret=%d\n", j, algo,
318 -ret);
319 goto out;
320 }
321 }
322
323 ahash_request_set_crypt(req, sg, result, template[i].psize);
324 if (use_digest) {
325 ret = wait_async_op(&tresult, crypto_ahash_digest(req));
326 if (ret) {
327 pr_err("alg: hash: digest failed on test %d "
328 "for %s: ret=%d\n", j, algo, -ret);
329 goto out;
330 }
331 } else {
332 ret = wait_async_op(&tresult, crypto_ahash_init(req));
333 if (ret) {
334 pr_err("alt: hash: init failed on test %d "
335 "for %s: ret=%d\n", j, algo, -ret);
336 goto out;
337 }
338 ret = wait_async_op(&tresult, crypto_ahash_update(req));
339 if (ret) {
340 pr_err("alt: hash: update failed on test %d "
341 "for %s: ret=%d\n", j, algo, -ret);
342 goto out;
343 }
344 ret = wait_async_op(&tresult, crypto_ahash_final(req));
345 if (ret) {
346 pr_err("alt: hash: final failed on test %d "
347 "for %s: ret=%d\n", j, algo, -ret);
348 goto out;
349 }
350 }
351
352 if (memcmp(result, template[i].digest,
353 crypto_ahash_digestsize(tfm))) {
354 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
355 j, algo);
356 hexdump(result, crypto_ahash_digestsize(tfm));
357 ret = -EINVAL;
358 goto out;
359 }
360 }
361
362 j = 0;
363 for (i = 0; i < tcount; i++) {
364 /* alignment tests are only done with continuous buffers */
365 if (align_offset != 0)
366 break;
367
368 if (!template[i].np)
369 continue;
370
371 j++;
372 memset(result, 0, MAX_DIGEST_SIZE);
373
374 temp = 0;
375 sg_init_table(sg, template[i].np);
376 ret = -EINVAL;
377 for (k = 0; k < template[i].np; k++) {
378 if (WARN_ON(offset_in_page(IDX[k]) +
379 template[i].tap[k] > PAGE_SIZE))
380 goto out;
381 sg_set_buf(&sg[k],
382 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
383 offset_in_page(IDX[k]),
384 template[i].plaintext + temp,
385 template[i].tap[k]),
386 template[i].tap[k]);
387 temp += template[i].tap[k];
388 }
389
390 if (template[i].ksize) {
391 if (template[i].ksize > MAX_KEYLEN) {
392 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
393 j, algo, template[i].ksize, MAX_KEYLEN);
394 ret = -EINVAL;
395 goto out;
396 }
397 crypto_ahash_clear_flags(tfm, ~0);
398 memcpy(key, template[i].key, template[i].ksize);
399 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
400
401 if (ret) {
402 printk(KERN_ERR "alg: hash: setkey "
403 "failed on chunking test %d "
404 "for %s: ret=%d\n", j, algo, -ret);
405 goto out;
406 }
407 }
408
409 ahash_request_set_crypt(req, sg, result, template[i].psize);
410 ret = crypto_ahash_digest(req);
411 switch (ret) {
412 case 0:
413 break;
414 case -EINPROGRESS:
415 case -EBUSY:
416 wait_for_completion(&tresult.completion);
417 reinit_completion(&tresult.completion);
418 ret = tresult.err;
419 if (!ret)
420 break;
421 /* fall through */
422 default:
423 printk(KERN_ERR "alg: hash: digest failed "
424 "on chunking test %d for %s: "
425 "ret=%d\n", j, algo, -ret);
426 goto out;
427 }
428
429 if (memcmp(result, template[i].digest,
430 crypto_ahash_digestsize(tfm))) {
431 printk(KERN_ERR "alg: hash: Chunking test %d "
432 "failed for %s\n", j, algo);
433 hexdump(result, crypto_ahash_digestsize(tfm));
434 ret = -EINVAL;
435 goto out;
436 }
437 }
438
439 /* partial update exercise */
440 j = 0;
441 for (i = 0; i < tcount; i++) {
442 /* alignment tests are only done with continuous buffers */
443 if (align_offset != 0)
444 break;
445
446 if (template[i].np < 2)
447 continue;
448
449 j++;
450 memset(result, 0, MAX_DIGEST_SIZE);
451
452 ret = -EINVAL;
453 hash_buff = xbuf[0];
454 memcpy(hash_buff, template[i].plaintext,
455 template[i].tap[0]);
456 sg_init_one(&sg[0], hash_buff, template[i].tap[0]);
457
458 if (template[i].ksize) {
459 crypto_ahash_clear_flags(tfm, ~0);
460 if (template[i].ksize > MAX_KEYLEN) {
461 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
462 j, algo, template[i].ksize, MAX_KEYLEN);
463 ret = -EINVAL;
464 goto out;
465 }
466 memcpy(key, template[i].key, template[i].ksize);
467 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
468 if (ret) {
469 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n",
470 j, algo, -ret);
471 goto out;
472 }
473 }
474
475 ahash_request_set_crypt(req, sg, result, template[i].tap[0]);
476 ret = wait_async_op(&tresult, crypto_ahash_init(req));
477 if (ret) {
478 pr_err("alt: hash: init failed on test %d for %s: ret=%d\n",
479 j, algo, -ret);
480 goto out;
481 }
482 ret = wait_async_op(&tresult, crypto_ahash_update(req));
483 if (ret) {
484 pr_err("alt: hash: update failed on test %d for %s: ret=%d\n",
485 j, algo, -ret);
486 goto out;
487 }
488
489 temp = template[i].tap[0];
490 for (k = 1; k < template[i].np; k++) {
491 ret = ahash_partial_update(&req, tfm, &template[i],
492 hash_buff, k, temp, &sg[0], algo, result,
493 &tresult);
494 if (ret) {
495 pr_err("hash: partial update failed on test %d for %s: ret=%d\n",
496 j, algo, -ret);
497 goto out_noreq;
498 }
499 temp += template[i].tap[k];
500 }
501 ret = wait_async_op(&tresult, crypto_ahash_final(req));
502 if (ret) {
503 pr_err("alt: hash: final failed on test %d for %s: ret=%d\n",
504 j, algo, -ret);
505 goto out;
506 }
507 if (memcmp(result, template[i].digest,
508 crypto_ahash_digestsize(tfm))) {
509 pr_err("alg: hash: Partial Test %d failed for %s\n",
510 j, algo);
511 hexdump(result, crypto_ahash_digestsize(tfm));
512 ret = -EINVAL;
513 goto out;
514 }
515 }
516
517 ret = 0;
518
519 out:
520 ahash_request_free(req);
521 out_noreq:
522 testmgr_free_buf(xbuf);
523 out_nobuf:
524 kfree(key);
525 kfree(result);
526 return ret;
527 }
528
529 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
530 unsigned int tcount, bool use_digest)
531 {
532 unsigned int alignmask;
533 int ret;
534
535 ret = __test_hash(tfm, template, tcount, use_digest, 0);
536 if (ret)
537 return ret;
538
539 /* test unaligned buffers, check with one byte offset */
540 ret = __test_hash(tfm, template, tcount, use_digest, 1);
541 if (ret)
542 return ret;
543
544 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
545 if (alignmask) {
546 /* Check if alignment mask for tfm is correctly set. */
547 ret = __test_hash(tfm, template, tcount, use_digest,
548 alignmask + 1);
549 if (ret)
550 return ret;
551 }
552
553 return 0;
554 }
555
556 static int __test_aead(struct crypto_aead *tfm, int enc,
557 struct aead_testvec *template, unsigned int tcount,
558 const bool diff_dst, const int align_offset)
559 {
560 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
561 unsigned int i, j, k, n, temp;
562 int ret = -ENOMEM;
563 char *q;
564 char *key;
565 struct aead_request *req;
566 struct scatterlist *sg;
567 struct scatterlist *sgout;
568 const char *e, *d;
569 struct tcrypt_result result;
570 unsigned int authsize, iv_len;
571 void *input;
572 void *output;
573 void *assoc;
574 char *iv;
575 char *xbuf[XBUFSIZE];
576 char *xoutbuf[XBUFSIZE];
577 char *axbuf[XBUFSIZE];
578
579 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
580 if (!iv)
581 return ret;
582 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
583 if (!key)
584 goto out_noxbuf;
585 if (testmgr_alloc_buf(xbuf))
586 goto out_noxbuf;
587 if (testmgr_alloc_buf(axbuf))
588 goto out_noaxbuf;
589 if (diff_dst && testmgr_alloc_buf(xoutbuf))
590 goto out_nooutbuf;
591
592 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
593 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL);
594 if (!sg)
595 goto out_nosg;
596 sgout = &sg[16];
597
598 if (diff_dst)
599 d = "-ddst";
600 else
601 d = "";
602
603 if (enc == ENCRYPT)
604 e = "encryption";
605 else
606 e = "decryption";
607
608 init_completion(&result.completion);
609
610 req = aead_request_alloc(tfm, GFP_KERNEL);
611 if (!req) {
612 pr_err("alg: aead%s: Failed to allocate request for %s\n",
613 d, algo);
614 goto out;
615 }
616
617 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
618 tcrypt_complete, &result);
619
620 iv_len = crypto_aead_ivsize(tfm);
621
622 for (i = 0, j = 0; i < tcount; i++) {
623 if (template[i].np)
624 continue;
625
626 j++;
627
628 /* some templates have no input data but they will
629 * touch input
630 */
631 input = xbuf[0];
632 input += align_offset;
633 assoc = axbuf[0];
634
635 ret = -EINVAL;
636 if (WARN_ON(align_offset + template[i].ilen >
637 PAGE_SIZE || template[i].alen > PAGE_SIZE))
638 goto out;
639
640 memcpy(input, template[i].input, template[i].ilen);
641 memcpy(assoc, template[i].assoc, template[i].alen);
642 if (template[i].iv)
643 memcpy(iv, template[i].iv, iv_len);
644 else
645 memset(iv, 0, iv_len);
646
647 crypto_aead_clear_flags(tfm, ~0);
648 if (template[i].wk)
649 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
650
651 if (template[i].klen > MAX_KEYLEN) {
652 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
653 d, j, algo, template[i].klen,
654 MAX_KEYLEN);
655 ret = -EINVAL;
656 goto out;
657 }
658 memcpy(key, template[i].key, template[i].klen);
659
660 ret = crypto_aead_setkey(tfm, key, template[i].klen);
661 if (!ret == template[i].fail) {
662 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
663 d, j, algo, crypto_aead_get_flags(tfm));
664 goto out;
665 } else if (ret)
666 continue;
667
668 authsize = abs(template[i].rlen - template[i].ilen);
669 ret = crypto_aead_setauthsize(tfm, authsize);
670 if (ret) {
671 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
672 d, authsize, j, algo);
673 goto out;
674 }
675
676 k = !!template[i].alen;
677 sg_init_table(sg, k + 1);
678 sg_set_buf(&sg[0], assoc, template[i].alen);
679 sg_set_buf(&sg[k], input,
680 template[i].ilen + (enc ? authsize : 0));
681 output = input;
682
683 if (diff_dst) {
684 sg_init_table(sgout, k + 1);
685 sg_set_buf(&sgout[0], assoc, template[i].alen);
686
687 output = xoutbuf[0];
688 output += align_offset;
689 sg_set_buf(&sgout[k], output,
690 template[i].rlen + (enc ? 0 : authsize));
691 }
692
693 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
694 template[i].ilen, iv);
695
696 aead_request_set_ad(req, template[i].alen);
697
698 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
699
700 switch (ret) {
701 case 0:
702 if (template[i].novrfy) {
703 /* verification was supposed to fail */
704 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
705 d, e, j, algo);
706 /* so really, we got a bad message */
707 ret = -EBADMSG;
708 goto out;
709 }
710 break;
711 case -EINPROGRESS:
712 case -EBUSY:
713 wait_for_completion(&result.completion);
714 reinit_completion(&result.completion);
715 ret = result.err;
716 if (!ret)
717 break;
718 case -EBADMSG:
719 if (template[i].novrfy)
720 /* verification failure was expected */
721 continue;
722 /* fall through */
723 default:
724 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
725 d, e, j, algo, -ret);
726 goto out;
727 }
728
729 q = output;
730 if (memcmp(q, template[i].result, template[i].rlen)) {
731 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
732 d, j, e, algo);
733 hexdump(q, template[i].rlen);
734 ret = -EINVAL;
735 goto out;
736 }
737 }
738
739 for (i = 0, j = 0; i < tcount; i++) {
740 /* alignment tests are only done with continuous buffers */
741 if (align_offset != 0)
742 break;
743
744 if (!template[i].np)
745 continue;
746
747 j++;
748
749 if (template[i].iv)
750 memcpy(iv, template[i].iv, iv_len);
751 else
752 memset(iv, 0, MAX_IVLEN);
753
754 crypto_aead_clear_flags(tfm, ~0);
755 if (template[i].wk)
756 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
757 if (template[i].klen > MAX_KEYLEN) {
758 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
759 d, j, algo, template[i].klen, MAX_KEYLEN);
760 ret = -EINVAL;
761 goto out;
762 }
763 memcpy(key, template[i].key, template[i].klen);
764
765 ret = crypto_aead_setkey(tfm, key, template[i].klen);
766 if (!ret == template[i].fail) {
767 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
768 d, j, algo, crypto_aead_get_flags(tfm));
769 goto out;
770 } else if (ret)
771 continue;
772
773 authsize = abs(template[i].rlen - template[i].ilen);
774
775 ret = -EINVAL;
776 sg_init_table(sg, template[i].anp + template[i].np);
777 if (diff_dst)
778 sg_init_table(sgout, template[i].anp + template[i].np);
779
780 ret = -EINVAL;
781 for (k = 0, temp = 0; k < template[i].anp; k++) {
782 if (WARN_ON(offset_in_page(IDX[k]) +
783 template[i].atap[k] > PAGE_SIZE))
784 goto out;
785 sg_set_buf(&sg[k],
786 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
787 offset_in_page(IDX[k]),
788 template[i].assoc + temp,
789 template[i].atap[k]),
790 template[i].atap[k]);
791 if (diff_dst)
792 sg_set_buf(&sgout[k],
793 axbuf[IDX[k] >> PAGE_SHIFT] +
794 offset_in_page(IDX[k]),
795 template[i].atap[k]);
796 temp += template[i].atap[k];
797 }
798
799 for (k = 0, temp = 0; k < template[i].np; k++) {
800 if (WARN_ON(offset_in_page(IDX[k]) +
801 template[i].tap[k] > PAGE_SIZE))
802 goto out;
803
804 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
805 memcpy(q, template[i].input + temp, template[i].tap[k]);
806 sg_set_buf(&sg[template[i].anp + k],
807 q, template[i].tap[k]);
808
809 if (diff_dst) {
810 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
811 offset_in_page(IDX[k]);
812
813 memset(q, 0, template[i].tap[k]);
814
815 sg_set_buf(&sgout[template[i].anp + k],
816 q, template[i].tap[k]);
817 }
818
819 n = template[i].tap[k];
820 if (k == template[i].np - 1 && enc)
821 n += authsize;
822 if (offset_in_page(q) + n < PAGE_SIZE)
823 q[n] = 0;
824
825 temp += template[i].tap[k];
826 }
827
828 ret = crypto_aead_setauthsize(tfm, authsize);
829 if (ret) {
830 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
831 d, authsize, j, algo);
832 goto out;
833 }
834
835 if (enc) {
836 if (WARN_ON(sg[template[i].anp + k - 1].offset +
837 sg[template[i].anp + k - 1].length +
838 authsize > PAGE_SIZE)) {
839 ret = -EINVAL;
840 goto out;
841 }
842
843 if (diff_dst)
844 sgout[template[i].anp + k - 1].length +=
845 authsize;
846 sg[template[i].anp + k - 1].length += authsize;
847 }
848
849 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
850 template[i].ilen,
851 iv);
852
853 aead_request_set_ad(req, template[i].alen);
854
855 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
856
857 switch (ret) {
858 case 0:
859 if (template[i].novrfy) {
860 /* verification was supposed to fail */
861 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
862 d, e, j, algo);
863 /* so really, we got a bad message */
864 ret = -EBADMSG;
865 goto out;
866 }
867 break;
868 case -EINPROGRESS:
869 case -EBUSY:
870 wait_for_completion(&result.completion);
871 reinit_completion(&result.completion);
872 ret = result.err;
873 if (!ret)
874 break;
875 case -EBADMSG:
876 if (template[i].novrfy)
877 /* verification failure was expected */
878 continue;
879 /* fall through */
880 default:
881 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
882 d, e, j, algo, -ret);
883 goto out;
884 }
885
886 ret = -EINVAL;
887 for (k = 0, temp = 0; k < template[i].np; k++) {
888 if (diff_dst)
889 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
890 offset_in_page(IDX[k]);
891 else
892 q = xbuf[IDX[k] >> PAGE_SHIFT] +
893 offset_in_page(IDX[k]);
894
895 n = template[i].tap[k];
896 if (k == template[i].np - 1)
897 n += enc ? authsize : -authsize;
898
899 if (memcmp(q, template[i].result + temp, n)) {
900 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
901 d, j, e, k, algo);
902 hexdump(q, n);
903 goto out;
904 }
905
906 q += n;
907 if (k == template[i].np - 1 && !enc) {
908 if (!diff_dst &&
909 memcmp(q, template[i].input +
910 temp + n, authsize))
911 n = authsize;
912 else
913 n = 0;
914 } else {
915 for (n = 0; offset_in_page(q + n) && q[n]; n++)
916 ;
917 }
918 if (n) {
919 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
920 d, j, e, k, algo, n);
921 hexdump(q, n);
922 goto out;
923 }
924
925 temp += template[i].tap[k];
926 }
927 }
928
929 ret = 0;
930
931 out:
932 aead_request_free(req);
933 kfree(sg);
934 out_nosg:
935 if (diff_dst)
936 testmgr_free_buf(xoutbuf);
937 out_nooutbuf:
938 testmgr_free_buf(axbuf);
939 out_noaxbuf:
940 testmgr_free_buf(xbuf);
941 out_noxbuf:
942 kfree(key);
943 kfree(iv);
944 return ret;
945 }
946
947 static int test_aead(struct crypto_aead *tfm, int enc,
948 struct aead_testvec *template, unsigned int tcount)
949 {
950 unsigned int alignmask;
951 int ret;
952
953 /* test 'dst == src' case */
954 ret = __test_aead(tfm, enc, template, tcount, false, 0);
955 if (ret)
956 return ret;
957
958 /* test 'dst != src' case */
959 ret = __test_aead(tfm, enc, template, tcount, true, 0);
960 if (ret)
961 return ret;
962
963 /* test unaligned buffers, check with one byte offset */
964 ret = __test_aead(tfm, enc, template, tcount, true, 1);
965 if (ret)
966 return ret;
967
968 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
969 if (alignmask) {
970 /* Check if alignment mask for tfm is correctly set. */
971 ret = __test_aead(tfm, enc, template, tcount, true,
972 alignmask + 1);
973 if (ret)
974 return ret;
975 }
976
977 return 0;
978 }
979
980 static int test_cipher(struct crypto_cipher *tfm, int enc,
981 struct cipher_testvec *template, unsigned int tcount)
982 {
983 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
984 unsigned int i, j, k;
985 char *q;
986 const char *e;
987 void *data;
988 char *xbuf[XBUFSIZE];
989 int ret = -ENOMEM;
990
991 if (testmgr_alloc_buf(xbuf))
992 goto out_nobuf;
993
994 if (enc == ENCRYPT)
995 e = "encryption";
996 else
997 e = "decryption";
998
999 j = 0;
1000 for (i = 0; i < tcount; i++) {
1001 if (template[i].np)
1002 continue;
1003
1004 j++;
1005
1006 ret = -EINVAL;
1007 if (WARN_ON(template[i].ilen > PAGE_SIZE))
1008 goto out;
1009
1010 data = xbuf[0];
1011 memcpy(data, template[i].input, template[i].ilen);
1012
1013 crypto_cipher_clear_flags(tfm, ~0);
1014 if (template[i].wk)
1015 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1016
1017 ret = crypto_cipher_setkey(tfm, template[i].key,
1018 template[i].klen);
1019 if (!ret == template[i].fail) {
1020 printk(KERN_ERR "alg: cipher: setkey failed "
1021 "on test %d for %s: flags=%x\n", j,
1022 algo, crypto_cipher_get_flags(tfm));
1023 goto out;
1024 } else if (ret)
1025 continue;
1026
1027 for (k = 0; k < template[i].ilen;
1028 k += crypto_cipher_blocksize(tfm)) {
1029 if (enc)
1030 crypto_cipher_encrypt_one(tfm, data + k,
1031 data + k);
1032 else
1033 crypto_cipher_decrypt_one(tfm, data + k,
1034 data + k);
1035 }
1036
1037 q = data;
1038 if (memcmp(q, template[i].result, template[i].rlen)) {
1039 printk(KERN_ERR "alg: cipher: Test %d failed "
1040 "on %s for %s\n", j, e, algo);
1041 hexdump(q, template[i].rlen);
1042 ret = -EINVAL;
1043 goto out;
1044 }
1045 }
1046
1047 ret = 0;
1048
1049 out:
1050 testmgr_free_buf(xbuf);
1051 out_nobuf:
1052 return ret;
1053 }
1054
1055 static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1056 struct cipher_testvec *template, unsigned int tcount,
1057 const bool diff_dst, const int align_offset)
1058 {
1059 const char *algo =
1060 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
1061 unsigned int i, j, k, n, temp;
1062 char *q;
1063 struct skcipher_request *req;
1064 struct scatterlist sg[8];
1065 struct scatterlist sgout[8];
1066 const char *e, *d;
1067 struct tcrypt_result result;
1068 void *data;
1069 char iv[MAX_IVLEN];
1070 char *xbuf[XBUFSIZE];
1071 char *xoutbuf[XBUFSIZE];
1072 int ret = -ENOMEM;
1073 unsigned int ivsize = crypto_skcipher_ivsize(tfm);
1074
1075 if (testmgr_alloc_buf(xbuf))
1076 goto out_nobuf;
1077
1078 if (diff_dst && testmgr_alloc_buf(xoutbuf))
1079 goto out_nooutbuf;
1080
1081 if (diff_dst)
1082 d = "-ddst";
1083 else
1084 d = "";
1085
1086 if (enc == ENCRYPT)
1087 e = "encryption";
1088 else
1089 e = "decryption";
1090
1091 init_completion(&result.completion);
1092
1093 req = skcipher_request_alloc(tfm, GFP_KERNEL);
1094 if (!req) {
1095 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
1096 d, algo);
1097 goto out;
1098 }
1099
1100 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1101 tcrypt_complete, &result);
1102
1103 j = 0;
1104 for (i = 0; i < tcount; i++) {
1105 if (template[i].np && !template[i].also_non_np)
1106 continue;
1107
1108 if (template[i].iv)
1109 memcpy(iv, template[i].iv, ivsize);
1110 else
1111 memset(iv, 0, MAX_IVLEN);
1112
1113 j++;
1114 ret = -EINVAL;
1115 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE))
1116 goto out;
1117
1118 data = xbuf[0];
1119 data += align_offset;
1120 memcpy(data, template[i].input, template[i].ilen);
1121
1122 crypto_skcipher_clear_flags(tfm, ~0);
1123 if (template[i].wk)
1124 crypto_skcipher_set_flags(tfm,
1125 CRYPTO_TFM_REQ_WEAK_KEY);
1126
1127 ret = crypto_skcipher_setkey(tfm, template[i].key,
1128 template[i].klen);
1129 if (!ret == template[i].fail) {
1130 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
1131 d, j, algo, crypto_skcipher_get_flags(tfm));
1132 goto out;
1133 } else if (ret)
1134 continue;
1135
1136 sg_init_one(&sg[0], data, template[i].ilen);
1137 if (diff_dst) {
1138 data = xoutbuf[0];
1139 data += align_offset;
1140 sg_init_one(&sgout[0], data, template[i].ilen);
1141 }
1142
1143 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1144 template[i].ilen, iv);
1145 ret = enc ? crypto_skcipher_encrypt(req) :
1146 crypto_skcipher_decrypt(req);
1147
1148 switch (ret) {
1149 case 0:
1150 break;
1151 case -EINPROGRESS:
1152 case -EBUSY:
1153 wait_for_completion(&result.completion);
1154 reinit_completion(&result.completion);
1155 ret = result.err;
1156 if (!ret)
1157 break;
1158 /* fall through */
1159 default:
1160 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1161 d, e, j, algo, -ret);
1162 goto out;
1163 }
1164
1165 q = data;
1166 if (memcmp(q, template[i].result, template[i].rlen)) {
1167 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
1168 d, j, e, algo);
1169 hexdump(q, template[i].rlen);
1170 ret = -EINVAL;
1171 goto out;
1172 }
1173
1174 if (template[i].iv_out &&
1175 memcmp(iv, template[i].iv_out,
1176 crypto_skcipher_ivsize(tfm))) {
1177 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1178 d, j, e, algo);
1179 hexdump(iv, crypto_skcipher_ivsize(tfm));
1180 ret = -EINVAL;
1181 goto out;
1182 }
1183 }
1184
1185 j = 0;
1186 for (i = 0; i < tcount; i++) {
1187 /* alignment tests are only done with continuous buffers */
1188 if (align_offset != 0)
1189 break;
1190
1191 if (!template[i].np)
1192 continue;
1193
1194 if (template[i].iv)
1195 memcpy(iv, template[i].iv, ivsize);
1196 else
1197 memset(iv, 0, MAX_IVLEN);
1198
1199 j++;
1200 crypto_skcipher_clear_flags(tfm, ~0);
1201 if (template[i].wk)
1202 crypto_skcipher_set_flags(tfm,
1203 CRYPTO_TFM_REQ_WEAK_KEY);
1204
1205 ret = crypto_skcipher_setkey(tfm, template[i].key,
1206 template[i].klen);
1207 if (!ret == template[i].fail) {
1208 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1209 d, j, algo, crypto_skcipher_get_flags(tfm));
1210 goto out;
1211 } else if (ret)
1212 continue;
1213
1214 temp = 0;
1215 ret = -EINVAL;
1216 sg_init_table(sg, template[i].np);
1217 if (diff_dst)
1218 sg_init_table(sgout, template[i].np);
1219 for (k = 0; k < template[i].np; k++) {
1220 if (WARN_ON(offset_in_page(IDX[k]) +
1221 template[i].tap[k] > PAGE_SIZE))
1222 goto out;
1223
1224 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1225
1226 memcpy(q, template[i].input + temp, template[i].tap[k]);
1227
1228 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1229 q[template[i].tap[k]] = 0;
1230
1231 sg_set_buf(&sg[k], q, template[i].tap[k]);
1232 if (diff_dst) {
1233 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1234 offset_in_page(IDX[k]);
1235
1236 sg_set_buf(&sgout[k], q, template[i].tap[k]);
1237
1238 memset(q, 0, template[i].tap[k]);
1239 if (offset_in_page(q) +
1240 template[i].tap[k] < PAGE_SIZE)
1241 q[template[i].tap[k]] = 0;
1242 }
1243
1244 temp += template[i].tap[k];
1245 }
1246
1247 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1248 template[i].ilen, iv);
1249
1250 ret = enc ? crypto_skcipher_encrypt(req) :
1251 crypto_skcipher_decrypt(req);
1252
1253 switch (ret) {
1254 case 0:
1255 break;
1256 case -EINPROGRESS:
1257 case -EBUSY:
1258 wait_for_completion(&result.completion);
1259 reinit_completion(&result.completion);
1260 ret = result.err;
1261 if (!ret)
1262 break;
1263 /* fall through */
1264 default:
1265 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1266 d, e, j, algo, -ret);
1267 goto out;
1268 }
1269
1270 temp = 0;
1271 ret = -EINVAL;
1272 for (k = 0; k < template[i].np; k++) {
1273 if (diff_dst)
1274 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1275 offset_in_page(IDX[k]);
1276 else
1277 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1278 offset_in_page(IDX[k]);
1279
1280 if (memcmp(q, template[i].result + temp,
1281 template[i].tap[k])) {
1282 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1283 d, j, e, k, algo);
1284 hexdump(q, template[i].tap[k]);
1285 goto out;
1286 }
1287
1288 q += template[i].tap[k];
1289 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1290 ;
1291 if (n) {
1292 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1293 d, j, e, k, algo, n);
1294 hexdump(q, n);
1295 goto out;
1296 }
1297 temp += template[i].tap[k];
1298 }
1299 }
1300
1301 ret = 0;
1302
1303 out:
1304 skcipher_request_free(req);
1305 if (diff_dst)
1306 testmgr_free_buf(xoutbuf);
1307 out_nooutbuf:
1308 testmgr_free_buf(xbuf);
1309 out_nobuf:
1310 return ret;
1311 }
1312
1313 static int test_skcipher(struct crypto_skcipher *tfm, int enc,
1314 struct cipher_testvec *template, unsigned int tcount)
1315 {
1316 unsigned int alignmask;
1317 int ret;
1318
1319 /* test 'dst == src' case */
1320 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1321 if (ret)
1322 return ret;
1323
1324 /* test 'dst != src' case */
1325 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1326 if (ret)
1327 return ret;
1328
1329 /* test unaligned buffers, check with one byte offset */
1330 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1331 if (ret)
1332 return ret;
1333
1334 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1335 if (alignmask) {
1336 /* Check if alignment mask for tfm is correctly set. */
1337 ret = __test_skcipher(tfm, enc, template, tcount, true,
1338 alignmask + 1);
1339 if (ret)
1340 return ret;
1341 }
1342
1343 return 0;
1344 }
1345
1346 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1347 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1348 {
1349 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1350 unsigned int i;
1351 char result[COMP_BUF_SIZE];
1352 int ret;
1353
1354 for (i = 0; i < ctcount; i++) {
1355 int ilen;
1356 unsigned int dlen = COMP_BUF_SIZE;
1357
1358 memset(result, 0, sizeof (result));
1359
1360 ilen = ctemplate[i].inlen;
1361 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1362 ilen, result, &dlen);
1363 if (ret) {
1364 printk(KERN_ERR "alg: comp: compression failed "
1365 "on test %d for %s: ret=%d\n", i + 1, algo,
1366 -ret);
1367 goto out;
1368 }
1369
1370 if (dlen != ctemplate[i].outlen) {
1371 printk(KERN_ERR "alg: comp: Compression test %d "
1372 "failed for %s: output len = %d\n", i + 1, algo,
1373 dlen);
1374 ret = -EINVAL;
1375 goto out;
1376 }
1377
1378 if (memcmp(result, ctemplate[i].output, dlen)) {
1379 printk(KERN_ERR "alg: comp: Compression test %d "
1380 "failed for %s\n", i + 1, algo);
1381 hexdump(result, dlen);
1382 ret = -EINVAL;
1383 goto out;
1384 }
1385 }
1386
1387 for (i = 0; i < dtcount; i++) {
1388 int ilen;
1389 unsigned int dlen = COMP_BUF_SIZE;
1390
1391 memset(result, 0, sizeof (result));
1392
1393 ilen = dtemplate[i].inlen;
1394 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1395 ilen, result, &dlen);
1396 if (ret) {
1397 printk(KERN_ERR "alg: comp: decompression failed "
1398 "on test %d for %s: ret=%d\n", i + 1, algo,
1399 -ret);
1400 goto out;
1401 }
1402
1403 if (dlen != dtemplate[i].outlen) {
1404 printk(KERN_ERR "alg: comp: Decompression test %d "
1405 "failed for %s: output len = %d\n", i + 1, algo,
1406 dlen);
1407 ret = -EINVAL;
1408 goto out;
1409 }
1410
1411 if (memcmp(result, dtemplate[i].output, dlen)) {
1412 printk(KERN_ERR "alg: comp: Decompression test %d "
1413 "failed for %s\n", i + 1, algo);
1414 hexdump(result, dlen);
1415 ret = -EINVAL;
1416 goto out;
1417 }
1418 }
1419
1420 ret = 0;
1421
1422 out:
1423 return ret;
1424 }
1425
1426 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1427 unsigned int tcount)
1428 {
1429 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1430 int err = 0, i, j, seedsize;
1431 u8 *seed;
1432 char result[32];
1433
1434 seedsize = crypto_rng_seedsize(tfm);
1435
1436 seed = kmalloc(seedsize, GFP_KERNEL);
1437 if (!seed) {
1438 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1439 "for %s\n", algo);
1440 return -ENOMEM;
1441 }
1442
1443 for (i = 0; i < tcount; i++) {
1444 memset(result, 0, 32);
1445
1446 memcpy(seed, template[i].v, template[i].vlen);
1447 memcpy(seed + template[i].vlen, template[i].key,
1448 template[i].klen);
1449 memcpy(seed + template[i].vlen + template[i].klen,
1450 template[i].dt, template[i].dtlen);
1451
1452 err = crypto_rng_reset(tfm, seed, seedsize);
1453 if (err) {
1454 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1455 "for %s\n", algo);
1456 goto out;
1457 }
1458
1459 for (j = 0; j < template[i].loops; j++) {
1460 err = crypto_rng_get_bytes(tfm, result,
1461 template[i].rlen);
1462 if (err < 0) {
1463 printk(KERN_ERR "alg: cprng: Failed to obtain "
1464 "the correct amount of random data for "
1465 "%s (requested %d)\n", algo,
1466 template[i].rlen);
1467 goto out;
1468 }
1469 }
1470
1471 err = memcmp(result, template[i].result,
1472 template[i].rlen);
1473 if (err) {
1474 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1475 i, algo);
1476 hexdump(result, template[i].rlen);
1477 err = -EINVAL;
1478 goto out;
1479 }
1480 }
1481
1482 out:
1483 kfree(seed);
1484 return err;
1485 }
1486
1487 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1488 u32 type, u32 mask)
1489 {
1490 struct crypto_aead *tfm;
1491 int err = 0;
1492
1493 tfm = crypto_alloc_aead(driver, type | CRYPTO_ALG_INTERNAL, mask);
1494 if (IS_ERR(tfm)) {
1495 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1496 "%ld\n", driver, PTR_ERR(tfm));
1497 return PTR_ERR(tfm);
1498 }
1499
1500 if (desc->suite.aead.enc.vecs) {
1501 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1502 desc->suite.aead.enc.count);
1503 if (err)
1504 goto out;
1505 }
1506
1507 if (!err && desc->suite.aead.dec.vecs)
1508 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1509 desc->suite.aead.dec.count);
1510
1511 out:
1512 crypto_free_aead(tfm);
1513 return err;
1514 }
1515
1516 static int alg_test_cipher(const struct alg_test_desc *desc,
1517 const char *driver, u32 type, u32 mask)
1518 {
1519 struct crypto_cipher *tfm;
1520 int err = 0;
1521
1522 tfm = crypto_alloc_cipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1523 if (IS_ERR(tfm)) {
1524 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1525 "%s: %ld\n", driver, PTR_ERR(tfm));
1526 return PTR_ERR(tfm);
1527 }
1528
1529 if (desc->suite.cipher.enc.vecs) {
1530 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1531 desc->suite.cipher.enc.count);
1532 if (err)
1533 goto out;
1534 }
1535
1536 if (desc->suite.cipher.dec.vecs)
1537 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1538 desc->suite.cipher.dec.count);
1539
1540 out:
1541 crypto_free_cipher(tfm);
1542 return err;
1543 }
1544
1545 static int alg_test_skcipher(const struct alg_test_desc *desc,
1546 const char *driver, u32 type, u32 mask)
1547 {
1548 struct crypto_skcipher *tfm;
1549 int err = 0;
1550
1551 tfm = crypto_alloc_skcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1552 if (IS_ERR(tfm)) {
1553 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1554 "%s: %ld\n", driver, PTR_ERR(tfm));
1555 return PTR_ERR(tfm);
1556 }
1557
1558 if (desc->suite.cipher.enc.vecs) {
1559 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1560 desc->suite.cipher.enc.count);
1561 if (err)
1562 goto out;
1563 }
1564
1565 if (desc->suite.cipher.dec.vecs)
1566 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1567 desc->suite.cipher.dec.count);
1568
1569 out:
1570 crypto_free_skcipher(tfm);
1571 return err;
1572 }
1573
1574 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1575 u32 type, u32 mask)
1576 {
1577 struct crypto_comp *tfm;
1578 int err;
1579
1580 tfm = crypto_alloc_comp(driver, type, mask);
1581 if (IS_ERR(tfm)) {
1582 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1583 "%ld\n", driver, PTR_ERR(tfm));
1584 return PTR_ERR(tfm);
1585 }
1586
1587 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1588 desc->suite.comp.decomp.vecs,
1589 desc->suite.comp.comp.count,
1590 desc->suite.comp.decomp.count);
1591
1592 crypto_free_comp(tfm);
1593 return err;
1594 }
1595
1596 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1597 u32 type, u32 mask)
1598 {
1599 struct crypto_ahash *tfm;
1600 int err;
1601
1602 tfm = crypto_alloc_ahash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1603 if (IS_ERR(tfm)) {
1604 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1605 "%ld\n", driver, PTR_ERR(tfm));
1606 return PTR_ERR(tfm);
1607 }
1608
1609 err = test_hash(tfm, desc->suite.hash.vecs,
1610 desc->suite.hash.count, true);
1611 if (!err)
1612 err = test_hash(tfm, desc->suite.hash.vecs,
1613 desc->suite.hash.count, false);
1614
1615 crypto_free_ahash(tfm);
1616 return err;
1617 }
1618
1619 static int alg_test_crc32c(const struct alg_test_desc *desc,
1620 const char *driver, u32 type, u32 mask)
1621 {
1622 struct crypto_shash *tfm;
1623 u32 val;
1624 int err;
1625
1626 err = alg_test_hash(desc, driver, type, mask);
1627 if (err)
1628 goto out;
1629
1630 tfm = crypto_alloc_shash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1631 if (IS_ERR(tfm)) {
1632 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1633 "%ld\n", driver, PTR_ERR(tfm));
1634 err = PTR_ERR(tfm);
1635 goto out;
1636 }
1637
1638 do {
1639 SHASH_DESC_ON_STACK(shash, tfm);
1640 u32 *ctx = (u32 *)shash_desc_ctx(shash);
1641
1642 shash->tfm = tfm;
1643 shash->flags = 0;
1644
1645 *ctx = le32_to_cpu(420553207);
1646 err = crypto_shash_final(shash, (u8 *)&val);
1647 if (err) {
1648 printk(KERN_ERR "alg: crc32c: Operation failed for "
1649 "%s: %d\n", driver, err);
1650 break;
1651 }
1652
1653 if (val != ~420553207) {
1654 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1655 "%d\n", driver, val);
1656 err = -EINVAL;
1657 }
1658 } while (0);
1659
1660 crypto_free_shash(tfm);
1661
1662 out:
1663 return err;
1664 }
1665
1666 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1667 u32 type, u32 mask)
1668 {
1669 struct crypto_rng *rng;
1670 int err;
1671
1672 rng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1673 if (IS_ERR(rng)) {
1674 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1675 "%ld\n", driver, PTR_ERR(rng));
1676 return PTR_ERR(rng);
1677 }
1678
1679 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1680
1681 crypto_free_rng(rng);
1682
1683 return err;
1684 }
1685
1686
1687 static int drbg_cavs_test(struct drbg_testvec *test, int pr,
1688 const char *driver, u32 type, u32 mask)
1689 {
1690 int ret = -EAGAIN;
1691 struct crypto_rng *drng;
1692 struct drbg_test_data test_data;
1693 struct drbg_string addtl, pers, testentropy;
1694 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1695
1696 if (!buf)
1697 return -ENOMEM;
1698
1699 drng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1700 if (IS_ERR(drng)) {
1701 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1702 "%s\n", driver);
1703 kzfree(buf);
1704 return -ENOMEM;
1705 }
1706
1707 test_data.testentropy = &testentropy;
1708 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1709 drbg_string_fill(&pers, test->pers, test->perslen);
1710 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1711 if (ret) {
1712 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1713 goto outbuf;
1714 }
1715
1716 drbg_string_fill(&addtl, test->addtla, test->addtllen);
1717 if (pr) {
1718 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1719 ret = crypto_drbg_get_bytes_addtl_test(drng,
1720 buf, test->expectedlen, &addtl, &test_data);
1721 } else {
1722 ret = crypto_drbg_get_bytes_addtl(drng,
1723 buf, test->expectedlen, &addtl);
1724 }
1725 if (ret < 0) {
1726 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1727 "driver %s\n", driver);
1728 goto outbuf;
1729 }
1730
1731 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1732 if (pr) {
1733 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1734 ret = crypto_drbg_get_bytes_addtl_test(drng,
1735 buf, test->expectedlen, &addtl, &test_data);
1736 } else {
1737 ret = crypto_drbg_get_bytes_addtl(drng,
1738 buf, test->expectedlen, &addtl);
1739 }
1740 if (ret < 0) {
1741 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1742 "driver %s\n", driver);
1743 goto outbuf;
1744 }
1745
1746 ret = memcmp(test->expected, buf, test->expectedlen);
1747
1748 outbuf:
1749 crypto_free_rng(drng);
1750 kzfree(buf);
1751 return ret;
1752 }
1753
1754
1755 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1756 u32 type, u32 mask)
1757 {
1758 int err = 0;
1759 int pr = 0;
1760 int i = 0;
1761 struct drbg_testvec *template = desc->suite.drbg.vecs;
1762 unsigned int tcount = desc->suite.drbg.count;
1763
1764 if (0 == memcmp(driver, "drbg_pr_", 8))
1765 pr = 1;
1766
1767 for (i = 0; i < tcount; i++) {
1768 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1769 if (err) {
1770 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1771 i, driver);
1772 err = -EINVAL;
1773 break;
1774 }
1775 }
1776 return err;
1777
1778 }
1779
1780 static int do_test_rsa(struct crypto_akcipher *tfm,
1781 struct akcipher_testvec *vecs)
1782 {
1783 char *xbuf[XBUFSIZE];
1784 struct akcipher_request *req;
1785 void *outbuf_enc = NULL;
1786 void *outbuf_dec = NULL;
1787 struct tcrypt_result result;
1788 unsigned int out_len_max, out_len = 0;
1789 int err = -ENOMEM;
1790 struct scatterlist src, dst, src_tab[2];
1791
1792 if (testmgr_alloc_buf(xbuf))
1793 return err;
1794
1795 req = akcipher_request_alloc(tfm, GFP_KERNEL);
1796 if (!req)
1797 goto free_xbuf;
1798
1799 init_completion(&result.completion);
1800
1801 if (vecs->public_key_vec)
1802 err = crypto_akcipher_set_pub_key(tfm, vecs->key,
1803 vecs->key_len);
1804 else
1805 err = crypto_akcipher_set_priv_key(tfm, vecs->key,
1806 vecs->key_len);
1807 if (err)
1808 goto free_req;
1809
1810 out_len_max = crypto_akcipher_maxsize(tfm);
1811 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
1812 if (!outbuf_enc)
1813 goto free_req;
1814
1815 if (WARN_ON(vecs->m_size > PAGE_SIZE))
1816 goto free_all;
1817
1818 memcpy(xbuf[0], vecs->m, vecs->m_size);
1819
1820 sg_init_table(src_tab, 2);
1821 sg_set_buf(&src_tab[0], xbuf[0], 8);
1822 sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8);
1823 sg_init_one(&dst, outbuf_enc, out_len_max);
1824 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size,
1825 out_len_max);
1826 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1827 tcrypt_complete, &result);
1828
1829 /* Run RSA encrypt - c = m^e mod n;*/
1830 err = wait_async_op(&result, crypto_akcipher_encrypt(req));
1831 if (err) {
1832 pr_err("alg: rsa: encrypt test failed. err %d\n", err);
1833 goto free_all;
1834 }
1835 if (req->dst_len != vecs->c_size) {
1836 pr_err("alg: rsa: encrypt test failed. Invalid output len\n");
1837 err = -EINVAL;
1838 goto free_all;
1839 }
1840 /* verify that encrypted message is equal to expected */
1841 if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) {
1842 pr_err("alg: rsa: encrypt test failed. Invalid output\n");
1843 err = -EINVAL;
1844 goto free_all;
1845 }
1846 /* Don't invoke decrypt for vectors with public key */
1847 if (vecs->public_key_vec) {
1848 err = 0;
1849 goto free_all;
1850 }
1851 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
1852 if (!outbuf_dec) {
1853 err = -ENOMEM;
1854 goto free_all;
1855 }
1856
1857 if (WARN_ON(vecs->c_size > PAGE_SIZE))
1858 goto free_all;
1859
1860 memcpy(xbuf[0], vecs->c, vecs->c_size);
1861
1862 sg_init_one(&src, xbuf[0], vecs->c_size);
1863 sg_init_one(&dst, outbuf_dec, out_len_max);
1864 init_completion(&result.completion);
1865 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max);
1866
1867 /* Run RSA decrypt - m = c^d mod n;*/
1868 err = wait_async_op(&result, crypto_akcipher_decrypt(req));
1869 if (err) {
1870 pr_err("alg: rsa: decrypt test failed. err %d\n", err);
1871 goto free_all;
1872 }
1873 out_len = req->dst_len;
1874 if (out_len != vecs->m_size) {
1875 pr_err("alg: rsa: decrypt test failed. Invalid output len\n");
1876 err = -EINVAL;
1877 goto free_all;
1878 }
1879 /* verify that decrypted message is equal to the original msg */
1880 if (memcmp(vecs->m, outbuf_dec, vecs->m_size)) {
1881 pr_err("alg: rsa: decrypt test failed. Invalid output\n");
1882 err = -EINVAL;
1883 }
1884 free_all:
1885 kfree(outbuf_dec);
1886 kfree(outbuf_enc);
1887 free_req:
1888 akcipher_request_free(req);
1889 free_xbuf:
1890 testmgr_free_buf(xbuf);
1891 return err;
1892 }
1893
1894 static int test_rsa(struct crypto_akcipher *tfm, struct akcipher_testvec *vecs,
1895 unsigned int tcount)
1896 {
1897 int ret, i;
1898
1899 for (i = 0; i < tcount; i++) {
1900 ret = do_test_rsa(tfm, vecs++);
1901 if (ret) {
1902 pr_err("alg: rsa: test failed on vector %d, err=%d\n",
1903 i + 1, ret);
1904 return ret;
1905 }
1906 }
1907 return 0;
1908 }
1909
1910 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
1911 struct akcipher_testvec *vecs, unsigned int tcount)
1912 {
1913 if (strncmp(alg, "rsa", 3) == 0)
1914 return test_rsa(tfm, vecs, tcount);
1915
1916 return 0;
1917 }
1918
1919 static int alg_test_akcipher(const struct alg_test_desc *desc,
1920 const char *driver, u32 type, u32 mask)
1921 {
1922 struct crypto_akcipher *tfm;
1923 int err = 0;
1924
1925 tfm = crypto_alloc_akcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1926 if (IS_ERR(tfm)) {
1927 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
1928 driver, PTR_ERR(tfm));
1929 return PTR_ERR(tfm);
1930 }
1931 if (desc->suite.akcipher.vecs)
1932 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
1933 desc->suite.akcipher.count);
1934
1935 crypto_free_akcipher(tfm);
1936 return err;
1937 }
1938
1939 static int alg_test_null(const struct alg_test_desc *desc,
1940 const char *driver, u32 type, u32 mask)
1941 {
1942 return 0;
1943 }
1944
1945 /* Please keep this list sorted by algorithm name. */
1946 static const struct alg_test_desc alg_test_descs[] = {
1947 {
1948 .alg = "__cbc-cast5-avx",
1949 .test = alg_test_null,
1950 }, {
1951 .alg = "__cbc-cast6-avx",
1952 .test = alg_test_null,
1953 }, {
1954 .alg = "__cbc-serpent-avx",
1955 .test = alg_test_null,
1956 }, {
1957 .alg = "__cbc-serpent-avx2",
1958 .test = alg_test_null,
1959 }, {
1960 .alg = "__cbc-serpent-sse2",
1961 .test = alg_test_null,
1962 }, {
1963 .alg = "__cbc-twofish-avx",
1964 .test = alg_test_null,
1965 }, {
1966 .alg = "__driver-cbc-aes-aesni",
1967 .test = alg_test_null,
1968 .fips_allowed = 1,
1969 }, {
1970 .alg = "__driver-cbc-camellia-aesni",
1971 .test = alg_test_null,
1972 }, {
1973 .alg = "__driver-cbc-camellia-aesni-avx2",
1974 .test = alg_test_null,
1975 }, {
1976 .alg = "__driver-cbc-cast5-avx",
1977 .test = alg_test_null,
1978 }, {
1979 .alg = "__driver-cbc-cast6-avx",
1980 .test = alg_test_null,
1981 }, {
1982 .alg = "__driver-cbc-serpent-avx",
1983 .test = alg_test_null,
1984 }, {
1985 .alg = "__driver-cbc-serpent-avx2",
1986 .test = alg_test_null,
1987 }, {
1988 .alg = "__driver-cbc-serpent-sse2",
1989 .test = alg_test_null,
1990 }, {
1991 .alg = "__driver-cbc-twofish-avx",
1992 .test = alg_test_null,
1993 }, {
1994 .alg = "__driver-ecb-aes-aesni",
1995 .test = alg_test_null,
1996 .fips_allowed = 1,
1997 }, {
1998 .alg = "__driver-ecb-camellia-aesni",
1999 .test = alg_test_null,
2000 }, {
2001 .alg = "__driver-ecb-camellia-aesni-avx2",
2002 .test = alg_test_null,
2003 }, {
2004 .alg = "__driver-ecb-cast5-avx",
2005 .test = alg_test_null,
2006 }, {
2007 .alg = "__driver-ecb-cast6-avx",
2008 .test = alg_test_null,
2009 }, {
2010 .alg = "__driver-ecb-serpent-avx",
2011 .test = alg_test_null,
2012 }, {
2013 .alg = "__driver-ecb-serpent-avx2",
2014 .test = alg_test_null,
2015 }, {
2016 .alg = "__driver-ecb-serpent-sse2",
2017 .test = alg_test_null,
2018 }, {
2019 .alg = "__driver-ecb-twofish-avx",
2020 .test = alg_test_null,
2021 }, {
2022 .alg = "__driver-gcm-aes-aesni",
2023 .test = alg_test_null,
2024 .fips_allowed = 1,
2025 }, {
2026 .alg = "__ghash-pclmulqdqni",
2027 .test = alg_test_null,
2028 .fips_allowed = 1,
2029 }, {
2030 .alg = "ansi_cprng",
2031 .test = alg_test_cprng,
2032 .suite = {
2033 .cprng = {
2034 .vecs = ansi_cprng_aes_tv_template,
2035 .count = ANSI_CPRNG_AES_TEST_VECTORS
2036 }
2037 }
2038 }, {
2039 .alg = "authenc(hmac(md5),ecb(cipher_null))",
2040 .test = alg_test_aead,
2041 .suite = {
2042 .aead = {
2043 .enc = {
2044 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template,
2045 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS
2046 },
2047 .dec = {
2048 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template,
2049 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS
2050 }
2051 }
2052 }
2053 }, {
2054 .alg = "authenc(hmac(sha1),cbc(aes))",
2055 .test = alg_test_aead,
2056 .suite = {
2057 .aead = {
2058 .enc = {
2059 .vecs =
2060 hmac_sha1_aes_cbc_enc_tv_temp,
2061 .count =
2062 HMAC_SHA1_AES_CBC_ENC_TEST_VEC
2063 }
2064 }
2065 }
2066 }, {
2067 .alg = "authenc(hmac(sha1),cbc(des))",
2068 .test = alg_test_aead,
2069 .suite = {
2070 .aead = {
2071 .enc = {
2072 .vecs =
2073 hmac_sha1_des_cbc_enc_tv_temp,
2074 .count =
2075 HMAC_SHA1_DES_CBC_ENC_TEST_VEC
2076 }
2077 }
2078 }
2079 }, {
2080 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
2081 .test = alg_test_aead,
2082 .fips_allowed = 1,
2083 .suite = {
2084 .aead = {
2085 .enc = {
2086 .vecs =
2087 hmac_sha1_des3_ede_cbc_enc_tv_temp,
2088 .count =
2089 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC
2090 }
2091 }
2092 }
2093 }, {
2094 .alg = "authenc(hmac(sha1),ctr(aes))",
2095 .test = alg_test_null,
2096 .fips_allowed = 1,
2097 }, {
2098 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
2099 .test = alg_test_aead,
2100 .suite = {
2101 .aead = {
2102 .enc = {
2103 .vecs =
2104 hmac_sha1_ecb_cipher_null_enc_tv_temp,
2105 .count =
2106 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC
2107 },
2108 .dec = {
2109 .vecs =
2110 hmac_sha1_ecb_cipher_null_dec_tv_temp,
2111 .count =
2112 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC
2113 }
2114 }
2115 }
2116 }, {
2117 .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2118 .test = alg_test_null,
2119 .fips_allowed = 1,
2120 }, {
2121 .alg = "authenc(hmac(sha224),cbc(des))",
2122 .test = alg_test_aead,
2123 .suite = {
2124 .aead = {
2125 .enc = {
2126 .vecs =
2127 hmac_sha224_des_cbc_enc_tv_temp,
2128 .count =
2129 HMAC_SHA224_DES_CBC_ENC_TEST_VEC
2130 }
2131 }
2132 }
2133 }, {
2134 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
2135 .test = alg_test_aead,
2136 .fips_allowed = 1,
2137 .suite = {
2138 .aead = {
2139 .enc = {
2140 .vecs =
2141 hmac_sha224_des3_ede_cbc_enc_tv_temp,
2142 .count =
2143 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC
2144 }
2145 }
2146 }
2147 }, {
2148 .alg = "authenc(hmac(sha256),cbc(aes))",
2149 .test = alg_test_aead,
2150 .fips_allowed = 1,
2151 .suite = {
2152 .aead = {
2153 .enc = {
2154 .vecs =
2155 hmac_sha256_aes_cbc_enc_tv_temp,
2156 .count =
2157 HMAC_SHA256_AES_CBC_ENC_TEST_VEC
2158 }
2159 }
2160 }
2161 }, {
2162 .alg = "authenc(hmac(sha256),cbc(des))",
2163 .test = alg_test_aead,
2164 .suite = {
2165 .aead = {
2166 .enc = {
2167 .vecs =
2168 hmac_sha256_des_cbc_enc_tv_temp,
2169 .count =
2170 HMAC_SHA256_DES_CBC_ENC_TEST_VEC
2171 }
2172 }
2173 }
2174 }, {
2175 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
2176 .test = alg_test_aead,
2177 .fips_allowed = 1,
2178 .suite = {
2179 .aead = {
2180 .enc = {
2181 .vecs =
2182 hmac_sha256_des3_ede_cbc_enc_tv_temp,
2183 .count =
2184 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC
2185 }
2186 }
2187 }
2188 }, {
2189 .alg = "authenc(hmac(sha256),ctr(aes))",
2190 .test = alg_test_null,
2191 .fips_allowed = 1,
2192 }, {
2193 .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2194 .test = alg_test_null,
2195 .fips_allowed = 1,
2196 }, {
2197 .alg = "authenc(hmac(sha384),cbc(des))",
2198 .test = alg_test_aead,
2199 .suite = {
2200 .aead = {
2201 .enc = {
2202 .vecs =
2203 hmac_sha384_des_cbc_enc_tv_temp,
2204 .count =
2205 HMAC_SHA384_DES_CBC_ENC_TEST_VEC
2206 }
2207 }
2208 }
2209 }, {
2210 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
2211 .test = alg_test_aead,
2212 .fips_allowed = 1,
2213 .suite = {
2214 .aead = {
2215 .enc = {
2216 .vecs =
2217 hmac_sha384_des3_ede_cbc_enc_tv_temp,
2218 .count =
2219 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC
2220 }
2221 }
2222 }
2223 }, {
2224 .alg = "authenc(hmac(sha384),ctr(aes))",
2225 .test = alg_test_null,
2226 .fips_allowed = 1,
2227 }, {
2228 .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2229 .test = alg_test_null,
2230 .fips_allowed = 1,
2231 }, {
2232 .alg = "authenc(hmac(sha512),cbc(aes))",
2233 .fips_allowed = 1,
2234 .test = alg_test_aead,
2235 .suite = {
2236 .aead = {
2237 .enc = {
2238 .vecs =
2239 hmac_sha512_aes_cbc_enc_tv_temp,
2240 .count =
2241 HMAC_SHA512_AES_CBC_ENC_TEST_VEC
2242 }
2243 }
2244 }
2245 }, {
2246 .alg = "authenc(hmac(sha512),cbc(des))",
2247 .test = alg_test_aead,
2248 .suite = {
2249 .aead = {
2250 .enc = {
2251 .vecs =
2252 hmac_sha512_des_cbc_enc_tv_temp,
2253 .count =
2254 HMAC_SHA512_DES_CBC_ENC_TEST_VEC
2255 }
2256 }
2257 }
2258 }, {
2259 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
2260 .test = alg_test_aead,
2261 .fips_allowed = 1,
2262 .suite = {
2263 .aead = {
2264 .enc = {
2265 .vecs =
2266 hmac_sha512_des3_ede_cbc_enc_tv_temp,
2267 .count =
2268 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC
2269 }
2270 }
2271 }
2272 }, {
2273 .alg = "authenc(hmac(sha512),ctr(aes))",
2274 .test = alg_test_null,
2275 .fips_allowed = 1,
2276 }, {
2277 .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2278 .test = alg_test_null,
2279 .fips_allowed = 1,
2280 }, {
2281 .alg = "cbc(aes)",
2282 .test = alg_test_skcipher,
2283 .fips_allowed = 1,
2284 .suite = {
2285 .cipher = {
2286 .enc = {
2287 .vecs = aes_cbc_enc_tv_template,
2288 .count = AES_CBC_ENC_TEST_VECTORS
2289 },
2290 .dec = {
2291 .vecs = aes_cbc_dec_tv_template,
2292 .count = AES_CBC_DEC_TEST_VECTORS
2293 }
2294 }
2295 }
2296 }, {
2297 .alg = "cbc(anubis)",
2298 .test = alg_test_skcipher,
2299 .suite = {
2300 .cipher = {
2301 .enc = {
2302 .vecs = anubis_cbc_enc_tv_template,
2303 .count = ANUBIS_CBC_ENC_TEST_VECTORS
2304 },
2305 .dec = {
2306 .vecs = anubis_cbc_dec_tv_template,
2307 .count = ANUBIS_CBC_DEC_TEST_VECTORS
2308 }
2309 }
2310 }
2311 }, {
2312 .alg = "cbc(blowfish)",
2313 .test = alg_test_skcipher,
2314 .suite = {
2315 .cipher = {
2316 .enc = {
2317 .vecs = bf_cbc_enc_tv_template,
2318 .count = BF_CBC_ENC_TEST_VECTORS
2319 },
2320 .dec = {
2321 .vecs = bf_cbc_dec_tv_template,
2322 .count = BF_CBC_DEC_TEST_VECTORS
2323 }
2324 }
2325 }
2326 }, {
2327 .alg = "cbc(camellia)",
2328 .test = alg_test_skcipher,
2329 .suite = {
2330 .cipher = {
2331 .enc = {
2332 .vecs = camellia_cbc_enc_tv_template,
2333 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
2334 },
2335 .dec = {
2336 .vecs = camellia_cbc_dec_tv_template,
2337 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
2338 }
2339 }
2340 }
2341 }, {
2342 .alg = "cbc(cast5)",
2343 .test = alg_test_skcipher,
2344 .suite = {
2345 .cipher = {
2346 .enc = {
2347 .vecs = cast5_cbc_enc_tv_template,
2348 .count = CAST5_CBC_ENC_TEST_VECTORS
2349 },
2350 .dec = {
2351 .vecs = cast5_cbc_dec_tv_template,
2352 .count = CAST5_CBC_DEC_TEST_VECTORS
2353 }
2354 }
2355 }
2356 }, {
2357 .alg = "cbc(cast6)",
2358 .test = alg_test_skcipher,
2359 .suite = {
2360 .cipher = {
2361 .enc = {
2362 .vecs = cast6_cbc_enc_tv_template,
2363 .count = CAST6_CBC_ENC_TEST_VECTORS
2364 },
2365 .dec = {
2366 .vecs = cast6_cbc_dec_tv_template,
2367 .count = CAST6_CBC_DEC_TEST_VECTORS
2368 }
2369 }
2370 }
2371 }, {
2372 .alg = "cbc(des)",
2373 .test = alg_test_skcipher,
2374 .suite = {
2375 .cipher = {
2376 .enc = {
2377 .vecs = des_cbc_enc_tv_template,
2378 .count = DES_CBC_ENC_TEST_VECTORS
2379 },
2380 .dec = {
2381 .vecs = des_cbc_dec_tv_template,
2382 .count = DES_CBC_DEC_TEST_VECTORS
2383 }
2384 }
2385 }
2386 }, {
2387 .alg = "cbc(des3_ede)",
2388 .test = alg_test_skcipher,
2389 .fips_allowed = 1,
2390 .suite = {
2391 .cipher = {
2392 .enc = {
2393 .vecs = des3_ede_cbc_enc_tv_template,
2394 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
2395 },
2396 .dec = {
2397 .vecs = des3_ede_cbc_dec_tv_template,
2398 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
2399 }
2400 }
2401 }
2402 }, {
2403 .alg = "cbc(serpent)",
2404 .test = alg_test_skcipher,
2405 .suite = {
2406 .cipher = {
2407 .enc = {
2408 .vecs = serpent_cbc_enc_tv_template,
2409 .count = SERPENT_CBC_ENC_TEST_VECTORS
2410 },
2411 .dec = {
2412 .vecs = serpent_cbc_dec_tv_template,
2413 .count = SERPENT_CBC_DEC_TEST_VECTORS
2414 }
2415 }
2416 }
2417 }, {
2418 .alg = "cbc(twofish)",
2419 .test = alg_test_skcipher,
2420 .suite = {
2421 .cipher = {
2422 .enc = {
2423 .vecs = tf_cbc_enc_tv_template,
2424 .count = TF_CBC_ENC_TEST_VECTORS
2425 },
2426 .dec = {
2427 .vecs = tf_cbc_dec_tv_template,
2428 .count = TF_CBC_DEC_TEST_VECTORS
2429 }
2430 }
2431 }
2432 }, {
2433 .alg = "ccm(aes)",
2434 .test = alg_test_aead,
2435 .fips_allowed = 1,
2436 .suite = {
2437 .aead = {
2438 .enc = {
2439 .vecs = aes_ccm_enc_tv_template,
2440 .count = AES_CCM_ENC_TEST_VECTORS
2441 },
2442 .dec = {
2443 .vecs = aes_ccm_dec_tv_template,
2444 .count = AES_CCM_DEC_TEST_VECTORS
2445 }
2446 }
2447 }
2448 }, {
2449 .alg = "chacha20",
2450 .test = alg_test_skcipher,
2451 .suite = {
2452 .cipher = {
2453 .enc = {
2454 .vecs = chacha20_enc_tv_template,
2455 .count = CHACHA20_ENC_TEST_VECTORS
2456 },
2457 .dec = {
2458 .vecs = chacha20_enc_tv_template,
2459 .count = CHACHA20_ENC_TEST_VECTORS
2460 },
2461 }
2462 }
2463 }, {
2464 .alg = "cmac(aes)",
2465 .fips_allowed = 1,
2466 .test = alg_test_hash,
2467 .suite = {
2468 .hash = {
2469 .vecs = aes_cmac128_tv_template,
2470 .count = CMAC_AES_TEST_VECTORS
2471 }
2472 }
2473 }, {
2474 .alg = "cmac(des3_ede)",
2475 .fips_allowed = 1,
2476 .test = alg_test_hash,
2477 .suite = {
2478 .hash = {
2479 .vecs = des3_ede_cmac64_tv_template,
2480 .count = CMAC_DES3_EDE_TEST_VECTORS
2481 }
2482 }
2483 }, {
2484 .alg = "compress_null",
2485 .test = alg_test_null,
2486 }, {
2487 .alg = "crc32",
2488 .test = alg_test_hash,
2489 .suite = {
2490 .hash = {
2491 .vecs = crc32_tv_template,
2492 .count = CRC32_TEST_VECTORS
2493 }
2494 }
2495 }, {
2496 .alg = "crc32c",
2497 .test = alg_test_crc32c,
2498 .fips_allowed = 1,
2499 .suite = {
2500 .hash = {
2501 .vecs = crc32c_tv_template,
2502 .count = CRC32C_TEST_VECTORS
2503 }
2504 }
2505 }, {
2506 .alg = "crct10dif",
2507 .test = alg_test_hash,
2508 .fips_allowed = 1,
2509 .suite = {
2510 .hash = {
2511 .vecs = crct10dif_tv_template,
2512 .count = CRCT10DIF_TEST_VECTORS
2513 }
2514 }
2515 }, {
2516 .alg = "cryptd(__driver-cbc-aes-aesni)",
2517 .test = alg_test_null,
2518 .fips_allowed = 1,
2519 }, {
2520 .alg = "cryptd(__driver-cbc-camellia-aesni)",
2521 .test = alg_test_null,
2522 }, {
2523 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
2524 .test = alg_test_null,
2525 }, {
2526 .alg = "cryptd(__driver-cbc-serpent-avx2)",
2527 .test = alg_test_null,
2528 }, {
2529 .alg = "cryptd(__driver-ecb-aes-aesni)",
2530 .test = alg_test_null,
2531 .fips_allowed = 1,
2532 }, {
2533 .alg = "cryptd(__driver-ecb-camellia-aesni)",
2534 .test = alg_test_null,
2535 }, {
2536 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
2537 .test = alg_test_null,
2538 }, {
2539 .alg = "cryptd(__driver-ecb-cast5-avx)",
2540 .test = alg_test_null,
2541 }, {
2542 .alg = "cryptd(__driver-ecb-cast6-avx)",
2543 .test = alg_test_null,
2544 }, {
2545 .alg = "cryptd(__driver-ecb-serpent-avx)",
2546 .test = alg_test_null,
2547 }, {
2548 .alg = "cryptd(__driver-ecb-serpent-avx2)",
2549 .test = alg_test_null,
2550 }, {
2551 .alg = "cryptd(__driver-ecb-serpent-sse2)",
2552 .test = alg_test_null,
2553 }, {
2554 .alg = "cryptd(__driver-ecb-twofish-avx)",
2555 .test = alg_test_null,
2556 }, {
2557 .alg = "cryptd(__driver-gcm-aes-aesni)",
2558 .test = alg_test_null,
2559 .fips_allowed = 1,
2560 }, {
2561 .alg = "cryptd(__ghash-pclmulqdqni)",
2562 .test = alg_test_null,
2563 .fips_allowed = 1,
2564 }, {
2565 .alg = "ctr(aes)",
2566 .test = alg_test_skcipher,
2567 .fips_allowed = 1,
2568 .suite = {
2569 .cipher = {
2570 .enc = {
2571 .vecs = aes_ctr_enc_tv_template,
2572 .count = AES_CTR_ENC_TEST_VECTORS
2573 },
2574 .dec = {
2575 .vecs = aes_ctr_dec_tv_template,
2576 .count = AES_CTR_DEC_TEST_VECTORS
2577 }
2578 }
2579 }
2580 }, {
2581 .alg = "ctr(blowfish)",
2582 .test = alg_test_skcipher,
2583 .suite = {
2584 .cipher = {
2585 .enc = {
2586 .vecs = bf_ctr_enc_tv_template,
2587 .count = BF_CTR_ENC_TEST_VECTORS
2588 },
2589 .dec = {
2590 .vecs = bf_ctr_dec_tv_template,
2591 .count = BF_CTR_DEC_TEST_VECTORS
2592 }
2593 }
2594 }
2595 }, {
2596 .alg = "ctr(camellia)",
2597 .test = alg_test_skcipher,
2598 .suite = {
2599 .cipher = {
2600 .enc = {
2601 .vecs = camellia_ctr_enc_tv_template,
2602 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2603 },
2604 .dec = {
2605 .vecs = camellia_ctr_dec_tv_template,
2606 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2607 }
2608 }
2609 }
2610 }, {
2611 .alg = "ctr(cast5)",
2612 .test = alg_test_skcipher,
2613 .suite = {
2614 .cipher = {
2615 .enc = {
2616 .vecs = cast5_ctr_enc_tv_template,
2617 .count = CAST5_CTR_ENC_TEST_VECTORS
2618 },
2619 .dec = {
2620 .vecs = cast5_ctr_dec_tv_template,
2621 .count = CAST5_CTR_DEC_TEST_VECTORS
2622 }
2623 }
2624 }
2625 }, {
2626 .alg = "ctr(cast6)",
2627 .test = alg_test_skcipher,
2628 .suite = {
2629 .cipher = {
2630 .enc = {
2631 .vecs = cast6_ctr_enc_tv_template,
2632 .count = CAST6_CTR_ENC_TEST_VECTORS
2633 },
2634 .dec = {
2635 .vecs = cast6_ctr_dec_tv_template,
2636 .count = CAST6_CTR_DEC_TEST_VECTORS
2637 }
2638 }
2639 }
2640 }, {
2641 .alg = "ctr(des)",
2642 .test = alg_test_skcipher,
2643 .suite = {
2644 .cipher = {
2645 .enc = {
2646 .vecs = des_ctr_enc_tv_template,
2647 .count = DES_CTR_ENC_TEST_VECTORS
2648 },
2649 .dec = {
2650 .vecs = des_ctr_dec_tv_template,
2651 .count = DES_CTR_DEC_TEST_VECTORS
2652 }
2653 }
2654 }
2655 }, {
2656 .alg = "ctr(des3_ede)",
2657 .test = alg_test_skcipher,
2658 .suite = {
2659 .cipher = {
2660 .enc = {
2661 .vecs = des3_ede_ctr_enc_tv_template,
2662 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2663 },
2664 .dec = {
2665 .vecs = des3_ede_ctr_dec_tv_template,
2666 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2667 }
2668 }
2669 }
2670 }, {
2671 .alg = "ctr(serpent)",
2672 .test = alg_test_skcipher,
2673 .suite = {
2674 .cipher = {
2675 .enc = {
2676 .vecs = serpent_ctr_enc_tv_template,
2677 .count = SERPENT_CTR_ENC_TEST_VECTORS
2678 },
2679 .dec = {
2680 .vecs = serpent_ctr_dec_tv_template,
2681 .count = SERPENT_CTR_DEC_TEST_VECTORS
2682 }
2683 }
2684 }
2685 }, {
2686 .alg = "ctr(twofish)",
2687 .test = alg_test_skcipher,
2688 .suite = {
2689 .cipher = {
2690 .enc = {
2691 .vecs = tf_ctr_enc_tv_template,
2692 .count = TF_CTR_ENC_TEST_VECTORS
2693 },
2694 .dec = {
2695 .vecs = tf_ctr_dec_tv_template,
2696 .count = TF_CTR_DEC_TEST_VECTORS
2697 }
2698 }
2699 }
2700 }, {
2701 .alg = "cts(cbc(aes))",
2702 .test = alg_test_skcipher,
2703 .suite = {
2704 .cipher = {
2705 .enc = {
2706 .vecs = cts_mode_enc_tv_template,
2707 .count = CTS_MODE_ENC_TEST_VECTORS
2708 },
2709 .dec = {
2710 .vecs = cts_mode_dec_tv_template,
2711 .count = CTS_MODE_DEC_TEST_VECTORS
2712 }
2713 }
2714 }
2715 }, {
2716 .alg = "deflate",
2717 .test = alg_test_comp,
2718 .fips_allowed = 1,
2719 .suite = {
2720 .comp = {
2721 .comp = {
2722 .vecs = deflate_comp_tv_template,
2723 .count = DEFLATE_COMP_TEST_VECTORS
2724 },
2725 .decomp = {
2726 .vecs = deflate_decomp_tv_template,
2727 .count = DEFLATE_DECOMP_TEST_VECTORS
2728 }
2729 }
2730 }
2731 }, {
2732 .alg = "digest_null",
2733 .test = alg_test_null,
2734 }, {
2735 .alg = "drbg_nopr_ctr_aes128",
2736 .test = alg_test_drbg,
2737 .fips_allowed = 1,
2738 .suite = {
2739 .drbg = {
2740 .vecs = drbg_nopr_ctr_aes128_tv_template,
2741 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template)
2742 }
2743 }
2744 }, {
2745 .alg = "drbg_nopr_ctr_aes192",
2746 .test = alg_test_drbg,
2747 .fips_allowed = 1,
2748 .suite = {
2749 .drbg = {
2750 .vecs = drbg_nopr_ctr_aes192_tv_template,
2751 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template)
2752 }
2753 }
2754 }, {
2755 .alg = "drbg_nopr_ctr_aes256",
2756 .test = alg_test_drbg,
2757 .fips_allowed = 1,
2758 .suite = {
2759 .drbg = {
2760 .vecs = drbg_nopr_ctr_aes256_tv_template,
2761 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template)
2762 }
2763 }
2764 }, {
2765 /*
2766 * There is no need to specifically test the DRBG with every
2767 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2768 */
2769 .alg = "drbg_nopr_hmac_sha1",
2770 .fips_allowed = 1,
2771 .test = alg_test_null,
2772 }, {
2773 .alg = "drbg_nopr_hmac_sha256",
2774 .test = alg_test_drbg,
2775 .fips_allowed = 1,
2776 .suite = {
2777 .drbg = {
2778 .vecs = drbg_nopr_hmac_sha256_tv_template,
2779 .count =
2780 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template)
2781 }
2782 }
2783 }, {
2784 /* covered by drbg_nopr_hmac_sha256 test */
2785 .alg = "drbg_nopr_hmac_sha384",
2786 .fips_allowed = 1,
2787 .test = alg_test_null,
2788 }, {
2789 .alg = "drbg_nopr_hmac_sha512",
2790 .test = alg_test_null,
2791 .fips_allowed = 1,
2792 }, {
2793 .alg = "drbg_nopr_sha1",
2794 .fips_allowed = 1,
2795 .test = alg_test_null,
2796 }, {
2797 .alg = "drbg_nopr_sha256",
2798 .test = alg_test_drbg,
2799 .fips_allowed = 1,
2800 .suite = {
2801 .drbg = {
2802 .vecs = drbg_nopr_sha256_tv_template,
2803 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template)
2804 }
2805 }
2806 }, {
2807 /* covered by drbg_nopr_sha256 test */
2808 .alg = "drbg_nopr_sha384",
2809 .fips_allowed = 1,
2810 .test = alg_test_null,
2811 }, {
2812 .alg = "drbg_nopr_sha512",
2813 .fips_allowed = 1,
2814 .test = alg_test_null,
2815 }, {
2816 .alg = "drbg_pr_ctr_aes128",
2817 .test = alg_test_drbg,
2818 .fips_allowed = 1,
2819 .suite = {
2820 .drbg = {
2821 .vecs = drbg_pr_ctr_aes128_tv_template,
2822 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template)
2823 }
2824 }
2825 }, {
2826 /* covered by drbg_pr_ctr_aes128 test */
2827 .alg = "drbg_pr_ctr_aes192",
2828 .fips_allowed = 1,
2829 .test = alg_test_null,
2830 }, {
2831 .alg = "drbg_pr_ctr_aes256",
2832 .fips_allowed = 1,
2833 .test = alg_test_null,
2834 }, {
2835 .alg = "drbg_pr_hmac_sha1",
2836 .fips_allowed = 1,
2837 .test = alg_test_null,
2838 }, {
2839 .alg = "drbg_pr_hmac_sha256",
2840 .test = alg_test_drbg,
2841 .fips_allowed = 1,
2842 .suite = {
2843 .drbg = {
2844 .vecs = drbg_pr_hmac_sha256_tv_template,
2845 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template)
2846 }
2847 }
2848 }, {
2849 /* covered by drbg_pr_hmac_sha256 test */
2850 .alg = "drbg_pr_hmac_sha384",
2851 .fips_allowed = 1,
2852 .test = alg_test_null,
2853 }, {
2854 .alg = "drbg_pr_hmac_sha512",
2855 .test = alg_test_null,
2856 .fips_allowed = 1,
2857 }, {
2858 .alg = "drbg_pr_sha1",
2859 .fips_allowed = 1,
2860 .test = alg_test_null,
2861 }, {
2862 .alg = "drbg_pr_sha256",
2863 .test = alg_test_drbg,
2864 .fips_allowed = 1,
2865 .suite = {
2866 .drbg = {
2867 .vecs = drbg_pr_sha256_tv_template,
2868 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template)
2869 }
2870 }
2871 }, {
2872 /* covered by drbg_pr_sha256 test */
2873 .alg = "drbg_pr_sha384",
2874 .fips_allowed = 1,
2875 .test = alg_test_null,
2876 }, {
2877 .alg = "drbg_pr_sha512",
2878 .fips_allowed = 1,
2879 .test = alg_test_null,
2880 }, {
2881 .alg = "ecb(__aes-aesni)",
2882 .test = alg_test_null,
2883 .fips_allowed = 1,
2884 }, {
2885 .alg = "ecb(aes)",
2886 .test = alg_test_skcipher,
2887 .fips_allowed = 1,
2888 .suite = {
2889 .cipher = {
2890 .enc = {
2891 .vecs = aes_enc_tv_template,
2892 .count = AES_ENC_TEST_VECTORS
2893 },
2894 .dec = {
2895 .vecs = aes_dec_tv_template,
2896 .count = AES_DEC_TEST_VECTORS
2897 }
2898 }
2899 }
2900 }, {
2901 .alg = "ecb(anubis)",
2902 .test = alg_test_skcipher,
2903 .suite = {
2904 .cipher = {
2905 .enc = {
2906 .vecs = anubis_enc_tv_template,
2907 .count = ANUBIS_ENC_TEST_VECTORS
2908 },
2909 .dec = {
2910 .vecs = anubis_dec_tv_template,
2911 .count = ANUBIS_DEC_TEST_VECTORS
2912 }
2913 }
2914 }
2915 }, {
2916 .alg = "ecb(arc4)",
2917 .test = alg_test_skcipher,
2918 .suite = {
2919 .cipher = {
2920 .enc = {
2921 .vecs = arc4_enc_tv_template,
2922 .count = ARC4_ENC_TEST_VECTORS
2923 },
2924 .dec = {
2925 .vecs = arc4_dec_tv_template,
2926 .count = ARC4_DEC_TEST_VECTORS
2927 }
2928 }
2929 }
2930 }, {
2931 .alg = "ecb(blowfish)",
2932 .test = alg_test_skcipher,
2933 .suite = {
2934 .cipher = {
2935 .enc = {
2936 .vecs = bf_enc_tv_template,
2937 .count = BF_ENC_TEST_VECTORS
2938 },
2939 .dec = {
2940 .vecs = bf_dec_tv_template,
2941 .count = BF_DEC_TEST_VECTORS
2942 }
2943 }
2944 }
2945 }, {
2946 .alg = "ecb(camellia)",
2947 .test = alg_test_skcipher,
2948 .suite = {
2949 .cipher = {
2950 .enc = {
2951 .vecs = camellia_enc_tv_template,
2952 .count = CAMELLIA_ENC_TEST_VECTORS
2953 },
2954 .dec = {
2955 .vecs = camellia_dec_tv_template,
2956 .count = CAMELLIA_DEC_TEST_VECTORS
2957 }
2958 }
2959 }
2960 }, {
2961 .alg = "ecb(cast5)",
2962 .test = alg_test_skcipher,
2963 .suite = {
2964 .cipher = {
2965 .enc = {
2966 .vecs = cast5_enc_tv_template,
2967 .count = CAST5_ENC_TEST_VECTORS
2968 },
2969 .dec = {
2970 .vecs = cast5_dec_tv_template,
2971 .count = CAST5_DEC_TEST_VECTORS
2972 }
2973 }
2974 }
2975 }, {
2976 .alg = "ecb(cast6)",
2977 .test = alg_test_skcipher,
2978 .suite = {
2979 .cipher = {
2980 .enc = {
2981 .vecs = cast6_enc_tv_template,
2982 .count = CAST6_ENC_TEST_VECTORS
2983 },
2984 .dec = {
2985 .vecs = cast6_dec_tv_template,
2986 .count = CAST6_DEC_TEST_VECTORS
2987 }
2988 }
2989 }
2990 }, {
2991 .alg = "ecb(cipher_null)",
2992 .test = alg_test_null,
2993 }, {
2994 .alg = "ecb(des)",
2995 .test = alg_test_skcipher,
2996 .suite = {
2997 .cipher = {
2998 .enc = {
2999 .vecs = des_enc_tv_template,
3000 .count = DES_ENC_TEST_VECTORS
3001 },
3002 .dec = {
3003 .vecs = des_dec_tv_template,
3004 .count = DES_DEC_TEST_VECTORS
3005 }
3006 }
3007 }
3008 }, {
3009 .alg = "ecb(des3_ede)",
3010 .test = alg_test_skcipher,
3011 .fips_allowed = 1,
3012 .suite = {
3013 .cipher = {
3014 .enc = {
3015 .vecs = des3_ede_enc_tv_template,
3016 .count = DES3_EDE_ENC_TEST_VECTORS
3017 },
3018 .dec = {
3019 .vecs = des3_ede_dec_tv_template,
3020 .count = DES3_EDE_DEC_TEST_VECTORS
3021 }
3022 }
3023 }
3024 }, {
3025 .alg = "ecb(fcrypt)",
3026 .test = alg_test_skcipher,
3027 .suite = {
3028 .cipher = {
3029 .enc = {
3030 .vecs = fcrypt_pcbc_enc_tv_template,
3031 .count = 1
3032 },
3033 .dec = {
3034 .vecs = fcrypt_pcbc_dec_tv_template,
3035 .count = 1
3036 }
3037 }
3038 }
3039 }, {
3040 .alg = "ecb(khazad)",
3041 .test = alg_test_skcipher,
3042 .suite = {
3043 .cipher = {
3044 .enc = {
3045 .vecs = khazad_enc_tv_template,
3046 .count = KHAZAD_ENC_TEST_VECTORS
3047 },
3048 .dec = {
3049 .vecs = khazad_dec_tv_template,
3050 .count = KHAZAD_DEC_TEST_VECTORS
3051 }
3052 }
3053 }
3054 }, {
3055 .alg = "ecb(seed)",
3056 .test = alg_test_skcipher,
3057 .suite = {
3058 .cipher = {
3059 .enc = {
3060 .vecs = seed_enc_tv_template,
3061 .count = SEED_ENC_TEST_VECTORS
3062 },
3063 .dec = {
3064 .vecs = seed_dec_tv_template,
3065 .count = SEED_DEC_TEST_VECTORS
3066 }
3067 }
3068 }
3069 }, {
3070 .alg = "ecb(serpent)",
3071 .test = alg_test_skcipher,
3072 .suite = {
3073 .cipher = {
3074 .enc = {
3075 .vecs = serpent_enc_tv_template,
3076 .count = SERPENT_ENC_TEST_VECTORS
3077 },
3078 .dec = {
3079 .vecs = serpent_dec_tv_template,
3080 .count = SERPENT_DEC_TEST_VECTORS
3081 }
3082 }
3083 }
3084 }, {
3085 .alg = "ecb(tea)",
3086 .test = alg_test_skcipher,
3087 .suite = {
3088 .cipher = {
3089 .enc = {
3090 .vecs = tea_enc_tv_template,
3091 .count = TEA_ENC_TEST_VECTORS
3092 },
3093 .dec = {
3094 .vecs = tea_dec_tv_template,
3095 .count = TEA_DEC_TEST_VECTORS
3096 }
3097 }
3098 }
3099 }, {
3100 .alg = "ecb(tnepres)",
3101 .test = alg_test_skcipher,
3102 .suite = {
3103 .cipher = {
3104 .enc = {
3105 .vecs = tnepres_enc_tv_template,
3106 .count = TNEPRES_ENC_TEST_VECTORS
3107 },
3108 .dec = {
3109 .vecs = tnepres_dec_tv_template,
3110 .count = TNEPRES_DEC_TEST_VECTORS
3111 }
3112 }
3113 }
3114 }, {
3115 .alg = "ecb(twofish)",
3116 .test = alg_test_skcipher,
3117 .suite = {
3118 .cipher = {
3119 .enc = {
3120 .vecs = tf_enc_tv_template,
3121 .count = TF_ENC_TEST_VECTORS
3122 },
3123 .dec = {
3124 .vecs = tf_dec_tv_template,
3125 .count = TF_DEC_TEST_VECTORS
3126 }
3127 }
3128 }
3129 }, {
3130 .alg = "ecb(xeta)",
3131 .test = alg_test_skcipher,
3132 .suite = {
3133 .cipher = {
3134 .enc = {
3135 .vecs = xeta_enc_tv_template,
3136 .count = XETA_ENC_TEST_VECTORS
3137 },
3138 .dec = {
3139 .vecs = xeta_dec_tv_template,
3140 .count = XETA_DEC_TEST_VECTORS
3141 }
3142 }
3143 }
3144 }, {
3145 .alg = "ecb(xtea)",
3146 .test = alg_test_skcipher,
3147 .suite = {
3148 .cipher = {
3149 .enc = {
3150 .vecs = xtea_enc_tv_template,
3151 .count = XTEA_ENC_TEST_VECTORS
3152 },
3153 .dec = {
3154 .vecs = xtea_dec_tv_template,
3155 .count = XTEA_DEC_TEST_VECTORS
3156 }
3157 }
3158 }
3159 }, {
3160 .alg = "gcm(aes)",
3161 .test = alg_test_aead,
3162 .fips_allowed = 1,
3163 .suite = {
3164 .aead = {
3165 .enc = {
3166 .vecs = aes_gcm_enc_tv_template,
3167 .count = AES_GCM_ENC_TEST_VECTORS
3168 },
3169 .dec = {
3170 .vecs = aes_gcm_dec_tv_template,
3171 .count = AES_GCM_DEC_TEST_VECTORS
3172 }
3173 }
3174 }
3175 }, {
3176 .alg = "ghash",
3177 .test = alg_test_hash,
3178 .fips_allowed = 1,
3179 .suite = {
3180 .hash = {
3181 .vecs = ghash_tv_template,
3182 .count = GHASH_TEST_VECTORS
3183 }
3184 }
3185 }, {
3186 .alg = "hmac(crc32)",
3187 .test = alg_test_hash,
3188 .suite = {
3189 .hash = {
3190 .vecs = bfin_crc_tv_template,
3191 .count = BFIN_CRC_TEST_VECTORS
3192 }
3193 }
3194 }, {
3195 .alg = "hmac(md5)",
3196 .test = alg_test_hash,
3197 .suite = {
3198 .hash = {
3199 .vecs = hmac_md5_tv_template,
3200 .count = HMAC_MD5_TEST_VECTORS
3201 }
3202 }
3203 }, {
3204 .alg = "hmac(rmd128)",
3205 .test = alg_test_hash,
3206 .suite = {
3207 .hash = {
3208 .vecs = hmac_rmd128_tv_template,
3209 .count = HMAC_RMD128_TEST_VECTORS
3210 }
3211 }
3212 }, {
3213 .alg = "hmac(rmd160)",
3214 .test = alg_test_hash,
3215 .suite = {
3216 .hash = {
3217 .vecs = hmac_rmd160_tv_template,
3218 .count = HMAC_RMD160_TEST_VECTORS
3219 }
3220 }
3221 }, {
3222 .alg = "hmac(sha1)",
3223 .test = alg_test_hash,
3224 .fips_allowed = 1,
3225 .suite = {
3226 .hash = {
3227 .vecs = hmac_sha1_tv_template,
3228 .count = HMAC_SHA1_TEST_VECTORS
3229 }
3230 }
3231 }, {
3232 .alg = "hmac(sha224)",
3233 .test = alg_test_hash,
3234 .fips_allowed = 1,
3235 .suite = {
3236 .hash = {
3237 .vecs = hmac_sha224_tv_template,
3238 .count = HMAC_SHA224_TEST_VECTORS
3239 }
3240 }
3241 }, {
3242 .alg = "hmac(sha256)",
3243 .test = alg_test_hash,
3244 .fips_allowed = 1,
3245 .suite = {
3246 .hash = {
3247 .vecs = hmac_sha256_tv_template,
3248 .count = HMAC_SHA256_TEST_VECTORS
3249 }
3250 }
3251 }, {
3252 .alg = "hmac(sha384)",
3253 .test = alg_test_hash,
3254 .fips_allowed = 1,
3255 .suite = {
3256 .hash = {
3257 .vecs = hmac_sha384_tv_template,
3258 .count = HMAC_SHA384_TEST_VECTORS
3259 }
3260 }
3261 }, {
3262 .alg = "hmac(sha512)",
3263 .test = alg_test_hash,
3264 .fips_allowed = 1,
3265 .suite = {
3266 .hash = {
3267 .vecs = hmac_sha512_tv_template,
3268 .count = HMAC_SHA512_TEST_VECTORS
3269 }
3270 }
3271 }, {
3272 .alg = "jitterentropy_rng",
3273 .fips_allowed = 1,
3274 .test = alg_test_null,
3275 }, {
3276 .alg = "kw(aes)",
3277 .test = alg_test_skcipher,
3278 .fips_allowed = 1,
3279 .suite = {
3280 .cipher = {
3281 .enc = {
3282 .vecs = aes_kw_enc_tv_template,
3283 .count = ARRAY_SIZE(aes_kw_enc_tv_template)
3284 },
3285 .dec = {
3286 .vecs = aes_kw_dec_tv_template,
3287 .count = ARRAY_SIZE(aes_kw_dec_tv_template)
3288 }
3289 }
3290 }
3291 }, {
3292 .alg = "lrw(aes)",
3293 .test = alg_test_skcipher,
3294 .suite = {
3295 .cipher = {
3296 .enc = {
3297 .vecs = aes_lrw_enc_tv_template,
3298 .count = AES_LRW_ENC_TEST_VECTORS
3299 },
3300 .dec = {
3301 .vecs = aes_lrw_dec_tv_template,
3302 .count = AES_LRW_DEC_TEST_VECTORS
3303 }
3304 }
3305 }
3306 }, {
3307 .alg = "lrw(camellia)",
3308 .test = alg_test_skcipher,
3309 .suite = {
3310 .cipher = {
3311 .enc = {
3312 .vecs = camellia_lrw_enc_tv_template,
3313 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
3314 },
3315 .dec = {
3316 .vecs = camellia_lrw_dec_tv_template,
3317 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
3318 }
3319 }
3320 }
3321 }, {
3322 .alg = "lrw(cast6)",
3323 .test = alg_test_skcipher,
3324 .suite = {
3325 .cipher = {
3326 .enc = {
3327 .vecs = cast6_lrw_enc_tv_template,
3328 .count = CAST6_LRW_ENC_TEST_VECTORS
3329 },
3330 .dec = {
3331 .vecs = cast6_lrw_dec_tv_template,
3332 .count = CAST6_LRW_DEC_TEST_VECTORS
3333 }
3334 }
3335 }
3336 }, {
3337 .alg = "lrw(serpent)",
3338 .test = alg_test_skcipher,
3339 .suite = {
3340 .cipher = {
3341 .enc = {
3342 .vecs = serpent_lrw_enc_tv_template,
3343 .count = SERPENT_LRW_ENC_TEST_VECTORS
3344 },
3345 .dec = {
3346 .vecs = serpent_lrw_dec_tv_template,
3347 .count = SERPENT_LRW_DEC_TEST_VECTORS
3348 }
3349 }
3350 }
3351 }, {
3352 .alg = "lrw(twofish)",
3353 .test = alg_test_skcipher,
3354 .suite = {
3355 .cipher = {
3356 .enc = {
3357 .vecs = tf_lrw_enc_tv_template,
3358 .count = TF_LRW_ENC_TEST_VECTORS
3359 },
3360 .dec = {
3361 .vecs = tf_lrw_dec_tv_template,
3362 .count = TF_LRW_DEC_TEST_VECTORS
3363 }
3364 }
3365 }
3366 }, {
3367 .alg = "lz4",
3368 .test = alg_test_comp,
3369 .fips_allowed = 1,
3370 .suite = {
3371 .comp = {
3372 .comp = {
3373 .vecs = lz4_comp_tv_template,
3374 .count = LZ4_COMP_TEST_VECTORS
3375 },
3376 .decomp = {
3377 .vecs = lz4_decomp_tv_template,
3378 .count = LZ4_DECOMP_TEST_VECTORS
3379 }
3380 }
3381 }
3382 }, {
3383 .alg = "lz4hc",
3384 .test = alg_test_comp,
3385 .fips_allowed = 1,
3386 .suite = {
3387 .comp = {
3388 .comp = {
3389 .vecs = lz4hc_comp_tv_template,
3390 .count = LZ4HC_COMP_TEST_VECTORS
3391 },
3392 .decomp = {
3393 .vecs = lz4hc_decomp_tv_template,
3394 .count = LZ4HC_DECOMP_TEST_VECTORS
3395 }
3396 }
3397 }
3398 }, {
3399 .alg = "lzo",
3400 .test = alg_test_comp,
3401 .fips_allowed = 1,
3402 .suite = {
3403 .comp = {
3404 .comp = {
3405 .vecs = lzo_comp_tv_template,
3406 .count = LZO_COMP_TEST_VECTORS
3407 },
3408 .decomp = {
3409 .vecs = lzo_decomp_tv_template,
3410 .count = LZO_DECOMP_TEST_VECTORS
3411 }
3412 }
3413 }
3414 }, {
3415 .alg = "md4",
3416 .test = alg_test_hash,
3417 .suite = {
3418 .hash = {
3419 .vecs = md4_tv_template,
3420 .count = MD4_TEST_VECTORS
3421 }
3422 }
3423 }, {
3424 .alg = "md5",
3425 .test = alg_test_hash,
3426 .suite = {
3427 .hash = {
3428 .vecs = md5_tv_template,
3429 .count = MD5_TEST_VECTORS
3430 }
3431 }
3432 }, {
3433 .alg = "michael_mic",
3434 .test = alg_test_hash,
3435 .suite = {
3436 .hash = {
3437 .vecs = michael_mic_tv_template,
3438 .count = MICHAEL_MIC_TEST_VECTORS
3439 }
3440 }
3441 }, {
3442 .alg = "ofb(aes)",
3443 .test = alg_test_skcipher,
3444 .fips_allowed = 1,
3445 .suite = {
3446 .cipher = {
3447 .enc = {
3448 .vecs = aes_ofb_enc_tv_template,
3449 .count = AES_OFB_ENC_TEST_VECTORS
3450 },
3451 .dec = {
3452 .vecs = aes_ofb_dec_tv_template,
3453 .count = AES_OFB_DEC_TEST_VECTORS
3454 }
3455 }
3456 }
3457 }, {
3458 .alg = "pcbc(fcrypt)",
3459 .test = alg_test_skcipher,
3460 .suite = {
3461 .cipher = {
3462 .enc = {
3463 .vecs = fcrypt_pcbc_enc_tv_template,
3464 .count = FCRYPT_ENC_TEST_VECTORS
3465 },
3466 .dec = {
3467 .vecs = fcrypt_pcbc_dec_tv_template,
3468 .count = FCRYPT_DEC_TEST_VECTORS
3469 }
3470 }
3471 }
3472 }, {
3473 .alg = "poly1305",
3474 .test = alg_test_hash,
3475 .suite = {
3476 .hash = {
3477 .vecs = poly1305_tv_template,
3478 .count = POLY1305_TEST_VECTORS
3479 }
3480 }
3481 }, {
3482 .alg = "rfc3686(ctr(aes))",
3483 .test = alg_test_skcipher,
3484 .fips_allowed = 1,
3485 .suite = {
3486 .cipher = {
3487 .enc = {
3488 .vecs = aes_ctr_rfc3686_enc_tv_template,
3489 .count = AES_CTR_3686_ENC_TEST_VECTORS
3490 },
3491 .dec = {
3492 .vecs = aes_ctr_rfc3686_dec_tv_template,
3493 .count = AES_CTR_3686_DEC_TEST_VECTORS
3494 }
3495 }
3496 }
3497 }, {
3498 .alg = "rfc4106(gcm(aes))",
3499 .test = alg_test_aead,
3500 .fips_allowed = 1,
3501 .suite = {
3502 .aead = {
3503 .enc = {
3504 .vecs = aes_gcm_rfc4106_enc_tv_template,
3505 .count = AES_GCM_4106_ENC_TEST_VECTORS
3506 },
3507 .dec = {
3508 .vecs = aes_gcm_rfc4106_dec_tv_template,
3509 .count = AES_GCM_4106_DEC_TEST_VECTORS
3510 }
3511 }
3512 }
3513 }, {
3514 .alg = "rfc4309(ccm(aes))",
3515 .test = alg_test_aead,
3516 .fips_allowed = 1,
3517 .suite = {
3518 .aead = {
3519 .enc = {
3520 .vecs = aes_ccm_rfc4309_enc_tv_template,
3521 .count = AES_CCM_4309_ENC_TEST_VECTORS
3522 },
3523 .dec = {
3524 .vecs = aes_ccm_rfc4309_dec_tv_template,
3525 .count = AES_CCM_4309_DEC_TEST_VECTORS
3526 }
3527 }
3528 }
3529 }, {
3530 .alg = "rfc4543(gcm(aes))",
3531 .test = alg_test_aead,
3532 .suite = {
3533 .aead = {
3534 .enc = {
3535 .vecs = aes_gcm_rfc4543_enc_tv_template,
3536 .count = AES_GCM_4543_ENC_TEST_VECTORS
3537 },
3538 .dec = {
3539 .vecs = aes_gcm_rfc4543_dec_tv_template,
3540 .count = AES_GCM_4543_DEC_TEST_VECTORS
3541 },
3542 }
3543 }
3544 }, {
3545 .alg = "rfc7539(chacha20,poly1305)",
3546 .test = alg_test_aead,
3547 .suite = {
3548 .aead = {
3549 .enc = {
3550 .vecs = rfc7539_enc_tv_template,
3551 .count = RFC7539_ENC_TEST_VECTORS
3552 },
3553 .dec = {
3554 .vecs = rfc7539_dec_tv_template,
3555 .count = RFC7539_DEC_TEST_VECTORS
3556 },
3557 }
3558 }
3559 }, {
3560 .alg = "rfc7539esp(chacha20,poly1305)",
3561 .test = alg_test_aead,
3562 .suite = {
3563 .aead = {
3564 .enc = {
3565 .vecs = rfc7539esp_enc_tv_template,
3566 .count = RFC7539ESP_ENC_TEST_VECTORS
3567 },
3568 .dec = {
3569 .vecs = rfc7539esp_dec_tv_template,
3570 .count = RFC7539ESP_DEC_TEST_VECTORS
3571 },
3572 }
3573 }
3574 }, {
3575 .alg = "rmd128",
3576 .test = alg_test_hash,
3577 .suite = {
3578 .hash = {
3579 .vecs = rmd128_tv_template,
3580 .count = RMD128_TEST_VECTORS
3581 }
3582 }
3583 }, {
3584 .alg = "rmd160",
3585 .test = alg_test_hash,
3586 .suite = {
3587 .hash = {
3588 .vecs = rmd160_tv_template,
3589 .count = RMD160_TEST_VECTORS
3590 }
3591 }
3592 }, {
3593 .alg = "rmd256",
3594 .test = alg_test_hash,
3595 .suite = {
3596 .hash = {
3597 .vecs = rmd256_tv_template,
3598 .count = RMD256_TEST_VECTORS
3599 }
3600 }
3601 }, {
3602 .alg = "rmd320",
3603 .test = alg_test_hash,
3604 .suite = {
3605 .hash = {
3606 .vecs = rmd320_tv_template,
3607 .count = RMD320_TEST_VECTORS
3608 }
3609 }
3610 }, {
3611 .alg = "rsa",
3612 .test = alg_test_akcipher,
3613 .fips_allowed = 1,
3614 .suite = {
3615 .akcipher = {
3616 .vecs = rsa_tv_template,
3617 .count = RSA_TEST_VECTORS
3618 }
3619 }
3620 }, {
3621 .alg = "salsa20",
3622 .test = alg_test_skcipher,
3623 .suite = {
3624 .cipher = {
3625 .enc = {
3626 .vecs = salsa20_stream_enc_tv_template,
3627 .count = SALSA20_STREAM_ENC_TEST_VECTORS
3628 }
3629 }
3630 }
3631 }, {
3632 .alg = "sha1",
3633 .test = alg_test_hash,
3634 .fips_allowed = 1,
3635 .suite = {
3636 .hash = {
3637 .vecs = sha1_tv_template,
3638 .count = SHA1_TEST_VECTORS
3639 }
3640 }
3641 }, {
3642 .alg = "sha224",
3643 .test = alg_test_hash,
3644 .fips_allowed = 1,
3645 .suite = {
3646 .hash = {
3647 .vecs = sha224_tv_template,
3648 .count = SHA224_TEST_VECTORS
3649 }
3650 }
3651 }, {
3652 .alg = "sha256",
3653 .test = alg_test_hash,
3654 .fips_allowed = 1,
3655 .suite = {
3656 .hash = {
3657 .vecs = sha256_tv_template,
3658 .count = SHA256_TEST_VECTORS
3659 }
3660 }
3661 }, {
3662 .alg = "sha384",
3663 .test = alg_test_hash,
3664 .fips_allowed = 1,
3665 .suite = {
3666 .hash = {
3667 .vecs = sha384_tv_template,
3668 .count = SHA384_TEST_VECTORS
3669 }
3670 }
3671 }, {
3672 .alg = "sha512",
3673 .test = alg_test_hash,
3674 .fips_allowed = 1,
3675 .suite = {
3676 .hash = {
3677 .vecs = sha512_tv_template,
3678 .count = SHA512_TEST_VECTORS
3679 }
3680 }
3681 }, {
3682 .alg = "tgr128",
3683 .test = alg_test_hash,
3684 .suite = {
3685 .hash = {
3686 .vecs = tgr128_tv_template,
3687 .count = TGR128_TEST_VECTORS
3688 }
3689 }
3690 }, {
3691 .alg = "tgr160",
3692 .test = alg_test_hash,
3693 .suite = {
3694 .hash = {
3695 .vecs = tgr160_tv_template,
3696 .count = TGR160_TEST_VECTORS
3697 }
3698 }
3699 }, {
3700 .alg = "tgr192",
3701 .test = alg_test_hash,
3702 .suite = {
3703 .hash = {
3704 .vecs = tgr192_tv_template,
3705 .count = TGR192_TEST_VECTORS
3706 }
3707 }
3708 }, {
3709 .alg = "vmac(aes)",
3710 .test = alg_test_hash,
3711 .suite = {
3712 .hash = {
3713 .vecs = aes_vmac128_tv_template,
3714 .count = VMAC_AES_TEST_VECTORS
3715 }
3716 }
3717 }, {
3718 .alg = "wp256",
3719 .test = alg_test_hash,
3720 .suite = {
3721 .hash = {
3722 .vecs = wp256_tv_template,
3723 .count = WP256_TEST_VECTORS
3724 }
3725 }
3726 }, {
3727 .alg = "wp384",
3728 .test = alg_test_hash,
3729 .suite = {
3730 .hash = {
3731 .vecs = wp384_tv_template,
3732 .count = WP384_TEST_VECTORS
3733 }
3734 }
3735 }, {
3736 .alg = "wp512",
3737 .test = alg_test_hash,
3738 .suite = {
3739 .hash = {
3740 .vecs = wp512_tv_template,
3741 .count = WP512_TEST_VECTORS
3742 }
3743 }
3744 }, {
3745 .alg = "xcbc(aes)",
3746 .test = alg_test_hash,
3747 .suite = {
3748 .hash = {
3749 .vecs = aes_xcbc128_tv_template,
3750 .count = XCBC_AES_TEST_VECTORS
3751 }
3752 }
3753 }, {
3754 .alg = "xts(aes)",
3755 .test = alg_test_skcipher,
3756 .fips_allowed = 1,
3757 .suite = {
3758 .cipher = {
3759 .enc = {
3760 .vecs = aes_xts_enc_tv_template,
3761 .count = AES_XTS_ENC_TEST_VECTORS
3762 },
3763 .dec = {
3764 .vecs = aes_xts_dec_tv_template,
3765 .count = AES_XTS_DEC_TEST_VECTORS
3766 }
3767 }
3768 }
3769 }, {
3770 .alg = "xts(camellia)",
3771 .test = alg_test_skcipher,
3772 .suite = {
3773 .cipher = {
3774 .enc = {
3775 .vecs = camellia_xts_enc_tv_template,
3776 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
3777 },
3778 .dec = {
3779 .vecs = camellia_xts_dec_tv_template,
3780 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
3781 }
3782 }
3783 }
3784 }, {
3785 .alg = "xts(cast6)",
3786 .test = alg_test_skcipher,
3787 .suite = {
3788 .cipher = {
3789 .enc = {
3790 .vecs = cast6_xts_enc_tv_template,
3791 .count = CAST6_XTS_ENC_TEST_VECTORS
3792 },
3793 .dec = {
3794 .vecs = cast6_xts_dec_tv_template,
3795 .count = CAST6_XTS_DEC_TEST_VECTORS
3796 }
3797 }
3798 }
3799 }, {
3800 .alg = "xts(serpent)",
3801 .test = alg_test_skcipher,
3802 .suite = {
3803 .cipher = {
3804 .enc = {
3805 .vecs = serpent_xts_enc_tv_template,
3806 .count = SERPENT_XTS_ENC_TEST_VECTORS
3807 },
3808 .dec = {
3809 .vecs = serpent_xts_dec_tv_template,
3810 .count = SERPENT_XTS_DEC_TEST_VECTORS
3811 }
3812 }
3813 }
3814 }, {
3815 .alg = "xts(twofish)",
3816 .test = alg_test_skcipher,
3817 .suite = {
3818 .cipher = {
3819 .enc = {
3820 .vecs = tf_xts_enc_tv_template,
3821 .count = TF_XTS_ENC_TEST_VECTORS
3822 },
3823 .dec = {
3824 .vecs = tf_xts_dec_tv_template,
3825 .count = TF_XTS_DEC_TEST_VECTORS
3826 }
3827 }
3828 }
3829 }
3830 };
3831
3832 static bool alg_test_descs_checked;
3833
3834 static void alg_test_descs_check_order(void)
3835 {
3836 int i;
3837
3838 /* only check once */
3839 if (alg_test_descs_checked)
3840 return;
3841
3842 alg_test_descs_checked = true;
3843
3844 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3845 int diff = strcmp(alg_test_descs[i - 1].alg,
3846 alg_test_descs[i].alg);
3847
3848 if (WARN_ON(diff > 0)) {
3849 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3850 alg_test_descs[i - 1].alg,
3851 alg_test_descs[i].alg);
3852 }
3853
3854 if (WARN_ON(diff == 0)) {
3855 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3856 alg_test_descs[i].alg);
3857 }
3858 }
3859 }
3860
3861 static int alg_find_test(const char *alg)
3862 {
3863 int start = 0;
3864 int end = ARRAY_SIZE(alg_test_descs);
3865
3866 while (start < end) {
3867 int i = (start + end) / 2;
3868 int diff = strcmp(alg_test_descs[i].alg, alg);
3869
3870 if (diff > 0) {
3871 end = i;
3872 continue;
3873 }
3874
3875 if (diff < 0) {
3876 start = i + 1;
3877 continue;
3878 }
3879
3880 return i;
3881 }
3882
3883 return -1;
3884 }
3885
3886 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3887 {
3888 int i;
3889 int j;
3890 int rc;
3891
3892 if (!fips_enabled && notests) {
3893 printk_once(KERN_INFO "alg: self-tests disabled\n");
3894 return 0;
3895 }
3896
3897 alg_test_descs_check_order();
3898
3899 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3900 char nalg[CRYPTO_MAX_ALG_NAME];
3901
3902 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3903 sizeof(nalg))
3904 return -ENAMETOOLONG;
3905
3906 i = alg_find_test(nalg);
3907 if (i < 0)
3908 goto notest;
3909
3910 if (fips_enabled && !alg_test_descs[i].fips_allowed)
3911 goto non_fips_alg;
3912
3913 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3914 goto test_done;
3915 }
3916
3917 i = alg_find_test(alg);
3918 j = alg_find_test(driver);
3919 if (i < 0 && j < 0)
3920 goto notest;
3921
3922 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3923 (j >= 0 && !alg_test_descs[j].fips_allowed)))
3924 goto non_fips_alg;
3925
3926 rc = 0;
3927 if (i >= 0)
3928 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3929 type, mask);
3930 if (j >= 0 && j != i)
3931 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3932 type, mask);
3933
3934 test_done:
3935 if (fips_enabled && rc)
3936 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3937
3938 if (fips_enabled && !rc)
3939 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
3940
3941 return rc;
3942
3943 notest:
3944 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3945 return 0;
3946 non_fips_alg:
3947 return -EINVAL;
3948 }
3949
3950 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3951
3952 EXPORT_SYMBOL_GPL(alg_test);
This page took 0.107288 seconds and 6 git commands to generate.