aboutsummaryrefslogtreecommitdiffstats
path: root/crypto/testmgr.c
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2008-07-31 05:08:25 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2008-08-29 01:49:55 -0400
commitda7f033ddc9fdebb3223b0bf88a2a2ab5b797608 (patch)
tree619aa89f8787abf07a66e3925bfc72c44a067b04 /crypto/testmgr.c
parent01b323245e4f6d4a22ffd73754f145f45c85988c (diff)
crypto: cryptomgr - Add test infrastructure
This patch moves the newly created alg_test infrastructure into cryptomgr. This shall allow us to use it for testing at algorithm registrations. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/testmgr.c')
-rw-r--r--crypto/testmgr.c1746
1 files changed, 1746 insertions, 0 deletions
diff --git a/crypto/testmgr.c b/crypto/testmgr.c
new file mode 100644
index 000000000000..e8666b3ead67
--- /dev/null
+++ b/crypto/testmgr.c
@@ -0,0 +1,1746 @@
1/*
2 * Algorithm testing framework and tests.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
12 * any later version.
13 *
14 */
15
16#include <crypto/hash.h>
17#include <linux/err.h>
18#include <linux/module.h>
19#include <linux/scatterlist.h>
20#include <linux/slab.h>
21#include <linux/string.h>
22
23#include "internal.h"
24#include "testmgr.h"
25
26/*
27 * Need slab memory for testing (size in number of pages).
28 */
29#define XBUFSIZE 8
30
31/*
32 * Indexes into the xbuf to simulate cross-page access.
33 */
34#define IDX1 32
35#define IDX2 32400
36#define IDX3 1
37#define IDX4 8193
38#define IDX5 22222
39#define IDX6 17101
40#define IDX7 27333
41#define IDX8 3000
42
43/*
44* Used by test_cipher()
45*/
46#define ENCRYPT 1
47#define DECRYPT 0
48
49struct tcrypt_result {
50 struct completion completion;
51 int err;
52};
53
54struct aead_test_suite {
55 struct {
56 struct aead_testvec *vecs;
57 unsigned int count;
58 } enc, dec;
59};
60
61struct cipher_test_suite {
62 struct {
63 struct cipher_testvec *vecs;
64 unsigned int count;
65 } enc, dec;
66};
67
68struct comp_test_suite {
69 struct {
70 struct comp_testvec *vecs;
71 unsigned int count;
72 } comp, decomp;
73};
74
75struct hash_test_suite {
76 struct hash_testvec *vecs;
77 unsigned int count;
78};
79
80struct alg_test_desc {
81 const char *alg;
82 int (*test)(const struct alg_test_desc *desc, const char *driver,
83 u32 type, u32 mask);
84
85 union {
86 struct aead_test_suite aead;
87 struct cipher_test_suite cipher;
88 struct comp_test_suite comp;
89 struct hash_test_suite hash;
90 } suite;
91};
92
93static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
94
95static char *xbuf[XBUFSIZE];
96static char *axbuf[XBUFSIZE];
97
98static void hexdump(unsigned char *buf, unsigned int len)
99{
100 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
101 16, 1,
102 buf, len, false);
103}
104
105static void tcrypt_complete(struct crypto_async_request *req, int err)
106{
107 struct tcrypt_result *res = req->data;
108
109 if (err == -EINPROGRESS)
110 return;
111
112 res->err = err;
113 complete(&res->completion);
114}
115
116static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
117 unsigned int tcount)
118{
119 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
120 unsigned int i, j, k, temp;
121 struct scatterlist sg[8];
122 char result[64];
123 struct ahash_request *req;
124 struct tcrypt_result tresult;
125 int ret;
126 void *hash_buff;
127
128 init_completion(&tresult.completion);
129
130 req = ahash_request_alloc(tfm, GFP_KERNEL);
131 if (!req) {
132 printk(KERN_ERR "alg: hash: Failed to allocate request for "
133 "%s\n", algo);
134 ret = -ENOMEM;
135 goto out_noreq;
136 }
137 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
138 tcrypt_complete, &tresult);
139
140 for (i = 0; i < tcount; i++) {
141 memset(result, 0, 64);
142
143 hash_buff = xbuf[0];
144
145 memcpy(hash_buff, template[i].plaintext, template[i].psize);
146 sg_init_one(&sg[0], hash_buff, template[i].psize);
147
148 if (template[i].ksize) {
149 crypto_ahash_clear_flags(tfm, ~0);
150 ret = crypto_ahash_setkey(tfm, template[i].key,
151 template[i].ksize);
152 if (ret) {
153 printk(KERN_ERR "alg: hash: setkey failed on "
154 "test %d for %s: ret=%d\n", i + 1, algo,
155 -ret);
156 goto out;
157 }
158 }
159
160 ahash_request_set_crypt(req, sg, result, template[i].psize);
161 ret = crypto_ahash_digest(req);
162 switch (ret) {
163 case 0:
164 break;
165 case -EINPROGRESS:
166 case -EBUSY:
167 ret = wait_for_completion_interruptible(
168 &tresult.completion);
169 if (!ret && !(ret = tresult.err)) {
170 INIT_COMPLETION(tresult.completion);
171 break;
172 }
173 /* fall through */
174 default:
175 printk(KERN_ERR "alg: hash: digest failed on test %d "
176 "for %s: ret=%d\n", i + 1, algo, -ret);
177 goto out;
178 }
179
180 if (memcmp(result, template[i].digest,
181 crypto_ahash_digestsize(tfm))) {
182 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
183 i + 1, algo);
184 hexdump(result, crypto_ahash_digestsize(tfm));
185 ret = -EINVAL;
186 goto out;
187 }
188 }
189
190 j = 0;
191 for (i = 0; i < tcount; i++) {
192 if (template[i].np) {
193 j++;
194 memset(result, 0, 64);
195
196 temp = 0;
197 sg_init_table(sg, template[i].np);
198 for (k = 0; k < template[i].np; k++) {
199 sg_set_buf(&sg[k],
200 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
201 offset_in_page(IDX[k]),
202 template[i].plaintext + temp,
203 template[i].tap[k]),
204 template[i].tap[k]);
205 temp += template[i].tap[k];
206 }
207
208 if (template[i].ksize) {
209 crypto_ahash_clear_flags(tfm, ~0);
210 ret = crypto_ahash_setkey(tfm, template[i].key,
211 template[i].ksize);
212
213 if (ret) {
214 printk(KERN_ERR "alg: hash: setkey "
215 "failed on chunking test %d "
216 "for %s: ret=%d\n", j, algo,
217 -ret);
218 goto out;
219 }
220 }
221
222 ahash_request_set_crypt(req, sg, result,
223 template[i].psize);
224 ret = crypto_ahash_digest(req);
225 switch (ret) {
226 case 0:
227 break;
228 case -EINPROGRESS:
229 case -EBUSY:
230 ret = wait_for_completion_interruptible(
231 &tresult.completion);
232 if (!ret && !(ret = tresult.err)) {
233 INIT_COMPLETION(tresult.completion);
234 break;
235 }
236 /* fall through */
237 default:
238 printk(KERN_ERR "alg: hash: digest failed "
239 "on chunking test %d for %s: "
240 "ret=%d\n", j, algo, -ret);
241 goto out;
242 }
243
244 if (memcmp(result, template[i].digest,
245 crypto_ahash_digestsize(tfm))) {
246 printk(KERN_ERR "alg: hash: Chunking test %d "
247 "failed for %s\n", j, algo);
248 hexdump(result, crypto_ahash_digestsize(tfm));
249 ret = -EINVAL;
250 goto out;
251 }
252 }
253 }
254
255 ret = 0;
256
257out:
258 ahash_request_free(req);
259out_noreq:
260 return ret;
261}
262
263static int test_aead(struct crypto_aead *tfm, int enc,
264 struct aead_testvec *template, unsigned int tcount)
265{
266 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
267 unsigned int i, j, k, n, temp;
268 int ret = 0;
269 char *q;
270 char *key;
271 struct aead_request *req;
272 struct scatterlist sg[8];
273 struct scatterlist asg[8];
274 const char *e;
275 struct tcrypt_result result;
276 unsigned int authsize;
277 void *input;
278 void *assoc;
279 char iv[MAX_IVLEN];
280
281 if (enc == ENCRYPT)
282 e = "encryption";
283 else
284 e = "decryption";
285
286 init_completion(&result.completion);
287
288 req = aead_request_alloc(tfm, GFP_KERNEL);
289 if (!req) {
290 printk(KERN_ERR "alg: aead: Failed to allocate request for "
291 "%s\n", algo);
292 ret = -ENOMEM;
293 goto out;
294 }
295
296 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
297 tcrypt_complete, &result);
298
299 for (i = 0, j = 0; i < tcount; i++) {
300 if (!template[i].np) {
301 j++;
302
303 /* some tepmplates have no input data but they will
304 * touch input
305 */
306 input = xbuf[0];
307 assoc = axbuf[0];
308
309 memcpy(input, template[i].input, template[i].ilen);
310 memcpy(assoc, template[i].assoc, template[i].alen);
311 if (template[i].iv)
312 memcpy(iv, template[i].iv, MAX_IVLEN);
313 else
314 memset(iv, 0, MAX_IVLEN);
315
316 crypto_aead_clear_flags(tfm, ~0);
317 if (template[i].wk)
318 crypto_aead_set_flags(
319 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
320
321 key = template[i].key;
322
323 ret = crypto_aead_setkey(tfm, key,
324 template[i].klen);
325 if (!ret == template[i].fail) {
326 printk(KERN_ERR "alg: aead: setkey failed on "
327 "test %d for %s: flags=%x\n", j, algo,
328 crypto_aead_get_flags(tfm));
329 goto out;
330 } else if (ret)
331 continue;
332
333 authsize = abs(template[i].rlen - template[i].ilen);
334 ret = crypto_aead_setauthsize(tfm, authsize);
335 if (ret) {
336 printk(KERN_ERR "alg: aead: Failed to set "
337 "authsize to %u on test %d for %s\n",
338 authsize, j, algo);
339 goto out;
340 }
341
342 sg_init_one(&sg[0], input,
343 template[i].ilen + (enc ? authsize : 0));
344
345 sg_init_one(&asg[0], assoc, template[i].alen);
346
347 aead_request_set_crypt(req, sg, sg,
348 template[i].ilen, iv);
349
350 aead_request_set_assoc(req, asg, template[i].alen);
351
352 ret = enc ?
353 crypto_aead_encrypt(req) :
354 crypto_aead_decrypt(req);
355
356 switch (ret) {
357 case 0:
358 break;
359 case -EINPROGRESS:
360 case -EBUSY:
361 ret = wait_for_completion_interruptible(
362 &result.completion);
363 if (!ret && !(ret = result.err)) {
364 INIT_COMPLETION(result.completion);
365 break;
366 }
367 /* fall through */
368 default:
369 printk(KERN_ERR "alg: aead: %s failed on test "
370 "%d for %s: ret=%d\n", e, j, algo, -ret);
371 goto out;
372 }
373
374 q = input;
375 if (memcmp(q, template[i].result, template[i].rlen)) {
376 printk(KERN_ERR "alg: aead: Test %d failed on "
377 "%s for %s\n", j, e, algo);
378 hexdump(q, template[i].rlen);
379 ret = -EINVAL;
380 goto out;
381 }
382 }
383 }
384
385 for (i = 0, j = 0; i < tcount; i++) {
386 if (template[i].np) {
387 j++;
388
389 if (template[i].iv)
390 memcpy(iv, template[i].iv, MAX_IVLEN);
391 else
392 memset(iv, 0, MAX_IVLEN);
393
394 crypto_aead_clear_flags(tfm, ~0);
395 if (template[i].wk)
396 crypto_aead_set_flags(
397 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
398 key = template[i].key;
399
400 ret = crypto_aead_setkey(tfm, key, template[i].klen);
401 if (!ret == template[i].fail) {
402 printk(KERN_ERR "alg: aead: setkey failed on "
403 "chunk test %d for %s: flags=%x\n", j,
404 algo, crypto_aead_get_flags(tfm));
405 goto out;
406 } else if (ret)
407 continue;
408
409 authsize = abs(template[i].rlen - template[i].ilen);
410
411 ret = -EINVAL;
412 sg_init_table(sg, template[i].np);
413 for (k = 0, temp = 0; k < template[i].np; k++) {
414 if (WARN_ON(offset_in_page(IDX[k]) +
415 template[i].tap[k] > PAGE_SIZE))
416 goto out;
417
418 q = xbuf[IDX[k] >> PAGE_SHIFT] +
419 offset_in_page(IDX[k]);
420
421 memcpy(q, template[i].input + temp,
422 template[i].tap[k]);
423
424 n = template[i].tap[k];
425 if (k == template[i].np - 1 && enc)
426 n += authsize;
427 if (offset_in_page(q) + n < PAGE_SIZE)
428 q[n] = 0;
429
430 sg_set_buf(&sg[k], q, template[i].tap[k]);
431 temp += template[i].tap[k];
432 }
433
434 ret = crypto_aead_setauthsize(tfm, authsize);
435 if (ret) {
436 printk(KERN_ERR "alg: aead: Failed to set "
437 "authsize to %u on chunk test %d for "
438 "%s\n", authsize, j, algo);
439 goto out;
440 }
441
442 if (enc) {
443 if (WARN_ON(sg[k - 1].offset +
444 sg[k - 1].length + authsize >
445 PAGE_SIZE)) {
446 ret = -EINVAL;
447 goto out;
448 }
449
450 sg[k - 1].length += authsize;
451 }
452
453 sg_init_table(asg, template[i].anp);
454 for (k = 0, temp = 0; k < template[i].anp; k++) {
455 sg_set_buf(&asg[k],
456 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
457 offset_in_page(IDX[k]),
458 template[i].assoc + temp,
459 template[i].atap[k]),
460 template[i].atap[k]);
461 temp += template[i].atap[k];
462 }
463
464 aead_request_set_crypt(req, sg, sg,
465 template[i].ilen,
466 iv);
467
468 aead_request_set_assoc(req, asg, template[i].alen);
469
470 ret = enc ?
471 crypto_aead_encrypt(req) :
472 crypto_aead_decrypt(req);
473
474 switch (ret) {
475 case 0:
476 break;
477 case -EINPROGRESS:
478 case -EBUSY:
479 ret = wait_for_completion_interruptible(
480 &result.completion);
481 if (!ret && !(ret = result.err)) {
482 INIT_COMPLETION(result.completion);
483 break;
484 }
485 /* fall through */
486 default:
487 printk(KERN_ERR "alg: aead: %s failed on "
488 "chunk test %d for %s: ret=%d\n", e, j,
489 algo, -ret);
490 goto out;
491 }
492
493 ret = -EINVAL;
494 for (k = 0, temp = 0; k < template[i].np; k++) {
495 q = xbuf[IDX[k] >> PAGE_SHIFT] +
496 offset_in_page(IDX[k]);
497
498 n = template[i].tap[k];
499 if (k == template[i].np - 1)
500 n += enc ? authsize : -authsize;
501
502 if (memcmp(q, template[i].result + temp, n)) {
503 printk(KERN_ERR "alg: aead: Chunk "
504 "test %d failed on %s at page "
505 "%u for %s\n", j, e, k, algo);
506 hexdump(q, n);
507 goto out;
508 }
509
510 q += n;
511 if (k == template[i].np - 1 && !enc) {
512 if (memcmp(q, template[i].input +
513 temp + n, authsize))
514 n = authsize;
515 else
516 n = 0;
517 } else {
518 for (n = 0; offset_in_page(q + n) &&
519 q[n]; n++)
520 ;
521 }
522 if (n) {
523 printk(KERN_ERR "alg: aead: Result "
524 "buffer corruption in chunk "
525 "test %d on %s at page %u for "
526 "%s: %u bytes:\n", j, e, k,
527 algo, n);
528 hexdump(q, n);
529 goto out;
530 }
531
532 temp += template[i].tap[k];
533 }
534 }
535 }
536
537 ret = 0;
538
539out:
540 aead_request_free(req);
541 return ret;
542}
543
544static int test_cipher(struct crypto_ablkcipher *tfm, int enc,
545 struct cipher_testvec *template, unsigned int tcount)
546{
547 const char *algo =
548 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
549 unsigned int i, j, k, n, temp;
550 int ret;
551 char *q;
552 struct ablkcipher_request *req;
553 struct scatterlist sg[8];
554 const char *e;
555 struct tcrypt_result result;
556 void *data;
557 char iv[MAX_IVLEN];
558
559 if (enc == ENCRYPT)
560 e = "encryption";
561 else
562 e = "decryption";
563
564 init_completion(&result.completion);
565
566 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
567 if (!req) {
568 printk(KERN_ERR "alg: cipher: Failed to allocate request for "
569 "%s\n", algo);
570 ret = -ENOMEM;
571 goto out;
572 }
573
574 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
575 tcrypt_complete, &result);
576
577 j = 0;
578 for (i = 0; i < tcount; i++) {
579 if (template[i].iv)
580 memcpy(iv, template[i].iv, MAX_IVLEN);
581 else
582 memset(iv, 0, MAX_IVLEN);
583
584 if (!(template[i].np)) {
585 j++;
586
587 data = xbuf[0];
588 memcpy(data, template[i].input, template[i].ilen);
589
590 crypto_ablkcipher_clear_flags(tfm, ~0);
591 if (template[i].wk)
592 crypto_ablkcipher_set_flags(
593 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
594
595 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
596 template[i].klen);
597 if (!ret == template[i].fail) {
598 printk(KERN_ERR "alg: cipher: setkey failed "
599 "on test %d for %s: flags=%x\n", j,
600 algo, crypto_ablkcipher_get_flags(tfm));
601 goto out;
602 } else if (ret)
603 continue;
604
605 sg_init_one(&sg[0], data, template[i].ilen);
606
607 ablkcipher_request_set_crypt(req, sg, sg,
608 template[i].ilen, iv);
609 ret = enc ?
610 crypto_ablkcipher_encrypt(req) :
611 crypto_ablkcipher_decrypt(req);
612
613 switch (ret) {
614 case 0:
615 break;
616 case -EINPROGRESS:
617 case -EBUSY:
618 ret = wait_for_completion_interruptible(
619 &result.completion);
620 if (!ret && !((ret = result.err))) {
621 INIT_COMPLETION(result.completion);
622 break;
623 }
624 /* fall through */
625 default:
626 printk(KERN_ERR "alg: cipher: %s failed on "
627 "test %d for %s: ret=%d\n", e, j, algo,
628 -ret);
629 goto out;
630 }
631
632 q = data;
633 if (memcmp(q, template[i].result, template[i].rlen)) {
634 printk(KERN_ERR "alg: cipher: Test %d failed "
635 "on %s for %s\n", j, e, algo);
636 hexdump(q, template[i].rlen);
637 ret = -EINVAL;
638 goto out;
639 }
640 }
641 }
642
643 j = 0;
644 for (i = 0; i < tcount; i++) {
645
646 if (template[i].iv)
647 memcpy(iv, template[i].iv, MAX_IVLEN);
648 else
649 memset(iv, 0, MAX_IVLEN);
650
651 if (template[i].np) {
652 j++;
653
654 crypto_ablkcipher_clear_flags(tfm, ~0);
655 if (template[i].wk)
656 crypto_ablkcipher_set_flags(
657 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
658
659 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
660 template[i].klen);
661 if (!ret == template[i].fail) {
662 printk(KERN_ERR "alg: cipher: setkey failed "
663 "on chunk test %d for %s: flags=%x\n",
664 j, algo,
665 crypto_ablkcipher_get_flags(tfm));
666 goto out;
667 } else if (ret)
668 continue;
669
670 temp = 0;
671 ret = -EINVAL;
672 sg_init_table(sg, template[i].np);
673 for (k = 0; k < template[i].np; k++) {
674 if (WARN_ON(offset_in_page(IDX[k]) +
675 template[i].tap[k] > PAGE_SIZE))
676 goto out;
677
678 q = xbuf[IDX[k] >> PAGE_SHIFT] +
679 offset_in_page(IDX[k]);
680
681 memcpy(q, template[i].input + temp,
682 template[i].tap[k]);
683
684 if (offset_in_page(q) + template[i].tap[k] <
685 PAGE_SIZE)
686 q[template[i].tap[k]] = 0;
687
688 sg_set_buf(&sg[k], q, template[i].tap[k]);
689
690 temp += template[i].tap[k];
691 }
692
693 ablkcipher_request_set_crypt(req, sg, sg,
694 template[i].ilen, iv);
695
696 ret = enc ?
697 crypto_ablkcipher_encrypt(req) :
698 crypto_ablkcipher_decrypt(req);
699
700 switch (ret) {
701 case 0:
702 break;
703 case -EINPROGRESS:
704 case -EBUSY:
705 ret = wait_for_completion_interruptible(
706 &result.completion);
707 if (!ret && !((ret = result.err))) {
708 INIT_COMPLETION(result.completion);
709 break;
710 }
711 /* fall through */
712 default:
713 printk(KERN_ERR "alg: cipher: %s failed on "
714 "chunk test %d for %s: ret=%d\n", e, j,
715 algo, -ret);
716 goto out;
717 }
718
719 temp = 0;
720 ret = -EINVAL;
721 for (k = 0; k < template[i].np; k++) {
722 q = xbuf[IDX[k] >> PAGE_SHIFT] +
723 offset_in_page(IDX[k]);
724
725 if (memcmp(q, template[i].result + temp,
726 template[i].tap[k])) {
727 printk(KERN_ERR "alg: cipher: Chunk "
728 "test %d failed on %s at page "
729 "%u for %s\n", j, e, k, algo);
730 hexdump(q, template[i].tap[k]);
731 goto out;
732 }
733
734 q += template[i].tap[k];
735 for (n = 0; offset_in_page(q + n) && q[n]; n++)
736 ;
737 if (n) {
738 printk(KERN_ERR "alg: cipher: "
739 "Result buffer corruption in "
740 "chunk test %d on %s at page "
741 "%u for %s: %u bytes:\n", j, e,
742 k, algo, n);
743 hexdump(q, n);
744 goto out;
745 }
746 temp += template[i].tap[k];
747 }
748 }
749 }
750
751 ret = 0;
752
753out:
754 ablkcipher_request_free(req);
755 return ret;
756}
757
758static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
759 struct comp_testvec *dtemplate, int ctcount, int dtcount)
760{
761 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
762 unsigned int i;
763 char result[COMP_BUF_SIZE];
764 int ret;
765
766 for (i = 0; i < ctcount; i++) {
767 int ilen, dlen = COMP_BUF_SIZE;
768
769 memset(result, 0, sizeof (result));
770
771 ilen = ctemplate[i].inlen;
772 ret = crypto_comp_compress(tfm, ctemplate[i].input,
773 ilen, result, &dlen);
774 if (ret) {
775 printk(KERN_ERR "alg: comp: compression failed "
776 "on test %d for %s: ret=%d\n", i + 1, algo,
777 -ret);
778 goto out;
779 }
780
781 if (memcmp(result, ctemplate[i].output, dlen)) {
782 printk(KERN_ERR "alg: comp: Compression test %d "
783 "failed for %s\n", i + 1, algo);
784 hexdump(result, dlen);
785 ret = -EINVAL;
786 goto out;
787 }
788 }
789
790 for (i = 0; i < dtcount; i++) {
791 int ilen, ret, dlen = COMP_BUF_SIZE;
792
793 memset(result, 0, sizeof (result));
794
795 ilen = dtemplate[i].inlen;
796 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
797 ilen, result, &dlen);
798 if (ret) {
799 printk(KERN_ERR "alg: comp: decompression failed "
800 "on test %d for %s: ret=%d\n", i + 1, algo,
801 -ret);
802 goto out;
803 }
804
805 if (memcmp(result, dtemplate[i].output, dlen)) {
806 printk(KERN_ERR "alg: comp: Decompression test %d "
807 "failed for %s\n", i + 1, algo);
808 hexdump(result, dlen);
809 ret = -EINVAL;
810 goto out;
811 }
812 }
813
814 ret = 0;
815
816out:
817 return ret;
818}
819
820static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
821 u32 type, u32 mask)
822{
823 struct crypto_aead *tfm;
824 int err = 0;
825
826 tfm = crypto_alloc_aead(driver, type, mask);
827 if (IS_ERR(tfm)) {
828 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
829 "%ld\n", driver, PTR_ERR(tfm));
830 return PTR_ERR(tfm);
831 }
832
833 if (desc->suite.aead.enc.vecs) {
834 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
835 desc->suite.aead.enc.count);
836 if (err)
837 goto out;
838 }
839
840 if (!err && desc->suite.aead.dec.vecs)
841 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
842 desc->suite.aead.dec.count);
843
844out:
845 crypto_free_aead(tfm);
846 return err;
847}
848
849static int alg_test_cipher(const struct alg_test_desc *desc,
850 const char *driver, u32 type, u32 mask)
851{
852 struct crypto_ablkcipher *tfm;
853 int err = 0;
854
855 tfm = crypto_alloc_ablkcipher(driver, type, mask);
856 if (IS_ERR(tfm)) {
857 printk(KERN_ERR "alg: cipher: Failed to load transform for "
858 "%s: %ld\n", driver, PTR_ERR(tfm));
859 return PTR_ERR(tfm);
860 }
861
862 if (desc->suite.cipher.enc.vecs) {
863 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
864 desc->suite.cipher.enc.count);
865 if (err)
866 goto out;
867 }
868
869 if (desc->suite.cipher.dec.vecs)
870 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
871 desc->suite.cipher.dec.count);
872
873out:
874 crypto_free_ablkcipher(tfm);
875 return err;
876}
877
878static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
879 u32 type, u32 mask)
880{
881 struct crypto_comp *tfm;
882 int err;
883
884 tfm = crypto_alloc_comp(driver, type, mask);
885 if (IS_ERR(tfm)) {
886 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
887 "%ld\n", driver, PTR_ERR(tfm));
888 return PTR_ERR(tfm);
889 }
890
891 err = test_comp(tfm, desc->suite.comp.comp.vecs,
892 desc->suite.comp.decomp.vecs,
893 desc->suite.comp.comp.count,
894 desc->suite.comp.decomp.count);
895
896 crypto_free_comp(tfm);
897 return err;
898}
899
900static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
901 u32 type, u32 mask)
902{
903 struct crypto_ahash *tfm;
904 int err;
905
906 tfm = crypto_alloc_ahash(driver, type, mask);
907 if (IS_ERR(tfm)) {
908 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
909 "%ld\n", driver, PTR_ERR(tfm));
910 return PTR_ERR(tfm);
911 }
912
913 err = test_hash(tfm, desc->suite.hash.vecs, desc->suite.hash.count);
914
915 crypto_free_ahash(tfm);
916 return err;
917}
918
919/* Please keep this list sorted by algorithm name. */
920static const struct alg_test_desc alg_test_descs[] = {
921 {
922 .alg = "cbc(aes)",
923 .test = alg_test_cipher,
924 .suite = {
925 .cipher = {
926 .enc = {
927 .vecs = aes_cbc_enc_tv_template,
928 .count = AES_CBC_ENC_TEST_VECTORS
929 },
930 .dec = {
931 .vecs = aes_cbc_dec_tv_template,
932 .count = AES_CBC_DEC_TEST_VECTORS
933 }
934 }
935 }
936 }, {
937 .alg = "cbc(anubis)",
938 .test = alg_test_cipher,
939 .suite = {
940 .cipher = {
941 .enc = {
942 .vecs = anubis_cbc_enc_tv_template,
943 .count = ANUBIS_CBC_ENC_TEST_VECTORS
944 },
945 .dec = {
946 .vecs = anubis_cbc_dec_tv_template,
947 .count = ANUBIS_CBC_DEC_TEST_VECTORS
948 }
949 }
950 }
951 }, {
952 .alg = "cbc(blowfish)",
953 .test = alg_test_cipher,
954 .suite = {
955 .cipher = {
956 .enc = {
957 .vecs = bf_cbc_enc_tv_template,
958 .count = BF_CBC_ENC_TEST_VECTORS
959 },
960 .dec = {
961 .vecs = bf_cbc_dec_tv_template,
962 .count = BF_CBC_DEC_TEST_VECTORS
963 }
964 }
965 }
966 }, {
967 .alg = "cbc(camellia)",
968 .test = alg_test_cipher,
969 .suite = {
970 .cipher = {
971 .enc = {
972 .vecs = camellia_cbc_enc_tv_template,
973 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
974 },
975 .dec = {
976 .vecs = camellia_cbc_dec_tv_template,
977 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
978 }
979 }
980 }
981 }, {
982 .alg = "cbc(des)",
983 .test = alg_test_cipher,
984 .suite = {
985 .cipher = {
986 .enc = {
987 .vecs = des_cbc_enc_tv_template,
988 .count = DES_CBC_ENC_TEST_VECTORS
989 },
990 .dec = {
991 .vecs = des_cbc_dec_tv_template,
992 .count = DES_CBC_DEC_TEST_VECTORS
993 }
994 }
995 }
996 }, {
997 .alg = "cbc(des3_ede)",
998 .test = alg_test_cipher,
999 .suite = {
1000 .cipher = {
1001 .enc = {
1002 .vecs = des3_ede_cbc_enc_tv_template,
1003 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
1004 },
1005 .dec = {
1006 .vecs = des3_ede_cbc_dec_tv_template,
1007 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
1008 }
1009 }
1010 }
1011 }, {
1012 .alg = "cbc(twofish)",
1013 .test = alg_test_cipher,
1014 .suite = {
1015 .cipher = {
1016 .enc = {
1017 .vecs = tf_cbc_enc_tv_template,
1018 .count = TF_CBC_ENC_TEST_VECTORS
1019 },
1020 .dec = {
1021 .vecs = tf_cbc_dec_tv_template,
1022 .count = TF_CBC_DEC_TEST_VECTORS
1023 }
1024 }
1025 }
1026 }, {
1027 .alg = "ccm(aes)",
1028 .test = alg_test_aead,
1029 .suite = {
1030 .aead = {
1031 .enc = {
1032 .vecs = aes_ccm_enc_tv_template,
1033 .count = AES_CCM_ENC_TEST_VECTORS
1034 },
1035 .dec = {
1036 .vecs = aes_ccm_dec_tv_template,
1037 .count = AES_CCM_DEC_TEST_VECTORS
1038 }
1039 }
1040 }
1041 }, {
1042 .alg = "crc32c",
1043 .test = alg_test_hash,
1044 .suite = {
1045 .hash = {
1046 .vecs = crc32c_tv_template,
1047 .count = CRC32C_TEST_VECTORS
1048 }
1049 }
1050 }, {
1051 .alg = "cts(cbc(aes))",
1052 .test = alg_test_cipher,
1053 .suite = {
1054 .cipher = {
1055 .enc = {
1056 .vecs = cts_mode_enc_tv_template,
1057 .count = CTS_MODE_ENC_TEST_VECTORS
1058 },
1059 .dec = {
1060 .vecs = cts_mode_dec_tv_template,
1061 .count = CTS_MODE_DEC_TEST_VECTORS
1062 }
1063 }
1064 }
1065 }, {
1066 .alg = "deflate",
1067 .test = alg_test_comp,
1068 .suite = {
1069 .comp = {
1070 .comp = {
1071 .vecs = deflate_comp_tv_template,
1072 .count = DEFLATE_COMP_TEST_VECTORS
1073 },
1074 .decomp = {
1075 .vecs = deflate_decomp_tv_template,
1076 .count = DEFLATE_DECOMP_TEST_VECTORS
1077 }
1078 }
1079 }
1080 }, {
1081 .alg = "ecb(aes)",
1082 .test = alg_test_cipher,
1083 .suite = {
1084 .cipher = {
1085 .enc = {
1086 .vecs = aes_enc_tv_template,
1087 .count = AES_ENC_TEST_VECTORS
1088 },
1089 .dec = {
1090 .vecs = aes_dec_tv_template,
1091 .count = AES_DEC_TEST_VECTORS
1092 }
1093 }
1094 }
1095 }, {
1096 .alg = "ecb(anubis)",
1097 .test = alg_test_cipher,
1098 .suite = {
1099 .cipher = {
1100 .enc = {
1101 .vecs = anubis_enc_tv_template,
1102 .count = ANUBIS_ENC_TEST_VECTORS
1103 },
1104 .dec = {
1105 .vecs = anubis_dec_tv_template,
1106 .count = ANUBIS_DEC_TEST_VECTORS
1107 }
1108 }
1109 }
1110 }, {
1111 .alg = "ecb(arc4)",
1112 .test = alg_test_cipher,
1113 .suite = {
1114 .cipher = {
1115 .enc = {
1116 .vecs = arc4_enc_tv_template,
1117 .count = ARC4_ENC_TEST_VECTORS
1118 },
1119 .dec = {
1120 .vecs = arc4_dec_tv_template,
1121 .count = ARC4_DEC_TEST_VECTORS
1122 }
1123 }
1124 }
1125 }, {
1126 .alg = "ecb(blowfish)",
1127 .test = alg_test_cipher,
1128 .suite = {
1129 .cipher = {
1130 .enc = {
1131 .vecs = bf_enc_tv_template,
1132 .count = BF_ENC_TEST_VECTORS
1133 },
1134 .dec = {
1135 .vecs = bf_dec_tv_template,
1136 .count = BF_DEC_TEST_VECTORS
1137 }
1138 }
1139 }
1140 }, {
1141 .alg = "ecb(camellia)",
1142 .test = alg_test_cipher,
1143 .suite = {
1144 .cipher = {
1145 .enc = {
1146 .vecs = camellia_enc_tv_template,
1147 .count = CAMELLIA_ENC_TEST_VECTORS
1148 },
1149 .dec = {
1150 .vecs = camellia_dec_tv_template,
1151 .count = CAMELLIA_DEC_TEST_VECTORS
1152 }
1153 }
1154 }
1155 }, {
1156 .alg = "ecb(cast5)",
1157 .test = alg_test_cipher,
1158 .suite = {
1159 .cipher = {
1160 .enc = {
1161 .vecs = cast5_enc_tv_template,
1162 .count = CAST5_ENC_TEST_VECTORS
1163 },
1164 .dec = {
1165 .vecs = cast5_dec_tv_template,
1166 .count = CAST5_DEC_TEST_VECTORS
1167 }
1168 }
1169 }
1170 }, {
1171 .alg = "ecb(cast6)",
1172 .test = alg_test_cipher,
1173 .suite = {
1174 .cipher = {
1175 .enc = {
1176 .vecs = cast6_enc_tv_template,
1177 .count = CAST6_ENC_TEST_VECTORS
1178 },
1179 .dec = {
1180 .vecs = cast6_dec_tv_template,
1181 .count = CAST6_DEC_TEST_VECTORS
1182 }
1183 }
1184 }
1185 }, {
1186 .alg = "ecb(des)",
1187 .test = alg_test_cipher,
1188 .suite = {
1189 .cipher = {
1190 .enc = {
1191 .vecs = des_enc_tv_template,
1192 .count = DES_ENC_TEST_VECTORS
1193 },
1194 .dec = {
1195 .vecs = des_dec_tv_template,
1196 .count = DES_DEC_TEST_VECTORS
1197 }
1198 }
1199 }
1200 }, {
1201 .alg = "ecb(des3_ede)",
1202 .test = alg_test_cipher,
1203 .suite = {
1204 .cipher = {
1205 .enc = {
1206 .vecs = des3_ede_enc_tv_template,
1207 .count = DES3_EDE_ENC_TEST_VECTORS
1208 },
1209 .dec = {
1210 .vecs = des3_ede_dec_tv_template,
1211 .count = DES3_EDE_DEC_TEST_VECTORS
1212 }
1213 }
1214 }
1215 }, {
1216 .alg = "ecb(khazad)",
1217 .test = alg_test_cipher,
1218 .suite = {
1219 .cipher = {
1220 .enc = {
1221 .vecs = khazad_enc_tv_template,
1222 .count = KHAZAD_ENC_TEST_VECTORS
1223 },
1224 .dec = {
1225 .vecs = khazad_dec_tv_template,
1226 .count = KHAZAD_DEC_TEST_VECTORS
1227 }
1228 }
1229 }
1230 }, {
1231 .alg = "ecb(seed)",
1232 .test = alg_test_cipher,
1233 .suite = {
1234 .cipher = {
1235 .enc = {
1236 .vecs = seed_enc_tv_template,
1237 .count = SEED_ENC_TEST_VECTORS
1238 },
1239 .dec = {
1240 .vecs = seed_dec_tv_template,
1241 .count = SEED_DEC_TEST_VECTORS
1242 }
1243 }
1244 }
1245 }, {
1246 .alg = "ecb(serpent)",
1247 .test = alg_test_cipher,
1248 .suite = {
1249 .cipher = {
1250 .enc = {
1251 .vecs = serpent_enc_tv_template,
1252 .count = SERPENT_ENC_TEST_VECTORS
1253 },
1254 .dec = {
1255 .vecs = serpent_dec_tv_template,
1256 .count = SERPENT_DEC_TEST_VECTORS
1257 }
1258 }
1259 }
1260 }, {
1261 .alg = "ecb(tea)",
1262 .test = alg_test_cipher,
1263 .suite = {
1264 .cipher = {
1265 .enc = {
1266 .vecs = tea_enc_tv_template,
1267 .count = TEA_ENC_TEST_VECTORS
1268 },
1269 .dec = {
1270 .vecs = tea_dec_tv_template,
1271 .count = TEA_DEC_TEST_VECTORS
1272 }
1273 }
1274 }
1275 }, {
1276 .alg = "ecb(tnepres)",
1277 .test = alg_test_cipher,
1278 .suite = {
1279 .cipher = {
1280 .enc = {
1281 .vecs = tnepres_enc_tv_template,
1282 .count = TNEPRES_ENC_TEST_VECTORS
1283 },
1284 .dec = {
1285 .vecs = tnepres_dec_tv_template,
1286 .count = TNEPRES_DEC_TEST_VECTORS
1287 }
1288 }
1289 }
1290 }, {
1291 .alg = "ecb(twofish)",
1292 .test = alg_test_cipher,
1293 .suite = {
1294 .cipher = {
1295 .enc = {
1296 .vecs = tf_enc_tv_template,
1297 .count = TF_ENC_TEST_VECTORS
1298 },
1299 .dec = {
1300 .vecs = tf_dec_tv_template,
1301 .count = TF_DEC_TEST_VECTORS
1302 }
1303 }
1304 }
1305 }, {
1306 .alg = "ecb(xeta)",
1307 .test = alg_test_cipher,
1308 .suite = {
1309 .cipher = {
1310 .enc = {
1311 .vecs = xeta_enc_tv_template,
1312 .count = XETA_ENC_TEST_VECTORS
1313 },
1314 .dec = {
1315 .vecs = xeta_dec_tv_template,
1316 .count = XETA_DEC_TEST_VECTORS
1317 }
1318 }
1319 }
1320 }, {
1321 .alg = "ecb(xtea)",
1322 .test = alg_test_cipher,
1323 .suite = {
1324 .cipher = {
1325 .enc = {
1326 .vecs = xtea_enc_tv_template,
1327 .count = XTEA_ENC_TEST_VECTORS
1328 },
1329 .dec = {
1330 .vecs = xtea_dec_tv_template,
1331 .count = XTEA_DEC_TEST_VECTORS
1332 }
1333 }
1334 }
1335 }, {
1336 .alg = "gcm(aes)",
1337 .test = alg_test_aead,
1338 .suite = {
1339 .aead = {
1340 .enc = {
1341 .vecs = aes_gcm_enc_tv_template,
1342 .count = AES_GCM_ENC_TEST_VECTORS
1343 },
1344 .dec = {
1345 .vecs = aes_gcm_dec_tv_template,
1346 .count = AES_GCM_DEC_TEST_VECTORS
1347 }
1348 }
1349 }
1350 }, {
1351 .alg = "hmac(md5)",
1352 .test = alg_test_hash,
1353 .suite = {
1354 .hash = {
1355 .vecs = hmac_md5_tv_template,
1356 .count = HMAC_MD5_TEST_VECTORS
1357 }
1358 }
1359 }, {
1360 .alg = "hmac(rmd128)",
1361 .test = alg_test_hash,
1362 .suite = {
1363 .hash = {
1364 .vecs = hmac_rmd128_tv_template,
1365 .count = HMAC_RMD128_TEST_VECTORS
1366 }
1367 }
1368 }, {
1369 .alg = "hmac(rmd160)",
1370 .test = alg_test_hash,
1371 .suite = {
1372 .hash = {
1373 .vecs = hmac_rmd160_tv_template,
1374 .count = HMAC_RMD160_TEST_VECTORS
1375 }
1376 }
1377 }, {
1378 .alg = "hmac(sha1)",
1379 .test = alg_test_hash,
1380 .suite = {
1381 .hash = {
1382 .vecs = hmac_sha1_tv_template,
1383 .count = HMAC_SHA1_TEST_VECTORS
1384 }
1385 }
1386 }, {
1387 .alg = "hmac(sha224)",
1388 .test = alg_test_hash,
1389 .suite = {
1390 .hash = {
1391 .vecs = hmac_sha224_tv_template,
1392 .count = HMAC_SHA224_TEST_VECTORS
1393 }
1394 }
1395 }, {
1396 .alg = "hmac(sha256)",
1397 .test = alg_test_hash,
1398 .suite = {
1399 .hash = {
1400 .vecs = hmac_sha256_tv_template,
1401 .count = HMAC_SHA256_TEST_VECTORS
1402 }
1403 }
1404 }, {
1405 .alg = "hmac(sha384)",
1406 .test = alg_test_hash,
1407 .suite = {
1408 .hash = {
1409 .vecs = hmac_sha384_tv_template,
1410 .count = HMAC_SHA384_TEST_VECTORS
1411 }
1412 }
1413 }, {
1414 .alg = "hmac(sha512)",
1415 .test = alg_test_hash,
1416 .suite = {
1417 .hash = {
1418 .vecs = hmac_sha512_tv_template,
1419 .count = HMAC_SHA512_TEST_VECTORS
1420 }
1421 }
1422 }, {
1423 .alg = "lrw(aes)",
1424 .test = alg_test_cipher,
1425 .suite = {
1426 .cipher = {
1427 .enc = {
1428 .vecs = aes_lrw_enc_tv_template,
1429 .count = AES_LRW_ENC_TEST_VECTORS
1430 },
1431 .dec = {
1432 .vecs = aes_lrw_dec_tv_template,
1433 .count = AES_LRW_DEC_TEST_VECTORS
1434 }
1435 }
1436 }
1437 }, {
1438 .alg = "lzo",
1439 .test = alg_test_comp,
1440 .suite = {
1441 .comp = {
1442 .comp = {
1443 .vecs = lzo_comp_tv_template,
1444 .count = LZO_COMP_TEST_VECTORS
1445 },
1446 .decomp = {
1447 .vecs = lzo_decomp_tv_template,
1448 .count = LZO_DECOMP_TEST_VECTORS
1449 }
1450 }
1451 }
1452 }, {
1453 .alg = "md4",
1454 .test = alg_test_hash,
1455 .suite = {
1456 .hash = {
1457 .vecs = md4_tv_template,
1458 .count = MD4_TEST_VECTORS
1459 }
1460 }
1461 }, {
1462 .alg = "md5",
1463 .test = alg_test_hash,
1464 .suite = {
1465 .hash = {
1466 .vecs = md5_tv_template,
1467 .count = MD5_TEST_VECTORS
1468 }
1469 }
1470 }, {
1471 .alg = "michael_mic",
1472 .test = alg_test_hash,
1473 .suite = {
1474 .hash = {
1475 .vecs = michael_mic_tv_template,
1476 .count = MICHAEL_MIC_TEST_VECTORS
1477 }
1478 }
1479 }, {
1480 .alg = "pcbc(fcrypt)",
1481 .test = alg_test_cipher,
1482 .suite = {
1483 .cipher = {
1484 .enc = {
1485 .vecs = fcrypt_pcbc_enc_tv_template,
1486 .count = FCRYPT_ENC_TEST_VECTORS
1487 },
1488 .dec = {
1489 .vecs = fcrypt_pcbc_dec_tv_template,
1490 .count = FCRYPT_DEC_TEST_VECTORS
1491 }
1492 }
1493 }
1494 }, {
1495 .alg = "rfc3686(ctr(aes))",
1496 .test = alg_test_cipher,
1497 .suite = {
1498 .cipher = {
1499 .enc = {
1500 .vecs = aes_ctr_enc_tv_template,
1501 .count = AES_CTR_ENC_TEST_VECTORS
1502 },
1503 .dec = {
1504 .vecs = aes_ctr_dec_tv_template,
1505 .count = AES_CTR_DEC_TEST_VECTORS
1506 }
1507 }
1508 }
1509 }, {
1510 .alg = "rmd128",
1511 .test = alg_test_hash,
1512 .suite = {
1513 .hash = {
1514 .vecs = rmd128_tv_template,
1515 .count = RMD128_TEST_VECTORS
1516 }
1517 }
1518 }, {
1519 .alg = "rmd160",
1520 .test = alg_test_hash,
1521 .suite = {
1522 .hash = {
1523 .vecs = rmd160_tv_template,
1524 .count = RMD160_TEST_VECTORS
1525 }
1526 }
1527 }, {
1528 .alg = "rmd256",
1529 .test = alg_test_hash,
1530 .suite = {
1531 .hash = {
1532 .vecs = rmd256_tv_template,
1533 .count = RMD256_TEST_VECTORS
1534 }
1535 }
1536 }, {
1537 .alg = "rmd320",
1538 .test = alg_test_hash,
1539 .suite = {
1540 .hash = {
1541 .vecs = rmd320_tv_template,
1542 .count = RMD320_TEST_VECTORS
1543 }
1544 }
1545 }, {
1546 .alg = "salsa20",
1547 .test = alg_test_cipher,
1548 .suite = {
1549 .cipher = {
1550 .enc = {
1551 .vecs = salsa20_stream_enc_tv_template,
1552 .count = SALSA20_STREAM_ENC_TEST_VECTORS
1553 }
1554 }
1555 }
1556 }, {
1557 .alg = "sha1",
1558 .test = alg_test_hash,
1559 .suite = {
1560 .hash = {
1561 .vecs = sha1_tv_template,
1562 .count = SHA1_TEST_VECTORS
1563 }
1564 }
1565 }, {
1566 .alg = "sha224",
1567 .test = alg_test_hash,
1568 .suite = {
1569 .hash = {
1570 .vecs = sha224_tv_template,
1571 .count = SHA224_TEST_VECTORS
1572 }
1573 }
1574 }, {
1575 .alg = "sha256",
1576 .test = alg_test_hash,
1577 .suite = {
1578 .hash = {
1579 .vecs = sha256_tv_template,
1580 .count = SHA256_TEST_VECTORS
1581 }
1582 }
1583 }, {
1584 .alg = "sha384",
1585 .test = alg_test_hash,
1586 .suite = {
1587 .hash = {
1588 .vecs = sha384_tv_template,
1589 .count = SHA384_TEST_VECTORS
1590 }
1591 }
1592 }, {
1593 .alg = "sha512",
1594 .test = alg_test_hash,
1595 .suite = {
1596 .hash = {
1597 .vecs = sha512_tv_template,
1598 .count = SHA512_TEST_VECTORS
1599 }
1600 }
1601 }, {
1602 .alg = "tgr128",
1603 .test = alg_test_hash,
1604 .suite = {
1605 .hash = {
1606 .vecs = tgr128_tv_template,
1607 .count = TGR128_TEST_VECTORS
1608 }
1609 }
1610 }, {
1611 .alg = "tgr160",
1612 .test = alg_test_hash,
1613 .suite = {
1614 .hash = {
1615 .vecs = tgr160_tv_template,
1616 .count = TGR160_TEST_VECTORS
1617 }
1618 }
1619 }, {
1620 .alg = "tgr192",
1621 .test = alg_test_hash,
1622 .suite = {
1623 .hash = {
1624 .vecs = tgr192_tv_template,
1625 .count = TGR192_TEST_VECTORS
1626 }
1627 }
1628 }, {
1629 .alg = "wp256",
1630 .test = alg_test_hash,
1631 .suite = {
1632 .hash = {
1633 .vecs = wp256_tv_template,
1634 .count = WP256_TEST_VECTORS
1635 }
1636 }
1637 }, {
1638 .alg = "wp384",
1639 .test = alg_test_hash,
1640 .suite = {
1641 .hash = {
1642 .vecs = wp384_tv_template,
1643 .count = WP384_TEST_VECTORS
1644 }
1645 }
1646 }, {
1647 .alg = "wp512",
1648 .test = alg_test_hash,
1649 .suite = {
1650 .hash = {
1651 .vecs = wp512_tv_template,
1652 .count = WP512_TEST_VECTORS
1653 }
1654 }
1655 }, {
1656 .alg = "xcbc(aes)",
1657 .test = alg_test_hash,
1658 .suite = {
1659 .hash = {
1660 .vecs = aes_xcbc128_tv_template,
1661 .count = XCBC_AES_TEST_VECTORS
1662 }
1663 }
1664 }, {
1665 .alg = "xts(aes)",
1666 .test = alg_test_cipher,
1667 .suite = {
1668 .cipher = {
1669 .enc = {
1670 .vecs = aes_xts_enc_tv_template,
1671 .count = AES_XTS_ENC_TEST_VECTORS
1672 },
1673 .dec = {
1674 .vecs = aes_xts_dec_tv_template,
1675 .count = AES_XTS_DEC_TEST_VECTORS
1676 }
1677 }
1678 }
1679 }
1680};
1681
1682int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
1683{
1684 int start = 0;
1685 int end = ARRAY_SIZE(alg_test_descs);
1686
1687 while (start < end) {
1688 int i = (start + end) / 2;
1689 int diff = strcmp(alg_test_descs[i].alg, alg);
1690
1691 if (diff > 0) {
1692 end = i;
1693 continue;
1694 }
1695
1696 if (diff < 0) {
1697 start = i + 1;
1698 continue;
1699 }
1700
1701 return alg_test_descs[i].test(alg_test_descs + i, driver,
1702 type, mask);
1703 }
1704
1705 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
1706 return 0;
1707}
1708EXPORT_SYMBOL_GPL(alg_test);
1709
1710int __init testmgr_init(void)
1711{
1712 int i;
1713
1714 for (i = 0; i < XBUFSIZE; i++) {
1715 xbuf[i] = (void *)__get_free_page(GFP_KERNEL);
1716 if (!xbuf[i])
1717 goto err_free_xbuf;
1718 }
1719
1720 for (i = 0; i < XBUFSIZE; i++) {
1721 axbuf[i] = (void *)__get_free_page(GFP_KERNEL);
1722 if (!axbuf[i])
1723 goto err_free_axbuf;
1724 }
1725
1726 return 0;
1727
1728err_free_axbuf:
1729 for (i = 0; i < XBUFSIZE && axbuf[i]; i++)
1730 free_page((unsigned long)axbuf[i]);
1731err_free_xbuf:
1732 for (i = 0; i < XBUFSIZE && xbuf[i]; i++)
1733 free_page((unsigned long)xbuf[i]);
1734
1735 return -ENOMEM;
1736}
1737
1738void testmgr_exit(void)
1739{
1740 int i;
1741
1742 for (i = 0; i < XBUFSIZE; i++)
1743 free_page((unsigned long)axbuf[i]);
1744 for (i = 0; i < XBUFSIZE; i++)
1745 free_page((unsigned long)xbuf[i]);
1746}