diff options
Diffstat (limited to 'crypto/tcrypt.c')
-rw-r--r-- | crypto/tcrypt.c | 304 |
1 files changed, 151 insertions, 153 deletions
diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index 66368022e0bf..b6d4b5ce00a3 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c | |||
@@ -31,10 +31,10 @@ | |||
31 | #include "tcrypt.h" | 31 | #include "tcrypt.h" |
32 | 32 | ||
33 | /* | 33 | /* |
34 | * Need to kmalloc() memory for testing. | 34 | * Need slab memory for testing (size in number of pages). |
35 | */ | 35 | */ |
36 | #define TVMEMSIZE 16384 | 36 | #define TVMEMSIZE 4 |
37 | #define XBUFSIZE 32768 | 37 | #define XBUFSIZE 8 |
38 | 38 | ||
39 | /* | 39 | /* |
40 | * Indexes into the xbuf to simulate cross-page access. | 40 | * Indexes into the xbuf to simulate cross-page access. |
@@ -67,9 +67,9 @@ static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 }; | |||
67 | static unsigned int sec; | 67 | static unsigned int sec; |
68 | 68 | ||
69 | static int mode; | 69 | static int mode; |
70 | static char *xbuf; | 70 | static char *xbuf[XBUFSIZE]; |
71 | static char *axbuf; | 71 | static char *axbuf[XBUFSIZE]; |
72 | static char *tvmem; | 72 | static char *tvmem[TVMEMSIZE]; |
73 | 73 | ||
74 | static char *check[] = { | 74 | static char *check[] = { |
75 | "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256", | 75 | "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256", |
@@ -133,9 +133,7 @@ static void test_hash(char *algo, struct hash_testvec *template, | |||
133 | printk("test %u:\n", i + 1); | 133 | printk("test %u:\n", i + 1); |
134 | memset(result, 0, 64); | 134 | memset(result, 0, 64); |
135 | 135 | ||
136 | hash_buff = kzalloc(template[i].psize, GFP_KERNEL); | 136 | hash_buff = xbuf[0]; |
137 | if (!hash_buff) | ||
138 | continue; | ||
139 | 137 | ||
140 | memcpy(hash_buff, template[i].plaintext, template[i].psize); | 138 | memcpy(hash_buff, template[i].plaintext, template[i].psize); |
141 | sg_init_one(&sg[0], hash_buff, template[i].psize); | 139 | sg_init_one(&sg[0], hash_buff, template[i].psize); |
@@ -146,7 +144,6 @@ static void test_hash(char *algo, struct hash_testvec *template, | |||
146 | template[i].ksize); | 144 | template[i].ksize); |
147 | if (ret) { | 145 | if (ret) { |
148 | printk("setkey() failed ret=%d\n", ret); | 146 | printk("setkey() failed ret=%d\n", ret); |
149 | kfree(hash_buff); | ||
150 | goto out; | 147 | goto out; |
151 | } | 148 | } |
152 | } | 149 | } |
@@ -167,7 +164,6 @@ static void test_hash(char *algo, struct hash_testvec *template, | |||
167 | /* fall through */ | 164 | /* fall through */ |
168 | default: | 165 | default: |
169 | printk("digest () failed ret=%d\n", ret); | 166 | printk("digest () failed ret=%d\n", ret); |
170 | kfree(hash_buff); | ||
171 | goto out; | 167 | goto out; |
172 | } | 168 | } |
173 | 169 | ||
@@ -176,14 +172,10 @@ static void test_hash(char *algo, struct hash_testvec *template, | |||
176 | memcmp(result, template[i].digest, | 172 | memcmp(result, template[i].digest, |
177 | crypto_ahash_digestsize(tfm)) ? | 173 | crypto_ahash_digestsize(tfm)) ? |
178 | "fail" : "pass"); | 174 | "fail" : "pass"); |
179 | kfree(hash_buff); | ||
180 | } | 175 | } |
181 | 176 | ||
182 | printk("testing %s across pages\n", algo); | 177 | printk("testing %s across pages\n", algo); |
183 | 178 | ||
184 | /* setup the dummy buffer first */ | ||
185 | memset(xbuf, 0, XBUFSIZE); | ||
186 | |||
187 | j = 0; | 179 | j = 0; |
188 | for (i = 0; i < tcount; i++) { | 180 | for (i = 0; i < tcount; i++) { |
189 | if (template[i].np) { | 181 | if (template[i].np) { |
@@ -194,12 +186,13 @@ static void test_hash(char *algo, struct hash_testvec *template, | |||
194 | temp = 0; | 186 | temp = 0; |
195 | sg_init_table(sg, template[i].np); | 187 | sg_init_table(sg, template[i].np); |
196 | for (k = 0; k < template[i].np; k++) { | 188 | for (k = 0; k < template[i].np; k++) { |
197 | memcpy(&xbuf[IDX[k]], | 189 | sg_set_buf(&sg[k], |
198 | template[i].plaintext + temp, | 190 | memcpy(xbuf[IDX[k] >> PAGE_SHIFT] + |
199 | template[i].tap[k]); | 191 | offset_in_page(IDX[k]), |
192 | template[i].plaintext + temp, | ||
193 | template[i].tap[k]), | ||
194 | template[i].tap[k]); | ||
200 | temp += template[i].tap[k]; | 195 | temp += template[i].tap[k]; |
201 | sg_set_buf(&sg[k], &xbuf[IDX[k]], | ||
202 | template[i].tap[k]); | ||
203 | } | 196 | } |
204 | 197 | ||
205 | if (template[i].ksize) { | 198 | if (template[i].ksize) { |
@@ -298,15 +291,8 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template, | |||
298 | /* some tepmplates have no input data but they will | 291 | /* some tepmplates have no input data but they will |
299 | * touch input | 292 | * touch input |
300 | */ | 293 | */ |
301 | input = kzalloc(template[i].ilen + template[i].rlen, GFP_KERNEL); | 294 | input = xbuf[0]; |
302 | if (!input) | 295 | assoc = axbuf[0]; |
303 | continue; | ||
304 | |||
305 | assoc = kzalloc(template[i].alen, GFP_KERNEL); | ||
306 | if (!assoc) { | ||
307 | kfree(input); | ||
308 | continue; | ||
309 | } | ||
310 | 296 | ||
311 | memcpy(input, template[i].input, template[i].ilen); | 297 | memcpy(input, template[i].input, template[i].ilen); |
312 | memcpy(assoc, template[i].assoc, template[i].alen); | 298 | memcpy(assoc, template[i].assoc, template[i].alen); |
@@ -320,10 +306,7 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template, | |||
320 | crypto_aead_set_flags( | 306 | crypto_aead_set_flags( |
321 | tfm, CRYPTO_TFM_REQ_WEAK_KEY); | 307 | tfm, CRYPTO_TFM_REQ_WEAK_KEY); |
322 | 308 | ||
323 | if (template[i].key) | 309 | key = template[i].key; |
324 | key = template[i].key; | ||
325 | else | ||
326 | key = kzalloc(template[i].klen, GFP_KERNEL); | ||
327 | 310 | ||
328 | ret = crypto_aead_setkey(tfm, key, | 311 | ret = crypto_aead_setkey(tfm, key, |
329 | template[i].klen); | 312 | template[i].klen); |
@@ -332,7 +315,7 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template, | |||
332 | crypto_aead_get_flags(tfm)); | 315 | crypto_aead_get_flags(tfm)); |
333 | 316 | ||
334 | if (!template[i].fail) | 317 | if (!template[i].fail) |
335 | goto next_one; | 318 | continue; |
336 | } | 319 | } |
337 | 320 | ||
338 | authsize = abs(template[i].rlen - template[i].ilen); | 321 | authsize = abs(template[i].rlen - template[i].ilen); |
@@ -341,7 +324,7 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template, | |||
341 | printk(KERN_INFO | 324 | printk(KERN_INFO |
342 | "failed to set authsize = %u\n", | 325 | "failed to set authsize = %u\n", |
343 | authsize); | 326 | authsize); |
344 | goto next_one; | 327 | continue; |
345 | } | 328 | } |
346 | 329 | ||
347 | sg_init_one(&sg[0], input, | 330 | sg_init_one(&sg[0], input, |
@@ -373,7 +356,7 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template, | |||
373 | default: | 356 | default: |
374 | printk(KERN_INFO "%s () failed err=%d\n", | 357 | printk(KERN_INFO "%s () failed err=%d\n", |
375 | e, -ret); | 358 | e, -ret); |
376 | goto next_one; | 359 | continue; |
377 | } | 360 | } |
378 | 361 | ||
379 | q = input; | 362 | q = input; |
@@ -382,16 +365,10 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template, | |||
382 | printk(KERN_INFO "enc/dec: %s\n", | 365 | printk(KERN_INFO "enc/dec: %s\n", |
383 | memcmp(q, template[i].result, | 366 | memcmp(q, template[i].result, |
384 | template[i].rlen) ? "fail" : "pass"); | 367 | template[i].rlen) ? "fail" : "pass"); |
385 | next_one: | ||
386 | if (!template[i].key) | ||
387 | kfree(key); | ||
388 | kfree(assoc); | ||
389 | kfree(input); | ||
390 | } | 368 | } |
391 | } | 369 | } |
392 | 370 | ||
393 | printk(KERN_INFO "\ntesting %s %s across pages (chunking)\n", algo, e); | 371 | printk(KERN_INFO "\ntesting %s %s across pages (chunking)\n", algo, e); |
394 | memset(axbuf, 0, XBUFSIZE); | ||
395 | 372 | ||
396 | for (i = 0, j = 0; i < tcount; i++) { | 373 | for (i = 0, j = 0; i < tcount; i++) { |
397 | if (template[i].np) { | 374 | if (template[i].np) { |
@@ -418,18 +395,30 @@ next_one: | |||
418 | goto out; | 395 | goto out; |
419 | } | 396 | } |
420 | 397 | ||
421 | memset(xbuf, 0, XBUFSIZE); | 398 | authsize = abs(template[i].rlen - template[i].ilen); |
399 | |||
422 | sg_init_table(sg, template[i].np); | 400 | sg_init_table(sg, template[i].np); |
423 | for (k = 0, temp = 0; k < template[i].np; k++) { | 401 | for (k = 0, temp = 0; k < template[i].np; k++) { |
424 | memcpy(&xbuf[IDX[k]], | 402 | if (WARN_ON(offset_in_page(IDX[k]) + |
425 | template[i].input + temp, | 403 | template[i].tap[k] > PAGE_SIZE)) |
404 | goto out; | ||
405 | |||
406 | q = xbuf[IDX[k] >> PAGE_SHIFT] + | ||
407 | offset_in_page(IDX[k]); | ||
408 | |||
409 | memcpy(q, template[i].input + temp, | ||
426 | template[i].tap[k]); | 410 | template[i].tap[k]); |
411 | |||
412 | n = template[i].tap[k]; | ||
413 | if (k == template[i].np - 1 && enc) | ||
414 | n += authsize; | ||
415 | if (offset_in_page(q) + n < PAGE_SIZE) | ||
416 | q[n] = 0; | ||
417 | |||
418 | sg_set_buf(&sg[k], q, template[i].tap[k]); | ||
427 | temp += template[i].tap[k]; | 419 | temp += template[i].tap[k]; |
428 | sg_set_buf(&sg[k], &xbuf[IDX[k]], | ||
429 | template[i].tap[k]); | ||
430 | } | 420 | } |
431 | 421 | ||
432 | authsize = abs(template[i].rlen - template[i].ilen); | ||
433 | ret = crypto_aead_setauthsize(tfm, authsize); | 422 | ret = crypto_aead_setauthsize(tfm, authsize); |
434 | if (ret) { | 423 | if (ret) { |
435 | printk(KERN_INFO | 424 | printk(KERN_INFO |
@@ -438,17 +427,24 @@ next_one: | |||
438 | goto out; | 427 | goto out; |
439 | } | 428 | } |
440 | 429 | ||
441 | if (enc) | 430 | if (enc) { |
431 | if (WARN_ON(sg[k - 1].offset + | ||
432 | sg[k - 1].length + authsize > | ||
433 | PAGE_SIZE)) | ||
434 | goto out; | ||
435 | |||
442 | sg[k - 1].length += authsize; | 436 | sg[k - 1].length += authsize; |
437 | } | ||
443 | 438 | ||
444 | sg_init_table(asg, template[i].anp); | 439 | sg_init_table(asg, template[i].anp); |
445 | for (k = 0, temp = 0; k < template[i].anp; k++) { | 440 | for (k = 0, temp = 0; k < template[i].anp; k++) { |
446 | memcpy(&axbuf[IDX[k]], | 441 | sg_set_buf(&asg[k], |
447 | template[i].assoc + temp, | 442 | memcpy(axbuf[IDX[k] >> PAGE_SHIFT] + |
448 | template[i].atap[k]); | 443 | offset_in_page(IDX[k]), |
449 | temp += template[i].atap[k]; | 444 | template[i].assoc + temp, |
450 | sg_set_buf(&asg[k], &axbuf[IDX[k]], | 445 | template[i].atap[k]), |
451 | template[i].atap[k]); | 446 | template[i].atap[k]); |
447 | temp += template[i].atap[k]; | ||
452 | } | 448 | } |
453 | 449 | ||
454 | aead_request_set_crypt(req, sg, sg, | 450 | aead_request_set_crypt(req, sg, sg, |
@@ -481,7 +477,8 @@ next_one: | |||
481 | 477 | ||
482 | for (k = 0, temp = 0; k < template[i].np; k++) { | 478 | for (k = 0, temp = 0; k < template[i].np; k++) { |
483 | printk(KERN_INFO "page %u\n", k); | 479 | printk(KERN_INFO "page %u\n", k); |
484 | q = &xbuf[IDX[k]]; | 480 | q = xbuf[IDX[k] >> PAGE_SHIFT] + |
481 | offset_in_page(IDX[k]); | ||
485 | 482 | ||
486 | n = template[i].tap[k]; | 483 | n = template[i].tap[k]; |
487 | if (k == template[i].np - 1) | 484 | if (k == template[i].np - 1) |
@@ -499,7 +496,8 @@ next_one: | |||
499 | else | 496 | else |
500 | n = 0; | 497 | n = 0; |
501 | } else { | 498 | } else { |
502 | for (n = 0; q[n]; n++) | 499 | for (n = 0; offset_in_page(q + n) && |
500 | q[n]; n++) | ||
503 | ; | 501 | ; |
504 | } | 502 | } |
505 | if (n) { | 503 | if (n) { |
@@ -558,12 +556,6 @@ static void test_cipher(char *algo, int enc, | |||
558 | 556 | ||
559 | j = 0; | 557 | j = 0; |
560 | for (i = 0; i < tcount; i++) { | 558 | for (i = 0; i < tcount; i++) { |
561 | |||
562 | data = kzalloc(template[i].ilen, GFP_KERNEL); | ||
563 | if (!data) | ||
564 | continue; | ||
565 | |||
566 | memcpy(data, template[i].input, template[i].ilen); | ||
567 | if (template[i].iv) | 559 | if (template[i].iv) |
568 | memcpy(iv, template[i].iv, MAX_IVLEN); | 560 | memcpy(iv, template[i].iv, MAX_IVLEN); |
569 | else | 561 | else |
@@ -574,6 +566,9 @@ static void test_cipher(char *algo, int enc, | |||
574 | printk("test %u (%d bit key):\n", | 566 | printk("test %u (%d bit key):\n", |
575 | j, template[i].klen * 8); | 567 | j, template[i].klen * 8); |
576 | 568 | ||
569 | data = xbuf[0]; | ||
570 | memcpy(data, template[i].input, template[i].ilen); | ||
571 | |||
577 | crypto_ablkcipher_clear_flags(tfm, ~0); | 572 | crypto_ablkcipher_clear_flags(tfm, ~0); |
578 | if (template[i].wk) | 573 | if (template[i].wk) |
579 | crypto_ablkcipher_set_flags( | 574 | crypto_ablkcipher_set_flags( |
@@ -585,10 +580,8 @@ static void test_cipher(char *algo, int enc, | |||
585 | printk("setkey() failed flags=%x\n", | 580 | printk("setkey() failed flags=%x\n", |
586 | crypto_ablkcipher_get_flags(tfm)); | 581 | crypto_ablkcipher_get_flags(tfm)); |
587 | 582 | ||
588 | if (!template[i].fail) { | 583 | if (!template[i].fail) |
589 | kfree(data); | ||
590 | goto out; | 584 | goto out; |
591 | } | ||
592 | } | 585 | } |
593 | 586 | ||
594 | sg_init_one(&sg[0], data, template[i].ilen); | 587 | sg_init_one(&sg[0], data, template[i].ilen); |
@@ -613,7 +606,6 @@ static void test_cipher(char *algo, int enc, | |||
613 | /* fall through */ | 606 | /* fall through */ |
614 | default: | 607 | default: |
615 | printk("%s () failed err=%d\n", e, -ret); | 608 | printk("%s () failed err=%d\n", e, -ret); |
616 | kfree(data); | ||
617 | goto out; | 609 | goto out; |
618 | } | 610 | } |
619 | 611 | ||
@@ -624,7 +616,6 @@ static void test_cipher(char *algo, int enc, | |||
624 | memcmp(q, template[i].result, | 616 | memcmp(q, template[i].result, |
625 | template[i].rlen) ? "fail" : "pass"); | 617 | template[i].rlen) ? "fail" : "pass"); |
626 | } | 618 | } |
627 | kfree(data); | ||
628 | } | 619 | } |
629 | 620 | ||
630 | printk("\ntesting %s %s across pages (chunking)\n", algo, e); | 621 | printk("\ntesting %s %s across pages (chunking)\n", algo, e); |
@@ -642,7 +633,6 @@ static void test_cipher(char *algo, int enc, | |||
642 | printk("test %u (%d bit key):\n", | 633 | printk("test %u (%d bit key):\n", |
643 | j, template[i].klen * 8); | 634 | j, template[i].klen * 8); |
644 | 635 | ||
645 | memset(xbuf, 0, XBUFSIZE); | ||
646 | crypto_ablkcipher_clear_flags(tfm, ~0); | 636 | crypto_ablkcipher_clear_flags(tfm, ~0); |
647 | if (template[i].wk) | 637 | if (template[i].wk) |
648 | crypto_ablkcipher_set_flags( | 638 | crypto_ablkcipher_set_flags( |
@@ -661,12 +651,23 @@ static void test_cipher(char *algo, int enc, | |||
661 | temp = 0; | 651 | temp = 0; |
662 | sg_init_table(sg, template[i].np); | 652 | sg_init_table(sg, template[i].np); |
663 | for (k = 0; k < template[i].np; k++) { | 653 | for (k = 0; k < template[i].np; k++) { |
664 | memcpy(&xbuf[IDX[k]], | 654 | if (WARN_ON(offset_in_page(IDX[k]) + |
665 | template[i].input + temp, | 655 | template[i].tap[k] > PAGE_SIZE)) |
666 | template[i].tap[k]); | 656 | goto out; |
657 | |||
658 | q = xbuf[IDX[k] >> PAGE_SHIFT] + | ||
659 | offset_in_page(IDX[k]); | ||
660 | |||
661 | memcpy(q, template[i].input + temp, | ||
662 | template[i].tap[k]); | ||
663 | |||
664 | if (offset_in_page(q) + template[i].tap[k] < | ||
665 | PAGE_SIZE) | ||
666 | q[template[i].tap[k]] = 0; | ||
667 | |||
668 | sg_set_buf(&sg[k], q, template[i].tap[k]); | ||
669 | |||
667 | temp += template[i].tap[k]; | 670 | temp += template[i].tap[k]; |
668 | sg_set_buf(&sg[k], &xbuf[IDX[k]], | ||
669 | template[i].tap[k]); | ||
670 | } | 671 | } |
671 | 672 | ||
672 | ablkcipher_request_set_crypt(req, sg, sg, | 673 | ablkcipher_request_set_crypt(req, sg, sg, |
@@ -696,19 +697,21 @@ static void test_cipher(char *algo, int enc, | |||
696 | temp = 0; | 697 | temp = 0; |
697 | for (k = 0; k < template[i].np; k++) { | 698 | for (k = 0; k < template[i].np; k++) { |
698 | printk("page %u\n", k); | 699 | printk("page %u\n", k); |
699 | q = &xbuf[IDX[k]]; | 700 | q = xbuf[IDX[k] >> PAGE_SHIFT] + |
701 | offset_in_page(IDX[k]); | ||
700 | hexdump(q, template[i].tap[k]); | 702 | hexdump(q, template[i].tap[k]); |
701 | printk("%s\n", | 703 | printk("%s\n", |
702 | memcmp(q, template[i].result + temp, | 704 | memcmp(q, template[i].result + temp, |
703 | template[i].tap[k]) ? "fail" : | 705 | template[i].tap[k]) ? "fail" : |
704 | "pass"); | 706 | "pass"); |
705 | 707 | ||
706 | for (n = 0; q[template[i].tap[k] + n]; n++) | 708 | q += template[i].tap[k]; |
709 | for (n = 0; offset_in_page(q + n) && q[n]; n++) | ||
707 | ; | 710 | ; |
708 | if (n) { | 711 | if (n) { |
709 | printk("Result buffer corruption %u " | 712 | printk("Result buffer corruption %u " |
710 | "bytes:\n", n); | 713 | "bytes:\n", n); |
711 | hexdump(&q[template[i].tap[k]], n); | 714 | hexdump(q, n); |
712 | } | 715 | } |
713 | temp += template[i].tap[k]; | 716 | temp += template[i].tap[k]; |
714 | } | 717 | } |
@@ -719,16 +722,13 @@ out: | |||
719 | ablkcipher_request_free(req); | 722 | ablkcipher_request_free(req); |
720 | } | 723 | } |
721 | 724 | ||
722 | static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc, char *p, | 725 | static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc, |
723 | int blen, int sec) | 726 | struct scatterlist *sg, int blen, int sec) |
724 | { | 727 | { |
725 | struct scatterlist sg[1]; | ||
726 | unsigned long start, end; | 728 | unsigned long start, end; |
727 | int bcount; | 729 | int bcount; |
728 | int ret; | 730 | int ret; |
729 | 731 | ||
730 | sg_init_one(sg, p, blen); | ||
731 | |||
732 | for (start = jiffies, end = start + sec * HZ, bcount = 0; | 732 | for (start = jiffies, end = start + sec * HZ, bcount = 0; |
733 | time_before(jiffies, end); bcount++) { | 733 | time_before(jiffies, end); bcount++) { |
734 | if (enc) | 734 | if (enc) |
@@ -745,16 +745,13 @@ static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc, char *p, | |||
745 | return 0; | 745 | return 0; |
746 | } | 746 | } |
747 | 747 | ||
748 | static int test_cipher_cycles(struct blkcipher_desc *desc, int enc, char *p, | 748 | static int test_cipher_cycles(struct blkcipher_desc *desc, int enc, |
749 | int blen) | 749 | struct scatterlist *sg, int blen) |
750 | { | 750 | { |
751 | struct scatterlist sg[1]; | ||
752 | unsigned long cycles = 0; | 751 | unsigned long cycles = 0; |
753 | int ret = 0; | 752 | int ret = 0; |
754 | int i; | 753 | int i; |
755 | 754 | ||
756 | sg_init_one(sg, p, blen); | ||
757 | |||
758 | local_bh_disable(); | 755 | local_bh_disable(); |
759 | local_irq_disable(); | 756 | local_irq_disable(); |
760 | 757 | ||
@@ -804,7 +801,7 @@ static void test_cipher_speed(char *algo, int enc, unsigned int sec, | |||
804 | unsigned int tcount, u8 *keysize) | 801 | unsigned int tcount, u8 *keysize) |
805 | { | 802 | { |
806 | unsigned int ret, i, j, iv_len; | 803 | unsigned int ret, i, j, iv_len; |
807 | unsigned char *key, *p, iv[128]; | 804 | unsigned char *key, iv[128]; |
808 | struct crypto_blkcipher *tfm; | 805 | struct crypto_blkcipher *tfm; |
809 | struct blkcipher_desc desc; | 806 | struct blkcipher_desc desc; |
810 | const char *e; | 807 | const char *e; |
@@ -832,27 +829,28 @@ static void test_cipher_speed(char *algo, int enc, unsigned int sec, | |||
832 | 829 | ||
833 | b_size = block_sizes; | 830 | b_size = block_sizes; |
834 | do { | 831 | do { |
832 | struct scatterlist sg[TVMEMSIZE]; | ||
835 | 833 | ||
836 | if ((*keysize + *b_size) > TVMEMSIZE) { | 834 | if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) { |
837 | printk("template (%u) too big for tvmem (%u)\n", | 835 | printk("template (%u) too big for " |
838 | *keysize + *b_size, TVMEMSIZE); | 836 | "tvmem (%lu)\n", *keysize + *b_size, |
837 | TVMEMSIZE * PAGE_SIZE); | ||
839 | goto out; | 838 | goto out; |
840 | } | 839 | } |
841 | 840 | ||
842 | printk("test %u (%d bit key, %d byte blocks): ", i, | 841 | printk("test %u (%d bit key, %d byte blocks): ", i, |
843 | *keysize * 8, *b_size); | 842 | *keysize * 8, *b_size); |
844 | 843 | ||
845 | memset(tvmem, 0xff, *keysize + *b_size); | 844 | memset(tvmem[0], 0xff, PAGE_SIZE); |
846 | 845 | ||
847 | /* set key, plain text and IV */ | 846 | /* set key, plain text and IV */ |
848 | key = (unsigned char *)tvmem; | 847 | key = (unsigned char *)tvmem[0]; |
849 | for (j = 0; j < tcount; j++) { | 848 | for (j = 0; j < tcount; j++) { |
850 | if (template[j].klen == *keysize) { | 849 | if (template[j].klen == *keysize) { |
851 | key = template[j].key; | 850 | key = template[j].key; |
852 | break; | 851 | break; |
853 | } | 852 | } |
854 | } | 853 | } |
855 | p = (unsigned char *)tvmem + *keysize; | ||
856 | 854 | ||
857 | ret = crypto_blkcipher_setkey(tfm, key, *keysize); | 855 | ret = crypto_blkcipher_setkey(tfm, key, *keysize); |
858 | if (ret) { | 856 | if (ret) { |
@@ -861,6 +859,14 @@ static void test_cipher_speed(char *algo, int enc, unsigned int sec, | |||
861 | goto out; | 859 | goto out; |
862 | } | 860 | } |
863 | 861 | ||
862 | sg_init_table(sg, TVMEMSIZE); | ||
863 | sg_set_buf(sg, tvmem[0] + *keysize, | ||
864 | PAGE_SIZE - *keysize); | ||
865 | for (j = 1; j < TVMEMSIZE; j++) { | ||
866 | sg_set_buf(sg + j, tvmem[j], PAGE_SIZE); | ||
867 | memset (tvmem[j], 0xff, PAGE_SIZE); | ||
868 | } | ||
869 | |||
864 | iv_len = crypto_blkcipher_ivsize(tfm); | 870 | iv_len = crypto_blkcipher_ivsize(tfm); |
865 | if (iv_len) { | 871 | if (iv_len) { |
866 | memset(&iv, 0xff, iv_len); | 872 | memset(&iv, 0xff, iv_len); |
@@ -868,9 +874,11 @@ static void test_cipher_speed(char *algo, int enc, unsigned int sec, | |||
868 | } | 874 | } |
869 | 875 | ||
870 | if (sec) | 876 | if (sec) |
871 | ret = test_cipher_jiffies(&desc, enc, p, *b_size, sec); | 877 | ret = test_cipher_jiffies(&desc, enc, sg, |
878 | *b_size, sec); | ||
872 | else | 879 | else |
873 | ret = test_cipher_cycles(&desc, enc, p, *b_size); | 880 | ret = test_cipher_cycles(&desc, enc, sg, |
881 | *b_size); | ||
874 | 882 | ||
875 | if (ret) { | 883 | if (ret) { |
876 | printk("%s() failed flags=%x\n", e, desc.flags); | 884 | printk("%s() failed flags=%x\n", e, desc.flags); |
@@ -886,19 +894,16 @@ out: | |||
886 | crypto_free_blkcipher(tfm); | 894 | crypto_free_blkcipher(tfm); |
887 | } | 895 | } |
888 | 896 | ||
889 | static int test_hash_jiffies_digest(struct hash_desc *desc, char *p, int blen, | 897 | static int test_hash_jiffies_digest(struct hash_desc *desc, |
898 | struct scatterlist *sg, int blen, | ||
890 | char *out, int sec) | 899 | char *out, int sec) |
891 | { | 900 | { |
892 | struct scatterlist sg[1]; | ||
893 | unsigned long start, end; | 901 | unsigned long start, end; |
894 | int bcount; | 902 | int bcount; |
895 | int ret; | 903 | int ret; |
896 | 904 | ||
897 | sg_init_table(sg, 1); | ||
898 | |||
899 | for (start = jiffies, end = start + sec * HZ, bcount = 0; | 905 | for (start = jiffies, end = start + sec * HZ, bcount = 0; |
900 | time_before(jiffies, end); bcount++) { | 906 | time_before(jiffies, end); bcount++) { |
901 | sg_set_buf(sg, p, blen); | ||
902 | ret = crypto_hash_digest(desc, sg, blen, out); | 907 | ret = crypto_hash_digest(desc, sg, blen, out); |
903 | if (ret) | 908 | if (ret) |
904 | return ret; | 909 | return ret; |
@@ -910,18 +915,15 @@ static int test_hash_jiffies_digest(struct hash_desc *desc, char *p, int blen, | |||
910 | return 0; | 915 | return 0; |
911 | } | 916 | } |
912 | 917 | ||
913 | static int test_hash_jiffies(struct hash_desc *desc, char *p, int blen, | 918 | static int test_hash_jiffies(struct hash_desc *desc, struct scatterlist *sg, |
914 | int plen, char *out, int sec) | 919 | int blen, int plen, char *out, int sec) |
915 | { | 920 | { |
916 | struct scatterlist sg[1]; | ||
917 | unsigned long start, end; | 921 | unsigned long start, end; |
918 | int bcount, pcount; | 922 | int bcount, pcount; |
919 | int ret; | 923 | int ret; |
920 | 924 | ||
921 | if (plen == blen) | 925 | if (plen == blen) |
922 | return test_hash_jiffies_digest(desc, p, blen, out, sec); | 926 | return test_hash_jiffies_digest(desc, sg, blen, out, sec); |
923 | |||
924 | sg_init_table(sg, 1); | ||
925 | 927 | ||
926 | for (start = jiffies, end = start + sec * HZ, bcount = 0; | 928 | for (start = jiffies, end = start + sec * HZ, bcount = 0; |
927 | time_before(jiffies, end); bcount++) { | 929 | time_before(jiffies, end); bcount++) { |
@@ -929,7 +931,6 @@ static int test_hash_jiffies(struct hash_desc *desc, char *p, int blen, | |||
929 | if (ret) | 931 | if (ret) |
930 | return ret; | 932 | return ret; |
931 | for (pcount = 0; pcount < blen; pcount += plen) { | 933 | for (pcount = 0; pcount < blen; pcount += plen) { |
932 | sg_set_buf(sg, p + pcount, plen); | ||
933 | ret = crypto_hash_update(desc, sg, plen); | 934 | ret = crypto_hash_update(desc, sg, plen); |
934 | if (ret) | 935 | if (ret) |
935 | return ret; | 936 | return ret; |
@@ -946,22 +947,18 @@ static int test_hash_jiffies(struct hash_desc *desc, char *p, int blen, | |||
946 | return 0; | 947 | return 0; |
947 | } | 948 | } |
948 | 949 | ||
949 | static int test_hash_cycles_digest(struct hash_desc *desc, char *p, int blen, | 950 | static int test_hash_cycles_digest(struct hash_desc *desc, |
950 | char *out) | 951 | struct scatterlist *sg, int blen, char *out) |
951 | { | 952 | { |
952 | struct scatterlist sg[1]; | ||
953 | unsigned long cycles = 0; | 953 | unsigned long cycles = 0; |
954 | int i; | 954 | int i; |
955 | int ret; | 955 | int ret; |
956 | 956 | ||
957 | sg_init_table(sg, 1); | ||
958 | |||
959 | local_bh_disable(); | 957 | local_bh_disable(); |
960 | local_irq_disable(); | 958 | local_irq_disable(); |
961 | 959 | ||
962 | /* Warm-up run. */ | 960 | /* Warm-up run. */ |
963 | for (i = 0; i < 4; i++) { | 961 | for (i = 0; i < 4; i++) { |
964 | sg_set_buf(sg, p, blen); | ||
965 | ret = crypto_hash_digest(desc, sg, blen, out); | 962 | ret = crypto_hash_digest(desc, sg, blen, out); |
966 | if (ret) | 963 | if (ret) |
967 | goto out; | 964 | goto out; |
@@ -973,7 +970,6 @@ static int test_hash_cycles_digest(struct hash_desc *desc, char *p, int blen, | |||
973 | 970 | ||
974 | start = get_cycles(); | 971 | start = get_cycles(); |
975 | 972 | ||
976 | sg_set_buf(sg, p, blen); | ||
977 | ret = crypto_hash_digest(desc, sg, blen, out); | 973 | ret = crypto_hash_digest(desc, sg, blen, out); |
978 | if (ret) | 974 | if (ret) |
979 | goto out; | 975 | goto out; |
@@ -996,18 +992,15 @@ out: | |||
996 | return 0; | 992 | return 0; |
997 | } | 993 | } |
998 | 994 | ||
999 | static int test_hash_cycles(struct hash_desc *desc, char *p, int blen, | 995 | static int test_hash_cycles(struct hash_desc *desc, struct scatterlist *sg, |
1000 | int plen, char *out) | 996 | int blen, int plen, char *out) |
1001 | { | 997 | { |
1002 | struct scatterlist sg[1]; | ||
1003 | unsigned long cycles = 0; | 998 | unsigned long cycles = 0; |
1004 | int i, pcount; | 999 | int i, pcount; |
1005 | int ret; | 1000 | int ret; |
1006 | 1001 | ||
1007 | if (plen == blen) | 1002 | if (plen == blen) |
1008 | return test_hash_cycles_digest(desc, p, blen, out); | 1003 | return test_hash_cycles_digest(desc, sg, blen, out); |
1009 | |||
1010 | sg_init_table(sg, 1); | ||
1011 | 1004 | ||
1012 | local_bh_disable(); | 1005 | local_bh_disable(); |
1013 | local_irq_disable(); | 1006 | local_irq_disable(); |
@@ -1018,7 +1011,6 @@ static int test_hash_cycles(struct hash_desc *desc, char *p, int blen, | |||
1018 | if (ret) | 1011 | if (ret) |
1019 | goto out; | 1012 | goto out; |
1020 | for (pcount = 0; pcount < blen; pcount += plen) { | 1013 | for (pcount = 0; pcount < blen; pcount += plen) { |
1021 | sg_set_buf(sg, p + pcount, plen); | ||
1022 | ret = crypto_hash_update(desc, sg, plen); | 1014 | ret = crypto_hash_update(desc, sg, plen); |
1023 | if (ret) | 1015 | if (ret) |
1024 | goto out; | 1016 | goto out; |
@@ -1038,7 +1030,6 @@ static int test_hash_cycles(struct hash_desc *desc, char *p, int blen, | |||
1038 | if (ret) | 1030 | if (ret) |
1039 | goto out; | 1031 | goto out; |
1040 | for (pcount = 0; pcount < blen; pcount += plen) { | 1032 | for (pcount = 0; pcount < blen; pcount += plen) { |
1041 | sg_set_buf(sg, p + pcount, plen); | ||
1042 | ret = crypto_hash_update(desc, sg, plen); | 1033 | ret = crypto_hash_update(desc, sg, plen); |
1043 | if (ret) | 1034 | if (ret) |
1044 | goto out; | 1035 | goto out; |
@@ -1068,6 +1059,7 @@ out: | |||
1068 | static void test_hash_speed(char *algo, unsigned int sec, | 1059 | static void test_hash_speed(char *algo, unsigned int sec, |
1069 | struct hash_speed *speed) | 1060 | struct hash_speed *speed) |
1070 | { | 1061 | { |
1062 | struct scatterlist sg[TVMEMSIZE]; | ||
1071 | struct crypto_hash *tfm; | 1063 | struct crypto_hash *tfm; |
1072 | struct hash_desc desc; | 1064 | struct hash_desc desc; |
1073 | char output[1024]; | 1065 | char output[1024]; |
@@ -1093,23 +1085,27 @@ static void test_hash_speed(char *algo, unsigned int sec, | |||
1093 | goto out; | 1085 | goto out; |
1094 | } | 1086 | } |
1095 | 1087 | ||
1088 | sg_init_table(sg, TVMEMSIZE); | ||
1089 | for (i = 0; i < TVMEMSIZE; i++) { | ||
1090 | sg_set_buf(sg + i, tvmem[i], PAGE_SIZE); | ||
1091 | memset(tvmem[i], 0xff, PAGE_SIZE); | ||
1092 | } | ||
1093 | |||
1096 | for (i = 0; speed[i].blen != 0; i++) { | 1094 | for (i = 0; speed[i].blen != 0; i++) { |
1097 | if (speed[i].blen > TVMEMSIZE) { | 1095 | if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) { |
1098 | printk("template (%u) too big for tvmem (%u)\n", | 1096 | printk("template (%u) too big for tvmem (%lu)\n", |
1099 | speed[i].blen, TVMEMSIZE); | 1097 | speed[i].blen, TVMEMSIZE * PAGE_SIZE); |
1100 | goto out; | 1098 | goto out; |
1101 | } | 1099 | } |
1102 | 1100 | ||
1103 | printk("test%3u (%5u byte blocks,%5u bytes per update,%4u updates): ", | 1101 | printk("test%3u (%5u byte blocks,%5u bytes per update,%4u updates): ", |
1104 | i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen); | 1102 | i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen); |
1105 | 1103 | ||
1106 | memset(tvmem, 0xff, speed[i].blen); | ||
1107 | |||
1108 | if (sec) | 1104 | if (sec) |
1109 | ret = test_hash_jiffies(&desc, tvmem, speed[i].blen, | 1105 | ret = test_hash_jiffies(&desc, sg, speed[i].blen, |
1110 | speed[i].plen, output, sec); | 1106 | speed[i].plen, output, sec); |
1111 | else | 1107 | else |
1112 | ret = test_hash_cycles(&desc, tvmem, speed[i].blen, | 1108 | ret = test_hash_cycles(&desc, sg, speed[i].blen, |
1113 | speed[i].plen, output); | 1109 | speed[i].plen, output); |
1114 | 1110 | ||
1115 | if (ret) { | 1111 | if (ret) { |
@@ -1128,7 +1124,6 @@ static void test_comp(char *algo, struct comp_testvec *ctemplate, | |||
1128 | unsigned int i; | 1124 | unsigned int i; |
1129 | char result[COMP_BUF_SIZE]; | 1125 | char result[COMP_BUF_SIZE]; |
1130 | struct crypto_comp *tfm; | 1126 | struct crypto_comp *tfm; |
1131 | unsigned int tsize; | ||
1132 | 1127 | ||
1133 | printk("\ntesting %s compression\n", algo); | 1128 | printk("\ntesting %s compression\n", algo); |
1134 | 1129 | ||
@@ -1159,14 +1154,6 @@ static void test_comp(char *algo, struct comp_testvec *ctemplate, | |||
1159 | 1154 | ||
1160 | printk("\ntesting %s decompression\n", algo); | 1155 | printk("\ntesting %s decompression\n", algo); |
1161 | 1156 | ||
1162 | tsize = sizeof(struct comp_testvec); | ||
1163 | tsize *= dtcount; | ||
1164 | if (tsize > TVMEMSIZE) { | ||
1165 | printk("template (%u) too big for tvmem (%u)\n", tsize, | ||
1166 | TVMEMSIZE); | ||
1167 | goto out; | ||
1168 | } | ||
1169 | |||
1170 | for (i = 0; i < dtcount; i++) { | 1157 | for (i = 0; i < dtcount; i++) { |
1171 | int ilen, ret, dlen = COMP_BUF_SIZE; | 1158 | int ilen, ret, dlen = COMP_BUF_SIZE; |
1172 | 1159 | ||
@@ -1185,7 +1172,7 @@ static void test_comp(char *algo, struct comp_testvec *ctemplate, | |||
1185 | memcmp(result, dtemplate[i].output, dlen) ? "fail" : "pass", | 1172 | memcmp(result, dtemplate[i].output, dlen) ? "fail" : "pass", |
1186 | ilen, dlen); | 1173 | ilen, dlen); |
1187 | } | 1174 | } |
1188 | out: | 1175 | |
1189 | crypto_free_comp(tfm); | 1176 | crypto_free_comp(tfm); |
1190 | } | 1177 | } |
1191 | 1178 | ||
@@ -1917,18 +1904,25 @@ static void do_test(void) | |||
1917 | static int __init tcrypt_mod_init(void) | 1904 | static int __init tcrypt_mod_init(void) |
1918 | { | 1905 | { |
1919 | int err = -ENOMEM; | 1906 | int err = -ENOMEM; |
1907 | int i; | ||
1920 | 1908 | ||
1921 | tvmem = kmalloc(TVMEMSIZE, GFP_KERNEL); | 1909 | for (i = 0; i < TVMEMSIZE; i++) { |
1922 | if (tvmem == NULL) | 1910 | tvmem[i] = (void *)__get_free_page(GFP_KERNEL); |
1923 | return err; | 1911 | if (!tvmem[i]) |
1912 | goto err_free_tv; | ||
1913 | } | ||
1924 | 1914 | ||
1925 | xbuf = kmalloc(XBUFSIZE, GFP_KERNEL); | 1915 | for (i = 0; i < XBUFSIZE; i++) { |
1926 | if (xbuf == NULL) | 1916 | xbuf[i] = (void *)__get_free_page(GFP_KERNEL); |
1927 | goto err_free_tv; | 1917 | if (!xbuf[i]) |
1918 | goto err_free_xbuf; | ||
1919 | } | ||
1928 | 1920 | ||
1929 | axbuf = kmalloc(XBUFSIZE, GFP_KERNEL); | 1921 | for (i = 0; i < XBUFSIZE; i++) { |
1930 | if (axbuf == NULL) | 1922 | axbuf[i] = (void *)__get_free_page(GFP_KERNEL); |
1931 | goto err_free_xbuf; | 1923 | if (!axbuf[i]) |
1924 | goto err_free_axbuf; | ||
1925 | } | ||
1932 | 1926 | ||
1933 | do_test(); | 1927 | do_test(); |
1934 | 1928 | ||
@@ -1940,11 +1934,15 @@ static int __init tcrypt_mod_init(void) | |||
1940 | */ | 1934 | */ |
1941 | err = -EAGAIN; | 1935 | err = -EAGAIN; |
1942 | 1936 | ||
1943 | kfree(axbuf); | 1937 | err_free_axbuf: |
1944 | err_free_xbuf: | 1938 | for (i = 0; i < XBUFSIZE && axbuf[i]; i++) |
1945 | kfree(xbuf); | 1939 | free_page((unsigned long)axbuf[i]); |
1946 | err_free_tv: | 1940 | err_free_xbuf: |
1947 | kfree(tvmem); | 1941 | for (i = 0; i < XBUFSIZE && xbuf[i]; i++) |
1942 | free_page((unsigned long)xbuf[i]); | ||
1943 | err_free_tv: | ||
1944 | for (i = 0; i < TVMEMSIZE && tvmem[i]; i++) | ||
1945 | free_page((unsigned long)tvmem[i]); | ||
1948 | 1946 | ||
1949 | return err; | 1947 | return err; |
1950 | } | 1948 | } |