aboutsummaryrefslogtreecommitdiffstats
path: root/crypto/tcrypt.c
diff options
context:
space:
mode:
authorSebastian Siewior <sebastian@breakpoint.cc>2008-03-13 08:20:28 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2008-04-20 22:19:22 -0400
commit562954d5e01d08154cf15c7e12e6e9ec803f50f7 (patch)
tree24e129c431f498f773537cd0963c3946ece350e2 /crypto/tcrypt.c
parent48c8949ea8460216783dd33640121187b9531b60 (diff)
[CRYPTO] tcrypt: Change the usage of the test vectors
The test routines (test_{cipher,hash,aead}) are makeing a copy of the test template and are processing the encryption process in place. This patch changes the creation of the copy so it will work even if the source address of the input data isn't an array inside of the template but a pointer. Signed-off-by: Sebastian Siewior <sebastian@breakpoint.cc> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/tcrypt.c')
-rw-r--r--crypto/tcrypt.c346
1 files changed, 181 insertions, 165 deletions
diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c
index 6b8315b6f2a5..30e75d49f35a 100644
--- a/crypto/tcrypt.c
+++ b/crypto/tcrypt.c
@@ -113,23 +113,11 @@ static void test_hash(char *algo, struct hash_testvec *template,
113 char result[64]; 113 char result[64];
114 struct crypto_hash *tfm; 114 struct crypto_hash *tfm;
115 struct hash_desc desc; 115 struct hash_desc desc;
116 struct hash_testvec *hash_tv;
117 unsigned int tsize;
118 int ret; 116 int ret;
117 void *hash_buff;
119 118
120 printk("\ntesting %s\n", algo); 119 printk("\ntesting %s\n", algo);
121 120
122 tsize = sizeof(struct hash_testvec);
123 tsize *= tcount;
124
125 if (tsize > TVMEMSIZE) {
126 printk("template (%u) too big for tvmem (%u)\n", tsize, TVMEMSIZE);
127 return;
128 }
129
130 memcpy(tvmem, template, tsize);
131 hash_tv = (void *)tvmem;
132
133 tfm = crypto_alloc_hash(algo, 0, CRYPTO_ALG_ASYNC); 121 tfm = crypto_alloc_hash(algo, 0, CRYPTO_ALG_ASYNC);
134 if (IS_ERR(tfm)) { 122 if (IS_ERR(tfm)) {
135 printk("failed to load transform for %s: %ld\n", algo, 123 printk("failed to load transform for %s: %ld\n", algo,
@@ -144,28 +132,36 @@ static void test_hash(char *algo, struct hash_testvec *template,
144 printk("test %u:\n", i + 1); 132 printk("test %u:\n", i + 1);
145 memset(result, 0, 64); 133 memset(result, 0, 64);
146 134
147 sg_init_one(&sg[0], hash_tv[i].plaintext, hash_tv[i].psize); 135 hash_buff = kzalloc(template[i].psize, GFP_KERNEL);
136 if (!hash_buff)
137 continue;
148 138
149 if (hash_tv[i].ksize) { 139 memcpy(hash_buff, template[i].plaintext, template[i].psize);
150 ret = crypto_hash_setkey(tfm, hash_tv[i].key, 140 sg_init_one(&sg[0], hash_buff, template[i].psize);
151 hash_tv[i].ksize); 141
142 if (template[i].ksize) {
143 ret = crypto_hash_setkey(tfm, template[i].key,
144 template[i].ksize);
152 if (ret) { 145 if (ret) {
153 printk("setkey() failed ret=%d\n", ret); 146 printk("setkey() failed ret=%d\n", ret);
147 kfree(hash_buff);
154 goto out; 148 goto out;
155 } 149 }
156 } 150 }
157 151
158 ret = crypto_hash_digest(&desc, sg, hash_tv[i].psize, result); 152 ret = crypto_hash_digest(&desc, sg, template[i].psize, result);
159 if (ret) { 153 if (ret) {
160 printk("digest () failed ret=%d\n", ret); 154 printk("digest () failed ret=%d\n", ret);
155 kfree(hash_buff);
161 goto out; 156 goto out;
162 } 157 }
163 158
164 hexdump(result, crypto_hash_digestsize(tfm)); 159 hexdump(result, crypto_hash_digestsize(tfm));
165 printk("%s\n", 160 printk("%s\n",
166 memcmp(result, hash_tv[i].digest, 161 memcmp(result, template[i].digest,
167 crypto_hash_digestsize(tfm)) ? 162 crypto_hash_digestsize(tfm)) ?
168 "fail" : "pass"); 163 "fail" : "pass");
164 kfree(hash_buff);
169 } 165 }
170 166
171 printk("testing %s across pages\n", algo); 167 printk("testing %s across pages\n", algo);
@@ -175,25 +171,25 @@ static void test_hash(char *algo, struct hash_testvec *template,
175 171
176 j = 0; 172 j = 0;
177 for (i = 0; i < tcount; i++) { 173 for (i = 0; i < tcount; i++) {
178 if (hash_tv[i].np) { 174 if (template[i].np) {
179 j++; 175 j++;
180 printk("test %u:\n", j); 176 printk("test %u:\n", j);
181 memset(result, 0, 64); 177 memset(result, 0, 64);
182 178
183 temp = 0; 179 temp = 0;
184 sg_init_table(sg, hash_tv[i].np); 180 sg_init_table(sg, template[i].np);
185 for (k = 0; k < hash_tv[i].np; k++) { 181 for (k = 0; k < template[i].np; k++) {
186 memcpy(&xbuf[IDX[k]], 182 memcpy(&xbuf[IDX[k]],
187 hash_tv[i].plaintext + temp, 183 template[i].plaintext + temp,
188 hash_tv[i].tap[k]); 184 template[i].tap[k]);
189 temp += hash_tv[i].tap[k]; 185 temp += template[i].tap[k];
190 sg_set_buf(&sg[k], &xbuf[IDX[k]], 186 sg_set_buf(&sg[k], &xbuf[IDX[k]],
191 hash_tv[i].tap[k]); 187 template[i].tap[k]);
192 } 188 }
193 189
194 if (hash_tv[i].ksize) { 190 if (template[i].ksize) {
195 ret = crypto_hash_setkey(tfm, hash_tv[i].key, 191 ret = crypto_hash_setkey(tfm, template[i].key,
196 hash_tv[i].ksize); 192 template[i].ksize);
197 193
198 if (ret) { 194 if (ret) {
199 printk("setkey() failed ret=%d\n", ret); 195 printk("setkey() failed ret=%d\n", ret);
@@ -201,7 +197,7 @@ static void test_hash(char *algo, struct hash_testvec *template,
201 } 197 }
202 } 198 }
203 199
204 ret = crypto_hash_digest(&desc, sg, hash_tv[i].psize, 200 ret = crypto_hash_digest(&desc, sg, template[i].psize,
205 result); 201 result);
206 if (ret) { 202 if (ret) {
207 printk("digest () failed ret=%d\n", ret); 203 printk("digest () failed ret=%d\n", ret);
@@ -210,7 +206,7 @@ static void test_hash(char *algo, struct hash_testvec *template,
210 206
211 hexdump(result, crypto_hash_digestsize(tfm)); 207 hexdump(result, crypto_hash_digestsize(tfm));
212 printk("%s\n", 208 printk("%s\n",
213 memcmp(result, hash_tv[i].digest, 209 memcmp(result, template[i].digest,
214 crypto_hash_digestsize(tfm)) ? 210 crypto_hash_digestsize(tfm)) ?
215 "fail" : "pass"); 211 "fail" : "pass");
216 } 212 }
@@ -224,17 +220,18 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template,
224 unsigned int tcount) 220 unsigned int tcount)
225{ 221{
226 unsigned int ret, i, j, k, temp; 222 unsigned int ret, i, j, k, temp;
227 unsigned int tsize;
228 char *q; 223 char *q;
229 struct crypto_aead *tfm; 224 struct crypto_aead *tfm;
230 char *key; 225 char *key;
231 struct aead_testvec *aead_tv;
232 struct aead_request *req; 226 struct aead_request *req;
233 struct scatterlist sg[8]; 227 struct scatterlist sg[8];
234 struct scatterlist asg[8]; 228 struct scatterlist asg[8];
235 const char *e; 229 const char *e;
236 struct tcrypt_result result; 230 struct tcrypt_result result;
237 unsigned int authsize; 231 unsigned int authsize;
232 void *input;
233 void *assoc;
234 char iv[MAX_IVLEN];
238 235
239 if (enc == ENCRYPT) 236 if (enc == ENCRYPT)
240 e = "encryption"; 237 e = "encryption";
@@ -243,18 +240,6 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template,
243 240
244 printk(KERN_INFO "\ntesting %s %s\n", algo, e); 241 printk(KERN_INFO "\ntesting %s %s\n", algo, e);
245 242
246 tsize = sizeof(struct aead_testvec);
247 tsize *= tcount;
248
249 if (tsize > TVMEMSIZE) {
250 printk(KERN_INFO "template (%u) too big for tvmem (%u)\n",
251 tsize, TVMEMSIZE);
252 return;
253 }
254
255 memcpy(tvmem, template, tsize);
256 aead_tv = (void *)tvmem;
257
258 init_completion(&result.completion); 243 init_completion(&result.completion);
259 244
260 tfm = crypto_alloc_aead(algo, 0, 0); 245 tfm = crypto_alloc_aead(algo, 0, 0);
@@ -275,46 +260,68 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template,
275 tcrypt_complete, &result); 260 tcrypt_complete, &result);
276 261
277 for (i = 0, j = 0; i < tcount; i++) { 262 for (i = 0, j = 0; i < tcount; i++) {
278 if (!aead_tv[i].np) { 263 if (!template[i].np) {
279 printk(KERN_INFO "test %u (%d bit key):\n", 264 printk(KERN_INFO "test %u (%d bit key):\n",
280 ++j, aead_tv[i].klen * 8); 265 ++j, template[i].klen * 8);
266
267 /* some tepmplates have no input data but they will
268 * touch input
269 */
270 input = kzalloc(template[i].ilen + template[i].rlen, GFP_KERNEL);
271 if (!input)
272 continue;
273
274 assoc = kzalloc(template[i].alen, GFP_KERNEL);
275 if (!assoc) {
276 kfree(input);
277 continue;
278 }
279
280 memcpy(input, template[i].input, template[i].ilen);
281 memcpy(assoc, template[i].assoc, template[i].alen);
282 if (template[i].iv)
283 memcpy(iv, template[i].iv, MAX_IVLEN);
284 else
285 memset(iv, 0, MAX_IVLEN);
281 286
282 crypto_aead_clear_flags(tfm, ~0); 287 crypto_aead_clear_flags(tfm, ~0);
283 if (aead_tv[i].wk) 288 if (template[i].wk)
284 crypto_aead_set_flags( 289 crypto_aead_set_flags(
285 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 290 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
286 key = aead_tv[i].key; 291
292 if (template[i].key)
293 key = template[i].key;
294 else
295 key = kzalloc(template[i].klen, GFP_KERNEL);
287 296
288 ret = crypto_aead_setkey(tfm, key, 297 ret = crypto_aead_setkey(tfm, key,
289 aead_tv[i].klen); 298 template[i].klen);
290 if (ret) { 299 if (ret) {
291 printk(KERN_INFO "setkey() failed flags=%x\n", 300 printk(KERN_INFO "setkey() failed flags=%x\n",
292 crypto_aead_get_flags(tfm)); 301 crypto_aead_get_flags(tfm));
293 302
294 if (!aead_tv[i].fail) 303 if (!template[i].fail)
295 goto out; 304 goto next_one;
296 } 305 }
297 306
298 authsize = abs(aead_tv[i].rlen - aead_tv[i].ilen); 307 authsize = abs(template[i].rlen - template[i].ilen);
299 ret = crypto_aead_setauthsize(tfm, authsize); 308 ret = crypto_aead_setauthsize(tfm, authsize);
300 if (ret) { 309 if (ret) {
301 printk(KERN_INFO 310 printk(KERN_INFO
302 "failed to set authsize = %u\n", 311 "failed to set authsize = %u\n",
303 authsize); 312 authsize);
304 goto out; 313 goto next_one;
305 } 314 }
306 315
307 sg_init_one(&sg[0], aead_tv[i].input, 316 sg_init_one(&sg[0], input,
308 aead_tv[i].ilen + (enc ? authsize : 0)); 317 template[i].ilen + (enc ? authsize : 0));
309 318
310 sg_init_one(&asg[0], aead_tv[i].assoc, 319 sg_init_one(&asg[0], assoc, template[i].alen);
311 aead_tv[i].alen);
312 320
313 aead_request_set_crypt(req, sg, sg, 321 aead_request_set_crypt(req, sg, sg,
314 aead_tv[i].ilen, 322 template[i].ilen, iv);
315 aead_tv[i].iv);
316 323
317 aead_request_set_assoc(req, asg, aead_tv[i].alen); 324 aead_request_set_assoc(req, asg, template[i].alen);
318 325
319 ret = enc ? 326 ret = enc ?
320 crypto_aead_encrypt(req) : 327 crypto_aead_encrypt(req) :
@@ -335,15 +342,21 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template,
335 default: 342 default:
336 printk(KERN_INFO "%s () failed err=%d\n", 343 printk(KERN_INFO "%s () failed err=%d\n",
337 e, -ret); 344 e, -ret);
338 goto out; 345 goto next_one;
339 } 346 }
340 347
341 q = kmap(sg_page(&sg[0])) + sg[0].offset; 348 q = kmap(sg_page(&sg[0])) + sg[0].offset;
342 hexdump(q, aead_tv[i].rlen); 349 hexdump(q, template[i].rlen);
343 350
344 printk(KERN_INFO "enc/dec: %s\n", 351 printk(KERN_INFO "enc/dec: %s\n",
345 memcmp(q, aead_tv[i].result, 352 memcmp(q, template[i].result,
346 aead_tv[i].rlen) ? "fail" : "pass"); 353 template[i].rlen) ? "fail" : "pass");
354 kunmap(sg_page(&sg[0]));
355next_one:
356 if (!template[i].key)
357 kfree(key);
358 kfree(assoc);
359 kfree(input);
347 } 360 }
348 } 361 }
349 362
@@ -352,36 +365,41 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template,
352 memset(axbuf, 0, XBUFSIZE); 365 memset(axbuf, 0, XBUFSIZE);
353 366
354 for (i = 0, j = 0; i < tcount; i++) { 367 for (i = 0, j = 0; i < tcount; i++) {
355 if (aead_tv[i].np) { 368 if (template[i].np) {
356 printk(KERN_INFO "test %u (%d bit key):\n", 369 printk(KERN_INFO "test %u (%d bit key):\n",
357 ++j, aead_tv[i].klen * 8); 370 ++j, template[i].klen * 8);
371
372 if (template[i].iv)
373 memcpy(iv, template[i].iv, MAX_IVLEN);
374 else
375 memset(iv, 0, MAX_IVLEN);
358 376
359 crypto_aead_clear_flags(tfm, ~0); 377 crypto_aead_clear_flags(tfm, ~0);
360 if (aead_tv[i].wk) 378 if (template[i].wk)
361 crypto_aead_set_flags( 379 crypto_aead_set_flags(
362 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 380 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
363 key = aead_tv[i].key; 381 key = template[i].key;
364 382
365 ret = crypto_aead_setkey(tfm, key, aead_tv[i].klen); 383 ret = crypto_aead_setkey(tfm, key, template[i].klen);
366 if (ret) { 384 if (ret) {
367 printk(KERN_INFO "setkey() failed flags=%x\n", 385 printk(KERN_INFO "setkey() failed flags=%x\n",
368 crypto_aead_get_flags(tfm)); 386 crypto_aead_get_flags(tfm));
369 387
370 if (!aead_tv[i].fail) 388 if (!template[i].fail)
371 goto out; 389 goto out;
372 } 390 }
373 391
374 sg_init_table(sg, aead_tv[i].np); 392 sg_init_table(sg, template[i].np);
375 for (k = 0, temp = 0; k < aead_tv[i].np; k++) { 393 for (k = 0, temp = 0; k < template[i].np; k++) {
376 memcpy(&xbuf[IDX[k]], 394 memcpy(&xbuf[IDX[k]],
377 aead_tv[i].input + temp, 395 template[i].input + temp,
378 aead_tv[i].tap[k]); 396 template[i].tap[k]);
379 temp += aead_tv[i].tap[k]; 397 temp += template[i].tap[k];
380 sg_set_buf(&sg[k], &xbuf[IDX[k]], 398 sg_set_buf(&sg[k], &xbuf[IDX[k]],
381 aead_tv[i].tap[k]); 399 template[i].tap[k]);
382 } 400 }
383 401
384 authsize = abs(aead_tv[i].rlen - aead_tv[i].ilen); 402 authsize = abs(template[i].rlen - template[i].ilen);
385 ret = crypto_aead_setauthsize(tfm, authsize); 403 ret = crypto_aead_setauthsize(tfm, authsize);
386 if (ret) { 404 if (ret) {
387 printk(KERN_INFO 405 printk(KERN_INFO
@@ -393,21 +411,21 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template,
393 if (enc) 411 if (enc)
394 sg[k - 1].length += authsize; 412 sg[k - 1].length += authsize;
395 413
396 sg_init_table(asg, aead_tv[i].anp); 414 sg_init_table(asg, template[i].anp);
397 for (k = 0, temp = 0; k < aead_tv[i].anp; k++) { 415 for (k = 0, temp = 0; k < template[i].anp; k++) {
398 memcpy(&axbuf[IDX[k]], 416 memcpy(&axbuf[IDX[k]],
399 aead_tv[i].assoc + temp, 417 template[i].assoc + temp,
400 aead_tv[i].atap[k]); 418 template[i].atap[k]);
401 temp += aead_tv[i].atap[k]; 419 temp += template[i].atap[k];
402 sg_set_buf(&asg[k], &axbuf[IDX[k]], 420 sg_set_buf(&asg[k], &axbuf[IDX[k]],
403 aead_tv[i].atap[k]); 421 template[i].atap[k]);
404 } 422 }
405 423
406 aead_request_set_crypt(req, sg, sg, 424 aead_request_set_crypt(req, sg, sg,
407 aead_tv[i].ilen, 425 template[i].ilen,
408 aead_tv[i].iv); 426 iv);
409 427
410 aead_request_set_assoc(req, asg, aead_tv[i].alen); 428 aead_request_set_assoc(req, asg, template[i].alen);
411 429
412 ret = enc ? 430 ret = enc ?
413 crypto_aead_encrypt(req) : 431 crypto_aead_encrypt(req) :
@@ -431,18 +449,19 @@ static void test_aead(char *algo, int enc, struct aead_testvec *template,
431 goto out; 449 goto out;
432 } 450 }
433 451
434 for (k = 0, temp = 0; k < aead_tv[i].np; k++) { 452 for (k = 0, temp = 0; k < template[i].np; k++) {
435 printk(KERN_INFO "page %u\n", k); 453 printk(KERN_INFO "page %u\n", k);
436 q = kmap(sg_page(&sg[k])) + sg[k].offset; 454 q = kmap(sg_page(&sg[k])) + sg[k].offset;
437 hexdump(q, aead_tv[i].tap[k]); 455 hexdump(q, template[i].tap[k]);
438 printk(KERN_INFO "%s\n", 456 printk(KERN_INFO "%s\n",
439 memcmp(q, aead_tv[i].result + temp, 457 memcmp(q, template[i].result + temp,
440 aead_tv[i].tap[k] - 458 template[i].tap[k] -
441 (k < aead_tv[i].np - 1 || enc ? 459 (k < template[i].np - 1 || enc ?
442 0 : authsize)) ? 460 0 : authsize)) ?
443 "fail" : "pass"); 461 "fail" : "pass");
444 462
445 temp += aead_tv[i].tap[k]; 463 temp += template[i].tap[k];
464 kunmap(sg_page(&sg[k]));
446 } 465 }
447 } 466 }
448 } 467 }
@@ -456,15 +475,14 @@ static void test_cipher(char *algo, int enc,
456 struct cipher_testvec *template, unsigned int tcount) 475 struct cipher_testvec *template, unsigned int tcount)
457{ 476{
458 unsigned int ret, i, j, k, temp; 477 unsigned int ret, i, j, k, temp;
459 unsigned int tsize;
460 char *q; 478 char *q;
461 struct crypto_ablkcipher *tfm; 479 struct crypto_ablkcipher *tfm;
462 char *key;
463 struct cipher_testvec *cipher_tv;
464 struct ablkcipher_request *req; 480 struct ablkcipher_request *req;
465 struct scatterlist sg[8]; 481 struct scatterlist sg[8];
466 const char *e; 482 const char *e;
467 struct tcrypt_result result; 483 struct tcrypt_result result;
484 void *data;
485 char iv[MAX_IVLEN];
468 486
469 if (enc == ENCRYPT) 487 if (enc == ENCRYPT)
470 e = "encryption"; 488 e = "encryption";
@@ -473,16 +491,7 @@ static void test_cipher(char *algo, int enc,
473 491
474 printk("\ntesting %s %s\n", algo, e); 492 printk("\ntesting %s %s\n", algo, e);
475 493
476 tsize = sizeof (struct cipher_testvec);
477 if (tsize > TVMEMSIZE) {
478 printk("template (%u) too big for tvmem (%u)\n", tsize,
479 TVMEMSIZE);
480 return;
481 }
482 cipher_tv = (void *)tvmem;
483
484 init_completion(&result.completion); 494 init_completion(&result.completion);
485
486 tfm = crypto_alloc_ablkcipher(algo, 0, 0); 495 tfm = crypto_alloc_ablkcipher(algo, 0, 0);
487 496
488 if (IS_ERR(tfm)) { 497 if (IS_ERR(tfm)) {
@@ -502,35 +511,43 @@ static void test_cipher(char *algo, int enc,
502 511
503 j = 0; 512 j = 0;
504 for (i = 0; i < tcount; i++) { 513 for (i = 0; i < tcount; i++) {
505 memcpy(cipher_tv, &template[i], tsize); 514
506 if (!(cipher_tv->np)) { 515 data = kzalloc(template[i].ilen, GFP_KERNEL);
516 if (!data)
517 continue;
518
519 memcpy(data, template[i].input, template[i].ilen);
520 if (template[i].iv)
521 memcpy(iv, template[i].iv, MAX_IVLEN);
522 else
523 memset(iv, 0, MAX_IVLEN);
524
525 if (!(template[i].np)) {
507 j++; 526 j++;
508 printk("test %u (%d bit key):\n", 527 printk("test %u (%d bit key):\n",
509 j, cipher_tv->klen * 8); 528 j, template[i].klen * 8);
510 529
511 crypto_ablkcipher_clear_flags(tfm, ~0); 530 crypto_ablkcipher_clear_flags(tfm, ~0);
512 if (cipher_tv->wk) 531 if (template[i].wk)
513 crypto_ablkcipher_set_flags( 532 crypto_ablkcipher_set_flags(
514 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 533 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
515 key = cipher_tv->key;
516 534
517 ret = crypto_ablkcipher_setkey(tfm, key, 535 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
518 cipher_tv->klen); 536 template[i].klen);
519 if (ret) { 537 if (ret) {
520 printk("setkey() failed flags=%x\n", 538 printk("setkey() failed flags=%x\n",
521 crypto_ablkcipher_get_flags(tfm)); 539 crypto_ablkcipher_get_flags(tfm));
522 540
523 if (!cipher_tv->fail) 541 if (!template[i].fail) {
542 kfree(data);
524 goto out; 543 goto out;
544 }
525 } 545 }
526 546
527 sg_init_one(&sg[0], cipher_tv->input, 547 sg_init_one(&sg[0], data, template[i].ilen);
528 cipher_tv->ilen);
529 548
530 ablkcipher_request_set_crypt(req, sg, sg, 549 ablkcipher_request_set_crypt(req, sg, sg,
531 cipher_tv->ilen, 550 template[i].ilen, iv);
532 cipher_tv->iv);
533
534 ret = enc ? 551 ret = enc ?
535 crypto_ablkcipher_encrypt(req) : 552 crypto_ablkcipher_encrypt(req) :
536 crypto_ablkcipher_decrypt(req); 553 crypto_ablkcipher_decrypt(req);
@@ -549,16 +566,19 @@ static void test_cipher(char *algo, int enc,
549 /* fall through */ 566 /* fall through */
550 default: 567 default:
551 printk("%s () failed err=%d\n", e, -ret); 568 printk("%s () failed err=%d\n", e, -ret);
569 kfree(data);
552 goto out; 570 goto out;
553 } 571 }
554 572
555 q = kmap(sg_page(&sg[0])) + sg[0].offset; 573 q = kmap(sg_page(&sg[0])) + sg[0].offset;
556 hexdump(q, cipher_tv->rlen); 574 hexdump(q, template[i].rlen);
557 575
558 printk("%s\n", 576 printk("%s\n",
559 memcmp(q, cipher_tv->result, 577 memcmp(q, template[i].result,
560 cipher_tv->rlen) ? "fail" : "pass"); 578 template[i].rlen) ? "fail" : "pass");
579 kunmap(sg_page(&sg[0]));
561 } 580 }
581 kfree(data);
562 } 582 }
563 583
564 printk("\ntesting %s %s across pages (chunking)\n", algo, e); 584 printk("\ntesting %s %s across pages (chunking)\n", algo, e);
@@ -566,42 +586,53 @@ static void test_cipher(char *algo, int enc,
566 586
567 j = 0; 587 j = 0;
568 for (i = 0; i < tcount; i++) { 588 for (i = 0; i < tcount; i++) {
569 memcpy(cipher_tv, &template[i], tsize); 589
570 if (cipher_tv->np) { 590 data = kzalloc(template[i].ilen, GFP_KERNEL);
591 if (!data)
592 continue;
593
594 memcpy(data, template[i].input, template[i].ilen);
595
596 if (template[i].iv)
597 memcpy(iv, template[i].iv, MAX_IVLEN);
598 else
599 memset(iv, 0, MAX_IVLEN);
600
601 if (template[i].np) {
571 j++; 602 j++;
572 printk("test %u (%d bit key):\n", 603 printk("test %u (%d bit key):\n",
573 j, cipher_tv->klen * 8); 604 j, template[i].klen * 8);
574 605
575 crypto_ablkcipher_clear_flags(tfm, ~0); 606 crypto_ablkcipher_clear_flags(tfm, ~0);
576 if (cipher_tv->wk) 607 if (template[i].wk)
577 crypto_ablkcipher_set_flags( 608 crypto_ablkcipher_set_flags(
578 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 609 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
579 key = cipher_tv->key;
580 610
581 ret = crypto_ablkcipher_setkey(tfm, key, 611 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
582 cipher_tv->klen); 612 template[i].klen);
583 if (ret) { 613 if (ret) {
584 printk("setkey() failed flags=%x\n", 614 printk("setkey() failed flags=%x\n",
585 crypto_ablkcipher_get_flags(tfm)); 615 crypto_ablkcipher_get_flags(tfm));
586 616
587 if (!cipher_tv->fail) 617 if (!template[i].fail) {
618 kfree(data);
588 goto out; 619 goto out;
620 }
589 } 621 }
590 622
591 temp = 0; 623 temp = 0;
592 sg_init_table(sg, cipher_tv->np); 624 sg_init_table(sg, template[i].np);
593 for (k = 0; k < cipher_tv->np; k++) { 625 for (k = 0; k < template[i].np; k++) {
594 memcpy(&xbuf[IDX[k]], 626 memcpy(&xbuf[IDX[k]],
595 cipher_tv->input + temp, 627 template[i].input + temp,
596 cipher_tv->tap[k]); 628 template[i].tap[k]);
597 temp += cipher_tv->tap[k]; 629 temp += template[i].tap[k];
598 sg_set_buf(&sg[k], &xbuf[IDX[k]], 630 sg_set_buf(&sg[k], &xbuf[IDX[k]],
599 cipher_tv->tap[k]); 631 template[i].tap[k]);
600 } 632 }
601 633
602 ablkcipher_request_set_crypt(req, sg, sg, 634 ablkcipher_request_set_crypt(req, sg, sg,
603 cipher_tv->ilen, 635 template[i].ilen, iv);
604 cipher_tv->iv);
605 636
606 ret = enc ? 637 ret = enc ?
607 crypto_ablkcipher_encrypt(req) : 638 crypto_ablkcipher_encrypt(req) :
@@ -625,19 +656,19 @@ static void test_cipher(char *algo, int enc,
625 } 656 }
626 657
627 temp = 0; 658 temp = 0;
628 for (k = 0; k < cipher_tv->np; k++) { 659 for (k = 0; k < template[i].np; k++) {
629 printk("page %u\n", k); 660 printk("page %u\n", k);
630 q = kmap(sg_page(&sg[k])) + sg[k].offset; 661 q = kmap(sg_page(&sg[k])) + sg[k].offset;
631 hexdump(q, cipher_tv->tap[k]); 662 hexdump(q, template[i].tap[k]);
632 printk("%s\n", 663 printk("%s\n",
633 memcmp(q, cipher_tv->result + temp, 664 memcmp(q, template[i].result + temp,
634 cipher_tv->tap[k]) ? "fail" : 665 template[i].tap[k]) ? "fail" :
635 "pass"); 666 "pass");
636 temp += cipher_tv->tap[k]; 667 temp += template[i].tap[k];
668 kunmap(sg_page(&sg[k]));
637 } 669 }
638 } 670 }
639 } 671 }
640
641out: 672out:
642 crypto_free_ablkcipher(tfm); 673 crypto_free_ablkcipher(tfm);
643 ablkcipher_request_free(req); 674 ablkcipher_request_free(req);
@@ -1052,22 +1083,10 @@ static void test_comp(char *algo, struct comp_testvec *ctemplate,
1052 unsigned int i; 1083 unsigned int i;
1053 char result[COMP_BUF_SIZE]; 1084 char result[COMP_BUF_SIZE];
1054 struct crypto_comp *tfm; 1085 struct crypto_comp *tfm;
1055 struct comp_testvec *tv;
1056 unsigned int tsize; 1086 unsigned int tsize;
1057 1087
1058 printk("\ntesting %s compression\n", algo); 1088 printk("\ntesting %s compression\n", algo);
1059 1089
1060 tsize = sizeof(struct comp_testvec);
1061 tsize *= ctcount;
1062 if (tsize > TVMEMSIZE) {
1063 printk("template (%u) too big for tvmem (%u)\n", tsize,
1064 TVMEMSIZE);
1065 return;
1066 }
1067
1068 memcpy(tvmem, ctemplate, tsize);
1069 tv = (void *)tvmem;
1070
1071 tfm = crypto_alloc_comp(algo, 0, CRYPTO_ALG_ASYNC); 1090 tfm = crypto_alloc_comp(algo, 0, CRYPTO_ALG_ASYNC);
1072 if (IS_ERR(tfm)) { 1091 if (IS_ERR(tfm)) {
1073 printk("failed to load transform for %s\n", algo); 1092 printk("failed to load transform for %s\n", algo);
@@ -1080,8 +1099,8 @@ static void test_comp(char *algo, struct comp_testvec *ctemplate,
1080 printk("test %u:\n", i + 1); 1099 printk("test %u:\n", i + 1);
1081 memset(result, 0, sizeof (result)); 1100 memset(result, 0, sizeof (result));
1082 1101
1083 ilen = tv[i].inlen; 1102 ilen = ctemplate[i].inlen;
1084 ret = crypto_comp_compress(tfm, tv[i].input, 1103 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1085 ilen, result, &dlen); 1104 ilen, result, &dlen);
1086 if (ret) { 1105 if (ret) {
1087 printk("fail: ret=%d\n", ret); 1106 printk("fail: ret=%d\n", ret);
@@ -1089,7 +1108,7 @@ static void test_comp(char *algo, struct comp_testvec *ctemplate,
1089 } 1108 }
1090 hexdump(result, dlen); 1109 hexdump(result, dlen);
1091 printk("%s (ratio %d:%d)\n", 1110 printk("%s (ratio %d:%d)\n",
1092 memcmp(result, tv[i].output, dlen) ? "fail" : "pass", 1111 memcmp(result, ctemplate[i].output, dlen) ? "fail" : "pass",
1093 ilen, dlen); 1112 ilen, dlen);
1094 } 1113 }
1095 1114
@@ -1103,17 +1122,14 @@ static void test_comp(char *algo, struct comp_testvec *ctemplate,
1103 goto out; 1122 goto out;
1104 } 1123 }
1105 1124
1106 memcpy(tvmem, dtemplate, tsize);
1107 tv = (void *)tvmem;
1108
1109 for (i = 0; i < dtcount; i++) { 1125 for (i = 0; i < dtcount; i++) {
1110 int ilen, ret, dlen = COMP_BUF_SIZE; 1126 int ilen, ret, dlen = COMP_BUF_SIZE;
1111 1127
1112 printk("test %u:\n", i + 1); 1128 printk("test %u:\n", i + 1);
1113 memset(result, 0, sizeof (result)); 1129 memset(result, 0, sizeof (result));
1114 1130
1115 ilen = tv[i].inlen; 1131 ilen = dtemplate[i].inlen;
1116 ret = crypto_comp_decompress(tfm, tv[i].input, 1132 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1117 ilen, result, &dlen); 1133 ilen, result, &dlen);
1118 if (ret) { 1134 if (ret) {
1119 printk("fail: ret=%d\n", ret); 1135 printk("fail: ret=%d\n", ret);
@@ -1121,7 +1137,7 @@ static void test_comp(char *algo, struct comp_testvec *ctemplate,
1121 } 1137 }
1122 hexdump(result, dlen); 1138 hexdump(result, dlen);
1123 printk("%s (ratio %d:%d)\n", 1139 printk("%s (ratio %d:%d)\n",
1124 memcmp(result, tv[i].output, dlen) ? "fail" : "pass", 1140 memcmp(result, dtemplate[i].output, dlen) ? "fail" : "pass",
1125 ilen, dlen); 1141 ilen, dlen);
1126 } 1142 }
1127out: 1143out: