summaryrefslogtreecommitdiffstats
path: root/crypto/testmgr.c
diff options
context:
space:
mode:
authorGilad Ben-Yossef <gilad@benyossef.com>2017-10-18 03:00:43 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2017-11-03 10:11:19 -0400
commit7f39713684acb2745506be195d31f73ce410fb24 (patch)
treeddca78c8aa22c8c35741220a777fb2ea548bab00 /crypto/testmgr.c
parent76c6739477fa9e16a75633d1f57c62a8a57388ad (diff)
crypto: testmgr - move to generic async completion
testmgr is starting async. crypto ops and waiting for them to complete. Move it over to generic code doing the same. This also provides a test of the generic crypto async. wait code. Signed-off-by: Gilad Ben-Yossef <gilad@benyossef.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/testmgr.c')
-rw-r--r--crypto/testmgr.c204
1 files changed, 66 insertions, 138 deletions
diff --git a/crypto/testmgr.c b/crypto/testmgr.c
index baf96cecaf49..29d7020b8826 100644
--- a/crypto/testmgr.c
+++ b/crypto/testmgr.c
@@ -76,11 +76,6 @@ int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
76#define ENCRYPT 1 76#define ENCRYPT 1
77#define DECRYPT 0 77#define DECRYPT 0
78 78
79struct tcrypt_result {
80 struct completion completion;
81 int err;
82};
83
84struct aead_test_suite { 79struct aead_test_suite {
85 struct { 80 struct {
86 const struct aead_testvec *vecs; 81 const struct aead_testvec *vecs;
@@ -155,17 +150,6 @@ static void hexdump(unsigned char *buf, unsigned int len)
155 buf, len, false); 150 buf, len, false);
156} 151}
157 152
158static void tcrypt_complete(struct crypto_async_request *req, int err)
159{
160 struct tcrypt_result *res = req->data;
161
162 if (err == -EINPROGRESS)
163 return;
164
165 res->err = err;
166 complete(&res->completion);
167}
168
169static int testmgr_alloc_buf(char *buf[XBUFSIZE]) 153static int testmgr_alloc_buf(char *buf[XBUFSIZE])
170{ 154{
171 int i; 155 int i;
@@ -193,20 +177,10 @@ static void testmgr_free_buf(char *buf[XBUFSIZE])
193 free_page((unsigned long)buf[i]); 177 free_page((unsigned long)buf[i]);
194} 178}
195 179
196static int wait_async_op(struct tcrypt_result *tr, int ret)
197{
198 if (ret == -EINPROGRESS || ret == -EBUSY) {
199 wait_for_completion(&tr->completion);
200 reinit_completion(&tr->completion);
201 ret = tr->err;
202 }
203 return ret;
204}
205
206static int ahash_partial_update(struct ahash_request **preq, 180static int ahash_partial_update(struct ahash_request **preq,
207 struct crypto_ahash *tfm, const struct hash_testvec *template, 181 struct crypto_ahash *tfm, const struct hash_testvec *template,
208 void *hash_buff, int k, int temp, struct scatterlist *sg, 182 void *hash_buff, int k, int temp, struct scatterlist *sg,
209 const char *algo, char *result, struct tcrypt_result *tresult) 183 const char *algo, char *result, struct crypto_wait *wait)
210{ 184{
211 char *state; 185 char *state;
212 struct ahash_request *req; 186 struct ahash_request *req;
@@ -236,7 +210,7 @@ static int ahash_partial_update(struct ahash_request **preq,
236 } 210 }
237 ahash_request_set_callback(req, 211 ahash_request_set_callback(req,
238 CRYPTO_TFM_REQ_MAY_BACKLOG, 212 CRYPTO_TFM_REQ_MAY_BACKLOG,
239 tcrypt_complete, tresult); 213 crypto_req_done, wait);
240 214
241 memcpy(hash_buff, template->plaintext + temp, 215 memcpy(hash_buff, template->plaintext + temp,
242 template->tap[k]); 216 template->tap[k]);
@@ -247,7 +221,7 @@ static int ahash_partial_update(struct ahash_request **preq,
247 pr_err("alg: hash: Failed to import() for %s\n", algo); 221 pr_err("alg: hash: Failed to import() for %s\n", algo);
248 goto out; 222 goto out;
249 } 223 }
250 ret = wait_async_op(tresult, crypto_ahash_update(req)); 224 ret = crypto_wait_req(crypto_ahash_update(req), wait);
251 if (ret) 225 if (ret)
252 goto out; 226 goto out;
253 *preq = req; 227 *preq = req;
@@ -272,7 +246,7 @@ static int __test_hash(struct crypto_ahash *tfm,
272 char *result; 246 char *result;
273 char *key; 247 char *key;
274 struct ahash_request *req; 248 struct ahash_request *req;
275 struct tcrypt_result tresult; 249 struct crypto_wait wait;
276 void *hash_buff; 250 void *hash_buff;
277 char *xbuf[XBUFSIZE]; 251 char *xbuf[XBUFSIZE];
278 int ret = -ENOMEM; 252 int ret = -ENOMEM;
@@ -286,7 +260,7 @@ static int __test_hash(struct crypto_ahash *tfm,
286 if (testmgr_alloc_buf(xbuf)) 260 if (testmgr_alloc_buf(xbuf))
287 goto out_nobuf; 261 goto out_nobuf;
288 262
289 init_completion(&tresult.completion); 263 crypto_init_wait(&wait);
290 264
291 req = ahash_request_alloc(tfm, GFP_KERNEL); 265 req = ahash_request_alloc(tfm, GFP_KERNEL);
292 if (!req) { 266 if (!req) {
@@ -295,7 +269,7 @@ static int __test_hash(struct crypto_ahash *tfm,
295 goto out_noreq; 269 goto out_noreq;
296 } 270 }
297 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 271 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
298 tcrypt_complete, &tresult); 272 crypto_req_done, &wait);
299 273
300 j = 0; 274 j = 0;
301 for (i = 0; i < tcount; i++) { 275 for (i = 0; i < tcount; i++) {
@@ -335,26 +309,26 @@ static int __test_hash(struct crypto_ahash *tfm,
335 309
336 ahash_request_set_crypt(req, sg, result, template[i].psize); 310 ahash_request_set_crypt(req, sg, result, template[i].psize);
337 if (use_digest) { 311 if (use_digest) {
338 ret = wait_async_op(&tresult, crypto_ahash_digest(req)); 312 ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
339 if (ret) { 313 if (ret) {
340 pr_err("alg: hash: digest failed on test %d " 314 pr_err("alg: hash: digest failed on test %d "
341 "for %s: ret=%d\n", j, algo, -ret); 315 "for %s: ret=%d\n", j, algo, -ret);
342 goto out; 316 goto out;
343 } 317 }
344 } else { 318 } else {
345 ret = wait_async_op(&tresult, crypto_ahash_init(req)); 319 ret = crypto_wait_req(crypto_ahash_init(req), &wait);
346 if (ret) { 320 if (ret) {
347 pr_err("alg: hash: init failed on test %d " 321 pr_err("alg: hash: init failed on test %d "
348 "for %s: ret=%d\n", j, algo, -ret); 322 "for %s: ret=%d\n", j, algo, -ret);
349 goto out; 323 goto out;
350 } 324 }
351 ret = wait_async_op(&tresult, crypto_ahash_update(req)); 325 ret = crypto_wait_req(crypto_ahash_update(req), &wait);
352 if (ret) { 326 if (ret) {
353 pr_err("alg: hash: update failed on test %d " 327 pr_err("alg: hash: update failed on test %d "
354 "for %s: ret=%d\n", j, algo, -ret); 328 "for %s: ret=%d\n", j, algo, -ret);
355 goto out; 329 goto out;
356 } 330 }
357 ret = wait_async_op(&tresult, crypto_ahash_final(req)); 331 ret = crypto_wait_req(crypto_ahash_final(req), &wait);
358 if (ret) { 332 if (ret) {
359 pr_err("alg: hash: final failed on test %d " 333 pr_err("alg: hash: final failed on test %d "
360 "for %s: ret=%d\n", j, algo, -ret); 334 "for %s: ret=%d\n", j, algo, -ret);
@@ -420,22 +394,10 @@ static int __test_hash(struct crypto_ahash *tfm,
420 } 394 }
421 395
422 ahash_request_set_crypt(req, sg, result, template[i].psize); 396 ahash_request_set_crypt(req, sg, result, template[i].psize);
423 ret = crypto_ahash_digest(req); 397 ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
424 switch (ret) { 398 if (ret) {
425 case 0: 399 pr_err("alg: hash: digest failed on chunking test %d for %s: ret=%d\n",
426 break; 400 j, algo, -ret);
427 case -EINPROGRESS:
428 case -EBUSY:
429 wait_for_completion(&tresult.completion);
430 reinit_completion(&tresult.completion);
431 ret = tresult.err;
432 if (!ret)
433 break;
434 /* fall through */
435 default:
436 printk(KERN_ERR "alg: hash: digest failed "
437 "on chunking test %d for %s: "
438 "ret=%d\n", j, algo, -ret);
439 goto out; 401 goto out;
440 } 402 }
441 403
@@ -486,13 +448,13 @@ static int __test_hash(struct crypto_ahash *tfm,
486 } 448 }
487 449
488 ahash_request_set_crypt(req, sg, result, template[i].tap[0]); 450 ahash_request_set_crypt(req, sg, result, template[i].tap[0]);
489 ret = wait_async_op(&tresult, crypto_ahash_init(req)); 451 ret = crypto_wait_req(crypto_ahash_init(req), &wait);
490 if (ret) { 452 if (ret) {
491 pr_err("alg: hash: init failed on test %d for %s: ret=%d\n", 453 pr_err("alg: hash: init failed on test %d for %s: ret=%d\n",
492 j, algo, -ret); 454 j, algo, -ret);
493 goto out; 455 goto out;
494 } 456 }
495 ret = wait_async_op(&tresult, crypto_ahash_update(req)); 457 ret = crypto_wait_req(crypto_ahash_update(req), &wait);
496 if (ret) { 458 if (ret) {
497 pr_err("alg: hash: update failed on test %d for %s: ret=%d\n", 459 pr_err("alg: hash: update failed on test %d for %s: ret=%d\n",
498 j, algo, -ret); 460 j, algo, -ret);
@@ -503,7 +465,7 @@ static int __test_hash(struct crypto_ahash *tfm,
503 for (k = 1; k < template[i].np; k++) { 465 for (k = 1; k < template[i].np; k++) {
504 ret = ahash_partial_update(&req, tfm, &template[i], 466 ret = ahash_partial_update(&req, tfm, &template[i],
505 hash_buff, k, temp, &sg[0], algo, result, 467 hash_buff, k, temp, &sg[0], algo, result,
506 &tresult); 468 &wait);
507 if (ret) { 469 if (ret) {
508 pr_err("alg: hash: partial update failed on test %d for %s: ret=%d\n", 470 pr_err("alg: hash: partial update failed on test %d for %s: ret=%d\n",
509 j, algo, -ret); 471 j, algo, -ret);
@@ -511,7 +473,7 @@ static int __test_hash(struct crypto_ahash *tfm,
511 } 473 }
512 temp += template[i].tap[k]; 474 temp += template[i].tap[k];
513 } 475 }
514 ret = wait_async_op(&tresult, crypto_ahash_final(req)); 476 ret = crypto_wait_req(crypto_ahash_final(req), &wait);
515 if (ret) { 477 if (ret) {
516 pr_err("alg: hash: final failed on test %d for %s: ret=%d\n", 478 pr_err("alg: hash: final failed on test %d for %s: ret=%d\n",
517 j, algo, -ret); 479 j, algo, -ret);
@@ -580,7 +542,7 @@ static int __test_aead(struct crypto_aead *tfm, int enc,
580 struct scatterlist *sg; 542 struct scatterlist *sg;
581 struct scatterlist *sgout; 543 struct scatterlist *sgout;
582 const char *e, *d; 544 const char *e, *d;
583 struct tcrypt_result result; 545 struct crypto_wait wait;
584 unsigned int authsize, iv_len; 546 unsigned int authsize, iv_len;
585 void *input; 547 void *input;
586 void *output; 548 void *output;
@@ -619,7 +581,7 @@ static int __test_aead(struct crypto_aead *tfm, int enc,
619 else 581 else
620 e = "decryption"; 582 e = "decryption";
621 583
622 init_completion(&result.completion); 584 crypto_init_wait(&wait);
623 585
624 req = aead_request_alloc(tfm, GFP_KERNEL); 586 req = aead_request_alloc(tfm, GFP_KERNEL);
625 if (!req) { 587 if (!req) {
@@ -629,7 +591,7 @@ static int __test_aead(struct crypto_aead *tfm, int enc,
629 } 591 }
630 592
631 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 593 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
632 tcrypt_complete, &result); 594 crypto_req_done, &wait);
633 595
634 iv_len = crypto_aead_ivsize(tfm); 596 iv_len = crypto_aead_ivsize(tfm);
635 597
@@ -709,7 +671,8 @@ static int __test_aead(struct crypto_aead *tfm, int enc,
709 671
710 aead_request_set_ad(req, template[i].alen); 672 aead_request_set_ad(req, template[i].alen);
711 673
712 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req); 674 ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
675 : crypto_aead_decrypt(req), &wait);
713 676
714 switch (ret) { 677 switch (ret) {
715 case 0: 678 case 0:
@@ -722,13 +685,6 @@ static int __test_aead(struct crypto_aead *tfm, int enc,
722 goto out; 685 goto out;
723 } 686 }
724 break; 687 break;
725 case -EINPROGRESS:
726 case -EBUSY:
727 wait_for_completion(&result.completion);
728 reinit_completion(&result.completion);
729 ret = result.err;
730 if (!ret)
731 break;
732 case -EBADMSG: 688 case -EBADMSG:
733 if (template[i].novrfy) 689 if (template[i].novrfy)
734 /* verification failure was expected */ 690 /* verification failure was expected */
@@ -866,7 +822,8 @@ static int __test_aead(struct crypto_aead *tfm, int enc,
866 822
867 aead_request_set_ad(req, template[i].alen); 823 aead_request_set_ad(req, template[i].alen);
868 824
869 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req); 825 ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
826 : crypto_aead_decrypt(req), &wait);
870 827
871 switch (ret) { 828 switch (ret) {
872 case 0: 829 case 0:
@@ -879,13 +836,6 @@ static int __test_aead(struct crypto_aead *tfm, int enc,
879 goto out; 836 goto out;
880 } 837 }
881 break; 838 break;
882 case -EINPROGRESS:
883 case -EBUSY:
884 wait_for_completion(&result.completion);
885 reinit_completion(&result.completion);
886 ret = result.err;
887 if (!ret)
888 break;
889 case -EBADMSG: 839 case -EBADMSG:
890 if (template[i].novrfy) 840 if (template[i].novrfy)
891 /* verification failure was expected */ 841 /* verification failure was expected */
@@ -1083,7 +1033,7 @@ static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1083 struct scatterlist sg[8]; 1033 struct scatterlist sg[8];
1084 struct scatterlist sgout[8]; 1034 struct scatterlist sgout[8];
1085 const char *e, *d; 1035 const char *e, *d;
1086 struct tcrypt_result result; 1036 struct crypto_wait wait;
1087 void *data; 1037 void *data;
1088 char iv[MAX_IVLEN]; 1038 char iv[MAX_IVLEN];
1089 char *xbuf[XBUFSIZE]; 1039 char *xbuf[XBUFSIZE];
@@ -1107,7 +1057,7 @@ static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1107 else 1057 else
1108 e = "decryption"; 1058 e = "decryption";
1109 1059
1110 init_completion(&result.completion); 1060 crypto_init_wait(&wait);
1111 1061
1112 req = skcipher_request_alloc(tfm, GFP_KERNEL); 1062 req = skcipher_request_alloc(tfm, GFP_KERNEL);
1113 if (!req) { 1063 if (!req) {
@@ -1117,7 +1067,7 @@ static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1117 } 1067 }
1118 1068
1119 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1069 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1120 tcrypt_complete, &result); 1070 crypto_req_done, &wait);
1121 1071
1122 j = 0; 1072 j = 0;
1123 for (i = 0; i < tcount; i++) { 1073 for (i = 0; i < tcount; i++) {
@@ -1164,21 +1114,10 @@ static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1164 1114
1165 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 1115 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1166 template[i].ilen, iv); 1116 template[i].ilen, iv);
1167 ret = enc ? crypto_skcipher_encrypt(req) : 1117 ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
1168 crypto_skcipher_decrypt(req); 1118 crypto_skcipher_decrypt(req), &wait);
1169 1119
1170 switch (ret) { 1120 if (ret) {
1171 case 0:
1172 break;
1173 case -EINPROGRESS:
1174 case -EBUSY:
1175 wait_for_completion(&result.completion);
1176 reinit_completion(&result.completion);
1177 ret = result.err;
1178 if (!ret)
1179 break;
1180 /* fall through */
1181 default:
1182 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n", 1121 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1183 d, e, j, algo, -ret); 1122 d, e, j, algo, -ret);
1184 goto out; 1123 goto out;
@@ -1272,21 +1211,10 @@ static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1272 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 1211 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1273 template[i].ilen, iv); 1212 template[i].ilen, iv);
1274 1213
1275 ret = enc ? crypto_skcipher_encrypt(req) : 1214 ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
1276 crypto_skcipher_decrypt(req); 1215 crypto_skcipher_decrypt(req), &wait);
1277 1216
1278 switch (ret) { 1217 if (ret) {
1279 case 0:
1280 break;
1281 case -EINPROGRESS:
1282 case -EBUSY:
1283 wait_for_completion(&result.completion);
1284 reinit_completion(&result.completion);
1285 ret = result.err;
1286 if (!ret)
1287 break;
1288 /* fall through */
1289 default:
1290 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n", 1218 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1291 d, e, j, algo, -ret); 1219 d, e, j, algo, -ret);
1292 goto out; 1220 goto out;
@@ -1462,7 +1390,7 @@ static int test_acomp(struct crypto_acomp *tfm,
1462 int ret; 1390 int ret;
1463 struct scatterlist src, dst; 1391 struct scatterlist src, dst;
1464 struct acomp_req *req; 1392 struct acomp_req *req;
1465 struct tcrypt_result result; 1393 struct crypto_wait wait;
1466 1394
1467 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL); 1395 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1468 if (!output) 1396 if (!output)
@@ -1486,7 +1414,7 @@ static int test_acomp(struct crypto_acomp *tfm,
1486 } 1414 }
1487 1415
1488 memset(output, 0, dlen); 1416 memset(output, 0, dlen);
1489 init_completion(&result.completion); 1417 crypto_init_wait(&wait);
1490 sg_init_one(&src, input_vec, ilen); 1418 sg_init_one(&src, input_vec, ilen);
1491 sg_init_one(&dst, output, dlen); 1419 sg_init_one(&dst, output, dlen);
1492 1420
@@ -1501,9 +1429,9 @@ static int test_acomp(struct crypto_acomp *tfm,
1501 1429
1502 acomp_request_set_params(req, &src, &dst, ilen, dlen); 1430 acomp_request_set_params(req, &src, &dst, ilen, dlen);
1503 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1431 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1504 tcrypt_complete, &result); 1432 crypto_req_done, &wait);
1505 1433
1506 ret = wait_async_op(&result, crypto_acomp_compress(req)); 1434 ret = crypto_wait_req(crypto_acomp_compress(req), &wait);
1507 if (ret) { 1435 if (ret) {
1508 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n", 1436 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1509 i + 1, algo, -ret); 1437 i + 1, algo, -ret);
@@ -1516,10 +1444,10 @@ static int test_acomp(struct crypto_acomp *tfm,
1516 dlen = COMP_BUF_SIZE; 1444 dlen = COMP_BUF_SIZE;
1517 sg_init_one(&src, output, ilen); 1445 sg_init_one(&src, output, ilen);
1518 sg_init_one(&dst, decomp_out, dlen); 1446 sg_init_one(&dst, decomp_out, dlen);
1519 init_completion(&result.completion); 1447 crypto_init_wait(&wait);
1520 acomp_request_set_params(req, &src, &dst, ilen, dlen); 1448 acomp_request_set_params(req, &src, &dst, ilen, dlen);
1521 1449
1522 ret = wait_async_op(&result, crypto_acomp_decompress(req)); 1450 ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
1523 if (ret) { 1451 if (ret) {
1524 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n", 1452 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1525 i + 1, algo, -ret); 1453 i + 1, algo, -ret);
@@ -1563,7 +1491,7 @@ static int test_acomp(struct crypto_acomp *tfm,
1563 } 1491 }
1564 1492
1565 memset(output, 0, dlen); 1493 memset(output, 0, dlen);
1566 init_completion(&result.completion); 1494 crypto_init_wait(&wait);
1567 sg_init_one(&src, input_vec, ilen); 1495 sg_init_one(&src, input_vec, ilen);
1568 sg_init_one(&dst, output, dlen); 1496 sg_init_one(&dst, output, dlen);
1569 1497
@@ -1578,9 +1506,9 @@ static int test_acomp(struct crypto_acomp *tfm,
1578 1506
1579 acomp_request_set_params(req, &src, &dst, ilen, dlen); 1507 acomp_request_set_params(req, &src, &dst, ilen, dlen);
1580 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1508 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1581 tcrypt_complete, &result); 1509 crypto_req_done, &wait);
1582 1510
1583 ret = wait_async_op(&result, crypto_acomp_decompress(req)); 1511 ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
1584 if (ret) { 1512 if (ret) {
1585 pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n", 1513 pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
1586 i + 1, algo, -ret); 1514 i + 1, algo, -ret);
@@ -2000,7 +1928,7 @@ static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec,
2000 void *a_public = NULL; 1928 void *a_public = NULL;
2001 void *a_ss = NULL; 1929 void *a_ss = NULL;
2002 void *shared_secret = NULL; 1930 void *shared_secret = NULL;
2003 struct tcrypt_result result; 1931 struct crypto_wait wait;
2004 unsigned int out_len_max; 1932 unsigned int out_len_max;
2005 int err = -ENOMEM; 1933 int err = -ENOMEM;
2006 struct scatterlist src, dst; 1934 struct scatterlist src, dst;
@@ -2009,7 +1937,7 @@ static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec,
2009 if (!req) 1937 if (!req)
2010 return err; 1938 return err;
2011 1939
2012 init_completion(&result.completion); 1940 crypto_init_wait(&wait);
2013 1941
2014 err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size); 1942 err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
2015 if (err < 0) 1943 if (err < 0)
@@ -2027,10 +1955,10 @@ static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec,
2027 sg_init_one(&dst, output_buf, out_len_max); 1955 sg_init_one(&dst, output_buf, out_len_max);
2028 kpp_request_set_output(req, &dst, out_len_max); 1956 kpp_request_set_output(req, &dst, out_len_max);
2029 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1957 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2030 tcrypt_complete, &result); 1958 crypto_req_done, &wait);
2031 1959
2032 /* Compute party A's public key */ 1960 /* Compute party A's public key */
2033 err = wait_async_op(&result, crypto_kpp_generate_public_key(req)); 1961 err = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait);
2034 if (err) { 1962 if (err) {
2035 pr_err("alg: %s: Party A: generate public key test failed. err %d\n", 1963 pr_err("alg: %s: Party A: generate public key test failed. err %d\n",
2036 alg, err); 1964 alg, err);
@@ -2069,8 +1997,8 @@ static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec,
2069 kpp_request_set_input(req, &src, vec->b_public_size); 1997 kpp_request_set_input(req, &src, vec->b_public_size);
2070 kpp_request_set_output(req, &dst, out_len_max); 1998 kpp_request_set_output(req, &dst, out_len_max);
2071 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1999 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2072 tcrypt_complete, &result); 2000 crypto_req_done, &wait);
2073 err = wait_async_op(&result, crypto_kpp_compute_shared_secret(req)); 2001 err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait);
2074 if (err) { 2002 if (err) {
2075 pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n", 2003 pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n",
2076 alg, err); 2004 alg, err);
@@ -2100,9 +2028,9 @@ static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec,
2100 kpp_request_set_input(req, &src, vec->expected_a_public_size); 2028 kpp_request_set_input(req, &src, vec->expected_a_public_size);
2101 kpp_request_set_output(req, &dst, out_len_max); 2029 kpp_request_set_output(req, &dst, out_len_max);
2102 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 2030 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2103 tcrypt_complete, &result); 2031 crypto_req_done, &wait);
2104 err = wait_async_op(&result, 2032 err = crypto_wait_req(crypto_kpp_compute_shared_secret(req),
2105 crypto_kpp_compute_shared_secret(req)); 2033 &wait);
2106 if (err) { 2034 if (err) {
2107 pr_err("alg: %s: Party B: compute shared secret failed. err %d\n", 2035 pr_err("alg: %s: Party B: compute shared secret failed. err %d\n",
2108 alg, err); 2036 alg, err);
@@ -2179,7 +2107,7 @@ static int test_akcipher_one(struct crypto_akcipher *tfm,
2179 struct akcipher_request *req; 2107 struct akcipher_request *req;
2180 void *outbuf_enc = NULL; 2108 void *outbuf_enc = NULL;
2181 void *outbuf_dec = NULL; 2109 void *outbuf_dec = NULL;
2182 struct tcrypt_result result; 2110 struct crypto_wait wait;
2183 unsigned int out_len_max, out_len = 0; 2111 unsigned int out_len_max, out_len = 0;
2184 int err = -ENOMEM; 2112 int err = -ENOMEM;
2185 struct scatterlist src, dst, src_tab[2]; 2113 struct scatterlist src, dst, src_tab[2];
@@ -2191,7 +2119,7 @@ static int test_akcipher_one(struct crypto_akcipher *tfm,
2191 if (!req) 2119 if (!req)
2192 goto free_xbuf; 2120 goto free_xbuf;
2193 2121
2194 init_completion(&result.completion); 2122 crypto_init_wait(&wait);
2195 2123
2196 if (vecs->public_key_vec) 2124 if (vecs->public_key_vec)
2197 err = crypto_akcipher_set_pub_key(tfm, vecs->key, 2125 err = crypto_akcipher_set_pub_key(tfm, vecs->key,
@@ -2220,13 +2148,13 @@ static int test_akcipher_one(struct crypto_akcipher *tfm,
2220 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size, 2148 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size,
2221 out_len_max); 2149 out_len_max);
2222 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 2150 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2223 tcrypt_complete, &result); 2151 crypto_req_done, &wait);
2224 2152
2225 err = wait_async_op(&result, vecs->siggen_sigver_test ? 2153 err = crypto_wait_req(vecs->siggen_sigver_test ?
2226 /* Run asymmetric signature generation */ 2154 /* Run asymmetric signature generation */
2227 crypto_akcipher_sign(req) : 2155 crypto_akcipher_sign(req) :
2228 /* Run asymmetric encrypt */ 2156 /* Run asymmetric encrypt */
2229 crypto_akcipher_encrypt(req)); 2157 crypto_akcipher_encrypt(req), &wait);
2230 if (err) { 2158 if (err) {
2231 pr_err("alg: akcipher: encrypt test failed. err %d\n", err); 2159 pr_err("alg: akcipher: encrypt test failed. err %d\n", err);
2232 goto free_all; 2160 goto free_all;
@@ -2261,14 +2189,14 @@ static int test_akcipher_one(struct crypto_akcipher *tfm,
2261 2189
2262 sg_init_one(&src, xbuf[0], vecs->c_size); 2190 sg_init_one(&src, xbuf[0], vecs->c_size);
2263 sg_init_one(&dst, outbuf_dec, out_len_max); 2191 sg_init_one(&dst, outbuf_dec, out_len_max);
2264 init_completion(&result.completion); 2192 crypto_init_wait(&wait);
2265 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max); 2193 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max);
2266 2194
2267 err = wait_async_op(&result, vecs->siggen_sigver_test ? 2195 err = crypto_wait_req(vecs->siggen_sigver_test ?
2268 /* Run asymmetric signature verification */ 2196 /* Run asymmetric signature verification */
2269 crypto_akcipher_verify(req) : 2197 crypto_akcipher_verify(req) :
2270 /* Run asymmetric decrypt */ 2198 /* Run asymmetric decrypt */
2271 crypto_akcipher_decrypt(req)); 2199 crypto_akcipher_decrypt(req), &wait);
2272 if (err) { 2200 if (err) {
2273 pr_err("alg: akcipher: decrypt test failed. err %d\n", err); 2201 pr_err("alg: akcipher: decrypt test failed. err %d\n", err);
2274 goto free_all; 2202 goto free_all;