summaryrefslogtreecommitdiffstats
path: root/crypto/testmgr.c
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@google.com>2019-02-01 02:51:47 -0500
committerHerbert Xu <herbert@gondor.apana.org.au>2019-02-08 02:30:09 -0500
commited96804ff1a5f94bdf4cda73ee81ba4545a076e5 (patch)
treeb4833b14763c9899f9b75ce06c1f2d5f17ecf6f4 /crypto/testmgr.c
parent4e7babba30d820c4195b1d58cf51dce3c22ecf2b (diff)
crypto: testmgr - convert aead testing to use testvec_configs
Convert alg_test_aead() to use the new test framework, using the same list of testvec_configs that skcipher testing uses. This significantly improves AEAD test coverage mainly because previously there was only very limited test coverage of the possible data layouts. Now the data layouts to test are listed in one place for all algorithms and optionally are also randomly generated. In fact, only one AEAD algorithm (AES-GCM) even had a chunked test case before. This already found bugs in all the AEGIS and MORUS implementations, the x86 AES-GCM implementation, and the arm64 AES-CCM implementation. I removed the AEAD chunked test vectors that were the same as non-chunked ones, but left the ones that were unique. Note: the rewritten test code allocates an aead_request just once per algorithm rather than once per encryption/decryption, but some AEAD algorithms incorrectly change the tfm pointer in the request. It's nontrivial to fix these, so to move forward I'm temporarily working around it by resetting the tfm pointer. But they'll need to be fixed. Signed-off-by: Eric Biggers <ebiggers@google.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/testmgr.c')
-rw-r--r--crypto/testmgr.c613
1 files changed, 185 insertions, 428 deletions
diff --git a/crypto/testmgr.c b/crypto/testmgr.c
index a275c7c2c371..6a870e21b0cf 100644
--- a/crypto/testmgr.c
+++ b/crypto/testmgr.c
@@ -1208,443 +1208,222 @@ static int test_hash(struct crypto_ahash *tfm,
1208 return 0; 1208 return 0;
1209} 1209}
1210 1210
1211static int __test_aead(struct crypto_aead *tfm, int enc, 1211static int test_aead_vec_cfg(const char *driver, int enc,
1212 const struct aead_testvec *template, unsigned int tcount, 1212 const struct aead_testvec *vec,
1213 const bool diff_dst, const int align_offset) 1213 unsigned int vec_num,
1214 const struct testvec_config *cfg,
1215 struct aead_request *req,
1216 struct cipher_test_sglists *tsgls)
1214{ 1217{
1215 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); 1218 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1216 unsigned int i, j, k, n, temp; 1219 const unsigned int alignmask = crypto_aead_alignmask(tfm);
1217 int ret = -ENOMEM; 1220 const unsigned int ivsize = crypto_aead_ivsize(tfm);
1218 char *q; 1221 const unsigned int authsize = vec->clen - vec->plen;
1219 char *key; 1222 const u32 req_flags = CRYPTO_TFM_REQ_MAY_BACKLOG | cfg->req_flags;
1220 struct aead_request *req; 1223 const char *op = enc ? "encryption" : "decryption";
1221 struct scatterlist *sg; 1224 DECLARE_CRYPTO_WAIT(wait);
1222 struct scatterlist *sgout; 1225 u8 _iv[3 * (MAX_ALGAPI_ALIGNMASK + 1) + MAX_IVLEN];
1223 const char *e, *d; 1226 u8 *iv = PTR_ALIGN(&_iv[0], 2 * (MAX_ALGAPI_ALIGNMASK + 1)) +
1224 struct crypto_wait wait; 1227 cfg->iv_offset +
1225 unsigned int authsize, iv_len; 1228 (cfg->iv_offset_relative_to_alignmask ? alignmask : 0);
1226 char *iv; 1229 struct kvec input[2];
1227 char *xbuf[XBUFSIZE]; 1230 int err;
1228 char *xoutbuf[XBUFSIZE];
1229 char *axbuf[XBUFSIZE];
1230
1231 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
1232 if (!iv)
1233 return ret;
1234 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
1235 if (!key)
1236 goto out_noxbuf;
1237 if (testmgr_alloc_buf(xbuf))
1238 goto out_noxbuf;
1239 if (testmgr_alloc_buf(axbuf))
1240 goto out_noaxbuf;
1241 if (diff_dst && testmgr_alloc_buf(xoutbuf))
1242 goto out_nooutbuf;
1243
1244 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
1245 sg = kmalloc(array3_size(sizeof(*sg), 8, (diff_dst ? 4 : 2)),
1246 GFP_KERNEL);
1247 if (!sg)
1248 goto out_nosg;
1249 sgout = &sg[16];
1250
1251 if (diff_dst)
1252 d = "-ddst";
1253 else
1254 d = "";
1255 1231
1256 if (enc == ENCRYPT) 1232 /* Set the key */
1257 e = "encryption"; 1233 if (vec->wk)
1234 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1258 else 1235 else
1259 e = "decryption"; 1236 crypto_aead_clear_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1260 1237 err = crypto_aead_setkey(tfm, vec->key, vec->klen);
1261 crypto_init_wait(&wait); 1238 if (err) {
1262 1239 if (vec->fail) /* expectedly failed to set key? */
1263 req = aead_request_alloc(tfm, GFP_KERNEL); 1240 return 0;
1264 if (!req) { 1241 pr_err("alg: aead: %s setkey failed with err %d on test vector %u; flags=%#x\n",
1265 pr_err("alg: aead%s: Failed to allocate request for %s\n", 1242 driver, err, vec_num, crypto_aead_get_flags(tfm));
1266 d, algo); 1243 return err;
1267 goto out;
1268 } 1244 }
1269 1245 if (vec->fail) {
1270 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1246 pr_err("alg: aead: %s setkey unexpectedly succeeded on test vector %u\n",
1271 crypto_req_done, &wait); 1247 driver, vec_num);
1272 1248 return -EINVAL;
1273 iv_len = crypto_aead_ivsize(tfm);
1274
1275 for (i = 0, j = 0; i < tcount; i++) {
1276 const char *input, *expected_output;
1277 unsigned int inlen, outlen;
1278 char *inbuf, *outbuf, *assocbuf;
1279
1280 if (template[i].np)
1281 continue;
1282 if (enc) {
1283 if (template[i].novrfy)
1284 continue;
1285 input = template[i].ptext;
1286 inlen = template[i].plen;
1287 expected_output = template[i].ctext;
1288 outlen = template[i].clen;
1289 } else {
1290 input = template[i].ctext;
1291 inlen = template[i].clen;
1292 expected_output = template[i].ptext;
1293 outlen = template[i].plen;
1294 }
1295
1296 j++;
1297
1298 /* some templates have no input data but they will
1299 * touch input
1300 */
1301 inbuf = xbuf[0] + align_offset;
1302 assocbuf = axbuf[0];
1303
1304 ret = -EINVAL;
1305 if (WARN_ON(align_offset + template[i].clen > PAGE_SIZE ||
1306 template[i].alen > PAGE_SIZE))
1307 goto out;
1308
1309 memcpy(inbuf, input, inlen);
1310 memcpy(assocbuf, template[i].assoc, template[i].alen);
1311 if (template[i].iv)
1312 memcpy(iv, template[i].iv, iv_len);
1313 else
1314 memset(iv, 0, iv_len);
1315
1316 crypto_aead_clear_flags(tfm, ~0);
1317 if (template[i].wk)
1318 crypto_aead_set_flags(tfm,
1319 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1320
1321 if (template[i].klen > MAX_KEYLEN) {
1322 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
1323 d, j, algo, template[i].klen,
1324 MAX_KEYLEN);
1325 ret = -EINVAL;
1326 goto out;
1327 }
1328 memcpy(key, template[i].key, template[i].klen);
1329
1330 ret = crypto_aead_setkey(tfm, key, template[i].klen);
1331 if (template[i].fail == !ret) {
1332 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
1333 d, j, algo, crypto_aead_get_flags(tfm));
1334 goto out;
1335 } else if (ret)
1336 continue;
1337
1338 authsize = template[i].clen - template[i].plen;
1339 ret = crypto_aead_setauthsize(tfm, authsize);
1340 if (ret) {
1341 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
1342 d, authsize, j, algo);
1343 goto out;
1344 }
1345
1346 k = !!template[i].alen;
1347 sg_init_table(sg, k + 1);
1348 sg_set_buf(&sg[0], assocbuf, template[i].alen);
1349 sg_set_buf(&sg[k], inbuf, template[i].clen);
1350 outbuf = inbuf;
1351
1352 if (diff_dst) {
1353 sg_init_table(sgout, k + 1);
1354 sg_set_buf(&sgout[0], assocbuf, template[i].alen);
1355
1356 outbuf = xoutbuf[0] + align_offset;
1357 sg_set_buf(&sgout[k], outbuf, template[i].clen);
1358 }
1359
1360 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, inlen,
1361 iv);
1362
1363 aead_request_set_ad(req, template[i].alen);
1364
1365 ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
1366 : crypto_aead_decrypt(req), &wait);
1367
1368 switch (ret) {
1369 case 0:
1370 if (template[i].novrfy) {
1371 /* verification was supposed to fail */
1372 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
1373 d, e, j, algo);
1374 /* so really, we got a bad message */
1375 ret = -EBADMSG;
1376 goto out;
1377 }
1378 break;
1379 case -EBADMSG:
1380 if (template[i].novrfy)
1381 /* verification failure was expected */
1382 continue;
1383 /* fall through */
1384 default:
1385 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
1386 d, e, j, algo, -ret);
1387 goto out;
1388 }
1389
1390 if (memcmp(outbuf, expected_output, outlen)) {
1391 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
1392 d, j, e, algo);
1393 hexdump(outbuf, outlen);
1394 ret = -EINVAL;
1395 goto out;
1396 }
1397 } 1249 }
1398 1250
1399 for (i = 0, j = 0; i < tcount; i++) { 1251 /* Set the authentication tag size */
1400 const char *input, *expected_output; 1252 err = crypto_aead_setauthsize(tfm, authsize);
1401 unsigned int inlen, outlen; 1253 if (err) {
1402 1254 pr_err("alg: aead: %s setauthsize failed with err %d on test vector %u\n",
1403 /* alignment tests are only done with continuous buffers */ 1255 driver, err, vec_num);
1404 if (align_offset != 0) 1256 return err;
1405 break; 1257 }
1406
1407 if (!template[i].np)
1408 continue;
1409
1410 if (enc) {
1411 if (template[i].novrfy)
1412 continue;
1413 input = template[i].ptext;
1414 inlen = template[i].plen;
1415 expected_output = template[i].ctext;
1416 outlen = template[i].clen;
1417 } else {
1418 input = template[i].ctext;
1419 inlen = template[i].clen;
1420 expected_output = template[i].ptext;
1421 outlen = template[i].plen;
1422 }
1423
1424 j++;
1425
1426 if (template[i].iv)
1427 memcpy(iv, template[i].iv, iv_len);
1428 else
1429 memset(iv, 0, MAX_IVLEN);
1430
1431 crypto_aead_clear_flags(tfm, ~0);
1432 if (template[i].wk)
1433 crypto_aead_set_flags(tfm,
1434 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1435 if (template[i].klen > MAX_KEYLEN) {
1436 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
1437 d, j, algo, template[i].klen, MAX_KEYLEN);
1438 ret = -EINVAL;
1439 goto out;
1440 }
1441 memcpy(key, template[i].key, template[i].klen);
1442
1443 ret = crypto_aead_setkey(tfm, key, template[i].klen);
1444 if (template[i].fail == !ret) {
1445 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
1446 d, j, algo, crypto_aead_get_flags(tfm));
1447 goto out;
1448 } else if (ret)
1449 continue;
1450
1451 authsize = template[i].clen - template[i].plen;
1452
1453 ret = -EINVAL;
1454 sg_init_table(sg, template[i].anp + template[i].np);
1455 if (diff_dst)
1456 sg_init_table(sgout, template[i].anp + template[i].np);
1457
1458 ret = -EINVAL;
1459 for (k = 0, temp = 0; k < template[i].anp; k++) {
1460 if (WARN_ON(offset_in_page(IDX[k]) +
1461 template[i].atap[k] > PAGE_SIZE))
1462 goto out;
1463 sg_set_buf(&sg[k],
1464 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
1465 offset_in_page(IDX[k]),
1466 template[i].assoc + temp,
1467 template[i].atap[k]),
1468 template[i].atap[k]);
1469 if (diff_dst)
1470 sg_set_buf(&sgout[k],
1471 axbuf[IDX[k] >> PAGE_SHIFT] +
1472 offset_in_page(IDX[k]),
1473 template[i].atap[k]);
1474 temp += template[i].atap[k];
1475 }
1476
1477 for (k = 0, temp = 0; k < template[i].np; k++) {
1478 n = template[i].tap[k];
1479 if (k == template[i].np - 1 && !enc)
1480 n += authsize;
1481
1482 if (WARN_ON(offset_in_page(IDX[k]) + n > PAGE_SIZE))
1483 goto out;
1484
1485 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1486 memcpy(q, input + temp, n);
1487 sg_set_buf(&sg[template[i].anp + k], q, n);
1488
1489 if (diff_dst) {
1490 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1491 offset_in_page(IDX[k]);
1492
1493 memset(q, 0, n);
1494
1495 sg_set_buf(&sgout[template[i].anp + k], q, n);
1496 }
1497
1498 if (k == template[i].np - 1 && enc)
1499 n += authsize;
1500 if (offset_in_page(q) + n < PAGE_SIZE)
1501 q[n] = 0;
1502
1503 temp += n;
1504 }
1505 1258
1506 ret = crypto_aead_setauthsize(tfm, authsize); 1259 /* The IV must be copied to a buffer, as the algorithm may modify it */
1507 if (ret) { 1260 if (WARN_ON(ivsize > MAX_IVLEN))
1508 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n", 1261 return -EINVAL;
1509 d, authsize, j, algo); 1262 if (vec->iv)
1510 goto out; 1263 memcpy(iv, vec->iv, ivsize);
1511 } 1264 else
1265 memset(iv, 0, ivsize);
1512 1266
1513 if (enc) { 1267 /* Build the src/dst scatterlists */
1514 if (WARN_ON(sg[template[i].anp + k - 1].offset + 1268 input[0].iov_base = (void *)vec->assoc;
1515 sg[template[i].anp + k - 1].length + 1269 input[0].iov_len = vec->alen;
1516 authsize > PAGE_SIZE)) { 1270 input[1].iov_base = enc ? (void *)vec->ptext : (void *)vec->ctext;
1517 ret = -EINVAL; 1271 input[1].iov_len = enc ? vec->plen : vec->clen;
1518 goto out; 1272 err = build_cipher_test_sglists(tsgls, cfg, alignmask,
1519 } 1273 vec->alen + (enc ? vec->plen :
1274 vec->clen),
1275 vec->alen + (enc ? vec->clen :
1276 vec->plen),
1277 input, 2);
1278 if (err) {
1279 pr_err("alg: aead: %s %s: error preparing scatterlists for test vector %u, cfg=\"%s\"\n",
1280 driver, op, vec_num, cfg->name);
1281 return err;
1282 }
1520 1283
1521 if (diff_dst) 1284 /* Do the actual encryption or decryption */
1522 sgout[template[i].anp + k - 1].length += 1285 testmgr_poison(req->__ctx, crypto_aead_reqsize(tfm));
1523 authsize; 1286 aead_request_set_callback(req, req_flags, crypto_req_done, &wait);
1524 sg[template[i].anp + k - 1].length += authsize; 1287 aead_request_set_crypt(req, tsgls->src.sgl_ptr, tsgls->dst.sgl_ptr,
1525 } 1288 enc ? vec->plen : vec->clen, iv);
1289 aead_request_set_ad(req, vec->alen);
1290 err = crypto_wait_req(enc ? crypto_aead_encrypt(req) :
1291 crypto_aead_decrypt(req), &wait);
1526 1292
1527 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 1293 aead_request_set_tfm(req, tfm); /* TODO: get rid of this */
1528 inlen, iv);
1529 1294
1530 aead_request_set_ad(req, template[i].alen); 1295 if (err) {
1296 if (err == -EBADMSG && vec->novrfy)
1297 return 0;
1298 pr_err("alg: aead: %s %s failed with err %d on test vector %u, cfg=\"%s\"\n",
1299 driver, op, err, vec_num, cfg->name);
1300 return err;
1301 }
1302 if (vec->novrfy) {
1303 pr_err("alg: aead: %s %s unexpectedly succeeded on test vector %u, cfg=\"%s\"\n",
1304 driver, op, vec_num, cfg->name);
1305 return -EINVAL;
1306 }
1531 1307
1532 ret = crypto_wait_req(enc ? crypto_aead_encrypt(req) 1308 /* Check for the correct output (ciphertext or plaintext) */
1533 : crypto_aead_decrypt(req), &wait); 1309 err = verify_correct_output(&tsgls->dst, enc ? vec->ctext : vec->ptext,
1310 enc ? vec->clen : vec->plen,
1311 vec->alen, enc || !cfg->inplace);
1312 if (err == -EOVERFLOW) {
1313 pr_err("alg: aead: %s %s overran dst buffer on test vector %u, cfg=\"%s\"\n",
1314 driver, op, vec_num, cfg->name);
1315 return err;
1316 }
1317 if (err) {
1318 pr_err("alg: aead: %s %s test failed (wrong result) on test vector %u, cfg=\"%s\"\n",
1319 driver, op, vec_num, cfg->name);
1320 return err;
1321 }
1534 1322
1535 switch (ret) { 1323 return 0;
1536 case 0: 1324}
1537 if (template[i].novrfy) {
1538 /* verification was supposed to fail */
1539 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
1540 d, e, j, algo);
1541 /* so really, we got a bad message */
1542 ret = -EBADMSG;
1543 goto out;
1544 }
1545 break;
1546 case -EBADMSG:
1547 if (template[i].novrfy)
1548 /* verification failure was expected */
1549 continue;
1550 /* fall through */
1551 default:
1552 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
1553 d, e, j, algo, -ret);
1554 goto out;
1555 }
1556 1325
1557 ret = -EINVAL; 1326static int test_aead_vec(const char *driver, int enc,
1558 for (k = 0, temp = 0; k < template[i].np; k++) { 1327 const struct aead_testvec *vec, unsigned int vec_num,
1559 if (diff_dst) 1328 struct aead_request *req,
1560 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1329 struct cipher_test_sglists *tsgls)
1561 offset_in_page(IDX[k]); 1330{
1562 else 1331 unsigned int i;
1563 q = xbuf[IDX[k] >> PAGE_SHIFT] + 1332 int err;
1564 offset_in_page(IDX[k]);
1565 1333
1566 n = template[i].tap[k]; 1334 if (enc && vec->novrfy)
1567 if (k == template[i].np - 1 && enc) 1335 return 0;
1568 n += authsize;
1569 1336
1570 if (memcmp(q, expected_output + temp, n)) { 1337 for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++) {
1571 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n", 1338 err = test_aead_vec_cfg(driver, enc, vec, vec_num,
1572 d, j, e, k, algo); 1339 &default_cipher_testvec_configs[i],
1573 hexdump(q, n); 1340 req, tsgls);
1574 goto out; 1341 if (err)
1575 } 1342 return err;
1343 }
1576 1344
1577 q += n; 1345#ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
1578 if (k == template[i].np - 1 && !enc) { 1346 if (!noextratests) {
1579 if (!diff_dst && memcmp(q, input + temp + n, 1347 struct testvec_config cfg;
1580 authsize)) 1348 char cfgname[TESTVEC_CONFIG_NAMELEN];
1581 n = authsize;
1582 else
1583 n = 0;
1584 } else {
1585 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1586 ;
1587 }
1588 if (n) {
1589 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1590 d, j, e, k, algo, n);
1591 hexdump(q, n);
1592 goto out;
1593 }
1594 1349
1595 temp += template[i].tap[k]; 1350 for (i = 0; i < fuzz_iterations; i++) {
1351 generate_random_testvec_config(&cfg, cfgname,
1352 sizeof(cfgname));
1353 err = test_aead_vec_cfg(driver, enc, vec, vec_num,
1354 &cfg, req, tsgls);
1355 if (err)
1356 return err;
1596 } 1357 }
1597 } 1358 }
1359#endif
1360 return 0;
1361}
1598 1362
1599 ret = 0; 1363static int test_aead(const char *driver, int enc,
1364 const struct aead_test_suite *suite,
1365 struct aead_request *req,
1366 struct cipher_test_sglists *tsgls)
1367{
1368 unsigned int i;
1369 int err;
1600 1370
1601out: 1371 for (i = 0; i < suite->count; i++) {
1602 aead_request_free(req); 1372 err = test_aead_vec(driver, enc, &suite->vecs[i], i, req,
1603 kfree(sg); 1373 tsgls);
1604out_nosg: 1374 if (err)
1605 if (diff_dst) 1375 return err;
1606 testmgr_free_buf(xoutbuf); 1376 }
1607out_nooutbuf: 1377 return 0;
1608 testmgr_free_buf(axbuf);
1609out_noaxbuf:
1610 testmgr_free_buf(xbuf);
1611out_noxbuf:
1612 kfree(key);
1613 kfree(iv);
1614 return ret;
1615} 1378}
1616 1379
1617static int test_aead(struct crypto_aead *tfm, int enc, 1380static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1618 const struct aead_testvec *template, unsigned int tcount) 1381 u32 type, u32 mask)
1619{ 1382{
1620 unsigned int alignmask; 1383 const struct aead_test_suite *suite = &desc->suite.aead;
1621 int ret; 1384 struct crypto_aead *tfm;
1385 struct aead_request *req = NULL;
1386 struct cipher_test_sglists *tsgls = NULL;
1387 int err;
1622 1388
1623 /* test 'dst == src' case */ 1389 if (suite->count <= 0) {
1624 ret = __test_aead(tfm, enc, template, tcount, false, 0); 1390 pr_err("alg: aead: empty test suite for %s\n", driver);
1625 if (ret) 1391 return -EINVAL;
1626 return ret; 1392 }
1627 1393
1628 /* test 'dst != src' case */ 1394 tfm = crypto_alloc_aead(driver, type, mask);
1629 ret = __test_aead(tfm, enc, template, tcount, true, 0); 1395 if (IS_ERR(tfm)) {
1630 if (ret) 1396 pr_err("alg: aead: failed to allocate transform for %s: %ld\n",
1631 return ret; 1397 driver, PTR_ERR(tfm));
1398 return PTR_ERR(tfm);
1399 }
1632 1400
1633 /* test unaligned buffers, check with one byte offset */ 1401 req = aead_request_alloc(tfm, GFP_KERNEL);
1634 ret = __test_aead(tfm, enc, template, tcount, true, 1); 1402 if (!req) {
1635 if (ret) 1403 pr_err("alg: aead: failed to allocate request for %s\n",
1636 return ret; 1404 driver);
1405 err = -ENOMEM;
1406 goto out;
1407 }
1637 1408
1638 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 1409 tsgls = alloc_cipher_test_sglists();
1639 if (alignmask) { 1410 if (!tsgls) {
1640 /* Check if alignment mask for tfm is correctly set. */ 1411 pr_err("alg: aead: failed to allocate test buffers for %s\n",
1641 ret = __test_aead(tfm, enc, template, tcount, true, 1412 driver);
1642 alignmask + 1); 1413 err = -ENOMEM;
1643 if (ret) 1414 goto out;
1644 return ret;
1645 } 1415 }
1646 1416
1647 return 0; 1417 err = test_aead(driver, ENCRYPT, suite, req, tsgls);
1418 if (err)
1419 goto out;
1420
1421 err = test_aead(driver, DECRYPT, suite, req, tsgls);
1422out:
1423 free_cipher_test_sglists(tsgls);
1424 aead_request_free(req);
1425 crypto_free_aead(tfm);
1426 return err;
1648} 1427}
1649 1428
1650static int test_cipher(struct crypto_cipher *tfm, int enc, 1429static int test_cipher(struct crypto_cipher *tfm, int enc,
@@ -2274,28 +2053,6 @@ out:
2274 return err; 2053 return err;
2275} 2054}
2276 2055
2277static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
2278 u32 type, u32 mask)
2279{
2280 const struct aead_test_suite *suite = &desc->suite.aead;
2281 struct crypto_aead *tfm;
2282 int err;
2283
2284 tfm = crypto_alloc_aead(driver, type, mask);
2285 if (IS_ERR(tfm)) {
2286 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
2287 "%ld\n", driver, PTR_ERR(tfm));
2288 return PTR_ERR(tfm);
2289 }
2290
2291 err = test_aead(tfm, ENCRYPT, suite->vecs, suite->count);
2292 if (!err)
2293 err = test_aead(tfm, DECRYPT, suite->vecs, suite->count);
2294
2295 crypto_free_aead(tfm);
2296 return err;
2297}
2298
2299static int alg_test_cipher(const struct alg_test_desc *desc, 2056static int alg_test_cipher(const struct alg_test_desc *desc,
2300 const char *driver, u32 type, u32 mask) 2057 const char *driver, u32 type, u32 mask)
2301{ 2058{