aboutsummaryrefslogtreecommitdiffstats
path: root/crypto/api.c
diff options
context:
space:
mode:
Diffstat (limited to 'crypto/api.c')
-rw-r--r--crypto/api.c121
1 files changed, 114 insertions, 7 deletions
diff --git a/crypto/api.c b/crypto/api.c
index 0444d242e985..9975a7bd246c 100644
--- a/crypto/api.c
+++ b/crypto/api.c
@@ -300,8 +300,8 @@ static void crypto_exit_ops(struct crypto_tfm *tfm)
300 const struct crypto_type *type = tfm->__crt_alg->cra_type; 300 const struct crypto_type *type = tfm->__crt_alg->cra_type;
301 301
302 if (type) { 302 if (type) {
303 if (type->exit) 303 if (tfm->exit)
304 type->exit(tfm); 304 tfm->exit(tfm);
305 return; 305 return;
306 } 306 }
307 307
@@ -379,17 +379,16 @@ struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
379 if (err) 379 if (err)
380 goto out_free_tfm; 380 goto out_free_tfm;
381 381
382 if (alg->cra_init && (err = alg->cra_init(tfm))) { 382 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
383 if (err == -EAGAIN)
384 crypto_shoot_alg(alg);
385 goto cra_init_failed; 383 goto cra_init_failed;
386 }
387 384
388 goto out; 385 goto out;
389 386
390cra_init_failed: 387cra_init_failed:
391 crypto_exit_ops(tfm); 388 crypto_exit_ops(tfm);
392out_free_tfm: 389out_free_tfm:
390 if (err == -EAGAIN)
391 crypto_shoot_alg(alg);
393 kfree(tfm); 392 kfree(tfm);
394out_err: 393out_err:
395 tfm = ERR_PTR(err); 394 tfm = ERR_PTR(err);
@@ -404,6 +403,9 @@ EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
404 * @type: Type of algorithm 403 * @type: Type of algorithm
405 * @mask: Mask for type comparison 404 * @mask: Mask for type comparison
406 * 405 *
406 * This function should not be used by new algorithm types.
407 * Plesae use crypto_alloc_tfm instead.
408 *
407 * crypto_alloc_base() will first attempt to locate an already loaded 409 * crypto_alloc_base() will first attempt to locate an already loaded
408 * algorithm. If that fails and the kernel supports dynamically loadable 410 * algorithm. If that fails and the kernel supports dynamically loadable
409 * modules, it will then attempt to load a module of the same name or 411 * modules, it will then attempt to load a module of the same name or
@@ -450,6 +452,111 @@ err:
450 return ERR_PTR(err); 452 return ERR_PTR(err);
451} 453}
452EXPORT_SYMBOL_GPL(crypto_alloc_base); 454EXPORT_SYMBOL_GPL(crypto_alloc_base);
455
456struct crypto_tfm *crypto_create_tfm(struct crypto_alg *alg,
457 const struct crypto_type *frontend)
458{
459 char *mem;
460 struct crypto_tfm *tfm = NULL;
461 unsigned int tfmsize;
462 unsigned int total;
463 int err = -ENOMEM;
464
465 tfmsize = frontend->tfmsize;
466 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg, frontend);
467
468 mem = kzalloc(total, GFP_KERNEL);
469 if (mem == NULL)
470 goto out_err;
471
472 tfm = (struct crypto_tfm *)(mem + tfmsize);
473 tfm->__crt_alg = alg;
474
475 err = frontend->init_tfm(tfm, frontend);
476 if (err)
477 goto out_free_tfm;
478
479 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
480 goto cra_init_failed;
481
482 goto out;
483
484cra_init_failed:
485 crypto_exit_ops(tfm);
486out_free_tfm:
487 if (err == -EAGAIN)
488 crypto_shoot_alg(alg);
489 kfree(mem);
490out_err:
491 tfm = ERR_PTR(err);
492out:
493 return tfm;
494}
495EXPORT_SYMBOL_GPL(crypto_create_tfm);
496
497/*
498 * crypto_alloc_tfm - Locate algorithm and allocate transform
499 * @alg_name: Name of algorithm
500 * @frontend: Frontend algorithm type
501 * @type: Type of algorithm
502 * @mask: Mask for type comparison
503 *
504 * crypto_alloc_tfm() will first attempt to locate an already loaded
505 * algorithm. If that fails and the kernel supports dynamically loadable
506 * modules, it will then attempt to load a module of the same name or
507 * alias. If that fails it will send a query to any loaded crypto manager
508 * to construct an algorithm on the fly. A refcount is grabbed on the
509 * algorithm which is then associated with the new transform.
510 *
511 * The returned transform is of a non-determinate type. Most people
512 * should use one of the more specific allocation functions such as
513 * crypto_alloc_blkcipher.
514 *
515 * In case of error the return value is an error pointer.
516 */
517struct crypto_tfm *crypto_alloc_tfm(const char *alg_name,
518 const struct crypto_type *frontend,
519 u32 type, u32 mask)
520{
521 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
522 struct crypto_tfm *tfm;
523 int err;
524
525 type &= frontend->maskclear;
526 mask &= frontend->maskclear;
527 type |= frontend->type;
528 mask |= frontend->maskset;
529
530 lookup = frontend->lookup ?: crypto_alg_mod_lookup;
531
532 for (;;) {
533 struct crypto_alg *alg;
534
535 alg = lookup(alg_name, type, mask);
536 if (IS_ERR(alg)) {
537 err = PTR_ERR(alg);
538 goto err;
539 }
540
541 tfm = crypto_create_tfm(alg, frontend);
542 if (!IS_ERR(tfm))
543 return tfm;
544
545 crypto_mod_put(alg);
546 err = PTR_ERR(tfm);
547
548err:
549 if (err != -EAGAIN)
550 break;
551 if (signal_pending(current)) {
552 err = -EINTR;
553 break;
554 }
555 }
556
557 return ERR_PTR(err);
558}
559EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
453 560
454/* 561/*
455 * crypto_free_tfm - Free crypto transform 562 * crypto_free_tfm - Free crypto transform
@@ -469,7 +576,7 @@ void crypto_free_tfm(struct crypto_tfm *tfm)
469 alg = tfm->__crt_alg; 576 alg = tfm->__crt_alg;
470 size = sizeof(*tfm) + alg->cra_ctxsize; 577 size = sizeof(*tfm) + alg->cra_ctxsize;
471 578
472 if (alg->cra_exit) 579 if (!tfm->exit && alg->cra_exit)
473 alg->cra_exit(tfm); 580 alg->cra_exit(tfm);
474 crypto_exit_ops(tfm); 581 crypto_exit_ops(tfm);
475 crypto_mod_put(alg); 582 crypto_mod_put(alg);