aboutsummaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2008-09-20 17:52:53 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2008-12-24 19:01:24 -0500
commit7b0bac64cd5b74d6f1147524c26216de13a501fd (patch)
treee9163f47d583f88d35fb8e5c9ca86ed2581c6efd /crypto
parent4a7794860ba2b56693b1d89fd485fd08cdc763e3 (diff)
crypto: api - Rebirth of crypto_alloc_tfm
This patch reintroduces a completely revamped crypto_alloc_tfm. The biggest change is that we now take two crypto_type objects when allocating a tfm, a frontend and a backend. In fact this simply formalises what we've been doing behind the API's back. For example, as it stands crypto_alloc_ahash may use an actual ahash algorithm or a crypto_hash algorithm. Putting this in the API allows us to do this much more cleanly. The existing types will be converted across gradually. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto')
-rw-r--r--crypto/api.c108
-rw-r--r--crypto/internal.h2
2 files changed, 110 insertions, 0 deletions
diff --git a/crypto/api.c b/crypto/api.c
index cbaaf346ad13..9975a7bd246c 100644
--- a/crypto/api.c
+++ b/crypto/api.c
@@ -403,6 +403,9 @@ EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
403 * @type: Type of algorithm 403 * @type: Type of algorithm
404 * @mask: Mask for type comparison 404 * @mask: Mask for type comparison
405 * 405 *
406 * This function should not be used by new algorithm types.
407 * Plesae use crypto_alloc_tfm instead.
408 *
406 * crypto_alloc_base() will first attempt to locate an already loaded 409 * crypto_alloc_base() will first attempt to locate an already loaded
407 * algorithm. If that fails and the kernel supports dynamically loadable 410 * algorithm. If that fails and the kernel supports dynamically loadable
408 * modules, it will then attempt to load a module of the same name or 411 * modules, it will then attempt to load a module of the same name or
@@ -449,6 +452,111 @@ err:
449 return ERR_PTR(err); 452 return ERR_PTR(err);
450} 453}
451EXPORT_SYMBOL_GPL(crypto_alloc_base); 454EXPORT_SYMBOL_GPL(crypto_alloc_base);
455
456struct crypto_tfm *crypto_create_tfm(struct crypto_alg *alg,
457 const struct crypto_type *frontend)
458{
459 char *mem;
460 struct crypto_tfm *tfm = NULL;
461 unsigned int tfmsize;
462 unsigned int total;
463 int err = -ENOMEM;
464
465 tfmsize = frontend->tfmsize;
466 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg, frontend);
467
468 mem = kzalloc(total, GFP_KERNEL);
469 if (mem == NULL)
470 goto out_err;
471
472 tfm = (struct crypto_tfm *)(mem + tfmsize);
473 tfm->__crt_alg = alg;
474
475 err = frontend->init_tfm(tfm, frontend);
476 if (err)
477 goto out_free_tfm;
478
479 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
480 goto cra_init_failed;
481
482 goto out;
483
484cra_init_failed:
485 crypto_exit_ops(tfm);
486out_free_tfm:
487 if (err == -EAGAIN)
488 crypto_shoot_alg(alg);
489 kfree(mem);
490out_err:
491 tfm = ERR_PTR(err);
492out:
493 return tfm;
494}
495EXPORT_SYMBOL_GPL(crypto_create_tfm);
496
497/*
498 * crypto_alloc_tfm - Locate algorithm and allocate transform
499 * @alg_name: Name of algorithm
500 * @frontend: Frontend algorithm type
501 * @type: Type of algorithm
502 * @mask: Mask for type comparison
503 *
504 * crypto_alloc_tfm() will first attempt to locate an already loaded
505 * algorithm. If that fails and the kernel supports dynamically loadable
506 * modules, it will then attempt to load a module of the same name or
507 * alias. If that fails it will send a query to any loaded crypto manager
508 * to construct an algorithm on the fly. A refcount is grabbed on the
509 * algorithm which is then associated with the new transform.
510 *
511 * The returned transform is of a non-determinate type. Most people
512 * should use one of the more specific allocation functions such as
513 * crypto_alloc_blkcipher.
514 *
515 * In case of error the return value is an error pointer.
516 */
517struct crypto_tfm *crypto_alloc_tfm(const char *alg_name,
518 const struct crypto_type *frontend,
519 u32 type, u32 mask)
520{
521 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
522 struct crypto_tfm *tfm;
523 int err;
524
525 type &= frontend->maskclear;
526 mask &= frontend->maskclear;
527 type |= frontend->type;
528 mask |= frontend->maskset;
529
530 lookup = frontend->lookup ?: crypto_alg_mod_lookup;
531
532 for (;;) {
533 struct crypto_alg *alg;
534
535 alg = lookup(alg_name, type, mask);
536 if (IS_ERR(alg)) {
537 err = PTR_ERR(alg);
538 goto err;
539 }
540
541 tfm = crypto_create_tfm(alg, frontend);
542 if (!IS_ERR(tfm))
543 return tfm;
544
545 crypto_mod_put(alg);
546 err = PTR_ERR(tfm);
547
548err:
549 if (err != -EAGAIN)
550 break;
551 if (signal_pending(current)) {
552 err = -EINTR;
553 break;
554 }
555 }
556
557 return ERR_PTR(err);
558}
559EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
452 560
453/* 561/*
454 * crypto_free_tfm - Free crypto transform 562 * crypto_free_tfm - Free crypto transform
diff --git a/crypto/internal.h b/crypto/internal.h
index 8ef72d76092e..3c19a27a7563 100644
--- a/crypto/internal.h
+++ b/crypto/internal.h
@@ -109,6 +109,8 @@ void crypto_alg_tested(const char *name, int err);
109void crypto_shoot_alg(struct crypto_alg *alg); 109void crypto_shoot_alg(struct crypto_alg *alg);
110struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type, 110struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
111 u32 mask); 111 u32 mask);
112struct crypto_tfm *crypto_create_tfm(struct crypto_alg *alg,
113 const struct crypto_type *frontend);
112 114
113int crypto_register_instance(struct crypto_template *tmpl, 115int crypto_register_instance(struct crypto_template *tmpl,
114 struct crypto_instance *inst); 116 struct crypto_instance *inst);