aboutsummaryrefslogtreecommitdiffstats
path: root/crypto/blkcipher.c
diff options
context:
space:
mode:
Diffstat (limited to 'crypto/blkcipher.c')
-rw-r--r--crypto/blkcipher.c202
1 files changed, 194 insertions, 8 deletions
diff --git a/crypto/blkcipher.c b/crypto/blkcipher.c
index f6c67f9d4e5c..4a7e65c4df4d 100644
--- a/crypto/blkcipher.c
+++ b/crypto/blkcipher.c
@@ -14,7 +14,8 @@
14 * 14 *
15 */ 15 */
16 16
17#include <linux/crypto.h> 17#include <crypto/internal/skcipher.h>
18#include <crypto/scatterwalk.h>
18#include <linux/errno.h> 19#include <linux/errno.h>
19#include <linux/hardirq.h> 20#include <linux/hardirq.h>
20#include <linux/kernel.h> 21#include <linux/kernel.h>
@@ -25,7 +26,6 @@
25#include <linux/string.h> 26#include <linux/string.h>
26 27
27#include "internal.h" 28#include "internal.h"
28#include "scatterwalk.h"
29 29
30enum { 30enum {
31 BLKCIPHER_WALK_PHYS = 1 << 0, 31 BLKCIPHER_WALK_PHYS = 1 << 0,
@@ -433,9 +433,8 @@ static unsigned int crypto_blkcipher_ctxsize(struct crypto_alg *alg, u32 type,
433 struct blkcipher_alg *cipher = &alg->cra_blkcipher; 433 struct blkcipher_alg *cipher = &alg->cra_blkcipher;
434 unsigned int len = alg->cra_ctxsize; 434 unsigned int len = alg->cra_ctxsize;
435 435
436 type ^= CRYPTO_ALG_ASYNC; 436 if ((mask & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_MASK &&
437 mask &= CRYPTO_ALG_ASYNC; 437 cipher->ivsize) {
438 if ((type & mask) && cipher->ivsize) {
439 len = ALIGN(len, (unsigned long)alg->cra_alignmask + 1); 438 len = ALIGN(len, (unsigned long)alg->cra_alignmask + 1);
440 len += cipher->ivsize; 439 len += cipher->ivsize;
441 } 440 }
@@ -451,6 +450,11 @@ static int crypto_init_blkcipher_ops_async(struct crypto_tfm *tfm)
451 crt->setkey = async_setkey; 450 crt->setkey = async_setkey;
452 crt->encrypt = async_encrypt; 451 crt->encrypt = async_encrypt;
453 crt->decrypt = async_decrypt; 452 crt->decrypt = async_decrypt;
453 if (!alg->ivsize) {
454 crt->givencrypt = skcipher_null_givencrypt;
455 crt->givdecrypt = skcipher_null_givdecrypt;
456 }
457 crt->base = __crypto_ablkcipher_cast(tfm);
454 crt->ivsize = alg->ivsize; 458 crt->ivsize = alg->ivsize;
455 459
456 return 0; 460 return 0;
@@ -482,9 +486,7 @@ static int crypto_init_blkcipher_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
482 if (alg->ivsize > PAGE_SIZE / 8) 486 if (alg->ivsize > PAGE_SIZE / 8)
483 return -EINVAL; 487 return -EINVAL;
484 488
485 type ^= CRYPTO_ALG_ASYNC; 489 if ((mask & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_MASK)
486 mask &= CRYPTO_ALG_ASYNC;
487 if (type & mask)
488 return crypto_init_blkcipher_ops_sync(tfm); 490 return crypto_init_blkcipher_ops_sync(tfm);
489 else 491 else
490 return crypto_init_blkcipher_ops_async(tfm); 492 return crypto_init_blkcipher_ops_async(tfm);
@@ -499,6 +501,8 @@ static void crypto_blkcipher_show(struct seq_file *m, struct crypto_alg *alg)
499 seq_printf(m, "min keysize : %u\n", alg->cra_blkcipher.min_keysize); 501 seq_printf(m, "min keysize : %u\n", alg->cra_blkcipher.min_keysize);
500 seq_printf(m, "max keysize : %u\n", alg->cra_blkcipher.max_keysize); 502 seq_printf(m, "max keysize : %u\n", alg->cra_blkcipher.max_keysize);
501 seq_printf(m, "ivsize : %u\n", alg->cra_blkcipher.ivsize); 503 seq_printf(m, "ivsize : %u\n", alg->cra_blkcipher.ivsize);
504 seq_printf(m, "geniv : %s\n", alg->cra_blkcipher.geniv ?:
505 "<default>");
502} 506}
503 507
504const struct crypto_type crypto_blkcipher_type = { 508const struct crypto_type crypto_blkcipher_type = {
@@ -510,5 +514,187 @@ const struct crypto_type crypto_blkcipher_type = {
510}; 514};
511EXPORT_SYMBOL_GPL(crypto_blkcipher_type); 515EXPORT_SYMBOL_GPL(crypto_blkcipher_type);
512 516
517static int crypto_grab_nivcipher(struct crypto_skcipher_spawn *spawn,
518 const char *name, u32 type, u32 mask)
519{
520 struct crypto_alg *alg;
521 int err;
522
523 type = crypto_skcipher_type(type);
524 mask = crypto_skcipher_mask(mask) | CRYPTO_ALG_GENIV;
525
526 alg = crypto_alg_mod_lookup(name, type, mask);
527 if (IS_ERR(alg))
528 return PTR_ERR(alg);
529
530 err = crypto_init_spawn(&spawn->base, alg, spawn->base.inst, mask);
531 crypto_mod_put(alg);
532 return err;
533}
534
535struct crypto_instance *skcipher_geniv_alloc(struct crypto_template *tmpl,
536 struct rtattr **tb, u32 type,
537 u32 mask)
538{
539 struct {
540 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
541 unsigned int keylen);
542 int (*encrypt)(struct ablkcipher_request *req);
543 int (*decrypt)(struct ablkcipher_request *req);
544
545 unsigned int min_keysize;
546 unsigned int max_keysize;
547 unsigned int ivsize;
548
549 const char *geniv;
550 } balg;
551 const char *name;
552 struct crypto_skcipher_spawn *spawn;
553 struct crypto_attr_type *algt;
554 struct crypto_instance *inst;
555 struct crypto_alg *alg;
556 int err;
557
558 algt = crypto_get_attr_type(tb);
559 err = PTR_ERR(algt);
560 if (IS_ERR(algt))
561 return ERR_PTR(err);
562
563 if ((algt->type ^ (CRYPTO_ALG_TYPE_GIVCIPHER | CRYPTO_ALG_GENIV)) &
564 algt->mask)
565 return ERR_PTR(-EINVAL);
566
567 name = crypto_attr_alg_name(tb[1]);
568 err = PTR_ERR(name);
569 if (IS_ERR(name))
570 return ERR_PTR(err);
571
572 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
573 if (!inst)
574 return ERR_PTR(-ENOMEM);
575
576 spawn = crypto_instance_ctx(inst);
577
578 /* Ignore async algorithms if necessary. */
579 mask |= crypto_requires_sync(algt->type, algt->mask);
580
581 crypto_set_skcipher_spawn(spawn, inst);
582 err = crypto_grab_nivcipher(spawn, name, type, mask);
583 if (err)
584 goto err_free_inst;
585
586 alg = crypto_skcipher_spawn_alg(spawn);
587
588 if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
589 CRYPTO_ALG_TYPE_BLKCIPHER) {
590 balg.ivsize = alg->cra_blkcipher.ivsize;
591 balg.min_keysize = alg->cra_blkcipher.min_keysize;
592 balg.max_keysize = alg->cra_blkcipher.max_keysize;
593
594 balg.setkey = async_setkey;
595 balg.encrypt = async_encrypt;
596 balg.decrypt = async_decrypt;
597
598 balg.geniv = alg->cra_blkcipher.geniv;
599 } else {
600 balg.ivsize = alg->cra_ablkcipher.ivsize;
601 balg.min_keysize = alg->cra_ablkcipher.min_keysize;
602 balg.max_keysize = alg->cra_ablkcipher.max_keysize;
603
604 balg.setkey = alg->cra_ablkcipher.setkey;
605 balg.encrypt = alg->cra_ablkcipher.encrypt;
606 balg.decrypt = alg->cra_ablkcipher.decrypt;
607
608 balg.geniv = alg->cra_ablkcipher.geniv;
609 }
610
611 err = -EINVAL;
612 if (!balg.ivsize)
613 goto err_drop_alg;
614
615 /*
616 * This is only true if we're constructing an algorithm with its
617 * default IV generator. For the default generator we elide the
618 * template name and double-check the IV generator.
619 */
620 if (algt->mask & CRYPTO_ALG_GENIV) {
621 if (!balg.geniv)
622 balg.geniv = crypto_default_geniv(alg);
623 err = -EAGAIN;
624 if (strcmp(tmpl->name, balg.geniv))
625 goto err_drop_alg;
626
627 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
628 memcpy(inst->alg.cra_driver_name, alg->cra_driver_name,
629 CRYPTO_MAX_ALG_NAME);
630 } else {
631 err = -ENAMETOOLONG;
632 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
633 "%s(%s)", tmpl->name, alg->cra_name) >=
634 CRYPTO_MAX_ALG_NAME)
635 goto err_drop_alg;
636 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
637 "%s(%s)", tmpl->name, alg->cra_driver_name) >=
638 CRYPTO_MAX_ALG_NAME)
639 goto err_drop_alg;
640 }
641
642 inst->alg.cra_flags = CRYPTO_ALG_TYPE_GIVCIPHER | CRYPTO_ALG_GENIV;
643 inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC;
644 inst->alg.cra_priority = alg->cra_priority;
645 inst->alg.cra_blocksize = alg->cra_blocksize;
646 inst->alg.cra_alignmask = alg->cra_alignmask;
647 inst->alg.cra_type = &crypto_givcipher_type;
648
649 inst->alg.cra_ablkcipher.ivsize = balg.ivsize;
650 inst->alg.cra_ablkcipher.min_keysize = balg.min_keysize;
651 inst->alg.cra_ablkcipher.max_keysize = balg.max_keysize;
652 inst->alg.cra_ablkcipher.geniv = balg.geniv;
653
654 inst->alg.cra_ablkcipher.setkey = balg.setkey;
655 inst->alg.cra_ablkcipher.encrypt = balg.encrypt;
656 inst->alg.cra_ablkcipher.decrypt = balg.decrypt;
657
658out:
659 return inst;
660
661err_drop_alg:
662 crypto_drop_skcipher(spawn);
663err_free_inst:
664 kfree(inst);
665 inst = ERR_PTR(err);
666 goto out;
667}
668EXPORT_SYMBOL_GPL(skcipher_geniv_alloc);
669
670void skcipher_geniv_free(struct crypto_instance *inst)
671{
672 crypto_drop_skcipher(crypto_instance_ctx(inst));
673 kfree(inst);
674}
675EXPORT_SYMBOL_GPL(skcipher_geniv_free);
676
677int skcipher_geniv_init(struct crypto_tfm *tfm)
678{
679 struct crypto_instance *inst = (void *)tfm->__crt_alg;
680 struct crypto_ablkcipher *cipher;
681
682 cipher = crypto_spawn_skcipher(crypto_instance_ctx(inst));
683 if (IS_ERR(cipher))
684 return PTR_ERR(cipher);
685
686 tfm->crt_ablkcipher.base = cipher;
687 tfm->crt_ablkcipher.reqsize += crypto_ablkcipher_reqsize(cipher);
688
689 return 0;
690}
691EXPORT_SYMBOL_GPL(skcipher_geniv_init);
692
693void skcipher_geniv_exit(struct crypto_tfm *tfm)
694{
695 crypto_free_ablkcipher(tfm->crt_ablkcipher.base);
696}
697EXPORT_SYMBOL_GPL(skcipher_geniv_exit);
698
513MODULE_LICENSE("GPL"); 699MODULE_LICENSE("GPL");
514MODULE_DESCRIPTION("Generic block chaining cipher type"); 700MODULE_DESCRIPTION("Generic block chaining cipher type");