aboutsummaryrefslogtreecommitdiffstats
path: root/arch/s390
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2011-05-20 20:24:14 -0400
committerLinus Torvalds <torvalds@linux-foundation.org>2011-05-20 20:24:14 -0400
commit052497553e5dedc04c43800820c1d5788201cc71 (patch)
tree952466fc0f0df2b44b8df6b087c944ddf890f709 /arch/s390
parentcae13fe4cc3f24820ffb990c09110626837e85d4 (diff)
parent4427b1b4ec111622071ec872c94594e05635c6e9 (diff)
Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (45 commits) crypto: caam - add support for sha512 variants of existing AEAD algorithms crypto: caam - remove unused authkeylen from caam_ctx crypto: caam - fix decryption shared vs. non-shared key setting crypto: caam - platform_bus_type migration crypto: aesni-intel - fix aesni build on i386 crypto: aesni-intel - Merge with fpu.ko crypto: mv_cesa - make count_sgs() null-pointer proof crypto: mv_cesa - copy remaining bytes to SRAM only when needed crypto: mv_cesa - move digest state initialisation to a better place crypto: mv_cesa - fill inner/outer IV fields only in HMAC case crypto: mv_cesa - refactor copy_src_to_buf() crypto: mv_cesa - no need to save digest state after the last chunk crypto: mv_cesa - print a warning when registration of AES algos fail crypto: mv_cesa - drop this call to mv_hash_final from mv_hash_finup crypto: mv_cesa - the descriptor pointer register needs to be set just once crypto: mv_cesa - use ablkcipher_request_cast instead of the manual container_of crypto: caam - fix printk recursion for long error texts crypto: caam - remove unused keylen from session context hwrng: amd - enable AMD hw rnd driver for Maple PPC boards hwrng: amd - manage resource allocation ...
Diffstat (limited to 'arch/s390')
-rw-r--r--arch/s390/crypto/Makefile1
-rw-r--r--arch/s390/crypto/aes_s390.c383
-rw-r--r--arch/s390/crypto/crypt_s390.h112
-rw-r--r--arch/s390/crypto/des_check_key.c132
-rw-r--r--arch/s390/crypto/des_s390.c370
-rw-r--r--arch/s390/crypto/ghash_s390.c162
-rw-r--r--arch/s390/crypto/prng.c2
-rw-r--r--arch/s390/crypto/sha1_s390.c2
-rw-r--r--arch/s390/crypto/sha256_s390.c2
-rw-r--r--arch/s390/crypto/sha512_s390.c2
10 files changed, 919 insertions, 249 deletions
diff --git a/arch/s390/crypto/Makefile b/arch/s390/crypto/Makefile
index 1cf81d77c5a..7f0b7cda625 100644
--- a/arch/s390/crypto/Makefile
+++ b/arch/s390/crypto/Makefile
@@ -8,3 +8,4 @@ obj-$(CONFIG_CRYPTO_SHA512_S390) += sha512_s390.o sha_common.o
8obj-$(CONFIG_CRYPTO_DES_S390) += des_s390.o 8obj-$(CONFIG_CRYPTO_DES_S390) += des_s390.o
9obj-$(CONFIG_CRYPTO_AES_S390) += aes_s390.o 9obj-$(CONFIG_CRYPTO_AES_S390) += aes_s390.o
10obj-$(CONFIG_S390_PRNG) += prng.o 10obj-$(CONFIG_S390_PRNG) += prng.o
11obj-$(CONFIG_CRYPTO_GHASH_S390) += ghash_s390.o
diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c
index 58f46734465..a9ce135893f 100644
--- a/arch/s390/crypto/aes_s390.c
+++ b/arch/s390/crypto/aes_s390.c
@@ -31,7 +31,8 @@
31#define AES_KEYLEN_192 2 31#define AES_KEYLEN_192 2
32#define AES_KEYLEN_256 4 32#define AES_KEYLEN_256 4
33 33
34static char keylen_flag = 0; 34static u8 *ctrblk;
35static char keylen_flag;
35 36
36struct s390_aes_ctx { 37struct s390_aes_ctx {
37 u8 iv[AES_BLOCK_SIZE]; 38 u8 iv[AES_BLOCK_SIZE];
@@ -45,6 +46,24 @@ struct s390_aes_ctx {
45 } fallback; 46 } fallback;
46}; 47};
47 48
49struct pcc_param {
50 u8 key[32];
51 u8 tweak[16];
52 u8 block[16];
53 u8 bit[16];
54 u8 xts[16];
55};
56
57struct s390_xts_ctx {
58 u8 key[32];
59 u8 xts_param[16];
60 struct pcc_param pcc;
61 long enc;
62 long dec;
63 int key_len;
64 struct crypto_blkcipher *fallback;
65};
66
48/* 67/*
49 * Check if the key_len is supported by the HW. 68 * Check if the key_len is supported by the HW.
50 * Returns 0 if it is, a positive number if it is not and software fallback is 69 * Returns 0 if it is, a positive number if it is not and software fallback is
@@ -504,15 +523,337 @@ static struct crypto_alg cbc_aes_alg = {
504 } 523 }
505}; 524};
506 525
526static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key,
527 unsigned int len)
528{
529 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
530 unsigned int ret;
531
532 xts_ctx->fallback->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
533 xts_ctx->fallback->base.crt_flags |= (tfm->crt_flags &
534 CRYPTO_TFM_REQ_MASK);
535
536 ret = crypto_blkcipher_setkey(xts_ctx->fallback, key, len);
537 if (ret) {
538 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
539 tfm->crt_flags |= (xts_ctx->fallback->base.crt_flags &
540 CRYPTO_TFM_RES_MASK);
541 }
542 return ret;
543}
544
545static int xts_fallback_decrypt(struct blkcipher_desc *desc,
546 struct scatterlist *dst, struct scatterlist *src,
547 unsigned int nbytes)
548{
549 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
550 struct crypto_blkcipher *tfm;
551 unsigned int ret;
552
553 tfm = desc->tfm;
554 desc->tfm = xts_ctx->fallback;
555
556 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
557
558 desc->tfm = tfm;
559 return ret;
560}
561
562static int xts_fallback_encrypt(struct blkcipher_desc *desc,
563 struct scatterlist *dst, struct scatterlist *src,
564 unsigned int nbytes)
565{
566 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
567 struct crypto_blkcipher *tfm;
568 unsigned int ret;
569
570 tfm = desc->tfm;
571 desc->tfm = xts_ctx->fallback;
572
573 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
574
575 desc->tfm = tfm;
576 return ret;
577}
578
579static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
580 unsigned int key_len)
581{
582 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
583 u32 *flags = &tfm->crt_flags;
584
585 switch (key_len) {
586 case 32:
587 xts_ctx->enc = KM_XTS_128_ENCRYPT;
588 xts_ctx->dec = KM_XTS_128_DECRYPT;
589 memcpy(xts_ctx->key + 16, in_key, 16);
590 memcpy(xts_ctx->pcc.key + 16, in_key + 16, 16);
591 break;
592 case 48:
593 xts_ctx->enc = 0;
594 xts_ctx->dec = 0;
595 xts_fallback_setkey(tfm, in_key, key_len);
596 break;
597 case 64:
598 xts_ctx->enc = KM_XTS_256_ENCRYPT;
599 xts_ctx->dec = KM_XTS_256_DECRYPT;
600 memcpy(xts_ctx->key, in_key, 32);
601 memcpy(xts_ctx->pcc.key, in_key + 32, 32);
602 break;
603 default:
604 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
605 return -EINVAL;
606 }
607 xts_ctx->key_len = key_len;
608 return 0;
609}
610
611static int xts_aes_crypt(struct blkcipher_desc *desc, long func,
612 struct s390_xts_ctx *xts_ctx,
613 struct blkcipher_walk *walk)
614{
615 unsigned int offset = (xts_ctx->key_len >> 1) & 0x10;
616 int ret = blkcipher_walk_virt(desc, walk);
617 unsigned int nbytes = walk->nbytes;
618 unsigned int n;
619 u8 *in, *out;
620 void *param;
621
622 if (!nbytes)
623 goto out;
624
625 memset(xts_ctx->pcc.block, 0, sizeof(xts_ctx->pcc.block));
626 memset(xts_ctx->pcc.bit, 0, sizeof(xts_ctx->pcc.bit));
627 memset(xts_ctx->pcc.xts, 0, sizeof(xts_ctx->pcc.xts));
628 memcpy(xts_ctx->pcc.tweak, walk->iv, sizeof(xts_ctx->pcc.tweak));
629 param = xts_ctx->pcc.key + offset;
630 ret = crypt_s390_pcc(func, param);
631 BUG_ON(ret < 0);
632
633 memcpy(xts_ctx->xts_param, xts_ctx->pcc.xts, 16);
634 param = xts_ctx->key + offset;
635 do {
636 /* only use complete blocks */
637 n = nbytes & ~(AES_BLOCK_SIZE - 1);
638 out = walk->dst.virt.addr;
639 in = walk->src.virt.addr;
640
641 ret = crypt_s390_km(func, param, out, in, n);
642 BUG_ON(ret < 0 || ret != n);
643
644 nbytes &= AES_BLOCK_SIZE - 1;
645 ret = blkcipher_walk_done(desc, walk, nbytes);
646 } while ((nbytes = walk->nbytes));
647out:
648 return ret;
649}
650
651static int xts_aes_encrypt(struct blkcipher_desc *desc,
652 struct scatterlist *dst, struct scatterlist *src,
653 unsigned int nbytes)
654{
655 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
656 struct blkcipher_walk walk;
657
658 if (unlikely(xts_ctx->key_len == 48))
659 return xts_fallback_encrypt(desc, dst, src, nbytes);
660
661 blkcipher_walk_init(&walk, dst, src, nbytes);
662 return xts_aes_crypt(desc, xts_ctx->enc, xts_ctx, &walk);
663}
664
665static int xts_aes_decrypt(struct blkcipher_desc *desc,
666 struct scatterlist *dst, struct scatterlist *src,
667 unsigned int nbytes)
668{
669 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
670 struct blkcipher_walk walk;
671
672 if (unlikely(xts_ctx->key_len == 48))
673 return xts_fallback_decrypt(desc, dst, src, nbytes);
674
675 blkcipher_walk_init(&walk, dst, src, nbytes);
676 return xts_aes_crypt(desc, xts_ctx->dec, xts_ctx, &walk);
677}
678
679static int xts_fallback_init(struct crypto_tfm *tfm)
680{
681 const char *name = tfm->__crt_alg->cra_name;
682 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
683
684 xts_ctx->fallback = crypto_alloc_blkcipher(name, 0,
685 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
686
687 if (IS_ERR(xts_ctx->fallback)) {
688 pr_err("Allocating XTS fallback algorithm %s failed\n",
689 name);
690 return PTR_ERR(xts_ctx->fallback);
691 }
692 return 0;
693}
694
695static void xts_fallback_exit(struct crypto_tfm *tfm)
696{
697 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
698
699 crypto_free_blkcipher(xts_ctx->fallback);
700 xts_ctx->fallback = NULL;
701}
702
703static struct crypto_alg xts_aes_alg = {
704 .cra_name = "xts(aes)",
705 .cra_driver_name = "xts-aes-s390",
706 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
707 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
708 CRYPTO_ALG_NEED_FALLBACK,
709 .cra_blocksize = AES_BLOCK_SIZE,
710 .cra_ctxsize = sizeof(struct s390_xts_ctx),
711 .cra_type = &crypto_blkcipher_type,
712 .cra_module = THIS_MODULE,
713 .cra_list = LIST_HEAD_INIT(xts_aes_alg.cra_list),
714 .cra_init = xts_fallback_init,
715 .cra_exit = xts_fallback_exit,
716 .cra_u = {
717 .blkcipher = {
718 .min_keysize = 2 * AES_MIN_KEY_SIZE,
719 .max_keysize = 2 * AES_MAX_KEY_SIZE,
720 .ivsize = AES_BLOCK_SIZE,
721 .setkey = xts_aes_set_key,
722 .encrypt = xts_aes_encrypt,
723 .decrypt = xts_aes_decrypt,
724 }
725 }
726};
727
728static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
729 unsigned int key_len)
730{
731 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
732
733 switch (key_len) {
734 case 16:
735 sctx->enc = KMCTR_AES_128_ENCRYPT;
736 sctx->dec = KMCTR_AES_128_DECRYPT;
737 break;
738 case 24:
739 sctx->enc = KMCTR_AES_192_ENCRYPT;
740 sctx->dec = KMCTR_AES_192_DECRYPT;
741 break;
742 case 32:
743 sctx->enc = KMCTR_AES_256_ENCRYPT;
744 sctx->dec = KMCTR_AES_256_DECRYPT;
745 break;
746 }
747
748 return aes_set_key(tfm, in_key, key_len);
749}
750
751static int ctr_aes_crypt(struct blkcipher_desc *desc, long func,
752 struct s390_aes_ctx *sctx, struct blkcipher_walk *walk)
753{
754 int ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
755 unsigned int i, n, nbytes;
756 u8 buf[AES_BLOCK_SIZE];
757 u8 *out, *in;
758
759 if (!walk->nbytes)
760 return ret;
761
762 memcpy(ctrblk, walk->iv, AES_BLOCK_SIZE);
763 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
764 out = walk->dst.virt.addr;
765 in = walk->src.virt.addr;
766 while (nbytes >= AES_BLOCK_SIZE) {
767 /* only use complete blocks, max. PAGE_SIZE */
768 n = (nbytes > PAGE_SIZE) ? PAGE_SIZE :
769 nbytes & ~(AES_BLOCK_SIZE - 1);
770 for (i = AES_BLOCK_SIZE; i < n; i += AES_BLOCK_SIZE) {
771 memcpy(ctrblk + i, ctrblk + i - AES_BLOCK_SIZE,
772 AES_BLOCK_SIZE);
773 crypto_inc(ctrblk + i, AES_BLOCK_SIZE);
774 }
775 ret = crypt_s390_kmctr(func, sctx->key, out, in, n, ctrblk);
776 BUG_ON(ret < 0 || ret != n);
777 if (n > AES_BLOCK_SIZE)
778 memcpy(ctrblk, ctrblk + n - AES_BLOCK_SIZE,
779 AES_BLOCK_SIZE);
780 crypto_inc(ctrblk, AES_BLOCK_SIZE);
781 out += n;
782 in += n;
783 nbytes -= n;
784 }
785 ret = blkcipher_walk_done(desc, walk, nbytes);
786 }
787 /*
788 * final block may be < AES_BLOCK_SIZE, copy only nbytes
789 */
790 if (nbytes) {
791 out = walk->dst.virt.addr;
792 in = walk->src.virt.addr;
793 ret = crypt_s390_kmctr(func, sctx->key, buf, in,
794 AES_BLOCK_SIZE, ctrblk);
795 BUG_ON(ret < 0 || ret != AES_BLOCK_SIZE);
796 memcpy(out, buf, nbytes);
797 crypto_inc(ctrblk, AES_BLOCK_SIZE);
798 ret = blkcipher_walk_done(desc, walk, 0);
799 }
800 memcpy(walk->iv, ctrblk, AES_BLOCK_SIZE);
801 return ret;
802}
803
804static int ctr_aes_encrypt(struct blkcipher_desc *desc,
805 struct scatterlist *dst, struct scatterlist *src,
806 unsigned int nbytes)
807{
808 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
809 struct blkcipher_walk walk;
810
811 blkcipher_walk_init(&walk, dst, src, nbytes);
812 return ctr_aes_crypt(desc, sctx->enc, sctx, &walk);
813}
814
815static int ctr_aes_decrypt(struct blkcipher_desc *desc,
816 struct scatterlist *dst, struct scatterlist *src,
817 unsigned int nbytes)
818{
819 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
820 struct blkcipher_walk walk;
821
822 blkcipher_walk_init(&walk, dst, src, nbytes);
823 return ctr_aes_crypt(desc, sctx->dec, sctx, &walk);
824}
825
826static struct crypto_alg ctr_aes_alg = {
827 .cra_name = "ctr(aes)",
828 .cra_driver_name = "ctr-aes-s390",
829 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
830 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
831 .cra_blocksize = 1,
832 .cra_ctxsize = sizeof(struct s390_aes_ctx),
833 .cra_type = &crypto_blkcipher_type,
834 .cra_module = THIS_MODULE,
835 .cra_list = LIST_HEAD_INIT(ctr_aes_alg.cra_list),
836 .cra_u = {
837 .blkcipher = {
838 .min_keysize = AES_MIN_KEY_SIZE,
839 .max_keysize = AES_MAX_KEY_SIZE,
840 .ivsize = AES_BLOCK_SIZE,
841 .setkey = ctr_aes_set_key,
842 .encrypt = ctr_aes_encrypt,
843 .decrypt = ctr_aes_decrypt,
844 }
845 }
846};
847
507static int __init aes_s390_init(void) 848static int __init aes_s390_init(void)
508{ 849{
509 int ret; 850 int ret;
510 851
511 if (crypt_s390_func_available(KM_AES_128_ENCRYPT)) 852 if (crypt_s390_func_available(KM_AES_128_ENCRYPT, CRYPT_S390_MSA))
512 keylen_flag |= AES_KEYLEN_128; 853 keylen_flag |= AES_KEYLEN_128;
513 if (crypt_s390_func_available(KM_AES_192_ENCRYPT)) 854 if (crypt_s390_func_available(KM_AES_192_ENCRYPT, CRYPT_S390_MSA))
514 keylen_flag |= AES_KEYLEN_192; 855 keylen_flag |= AES_KEYLEN_192;
515 if (crypt_s390_func_available(KM_AES_256_ENCRYPT)) 856 if (crypt_s390_func_available(KM_AES_256_ENCRYPT, CRYPT_S390_MSA))
516 keylen_flag |= AES_KEYLEN_256; 857 keylen_flag |= AES_KEYLEN_256;
517 858
518 if (!keylen_flag) 859 if (!keylen_flag)
@@ -535,9 +876,40 @@ static int __init aes_s390_init(void)
535 if (ret) 876 if (ret)
536 goto cbc_aes_err; 877 goto cbc_aes_err;
537 878
879 if (crypt_s390_func_available(KM_XTS_128_ENCRYPT,
880 CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
881 crypt_s390_func_available(KM_XTS_256_ENCRYPT,
882 CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
883 ret = crypto_register_alg(&xts_aes_alg);
884 if (ret)
885 goto xts_aes_err;
886 }
887
888 if (crypt_s390_func_available(KMCTR_AES_128_ENCRYPT,
889 CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
890 crypt_s390_func_available(KMCTR_AES_192_ENCRYPT,
891 CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
892 crypt_s390_func_available(KMCTR_AES_256_ENCRYPT,
893 CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
894 ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
895 if (!ctrblk) {
896 ret = -ENOMEM;
897 goto ctr_aes_err;
898 }
899 ret = crypto_register_alg(&ctr_aes_alg);
900 if (ret) {
901 free_page((unsigned long) ctrblk);
902 goto ctr_aes_err;
903 }
904 }
905
538out: 906out:
539 return ret; 907 return ret;
540 908
909ctr_aes_err:
910 crypto_unregister_alg(&xts_aes_alg);
911xts_aes_err:
912 crypto_unregister_alg(&cbc_aes_alg);
541cbc_aes_err: 913cbc_aes_err:
542 crypto_unregister_alg(&ecb_aes_alg); 914 crypto_unregister_alg(&ecb_aes_alg);
543ecb_aes_err: 915ecb_aes_err:
@@ -548,6 +920,9 @@ aes_err:
548 920
549static void __exit aes_s390_fini(void) 921static void __exit aes_s390_fini(void)
550{ 922{
923 crypto_unregister_alg(&ctr_aes_alg);
924 free_page((unsigned long) ctrblk);
925 crypto_unregister_alg(&xts_aes_alg);
551 crypto_unregister_alg(&cbc_aes_alg); 926 crypto_unregister_alg(&cbc_aes_alg);
552 crypto_unregister_alg(&ecb_aes_alg); 927 crypto_unregister_alg(&ecb_aes_alg);
553 crypto_unregister_alg(&aes_alg); 928 crypto_unregister_alg(&aes_alg);
diff --git a/arch/s390/crypto/crypt_s390.h b/arch/s390/crypto/crypt_s390.h
index 7ee9a1b4ad9..49676771bd6 100644
--- a/arch/s390/crypto/crypt_s390.h
+++ b/arch/s390/crypto/crypt_s390.h
@@ -24,13 +24,18 @@
24#define CRYPT_S390_PRIORITY 300 24#define CRYPT_S390_PRIORITY 300
25#define CRYPT_S390_COMPOSITE_PRIORITY 400 25#define CRYPT_S390_COMPOSITE_PRIORITY 400
26 26
27#define CRYPT_S390_MSA 0x1
28#define CRYPT_S390_MSA3 0x2
29#define CRYPT_S390_MSA4 0x4
30
27/* s390 cryptographic operations */ 31/* s390 cryptographic operations */
28enum crypt_s390_operations { 32enum crypt_s390_operations {
29 CRYPT_S390_KM = 0x0100, 33 CRYPT_S390_KM = 0x0100,
30 CRYPT_S390_KMC = 0x0200, 34 CRYPT_S390_KMC = 0x0200,
31 CRYPT_S390_KIMD = 0x0300, 35 CRYPT_S390_KIMD = 0x0300,
32 CRYPT_S390_KLMD = 0x0400, 36 CRYPT_S390_KLMD = 0x0400,
33 CRYPT_S390_KMAC = 0x0500 37 CRYPT_S390_KMAC = 0x0500,
38 CRYPT_S390_KMCTR = 0x0600
34}; 39};
35 40
36/* 41/*
@@ -51,6 +56,10 @@ enum crypt_s390_km_func {
51 KM_AES_192_DECRYPT = CRYPT_S390_KM | 0x13 | 0x80, 56 KM_AES_192_DECRYPT = CRYPT_S390_KM | 0x13 | 0x80,
52 KM_AES_256_ENCRYPT = CRYPT_S390_KM | 0x14, 57 KM_AES_256_ENCRYPT = CRYPT_S390_KM | 0x14,
53 KM_AES_256_DECRYPT = CRYPT_S390_KM | 0x14 | 0x80, 58 KM_AES_256_DECRYPT = CRYPT_S390_KM | 0x14 | 0x80,
59 KM_XTS_128_ENCRYPT = CRYPT_S390_KM | 0x32,
60 KM_XTS_128_DECRYPT = CRYPT_S390_KM | 0x32 | 0x80,
61 KM_XTS_256_ENCRYPT = CRYPT_S390_KM | 0x34,
62 KM_XTS_256_DECRYPT = CRYPT_S390_KM | 0x34 | 0x80,
54}; 63};
55 64
56/* 65/*
@@ -75,6 +84,26 @@ enum crypt_s390_kmc_func {
75}; 84};
76 85
77/* 86/*
87 * function codes for KMCTR (CIPHER MESSAGE WITH COUNTER)
88 * instruction
89 */
90enum crypt_s390_kmctr_func {
91 KMCTR_QUERY = CRYPT_S390_KMCTR | 0x0,
92 KMCTR_DEA_ENCRYPT = CRYPT_S390_KMCTR | 0x1,
93 KMCTR_DEA_DECRYPT = CRYPT_S390_KMCTR | 0x1 | 0x80,
94 KMCTR_TDEA_128_ENCRYPT = CRYPT_S390_KMCTR | 0x2,
95 KMCTR_TDEA_128_DECRYPT = CRYPT_S390_KMCTR | 0x2 | 0x80,
96 KMCTR_TDEA_192_ENCRYPT = CRYPT_S390_KMCTR | 0x3,
97 KMCTR_TDEA_192_DECRYPT = CRYPT_S390_KMCTR | 0x3 | 0x80,
98 KMCTR_AES_128_ENCRYPT = CRYPT_S390_KMCTR | 0x12,
99 KMCTR_AES_128_DECRYPT = CRYPT_S390_KMCTR | 0x12 | 0x80,
100 KMCTR_AES_192_ENCRYPT = CRYPT_S390_KMCTR | 0x13,
101 KMCTR_AES_192_DECRYPT = CRYPT_S390_KMCTR | 0x13 | 0x80,
102 KMCTR_AES_256_ENCRYPT = CRYPT_S390_KMCTR | 0x14,
103 KMCTR_AES_256_DECRYPT = CRYPT_S390_KMCTR | 0x14 | 0x80,
104};
105
106/*
78 * function codes for KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST) 107 * function codes for KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST)
79 * instruction 108 * instruction
80 */ 109 */
@@ -83,6 +112,7 @@ enum crypt_s390_kimd_func {
83 KIMD_SHA_1 = CRYPT_S390_KIMD | 1, 112 KIMD_SHA_1 = CRYPT_S390_KIMD | 1,
84 KIMD_SHA_256 = CRYPT_S390_KIMD | 2, 113 KIMD_SHA_256 = CRYPT_S390_KIMD | 2,
85 KIMD_SHA_512 = CRYPT_S390_KIMD | 3, 114 KIMD_SHA_512 = CRYPT_S390_KIMD | 3,
115 KIMD_GHASH = CRYPT_S390_KIMD | 65,
86}; 116};
87 117
88/* 118/*
@@ -284,6 +314,45 @@ static inline int crypt_s390_kmac(long func, void *param,
284} 314}
285 315
286/** 316/**
317 * crypt_s390_kmctr:
318 * @func: the function code passed to KMCTR; see crypt_s390_kmctr_func
319 * @param: address of parameter block; see POP for details on each func
320 * @dest: address of destination memory area
321 * @src: address of source memory area
322 * @src_len: length of src operand in bytes
323 * @counter: address of counter value
324 *
325 * Executes the KMCTR (CIPHER MESSAGE WITH COUNTER) operation of the CPU.
326 *
327 * Returns -1 for failure, 0 for the query func, number of processed
328 * bytes for encryption/decryption funcs
329 */
330static inline int crypt_s390_kmctr(long func, void *param, u8 *dest,
331 const u8 *src, long src_len, u8 *counter)
332{
333 register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
334 register void *__param asm("1") = param;
335 register const u8 *__src asm("2") = src;
336 register long __src_len asm("3") = src_len;
337 register u8 *__dest asm("4") = dest;
338 register u8 *__ctr asm("6") = counter;
339 int ret = -1;
340
341 asm volatile(
342 "0: .insn rrf,0xb92d0000,%3,%1,%4,0 \n" /* KMCTR opcode */
343 "1: brc 1,0b \n" /* handle partial completion */
344 " la %0,0\n"
345 "2:\n"
346 EX_TABLE(0b,2b) EX_TABLE(1b,2b)
347 : "+d" (ret), "+a" (__src), "+d" (__src_len), "+a" (__dest),
348 "+a" (__ctr)
349 : "d" (__func), "a" (__param) : "cc", "memory");
350 if (ret < 0)
351 return ret;
352 return (func & CRYPT_S390_FUNC_MASK) ? src_len - __src_len : __src_len;
353}
354
355/**
287 * crypt_s390_func_available: 356 * crypt_s390_func_available:
288 * @func: the function code of the specific function; 0 if op in general 357 * @func: the function code of the specific function; 0 if op in general
289 * 358 *
@@ -291,13 +360,17 @@ static inline int crypt_s390_kmac(long func, void *param,
291 * 360 *
292 * Returns 1 if func available; 0 if func or op in general not available 361 * Returns 1 if func available; 0 if func or op in general not available
293 */ 362 */
294static inline int crypt_s390_func_available(int func) 363static inline int crypt_s390_func_available(int func,
364 unsigned int facility_mask)
295{ 365{
296 unsigned char status[16]; 366 unsigned char status[16];
297 int ret; 367 int ret;
298 368
299 /* check if CPACF facility (bit 17) is available */ 369 if (facility_mask & CRYPT_S390_MSA && !test_facility(17))
300 if (!test_facility(17)) 370 return 0;
371 if (facility_mask & CRYPT_S390_MSA3 && !test_facility(76))
372 return 0;
373 if (facility_mask & CRYPT_S390_MSA4 && !test_facility(77))
301 return 0; 374 return 0;
302 375
303 switch (func & CRYPT_S390_OP_MASK) { 376 switch (func & CRYPT_S390_OP_MASK) {
@@ -316,6 +389,10 @@ static inline int crypt_s390_func_available(int func)
316 case CRYPT_S390_KMAC: 389 case CRYPT_S390_KMAC:
317 ret = crypt_s390_kmac(KMAC_QUERY, &status, NULL, 0); 390 ret = crypt_s390_kmac(KMAC_QUERY, &status, NULL, 0);
318 break; 391 break;
392 case CRYPT_S390_KMCTR:
393 ret = crypt_s390_kmctr(KMCTR_QUERY, &status, NULL, NULL, 0,
394 NULL);
395 break;
319 default: 396 default:
320 return 0; 397 return 0;
321 } 398 }
@@ -326,4 +403,31 @@ static inline int crypt_s390_func_available(int func)
326 return (status[func >> 3] & (0x80 >> (func & 7))) != 0; 403 return (status[func >> 3] & (0x80 >> (func & 7))) != 0;
327} 404}
328 405
406/**
407 * crypt_s390_pcc:
408 * @func: the function code passed to KM; see crypt_s390_km_func
409 * @param: address of parameter block; see POP for details on each func
410 *
411 * Executes the PCC (PERFORM CRYPTOGRAPHIC COMPUTATION) operation of the CPU.
412 *
413 * Returns -1 for failure, 0 for success.
414 */
415static inline int crypt_s390_pcc(long func, void *param)
416{
417 register long __func asm("0") = func & 0x7f; /* encrypt or decrypt */
418 register void *__param asm("1") = param;
419 int ret = -1;
420
421 asm volatile(
422 "0: .insn rre,0xb92c0000,0,0 \n" /* PCC opcode */
423 "1: brc 1,0b \n" /* handle partial completion */
424 " la %0,0\n"
425 "2:\n"
426 EX_TABLE(0b,2b) EX_TABLE(1b,2b)
427 : "+d" (ret)
428 : "d" (__func), "a" (__param) : "cc", "memory");
429 return ret;
430}
431
432
329#endif /* _CRYPTO_ARCH_S390_CRYPT_S390_H */ 433#endif /* _CRYPTO_ARCH_S390_CRYPT_S390_H */
diff --git a/arch/s390/crypto/des_check_key.c b/arch/s390/crypto/des_check_key.c
deleted file mode 100644
index 5706af26644..00000000000
--- a/arch/s390/crypto/des_check_key.c
+++ /dev/null
@@ -1,132 +0,0 @@
1/*
2 * Cryptographic API.
3 *
4 * Function for checking keys for the DES and Tripple DES Encryption
5 * algorithms.
6 *
7 * Originally released as descore by Dana L. How <how@isl.stanford.edu>.
8 * Modified by Raimar Falke <rf13@inf.tu-dresden.de> for the Linux-Kernel.
9 * Derived from Cryptoapi and Nettle implementations, adapted for in-place
10 * scatterlist interface. Changed LGPL to GPL per section 3 of the LGPL.
11 *
12 * s390 Version:
13 * Copyright IBM Corp. 2003
14 * Author(s): Thomas Spatzier
15 * Jan Glauber (jan.glauber@de.ibm.com)
16 *
17 * Derived from "crypto/des.c"
18 * Copyright (c) 1992 Dana L. How.
19 * Copyright (c) Raimar Falke <rf13@inf.tu-dresden.de>
20 * Copyright (c) Gisle Sflensminde <gisle@ii.uib.no>
21 * Copyright (C) 2001 Niels Mvller.
22 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
23 *
24 * This program is free software; you can redistribute it and/or modify
25 * it under the terms of the GNU General Public License as published by
26 * the Free Software Foundation; either version 2 of the License, or
27 * (at your option) any later version.
28 *
29 */
30#include <linux/init.h>
31#include <linux/module.h>
32#include <linux/errno.h>
33#include <linux/crypto.h>
34#include "crypto_des.h"
35
36#define ROR(d,c,o) ((d) = (d) >> (c) | (d) << (o))
37
38static const u8 parity[] = {
39 8,1,0,8,0,8,8,0,0,8,8,0,8,0,2,8,0,8,8,0,8,0,0,8,8,0,0,8,0,8,8,3,
40 0,8,8,0,8,0,0,8,8,0,0,8,0,8,8,0,8,0,0,8,0,8,8,0,0,8,8,0,8,0,0,8,
41 0,8,8,0,8,0,0,8,8,0,0,8,0,8,8,0,8,0,0,8,0,8,8,0,0,8,8,0,8,0,0,8,
42 8,0,0,8,0,8,8,0,0,8,8,0,8,0,0,8,0,8,8,0,8,0,0,8,8,0,0,8,0,8,8,0,
43 0,8,8,0,8,0,0,8,8,0,0,8,0,8,8,0,8,0,0,8,0,8,8,0,0,8,8,0,8,0,0,8,
44 8,0,0,8,0,8,8,0,0,8,8,0,8,0,0,8,0,8,8,0,8,0,0,8,8,0,0,8,0,8,8,0,
45 8,0,0,8,0,8,8,0,0,8,8,0,8,0,0,8,0,8,8,0,8,0,0,8,8,0,0,8,0,8,8,0,
46 4,8,8,0,8,0,0,8,8,0,0,8,0,8,8,0,8,5,0,8,0,8,8,0,0,8,8,0,8,0,6,8,
47};
48
49/*
50 * RFC2451: Weak key checks SHOULD be performed.
51 */
52int
53crypto_des_check_key(const u8 *key, unsigned int keylen, u32 *flags)
54{
55 u32 n, w;
56
57 n = parity[key[0]]; n <<= 4;
58 n |= parity[key[1]]; n <<= 4;
59 n |= parity[key[2]]; n <<= 4;
60 n |= parity[key[3]]; n <<= 4;
61 n |= parity[key[4]]; n <<= 4;
62 n |= parity[key[5]]; n <<= 4;
63 n |= parity[key[6]]; n <<= 4;
64 n |= parity[key[7]];
65 w = 0x88888888L;
66
67 if ((*flags & CRYPTO_TFM_REQ_WEAK_KEY)
68 && !((n - (w >> 3)) & w)) { /* 1 in 10^10 keys passes this test */
69 if (n < 0x41415151) {
70 if (n < 0x31312121) {
71 if (n < 0x14141515) {
72 /* 01 01 01 01 01 01 01 01 */
73 if (n == 0x11111111) goto weak;
74 /* 01 1F 01 1F 01 0E 01 0E */
75 if (n == 0x13131212) goto weak;
76 } else {
77 /* 01 E0 01 E0 01 F1 01 F1 */
78 if (n == 0x14141515) goto weak;
79 /* 01 FE 01 FE 01 FE 01 FE */
80 if (n == 0x16161616) goto weak;
81 }
82 } else {
83 if (n < 0x34342525) {
84 /* 1F 01 1F 01 0E 01 0E 01 */
85 if (n == 0x31312121) goto weak;
86 /* 1F 1F 1F 1F 0E 0E 0E 0E (?) */
87 if (n == 0x33332222) goto weak;
88 } else {
89 /* 1F E0 1F E0 0E F1 0E F1 */
90 if (n == 0x34342525) goto weak;
91 /* 1F FE 1F FE 0E FE 0E FE */
92 if (n == 0x36362626) goto weak;
93 }
94 }
95 } else {
96 if (n < 0x61616161) {
97 if (n < 0x44445555) {
98 /* E0 01 E0 01 F1 01 F1 01 */
99 if (n == 0x41415151) goto weak;
100 /* E0 1F E0 1F F1 0E F1 0E */
101 if (n == 0x43435252) goto weak;
102 } else {
103 /* E0 E0 E0 E0 F1 F1 F1 F1 (?) */
104 if (n == 0x44445555) goto weak;
105 /* E0 FE E0 FE F1 FE F1 FE */
106 if (n == 0x46465656) goto weak;
107 }
108 } else {
109 if (n < 0x64646565) {
110 /* FE 01 FE 01 FE 01 FE 01 */
111 if (n == 0x61616161) goto weak;
112 /* FE 1F FE 1F FE 0E FE 0E */
113 if (n == 0x63636262) goto weak;
114 } else {
115 /* FE E0 FE E0 FE F1 FE F1 */
116 if (n == 0x64646565) goto weak;
117 /* FE FE FE FE FE FE FE FE */
118 if (n == 0x66666666) goto weak;
119 }
120 }
121 }
122 }
123 return 0;
124weak:
125 *flags |= CRYPTO_TFM_RES_WEAK_KEY;
126 return -EINVAL;
127}
128
129EXPORT_SYMBOL(crypto_des_check_key);
130
131MODULE_LICENSE("GPL");
132MODULE_DESCRIPTION("Key Check function for DES & DES3 Cipher Algorithms");
diff --git a/arch/s390/crypto/des_s390.c b/arch/s390/crypto/des_s390.c
index cc542011839..a52bfd124d8 100644
--- a/arch/s390/crypto/des_s390.c
+++ b/arch/s390/crypto/des_s390.c
@@ -3,7 +3,7 @@
3 * 3 *
4 * s390 implementation of the DES Cipher Algorithm. 4 * s390 implementation of the DES Cipher Algorithm.
5 * 5 *
6 * Copyright IBM Corp. 2003,2007 6 * Copyright IBM Corp. 2003,2011
7 * Author(s): Thomas Spatzier 7 * Author(s): Thomas Spatzier
8 * Jan Glauber (jan.glauber@de.ibm.com) 8 * Jan Glauber (jan.glauber@de.ibm.com)
9 * 9 *
@@ -22,22 +22,19 @@
22 22
23#include "crypt_s390.h" 23#include "crypt_s390.h"
24 24
25#define DES3_192_KEY_SIZE (3 * DES_KEY_SIZE) 25#define DES3_KEY_SIZE (3 * DES_KEY_SIZE)
26 26
27struct crypt_s390_des_ctx { 27static u8 *ctrblk;
28 u8 iv[DES_BLOCK_SIZE];
29 u8 key[DES_KEY_SIZE];
30};
31 28
32struct crypt_s390_des3_192_ctx { 29struct s390_des_ctx {
33 u8 iv[DES_BLOCK_SIZE]; 30 u8 iv[DES_BLOCK_SIZE];
34 u8 key[DES3_192_KEY_SIZE]; 31 u8 key[DES3_KEY_SIZE];
35}; 32};
36 33
37static int des_setkey(struct crypto_tfm *tfm, const u8 *key, 34static int des_setkey(struct crypto_tfm *tfm, const u8 *key,
38 unsigned int keylen) 35 unsigned int key_len)
39{ 36{
40 struct crypt_s390_des_ctx *dctx = crypto_tfm_ctx(tfm); 37 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
41 u32 *flags = &tfm->crt_flags; 38 u32 *flags = &tfm->crt_flags;
42 u32 tmp[DES_EXPKEY_WORDS]; 39 u32 tmp[DES_EXPKEY_WORDS];
43 40
@@ -47,22 +44,22 @@ static int des_setkey(struct crypto_tfm *tfm, const u8 *key,
47 return -EINVAL; 44 return -EINVAL;
48 } 45 }
49 46
50 memcpy(dctx->key, key, keylen); 47 memcpy(ctx->key, key, key_len);
51 return 0; 48 return 0;
52} 49}
53 50
54static void des_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 51static void des_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
55{ 52{
56 struct crypt_s390_des_ctx *dctx = crypto_tfm_ctx(tfm); 53 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
57 54
58 crypt_s390_km(KM_DEA_ENCRYPT, dctx->key, out, in, DES_BLOCK_SIZE); 55 crypt_s390_km(KM_DEA_ENCRYPT, ctx->key, out, in, DES_BLOCK_SIZE);
59} 56}
60 57
61static void des_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 58static void des_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
62{ 59{
63 struct crypt_s390_des_ctx *dctx = crypto_tfm_ctx(tfm); 60 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
64 61
65 crypt_s390_km(KM_DEA_DECRYPT, dctx->key, out, in, DES_BLOCK_SIZE); 62 crypt_s390_km(KM_DEA_DECRYPT, ctx->key, out, in, DES_BLOCK_SIZE);
66} 63}
67 64
68static struct crypto_alg des_alg = { 65static struct crypto_alg des_alg = {
@@ -71,7 +68,7 @@ static struct crypto_alg des_alg = {
71 .cra_priority = CRYPT_S390_PRIORITY, 68 .cra_priority = CRYPT_S390_PRIORITY,
72 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 69 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
73 .cra_blocksize = DES_BLOCK_SIZE, 70 .cra_blocksize = DES_BLOCK_SIZE,
74 .cra_ctxsize = sizeof(struct crypt_s390_des_ctx), 71 .cra_ctxsize = sizeof(struct s390_des_ctx),
75 .cra_module = THIS_MODULE, 72 .cra_module = THIS_MODULE,
76 .cra_list = LIST_HEAD_INIT(des_alg.cra_list), 73 .cra_list = LIST_HEAD_INIT(des_alg.cra_list),
77 .cra_u = { 74 .cra_u = {
@@ -86,7 +83,7 @@ static struct crypto_alg des_alg = {
86}; 83};
87 84
88static int ecb_desall_crypt(struct blkcipher_desc *desc, long func, 85static int ecb_desall_crypt(struct blkcipher_desc *desc, long func,
89 void *param, struct blkcipher_walk *walk) 86 u8 *key, struct blkcipher_walk *walk)
90{ 87{
91 int ret = blkcipher_walk_virt(desc, walk); 88 int ret = blkcipher_walk_virt(desc, walk);
92 unsigned int nbytes; 89 unsigned int nbytes;
@@ -97,7 +94,7 @@ static int ecb_desall_crypt(struct blkcipher_desc *desc, long func,
97 u8 *out = walk->dst.virt.addr; 94 u8 *out = walk->dst.virt.addr;
98 u8 *in = walk->src.virt.addr; 95 u8 *in = walk->src.virt.addr;
99 96
100 ret = crypt_s390_km(func, param, out, in, n); 97 ret = crypt_s390_km(func, key, out, in, n);
101 BUG_ON((ret < 0) || (ret != n)); 98 BUG_ON((ret < 0) || (ret != n));
102 99
103 nbytes &= DES_BLOCK_SIZE - 1; 100 nbytes &= DES_BLOCK_SIZE - 1;
@@ -108,7 +105,7 @@ static int ecb_desall_crypt(struct blkcipher_desc *desc, long func,
108} 105}
109 106
110static int cbc_desall_crypt(struct blkcipher_desc *desc, long func, 107static int cbc_desall_crypt(struct blkcipher_desc *desc, long func,
111 void *param, struct blkcipher_walk *walk) 108 u8 *iv, struct blkcipher_walk *walk)
112{ 109{
113 int ret = blkcipher_walk_virt(desc, walk); 110 int ret = blkcipher_walk_virt(desc, walk);
114 unsigned int nbytes = walk->nbytes; 111 unsigned int nbytes = walk->nbytes;
@@ -116,20 +113,20 @@ static int cbc_desall_crypt(struct blkcipher_desc *desc, long func,
116 if (!nbytes) 113 if (!nbytes)
117 goto out; 114 goto out;
118 115
119 memcpy(param, walk->iv, DES_BLOCK_SIZE); 116 memcpy(iv, walk->iv, DES_BLOCK_SIZE);
120 do { 117 do {
121 /* only use complete blocks */ 118 /* only use complete blocks */
122 unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1); 119 unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1);
123 u8 *out = walk->dst.virt.addr; 120 u8 *out = walk->dst.virt.addr;
124 u8 *in = walk->src.virt.addr; 121 u8 *in = walk->src.virt.addr;
125 122
126 ret = crypt_s390_kmc(func, param, out, in, n); 123 ret = crypt_s390_kmc(func, iv, out, in, n);
127 BUG_ON((ret < 0) || (ret != n)); 124 BUG_ON((ret < 0) || (ret != n));
128 125
129 nbytes &= DES_BLOCK_SIZE - 1; 126 nbytes &= DES_BLOCK_SIZE - 1;
130 ret = blkcipher_walk_done(desc, walk, nbytes); 127 ret = blkcipher_walk_done(desc, walk, nbytes);
131 } while ((nbytes = walk->nbytes)); 128 } while ((nbytes = walk->nbytes));
132 memcpy(walk->iv, param, DES_BLOCK_SIZE); 129 memcpy(walk->iv, iv, DES_BLOCK_SIZE);
133 130
134out: 131out:
135 return ret; 132 return ret;
@@ -139,22 +136,22 @@ static int ecb_des_encrypt(struct blkcipher_desc *desc,
139 struct scatterlist *dst, struct scatterlist *src, 136 struct scatterlist *dst, struct scatterlist *src,
140 unsigned int nbytes) 137 unsigned int nbytes)
141{ 138{
142 struct crypt_s390_des_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 139 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
143 struct blkcipher_walk walk; 140 struct blkcipher_walk walk;
144 141
145 blkcipher_walk_init(&walk, dst, src, nbytes); 142 blkcipher_walk_init(&walk, dst, src, nbytes);
146 return ecb_desall_crypt(desc, KM_DEA_ENCRYPT, sctx->key, &walk); 143 return ecb_desall_crypt(desc, KM_DEA_ENCRYPT, ctx->key, &walk);
147} 144}
148 145
149static int ecb_des_decrypt(struct blkcipher_desc *desc, 146static int ecb_des_decrypt(struct blkcipher_desc *desc,
150 struct scatterlist *dst, struct scatterlist *src, 147 struct scatterlist *dst, struct scatterlist *src,
151 unsigned int nbytes) 148 unsigned int nbytes)
152{ 149{
153 struct crypt_s390_des_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 150 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
154 struct blkcipher_walk walk; 151 struct blkcipher_walk walk;
155 152
156 blkcipher_walk_init(&walk, dst, src, nbytes); 153 blkcipher_walk_init(&walk, dst, src, nbytes);
157 return ecb_desall_crypt(desc, KM_DEA_DECRYPT, sctx->key, &walk); 154 return ecb_desall_crypt(desc, KM_DEA_DECRYPT, ctx->key, &walk);
158} 155}
159 156
160static struct crypto_alg ecb_des_alg = { 157static struct crypto_alg ecb_des_alg = {
@@ -163,7 +160,7 @@ static struct crypto_alg ecb_des_alg = {
163 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 160 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
164 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 161 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
165 .cra_blocksize = DES_BLOCK_SIZE, 162 .cra_blocksize = DES_BLOCK_SIZE,
166 .cra_ctxsize = sizeof(struct crypt_s390_des_ctx), 163 .cra_ctxsize = sizeof(struct s390_des_ctx),
167 .cra_type = &crypto_blkcipher_type, 164 .cra_type = &crypto_blkcipher_type,
168 .cra_module = THIS_MODULE, 165 .cra_module = THIS_MODULE,
169 .cra_list = LIST_HEAD_INIT(ecb_des_alg.cra_list), 166 .cra_list = LIST_HEAD_INIT(ecb_des_alg.cra_list),
@@ -182,22 +179,22 @@ static int cbc_des_encrypt(struct blkcipher_desc *desc,
182 struct scatterlist *dst, struct scatterlist *src, 179 struct scatterlist *dst, struct scatterlist *src,
183 unsigned int nbytes) 180 unsigned int nbytes)
184{ 181{
185 struct crypt_s390_des_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 182 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
186 struct blkcipher_walk walk; 183 struct blkcipher_walk walk;
187 184
188 blkcipher_walk_init(&walk, dst, src, nbytes); 185 blkcipher_walk_init(&walk, dst, src, nbytes);
189 return cbc_desall_crypt(desc, KMC_DEA_ENCRYPT, sctx->iv, &walk); 186 return cbc_desall_crypt(desc, KMC_DEA_ENCRYPT, ctx->iv, &walk);
190} 187}
191 188
192static int cbc_des_decrypt(struct blkcipher_desc *desc, 189static int cbc_des_decrypt(struct blkcipher_desc *desc,
193 struct scatterlist *dst, struct scatterlist *src, 190 struct scatterlist *dst, struct scatterlist *src,
194 unsigned int nbytes) 191 unsigned int nbytes)
195{ 192{
196 struct crypt_s390_des_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 193 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
197 struct blkcipher_walk walk; 194 struct blkcipher_walk walk;
198 195
199 blkcipher_walk_init(&walk, dst, src, nbytes); 196 blkcipher_walk_init(&walk, dst, src, nbytes);
200 return cbc_desall_crypt(desc, KMC_DEA_DECRYPT, sctx->iv, &walk); 197 return cbc_desall_crypt(desc, KMC_DEA_DECRYPT, ctx->iv, &walk);
201} 198}
202 199
203static struct crypto_alg cbc_des_alg = { 200static struct crypto_alg cbc_des_alg = {
@@ -206,7 +203,7 @@ static struct crypto_alg cbc_des_alg = {
206 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 203 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
207 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 204 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
208 .cra_blocksize = DES_BLOCK_SIZE, 205 .cra_blocksize = DES_BLOCK_SIZE,
209 .cra_ctxsize = sizeof(struct crypt_s390_des_ctx), 206 .cra_ctxsize = sizeof(struct s390_des_ctx),
210 .cra_type = &crypto_blkcipher_type, 207 .cra_type = &crypto_blkcipher_type,
211 .cra_module = THIS_MODULE, 208 .cra_module = THIS_MODULE,
212 .cra_list = LIST_HEAD_INIT(cbc_des_alg.cra_list), 209 .cra_list = LIST_HEAD_INIT(cbc_des_alg.cra_list),
@@ -235,10 +232,10 @@ static struct crypto_alg cbc_des_alg = {
235 * property. 232 * property.
236 * 233 *
237 */ 234 */
238static int des3_192_setkey(struct crypto_tfm *tfm, const u8 *key, 235static int des3_setkey(struct crypto_tfm *tfm, const u8 *key,
239 unsigned int keylen) 236 unsigned int key_len)
240{ 237{
241 struct crypt_s390_des3_192_ctx *dctx = crypto_tfm_ctx(tfm); 238 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
242 u32 *flags = &tfm->crt_flags; 239 u32 *flags = &tfm->crt_flags;
243 240
244 if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) && 241 if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) &&
@@ -248,141 +245,276 @@ static int des3_192_setkey(struct crypto_tfm *tfm, const u8 *key,
248 *flags |= CRYPTO_TFM_RES_WEAK_KEY; 245 *flags |= CRYPTO_TFM_RES_WEAK_KEY;
249 return -EINVAL; 246 return -EINVAL;
250 } 247 }
251 memcpy(dctx->key, key, keylen); 248 memcpy(ctx->key, key, key_len);
252 return 0; 249 return 0;
253} 250}
254 251
255static void des3_192_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 252static void des3_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
256{ 253{
257 struct crypt_s390_des3_192_ctx *dctx = crypto_tfm_ctx(tfm); 254 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
258 255
259 crypt_s390_km(KM_TDEA_192_ENCRYPT, dctx->key, dst, (void*)src, 256 crypt_s390_km(KM_TDEA_192_ENCRYPT, ctx->key, dst, src, DES_BLOCK_SIZE);
260 DES_BLOCK_SIZE);
261} 257}
262 258
263static void des3_192_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 259static void des3_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
264{ 260{
265 struct crypt_s390_des3_192_ctx *dctx = crypto_tfm_ctx(tfm); 261 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
266 262
267 crypt_s390_km(KM_TDEA_192_DECRYPT, dctx->key, dst, (void*)src, 263 crypt_s390_km(KM_TDEA_192_DECRYPT, ctx->key, dst, src, DES_BLOCK_SIZE);
268 DES_BLOCK_SIZE);
269} 264}
270 265
271static struct crypto_alg des3_192_alg = { 266static struct crypto_alg des3_alg = {
272 .cra_name = "des3_ede", 267 .cra_name = "des3_ede",
273 .cra_driver_name = "des3_ede-s390", 268 .cra_driver_name = "des3_ede-s390",
274 .cra_priority = CRYPT_S390_PRIORITY, 269 .cra_priority = CRYPT_S390_PRIORITY,
275 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 270 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
276 .cra_blocksize = DES_BLOCK_SIZE, 271 .cra_blocksize = DES_BLOCK_SIZE,
277 .cra_ctxsize = sizeof(struct crypt_s390_des3_192_ctx), 272 .cra_ctxsize = sizeof(struct s390_des_ctx),
278 .cra_module = THIS_MODULE, 273 .cra_module = THIS_MODULE,
279 .cra_list = LIST_HEAD_INIT(des3_192_alg.cra_list), 274 .cra_list = LIST_HEAD_INIT(des3_alg.cra_list),
280 .cra_u = { 275 .cra_u = {
281 .cipher = { 276 .cipher = {
282 .cia_min_keysize = DES3_192_KEY_SIZE, 277 .cia_min_keysize = DES3_KEY_SIZE,
283 .cia_max_keysize = DES3_192_KEY_SIZE, 278 .cia_max_keysize = DES3_KEY_SIZE,
284 .cia_setkey = des3_192_setkey, 279 .cia_setkey = des3_setkey,
285 .cia_encrypt = des3_192_encrypt, 280 .cia_encrypt = des3_encrypt,
286 .cia_decrypt = des3_192_decrypt, 281 .cia_decrypt = des3_decrypt,
287 } 282 }
288 } 283 }
289}; 284};
290 285
291static int ecb_des3_192_encrypt(struct blkcipher_desc *desc, 286static int ecb_des3_encrypt(struct blkcipher_desc *desc,
292 struct scatterlist *dst, 287 struct scatterlist *dst, struct scatterlist *src,
293 struct scatterlist *src, unsigned int nbytes) 288 unsigned int nbytes)
294{ 289{
295 struct crypt_s390_des3_192_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 290 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
296 struct blkcipher_walk walk; 291 struct blkcipher_walk walk;
297 292
298 blkcipher_walk_init(&walk, dst, src, nbytes); 293 blkcipher_walk_init(&walk, dst, src, nbytes);
299 return ecb_desall_crypt(desc, KM_TDEA_192_ENCRYPT, sctx->key, &walk); 294 return ecb_desall_crypt(desc, KM_TDEA_192_ENCRYPT, ctx->key, &walk);
300} 295}
301 296
302static int ecb_des3_192_decrypt(struct blkcipher_desc *desc, 297static int ecb_des3_decrypt(struct blkcipher_desc *desc,
303 struct scatterlist *dst, 298 struct scatterlist *dst, struct scatterlist *src,
304 struct scatterlist *src, unsigned int nbytes) 299 unsigned int nbytes)
305{ 300{
306 struct crypt_s390_des3_192_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 301 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
307 struct blkcipher_walk walk; 302 struct blkcipher_walk walk;
308 303
309 blkcipher_walk_init(&walk, dst, src, nbytes); 304 blkcipher_walk_init(&walk, dst, src, nbytes);
310 return ecb_desall_crypt(desc, KM_TDEA_192_DECRYPT, sctx->key, &walk); 305 return ecb_desall_crypt(desc, KM_TDEA_192_DECRYPT, ctx->key, &walk);
311} 306}
312 307
313static struct crypto_alg ecb_des3_192_alg = { 308static struct crypto_alg ecb_des3_alg = {
314 .cra_name = "ecb(des3_ede)", 309 .cra_name = "ecb(des3_ede)",
315 .cra_driver_name = "ecb-des3_ede-s390", 310 .cra_driver_name = "ecb-des3_ede-s390",
316 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 311 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
317 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 312 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
318 .cra_blocksize = DES_BLOCK_SIZE, 313 .cra_blocksize = DES_BLOCK_SIZE,
319 .cra_ctxsize = sizeof(struct crypt_s390_des3_192_ctx), 314 .cra_ctxsize = sizeof(struct s390_des_ctx),
320 .cra_type = &crypto_blkcipher_type, 315 .cra_type = &crypto_blkcipher_type,
321 .cra_module = THIS_MODULE, 316 .cra_module = THIS_MODULE,
322 .cra_list = LIST_HEAD_INIT( 317 .cra_list = LIST_HEAD_INIT(
323 ecb_des3_192_alg.cra_list), 318 ecb_des3_alg.cra_list),
324 .cra_u = { 319 .cra_u = {
325 .blkcipher = { 320 .blkcipher = {
326 .min_keysize = DES3_192_KEY_SIZE, 321 .min_keysize = DES3_KEY_SIZE,
327 .max_keysize = DES3_192_KEY_SIZE, 322 .max_keysize = DES3_KEY_SIZE,
328 .setkey = des3_192_setkey, 323 .setkey = des3_setkey,
329 .encrypt = ecb_des3_192_encrypt, 324 .encrypt = ecb_des3_encrypt,
330 .decrypt = ecb_des3_192_decrypt, 325 .decrypt = ecb_des3_decrypt,
331 } 326 }
332 } 327 }
333}; 328};
334 329
335static int cbc_des3_192_encrypt(struct blkcipher_desc *desc, 330static int cbc_des3_encrypt(struct blkcipher_desc *desc,
336 struct scatterlist *dst, 331 struct scatterlist *dst, struct scatterlist *src,
337 struct scatterlist *src, unsigned int nbytes) 332 unsigned int nbytes)
338{ 333{
339 struct crypt_s390_des3_192_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 334 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
340 struct blkcipher_walk walk; 335 struct blkcipher_walk walk;
341 336
342 blkcipher_walk_init(&walk, dst, src, nbytes); 337 blkcipher_walk_init(&walk, dst, src, nbytes);
343 return cbc_desall_crypt(desc, KMC_TDEA_192_ENCRYPT, sctx->iv, &walk); 338 return cbc_desall_crypt(desc, KMC_TDEA_192_ENCRYPT, ctx->iv, &walk);
344} 339}
345 340
346static int cbc_des3_192_decrypt(struct blkcipher_desc *desc, 341static int cbc_des3_decrypt(struct blkcipher_desc *desc,
347 struct scatterlist *dst, 342 struct scatterlist *dst, struct scatterlist *src,
348 struct scatterlist *src, unsigned int nbytes) 343 unsigned int nbytes)
349{ 344{
350 struct crypt_s390_des3_192_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 345 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
351 struct blkcipher_walk walk; 346 struct blkcipher_walk walk;
352 347
353 blkcipher_walk_init(&walk, dst, src, nbytes); 348 blkcipher_walk_init(&walk, dst, src, nbytes);
354 return cbc_desall_crypt(desc, KMC_TDEA_192_DECRYPT, sctx->iv, &walk); 349 return cbc_desall_crypt(desc, KMC_TDEA_192_DECRYPT, ctx->iv, &walk);
355} 350}
356 351
357static struct crypto_alg cbc_des3_192_alg = { 352static struct crypto_alg cbc_des3_alg = {
358 .cra_name = "cbc(des3_ede)", 353 .cra_name = "cbc(des3_ede)",
359 .cra_driver_name = "cbc-des3_ede-s390", 354 .cra_driver_name = "cbc-des3_ede-s390",
360 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 355 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
361 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 356 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
362 .cra_blocksize = DES_BLOCK_SIZE, 357 .cra_blocksize = DES_BLOCK_SIZE,
363 .cra_ctxsize = sizeof(struct crypt_s390_des3_192_ctx), 358 .cra_ctxsize = sizeof(struct s390_des_ctx),
364 .cra_type = &crypto_blkcipher_type, 359 .cra_type = &crypto_blkcipher_type,
365 .cra_module = THIS_MODULE, 360 .cra_module = THIS_MODULE,
366 .cra_list = LIST_HEAD_INIT( 361 .cra_list = LIST_HEAD_INIT(
367 cbc_des3_192_alg.cra_list), 362 cbc_des3_alg.cra_list),
368 .cra_u = { 363 .cra_u = {
369 .blkcipher = { 364 .blkcipher = {
370 .min_keysize = DES3_192_KEY_SIZE, 365 .min_keysize = DES3_KEY_SIZE,
371 .max_keysize = DES3_192_KEY_SIZE, 366 .max_keysize = DES3_KEY_SIZE,
372 .ivsize = DES_BLOCK_SIZE, 367 .ivsize = DES_BLOCK_SIZE,
373 .setkey = des3_192_setkey, 368 .setkey = des3_setkey,
374 .encrypt = cbc_des3_192_encrypt, 369 .encrypt = cbc_des3_encrypt,
375 .decrypt = cbc_des3_192_decrypt, 370 .decrypt = cbc_des3_decrypt,
376 } 371 }
377 } 372 }
378}; 373};
379 374
380static int des_s390_init(void) 375static int ctr_desall_crypt(struct blkcipher_desc *desc, long func,
376 struct s390_des_ctx *ctx, struct blkcipher_walk *walk)
377{
378 int ret = blkcipher_walk_virt_block(desc, walk, DES_BLOCK_SIZE);
379 unsigned int i, n, nbytes;
380 u8 buf[DES_BLOCK_SIZE];
381 u8 *out, *in;
382
383 memcpy(ctrblk, walk->iv, DES_BLOCK_SIZE);
384 while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) {
385 out = walk->dst.virt.addr;
386 in = walk->src.virt.addr;
387 while (nbytes >= DES_BLOCK_SIZE) {
388 /* align to block size, max. PAGE_SIZE */
389 n = (nbytes > PAGE_SIZE) ? PAGE_SIZE :
390 nbytes & ~(DES_BLOCK_SIZE - 1);
391 for (i = DES_BLOCK_SIZE; i < n; i += DES_BLOCK_SIZE) {
392 memcpy(ctrblk + i, ctrblk + i - DES_BLOCK_SIZE,
393 DES_BLOCK_SIZE);
394 crypto_inc(ctrblk + i, DES_BLOCK_SIZE);
395 }
396 ret = crypt_s390_kmctr(func, ctx->key, out, in, n, ctrblk);
397 BUG_ON((ret < 0) || (ret != n));
398 if (n > DES_BLOCK_SIZE)
399 memcpy(ctrblk, ctrblk + n - DES_BLOCK_SIZE,
400 DES_BLOCK_SIZE);
401 crypto_inc(ctrblk, DES_BLOCK_SIZE);
402 out += n;
403 in += n;
404 nbytes -= n;
405 }
406 ret = blkcipher_walk_done(desc, walk, nbytes);
407 }
408
409 /* final block may be < DES_BLOCK_SIZE, copy only nbytes */
410 if (nbytes) {
411 out = walk->dst.virt.addr;
412 in = walk->src.virt.addr;
413 ret = crypt_s390_kmctr(func, ctx->key, buf, in,
414 DES_BLOCK_SIZE, ctrblk);
415 BUG_ON(ret < 0 || ret != DES_BLOCK_SIZE);
416 memcpy(out, buf, nbytes);
417 crypto_inc(ctrblk, DES_BLOCK_SIZE);
418 ret = blkcipher_walk_done(desc, walk, 0);
419 }
420 memcpy(walk->iv, ctrblk, DES_BLOCK_SIZE);
421 return ret;
422}
423
424static int ctr_des_encrypt(struct blkcipher_desc *desc,
425 struct scatterlist *dst, struct scatterlist *src,
426 unsigned int nbytes)
427{
428 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
429 struct blkcipher_walk walk;
430
431 blkcipher_walk_init(&walk, dst, src, nbytes);
432 return ctr_desall_crypt(desc, KMCTR_DEA_ENCRYPT, ctx, &walk);
433}
434
435static int ctr_des_decrypt(struct blkcipher_desc *desc,
436 struct scatterlist *dst, struct scatterlist *src,
437 unsigned int nbytes)
438{
439 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
440 struct blkcipher_walk walk;
441
442 blkcipher_walk_init(&walk, dst, src, nbytes);
443 return ctr_desall_crypt(desc, KMCTR_DEA_DECRYPT, ctx, &walk);
444}
445
446static struct crypto_alg ctr_des_alg = {
447 .cra_name = "ctr(des)",
448 .cra_driver_name = "ctr-des-s390",
449 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
450 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
451 .cra_blocksize = 1,
452 .cra_ctxsize = sizeof(struct s390_des_ctx),
453 .cra_type = &crypto_blkcipher_type,
454 .cra_module = THIS_MODULE,
455 .cra_list = LIST_HEAD_INIT(ctr_des_alg.cra_list),
456 .cra_u = {
457 .blkcipher = {
458 .min_keysize = DES_KEY_SIZE,
459 .max_keysize = DES_KEY_SIZE,
460 .ivsize = DES_BLOCK_SIZE,
461 .setkey = des_setkey,
462 .encrypt = ctr_des_encrypt,
463 .decrypt = ctr_des_decrypt,
464 }
465 }
466};
467
468static int ctr_des3_encrypt(struct blkcipher_desc *desc,
469 struct scatterlist *dst, struct scatterlist *src,
470 unsigned int nbytes)
471{
472 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
473 struct blkcipher_walk walk;
474
475 blkcipher_walk_init(&walk, dst, src, nbytes);
476 return ctr_desall_crypt(desc, KMCTR_TDEA_192_ENCRYPT, ctx, &walk);
477}
478
479static int ctr_des3_decrypt(struct blkcipher_desc *desc,
480 struct scatterlist *dst, struct scatterlist *src,
481 unsigned int nbytes)
482{
483 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
484 struct blkcipher_walk walk;
485
486 blkcipher_walk_init(&walk, dst, src, nbytes);
487 return ctr_desall_crypt(desc, KMCTR_TDEA_192_DECRYPT, ctx, &walk);
488}
489
490static struct crypto_alg ctr_des3_alg = {
491 .cra_name = "ctr(des3_ede)",
492 .cra_driver_name = "ctr-des3_ede-s390",
493 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
494 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
495 .cra_blocksize = 1,
496 .cra_ctxsize = sizeof(struct s390_des_ctx),
497 .cra_type = &crypto_blkcipher_type,
498 .cra_module = THIS_MODULE,
499 .cra_list = LIST_HEAD_INIT(ctr_des3_alg.cra_list),
500 .cra_u = {
501 .blkcipher = {
502 .min_keysize = DES3_KEY_SIZE,
503 .max_keysize = DES3_KEY_SIZE,
504 .ivsize = DES_BLOCK_SIZE,
505 .setkey = des3_setkey,
506 .encrypt = ctr_des3_encrypt,
507 .decrypt = ctr_des3_decrypt,
508 }
509 }
510};
511
512static int __init des_s390_init(void)
381{ 513{
382 int ret; 514 int ret;
383 515
384 if (!crypt_s390_func_available(KM_DEA_ENCRYPT) || 516 if (!crypt_s390_func_available(KM_DEA_ENCRYPT, CRYPT_S390_MSA) ||
385 !crypt_s390_func_available(KM_TDEA_192_ENCRYPT)) 517 !crypt_s390_func_available(KM_TDEA_192_ENCRYPT, CRYPT_S390_MSA))
386 return -EOPNOTSUPP; 518 return -EOPNOTSUPP;
387 519
388 ret = crypto_register_alg(&des_alg); 520 ret = crypto_register_alg(&des_alg);
@@ -394,23 +526,46 @@ static int des_s390_init(void)
394 ret = crypto_register_alg(&cbc_des_alg); 526 ret = crypto_register_alg(&cbc_des_alg);
395 if (ret) 527 if (ret)
396 goto cbc_des_err; 528 goto cbc_des_err;
397 ret = crypto_register_alg(&des3_192_alg); 529 ret = crypto_register_alg(&des3_alg);
398 if (ret) 530 if (ret)
399 goto des3_192_err; 531 goto des3_err;
400 ret = crypto_register_alg(&ecb_des3_192_alg); 532 ret = crypto_register_alg(&ecb_des3_alg);
401 if (ret) 533 if (ret)
402 goto ecb_des3_192_err; 534 goto ecb_des3_err;
403 ret = crypto_register_alg(&cbc_des3_192_alg); 535 ret = crypto_register_alg(&cbc_des3_alg);
404 if (ret) 536 if (ret)
405 goto cbc_des3_192_err; 537 goto cbc_des3_err;
538
539 if (crypt_s390_func_available(KMCTR_DEA_ENCRYPT,
540 CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
541 crypt_s390_func_available(KMCTR_TDEA_192_ENCRYPT,
542 CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
543 ret = crypto_register_alg(&ctr_des_alg);
544 if (ret)
545 goto ctr_des_err;
546 ret = crypto_register_alg(&ctr_des3_alg);
547 if (ret)
548 goto ctr_des3_err;
549 ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
550 if (!ctrblk) {
551 ret = -ENOMEM;
552 goto ctr_mem_err;
553 }
554 }
406out: 555out:
407 return ret; 556 return ret;
408 557
409cbc_des3_192_err: 558ctr_mem_err:
410 crypto_unregister_alg(&ecb_des3_192_alg); 559 crypto_unregister_alg(&ctr_des3_alg);
411ecb_des3_192_err: 560ctr_des3_err:
412 crypto_unregister_alg(&des3_192_alg); 561 crypto_unregister_alg(&ctr_des_alg);
413des3_192_err: 562ctr_des_err:
563 crypto_unregister_alg(&cbc_des3_alg);
564cbc_des3_err:
565 crypto_unregister_alg(&ecb_des3_alg);
566ecb_des3_err:
567 crypto_unregister_alg(&des3_alg);
568des3_err:
414 crypto_unregister_alg(&cbc_des_alg); 569 crypto_unregister_alg(&cbc_des_alg);
415cbc_des_err: 570cbc_des_err:
416 crypto_unregister_alg(&ecb_des_alg); 571 crypto_unregister_alg(&ecb_des_alg);
@@ -422,9 +577,14 @@ des_err:
422 577
423static void __exit des_s390_exit(void) 578static void __exit des_s390_exit(void)
424{ 579{
425 crypto_unregister_alg(&cbc_des3_192_alg); 580 if (ctrblk) {
426 crypto_unregister_alg(&ecb_des3_192_alg); 581 crypto_unregister_alg(&ctr_des_alg);
427 crypto_unregister_alg(&des3_192_alg); 582 crypto_unregister_alg(&ctr_des3_alg);
583 free_page((unsigned long) ctrblk);
584 }
585 crypto_unregister_alg(&cbc_des3_alg);
586 crypto_unregister_alg(&ecb_des3_alg);
587 crypto_unregister_alg(&des3_alg);
428 crypto_unregister_alg(&cbc_des_alg); 588 crypto_unregister_alg(&cbc_des_alg);
429 crypto_unregister_alg(&ecb_des_alg); 589 crypto_unregister_alg(&ecb_des_alg);
430 crypto_unregister_alg(&des_alg); 590 crypto_unregister_alg(&des_alg);
diff --git a/arch/s390/crypto/ghash_s390.c b/arch/s390/crypto/ghash_s390.c
new file mode 100644
index 00000000000..b1bd170f24b
--- /dev/null
+++ b/arch/s390/crypto/ghash_s390.c
@@ -0,0 +1,162 @@
1/*
2 * Cryptographic API.
3 *
4 * s390 implementation of the GHASH algorithm for GCM (Galois/Counter Mode).
5 *
6 * Copyright IBM Corp. 2011
7 * Author(s): Gerald Schaefer <gerald.schaefer@de.ibm.com>
8 */
9
10#include <crypto/internal/hash.h>
11#include <linux/module.h>
12
13#include "crypt_s390.h"
14
15#define GHASH_BLOCK_SIZE 16
16#define GHASH_DIGEST_SIZE 16
17
18struct ghash_ctx {
19 u8 icv[16];
20 u8 key[16];
21};
22
23struct ghash_desc_ctx {
24 u8 buffer[GHASH_BLOCK_SIZE];
25 u32 bytes;
26};
27
28static int ghash_init(struct shash_desc *desc)
29{
30 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
31
32 memset(dctx, 0, sizeof(*dctx));
33
34 return 0;
35}
36
37static int ghash_setkey(struct crypto_shash *tfm,
38 const u8 *key, unsigned int keylen)
39{
40 struct ghash_ctx *ctx = crypto_shash_ctx(tfm);
41
42 if (keylen != GHASH_BLOCK_SIZE) {
43 crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
44 return -EINVAL;
45 }
46
47 memcpy(ctx->key, key, GHASH_BLOCK_SIZE);
48 memset(ctx->icv, 0, GHASH_BLOCK_SIZE);
49
50 return 0;
51}
52
53static int ghash_update(struct shash_desc *desc,
54 const u8 *src, unsigned int srclen)
55{
56 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
57 struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
58 unsigned int n;
59 u8 *buf = dctx->buffer;
60 int ret;
61
62 if (dctx->bytes) {
63 u8 *pos = buf + (GHASH_BLOCK_SIZE - dctx->bytes);
64
65 n = min(srclen, dctx->bytes);
66 dctx->bytes -= n;
67 srclen -= n;
68
69 memcpy(pos, src, n);
70 src += n;
71
72 if (!dctx->bytes) {
73 ret = crypt_s390_kimd(KIMD_GHASH, ctx, buf,
74 GHASH_BLOCK_SIZE);
75 BUG_ON(ret != GHASH_BLOCK_SIZE);
76 }
77 }
78
79 n = srclen & ~(GHASH_BLOCK_SIZE - 1);
80 if (n) {
81 ret = crypt_s390_kimd(KIMD_GHASH, ctx, src, n);
82 BUG_ON(ret != n);
83 src += n;
84 srclen -= n;
85 }
86
87 if (srclen) {
88 dctx->bytes = GHASH_BLOCK_SIZE - srclen;
89 memcpy(buf, src, srclen);
90 }
91
92 return 0;
93}
94
95static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx)
96{
97 u8 *buf = dctx->buffer;
98 int ret;
99
100 if (dctx->bytes) {
101 u8 *pos = buf + (GHASH_BLOCK_SIZE - dctx->bytes);
102
103 memset(pos, 0, dctx->bytes);
104
105 ret = crypt_s390_kimd(KIMD_GHASH, ctx, buf, GHASH_BLOCK_SIZE);
106 BUG_ON(ret != GHASH_BLOCK_SIZE);
107 }
108
109 dctx->bytes = 0;
110}
111
112static int ghash_final(struct shash_desc *desc, u8 *dst)
113{
114 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
115 struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
116
117 ghash_flush(ctx, dctx);
118 memcpy(dst, ctx->icv, GHASH_BLOCK_SIZE);
119
120 return 0;
121}
122
123static struct shash_alg ghash_alg = {
124 .digestsize = GHASH_DIGEST_SIZE,
125 .init = ghash_init,
126 .update = ghash_update,
127 .final = ghash_final,
128 .setkey = ghash_setkey,
129 .descsize = sizeof(struct ghash_desc_ctx),
130 .base = {
131 .cra_name = "ghash",
132 .cra_driver_name = "ghash-s390",
133 .cra_priority = CRYPT_S390_PRIORITY,
134 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
135 .cra_blocksize = GHASH_BLOCK_SIZE,
136 .cra_ctxsize = sizeof(struct ghash_ctx),
137 .cra_module = THIS_MODULE,
138 .cra_list = LIST_HEAD_INIT(ghash_alg.base.cra_list),
139 },
140};
141
142static int __init ghash_mod_init(void)
143{
144 if (!crypt_s390_func_available(KIMD_GHASH,
145 CRYPT_S390_MSA | CRYPT_S390_MSA4))
146 return -EOPNOTSUPP;
147
148 return crypto_register_shash(&ghash_alg);
149}
150
151static void __exit ghash_mod_exit(void)
152{
153 crypto_unregister_shash(&ghash_alg);
154}
155
156module_init(ghash_mod_init);
157module_exit(ghash_mod_exit);
158
159MODULE_ALIAS("ghash");
160
161MODULE_LICENSE("GPL");
162MODULE_DESCRIPTION("GHASH Message Digest Algorithm, s390 implementation");
diff --git a/arch/s390/crypto/prng.c b/arch/s390/crypto/prng.c
index 8b16c479585..0808fbf0f7d 100644
--- a/arch/s390/crypto/prng.c
+++ b/arch/s390/crypto/prng.c
@@ -166,7 +166,7 @@ static int __init prng_init(void)
166 int ret; 166 int ret;
167 167
168 /* check if the CPU has a PRNG */ 168 /* check if the CPU has a PRNG */
169 if (!crypt_s390_func_available(KMC_PRNG)) 169 if (!crypt_s390_func_available(KMC_PRNG, CRYPT_S390_MSA))
170 return -EOPNOTSUPP; 170 return -EOPNOTSUPP;
171 171
172 if (prng_chunk_size < 8) 172 if (prng_chunk_size < 8)
diff --git a/arch/s390/crypto/sha1_s390.c b/arch/s390/crypto/sha1_s390.c
index f6de7826c97..e9868c6e0a0 100644
--- a/arch/s390/crypto/sha1_s390.c
+++ b/arch/s390/crypto/sha1_s390.c
@@ -90,7 +90,7 @@ static struct shash_alg alg = {
90 90
91static int __init sha1_s390_init(void) 91static int __init sha1_s390_init(void)
92{ 92{
93 if (!crypt_s390_func_available(KIMD_SHA_1)) 93 if (!crypt_s390_func_available(KIMD_SHA_1, CRYPT_S390_MSA))
94 return -EOPNOTSUPP; 94 return -EOPNOTSUPP;
95 return crypto_register_shash(&alg); 95 return crypto_register_shash(&alg);
96} 96}
diff --git a/arch/s390/crypto/sha256_s390.c b/arch/s390/crypto/sha256_s390.c
index 61a7db37212..5ed8d64fc2e 100644
--- a/arch/s390/crypto/sha256_s390.c
+++ b/arch/s390/crypto/sha256_s390.c
@@ -86,7 +86,7 @@ static struct shash_alg alg = {
86 86
87static int sha256_s390_init(void) 87static int sha256_s390_init(void)
88{ 88{
89 if (!crypt_s390_func_available(KIMD_SHA_256)) 89 if (!crypt_s390_func_available(KIMD_SHA_256, CRYPT_S390_MSA))
90 return -EOPNOTSUPP; 90 return -EOPNOTSUPP;
91 91
92 return crypto_register_shash(&alg); 92 return crypto_register_shash(&alg);
diff --git a/arch/s390/crypto/sha512_s390.c b/arch/s390/crypto/sha512_s390.c
index 4bf73d0dc52..32a81383b69 100644
--- a/arch/s390/crypto/sha512_s390.c
+++ b/arch/s390/crypto/sha512_s390.c
@@ -132,7 +132,7 @@ static int __init init(void)
132{ 132{
133 int ret; 133 int ret;
134 134
135 if (!crypt_s390_func_available(KIMD_SHA_512)) 135 if (!crypt_s390_func_available(KIMD_SHA_512, CRYPT_S390_MSA))
136 return -EOPNOTSUPP; 136 return -EOPNOTSUPP;
137 if ((ret = crypto_register_shash(&sha512_alg)) < 0) 137 if ((ret = crypto_register_shash(&sha512_alg)) < 0)
138 goto out; 138 goto out;