aboutsummaryrefslogtreecommitdiffstats
path: root/arch/s390/crypto
diff options
context:
space:
mode:
authorGerald Schaefer <gerald.schaefer@de.ibm.com>2011-05-04 01:09:44 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2011-05-04 01:09:44 -0400
commit0200f3ecc19660bebeabbcbaf212957fcf1dbf8f (patch)
tree584b5b936a58f3d9db09fca73ed10bdf20f942f5 /arch/s390/crypto
parent9996e3421cae20a17c99881b2ac0f7562f760e04 (diff)
crypto: s390 - add System z hardware support for CTR mode
This patch adds System z hardware acceleration support for AES, DES and 3DES in CTR mode. The hardware support is available starting with System z196. Signed-off-by: Gerald Schaefer <gerald.schaefer@de.ibm.com> Signed-off-by: Jan Glauber <jang@linux.vnet.ibm.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch/s390/crypto')
-rw-r--r--arch/s390/crypto/aes_s390.c146
-rw-r--r--arch/s390/crypto/crypt_s390.h66
-rw-r--r--arch/s390/crypto/des_s390.c169
3 files changed, 377 insertions, 4 deletions
diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c
index 8230e8605deb..a9ce135893f8 100644
--- a/arch/s390/crypto/aes_s390.c
+++ b/arch/s390/crypto/aes_s390.c
@@ -31,7 +31,8 @@
31#define AES_KEYLEN_192 2 31#define AES_KEYLEN_192 2
32#define AES_KEYLEN_256 4 32#define AES_KEYLEN_256 4
33 33
34static char keylen_flag = 0; 34static u8 *ctrblk;
35static char keylen_flag;
35 36
36struct s390_aes_ctx { 37struct s390_aes_ctx {
37 u8 iv[AES_BLOCK_SIZE]; 38 u8 iv[AES_BLOCK_SIZE];
@@ -724,9 +725,128 @@ static struct crypto_alg xts_aes_alg = {
724 } 725 }
725}; 726};
726 727
728static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
729 unsigned int key_len)
730{
731 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
732
733 switch (key_len) {
734 case 16:
735 sctx->enc = KMCTR_AES_128_ENCRYPT;
736 sctx->dec = KMCTR_AES_128_DECRYPT;
737 break;
738 case 24:
739 sctx->enc = KMCTR_AES_192_ENCRYPT;
740 sctx->dec = KMCTR_AES_192_DECRYPT;
741 break;
742 case 32:
743 sctx->enc = KMCTR_AES_256_ENCRYPT;
744 sctx->dec = KMCTR_AES_256_DECRYPT;
745 break;
746 }
747
748 return aes_set_key(tfm, in_key, key_len);
749}
750
751static int ctr_aes_crypt(struct blkcipher_desc *desc, long func,
752 struct s390_aes_ctx *sctx, struct blkcipher_walk *walk)
753{
754 int ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
755 unsigned int i, n, nbytes;
756 u8 buf[AES_BLOCK_SIZE];
757 u8 *out, *in;
758
759 if (!walk->nbytes)
760 return ret;
761
762 memcpy(ctrblk, walk->iv, AES_BLOCK_SIZE);
763 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
764 out = walk->dst.virt.addr;
765 in = walk->src.virt.addr;
766 while (nbytes >= AES_BLOCK_SIZE) {
767 /* only use complete blocks, max. PAGE_SIZE */
768 n = (nbytes > PAGE_SIZE) ? PAGE_SIZE :
769 nbytes & ~(AES_BLOCK_SIZE - 1);
770 for (i = AES_BLOCK_SIZE; i < n; i += AES_BLOCK_SIZE) {
771 memcpy(ctrblk + i, ctrblk + i - AES_BLOCK_SIZE,
772 AES_BLOCK_SIZE);
773 crypto_inc(ctrblk + i, AES_BLOCK_SIZE);
774 }
775 ret = crypt_s390_kmctr(func, sctx->key, out, in, n, ctrblk);
776 BUG_ON(ret < 0 || ret != n);
777 if (n > AES_BLOCK_SIZE)
778 memcpy(ctrblk, ctrblk + n - AES_BLOCK_SIZE,
779 AES_BLOCK_SIZE);
780 crypto_inc(ctrblk, AES_BLOCK_SIZE);
781 out += n;
782 in += n;
783 nbytes -= n;
784 }
785 ret = blkcipher_walk_done(desc, walk, nbytes);
786 }
787 /*
788 * final block may be < AES_BLOCK_SIZE, copy only nbytes
789 */
790 if (nbytes) {
791 out = walk->dst.virt.addr;
792 in = walk->src.virt.addr;
793 ret = crypt_s390_kmctr(func, sctx->key, buf, in,
794 AES_BLOCK_SIZE, ctrblk);
795 BUG_ON(ret < 0 || ret != AES_BLOCK_SIZE);
796 memcpy(out, buf, nbytes);
797 crypto_inc(ctrblk, AES_BLOCK_SIZE);
798 ret = blkcipher_walk_done(desc, walk, 0);
799 }
800 memcpy(walk->iv, ctrblk, AES_BLOCK_SIZE);
801 return ret;
802}
803
804static int ctr_aes_encrypt(struct blkcipher_desc *desc,
805 struct scatterlist *dst, struct scatterlist *src,
806 unsigned int nbytes)
807{
808 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
809 struct blkcipher_walk walk;
810
811 blkcipher_walk_init(&walk, dst, src, nbytes);
812 return ctr_aes_crypt(desc, sctx->enc, sctx, &walk);
813}
814
815static int ctr_aes_decrypt(struct blkcipher_desc *desc,
816 struct scatterlist *dst, struct scatterlist *src,
817 unsigned int nbytes)
818{
819 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
820 struct blkcipher_walk walk;
821
822 blkcipher_walk_init(&walk, dst, src, nbytes);
823 return ctr_aes_crypt(desc, sctx->dec, sctx, &walk);
824}
825
826static struct crypto_alg ctr_aes_alg = {
827 .cra_name = "ctr(aes)",
828 .cra_driver_name = "ctr-aes-s390",
829 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
830 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
831 .cra_blocksize = 1,
832 .cra_ctxsize = sizeof(struct s390_aes_ctx),
833 .cra_type = &crypto_blkcipher_type,
834 .cra_module = THIS_MODULE,
835 .cra_list = LIST_HEAD_INIT(ctr_aes_alg.cra_list),
836 .cra_u = {
837 .blkcipher = {
838 .min_keysize = AES_MIN_KEY_SIZE,
839 .max_keysize = AES_MAX_KEY_SIZE,
840 .ivsize = AES_BLOCK_SIZE,
841 .setkey = ctr_aes_set_key,
842 .encrypt = ctr_aes_encrypt,
843 .decrypt = ctr_aes_decrypt,
844 }
845 }
846};
847
727static int __init aes_s390_init(void) 848static int __init aes_s390_init(void)
728{ 849{
729 unsigned long long facility_bits[2];
730 int ret; 850 int ret;
731 851
732 if (crypt_s390_func_available(KM_AES_128_ENCRYPT, CRYPT_S390_MSA)) 852 if (crypt_s390_func_available(KM_AES_128_ENCRYPT, CRYPT_S390_MSA))
@@ -765,9 +885,29 @@ static int __init aes_s390_init(void)
765 goto xts_aes_err; 885 goto xts_aes_err;
766 } 886 }
767 887
888 if (crypt_s390_func_available(KMCTR_AES_128_ENCRYPT,
889 CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
890 crypt_s390_func_available(KMCTR_AES_192_ENCRYPT,
891 CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
892 crypt_s390_func_available(KMCTR_AES_256_ENCRYPT,
893 CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
894 ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
895 if (!ctrblk) {
896 ret = -ENOMEM;
897 goto ctr_aes_err;
898 }
899 ret = crypto_register_alg(&ctr_aes_alg);
900 if (ret) {
901 free_page((unsigned long) ctrblk);
902 goto ctr_aes_err;
903 }
904 }
905
768out: 906out:
769 return ret; 907 return ret;
770 908
909ctr_aes_err:
910 crypto_unregister_alg(&xts_aes_alg);
771xts_aes_err: 911xts_aes_err:
772 crypto_unregister_alg(&cbc_aes_alg); 912 crypto_unregister_alg(&cbc_aes_alg);
773cbc_aes_err: 913cbc_aes_err:
@@ -780,6 +920,8 @@ aes_err:
780 920
781static void __exit aes_s390_fini(void) 921static void __exit aes_s390_fini(void)
782{ 922{
923 crypto_unregister_alg(&ctr_aes_alg);
924 free_page((unsigned long) ctrblk);
783 crypto_unregister_alg(&xts_aes_alg); 925 crypto_unregister_alg(&xts_aes_alg);
784 crypto_unregister_alg(&cbc_aes_alg); 926 crypto_unregister_alg(&cbc_aes_alg);
785 crypto_unregister_alg(&ecb_aes_alg); 927 crypto_unregister_alg(&ecb_aes_alg);
diff --git a/arch/s390/crypto/crypt_s390.h b/arch/s390/crypto/crypt_s390.h
index eb2e39186b6b..49676771bd66 100644
--- a/arch/s390/crypto/crypt_s390.h
+++ b/arch/s390/crypto/crypt_s390.h
@@ -34,7 +34,8 @@ enum crypt_s390_operations {
34 CRYPT_S390_KMC = 0x0200, 34 CRYPT_S390_KMC = 0x0200,
35 CRYPT_S390_KIMD = 0x0300, 35 CRYPT_S390_KIMD = 0x0300,
36 CRYPT_S390_KLMD = 0x0400, 36 CRYPT_S390_KLMD = 0x0400,
37 CRYPT_S390_KMAC = 0x0500 37 CRYPT_S390_KMAC = 0x0500,
38 CRYPT_S390_KMCTR = 0x0600
38}; 39};
39 40
40/* 41/*
@@ -83,6 +84,26 @@ enum crypt_s390_kmc_func {
83}; 84};
84 85
85/* 86/*
87 * function codes for KMCTR (CIPHER MESSAGE WITH COUNTER)
88 * instruction
89 */
90enum crypt_s390_kmctr_func {
91 KMCTR_QUERY = CRYPT_S390_KMCTR | 0x0,
92 KMCTR_DEA_ENCRYPT = CRYPT_S390_KMCTR | 0x1,
93 KMCTR_DEA_DECRYPT = CRYPT_S390_KMCTR | 0x1 | 0x80,
94 KMCTR_TDEA_128_ENCRYPT = CRYPT_S390_KMCTR | 0x2,
95 KMCTR_TDEA_128_DECRYPT = CRYPT_S390_KMCTR | 0x2 | 0x80,
96 KMCTR_TDEA_192_ENCRYPT = CRYPT_S390_KMCTR | 0x3,
97 KMCTR_TDEA_192_DECRYPT = CRYPT_S390_KMCTR | 0x3 | 0x80,
98 KMCTR_AES_128_ENCRYPT = CRYPT_S390_KMCTR | 0x12,
99 KMCTR_AES_128_DECRYPT = CRYPT_S390_KMCTR | 0x12 | 0x80,
100 KMCTR_AES_192_ENCRYPT = CRYPT_S390_KMCTR | 0x13,
101 KMCTR_AES_192_DECRYPT = CRYPT_S390_KMCTR | 0x13 | 0x80,
102 KMCTR_AES_256_ENCRYPT = CRYPT_S390_KMCTR | 0x14,
103 KMCTR_AES_256_DECRYPT = CRYPT_S390_KMCTR | 0x14 | 0x80,
104};
105
106/*
86 * function codes for KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST) 107 * function codes for KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST)
87 * instruction 108 * instruction
88 */ 109 */
@@ -293,6 +314,45 @@ static inline int crypt_s390_kmac(long func, void *param,
293} 314}
294 315
295/** 316/**
317 * crypt_s390_kmctr:
318 * @func: the function code passed to KMCTR; see crypt_s390_kmctr_func
319 * @param: address of parameter block; see POP for details on each func
320 * @dest: address of destination memory area
321 * @src: address of source memory area
322 * @src_len: length of src operand in bytes
323 * @counter: address of counter value
324 *
325 * Executes the KMCTR (CIPHER MESSAGE WITH COUNTER) operation of the CPU.
326 *
327 * Returns -1 for failure, 0 for the query func, number of processed
328 * bytes for encryption/decryption funcs
329 */
330static inline int crypt_s390_kmctr(long func, void *param, u8 *dest,
331 const u8 *src, long src_len, u8 *counter)
332{
333 register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
334 register void *__param asm("1") = param;
335 register const u8 *__src asm("2") = src;
336 register long __src_len asm("3") = src_len;
337 register u8 *__dest asm("4") = dest;
338 register u8 *__ctr asm("6") = counter;
339 int ret = -1;
340
341 asm volatile(
342 "0: .insn rrf,0xb92d0000,%3,%1,%4,0 \n" /* KMCTR opcode */
343 "1: brc 1,0b \n" /* handle partial completion */
344 " la %0,0\n"
345 "2:\n"
346 EX_TABLE(0b,2b) EX_TABLE(1b,2b)
347 : "+d" (ret), "+a" (__src), "+d" (__src_len), "+a" (__dest),
348 "+a" (__ctr)
349 : "d" (__func), "a" (__param) : "cc", "memory");
350 if (ret < 0)
351 return ret;
352 return (func & CRYPT_S390_FUNC_MASK) ? src_len - __src_len : __src_len;
353}
354
355/**
296 * crypt_s390_func_available: 356 * crypt_s390_func_available:
297 * @func: the function code of the specific function; 0 if op in general 357 * @func: the function code of the specific function; 0 if op in general
298 * 358 *
@@ -329,6 +389,10 @@ static inline int crypt_s390_func_available(int func,
329 case CRYPT_S390_KMAC: 389 case CRYPT_S390_KMAC:
330 ret = crypt_s390_kmac(KMAC_QUERY, &status, NULL, 0); 390 ret = crypt_s390_kmac(KMAC_QUERY, &status, NULL, 0);
331 break; 391 break;
392 case CRYPT_S390_KMCTR:
393 ret = crypt_s390_kmctr(KMCTR_QUERY, &status, NULL, NULL, 0,
394 NULL);
395 break;
332 default: 396 default:
333 return 0; 397 return 0;
334 } 398 }
diff --git a/arch/s390/crypto/des_s390.c b/arch/s390/crypto/des_s390.c
index c36a01d70e04..a52bfd124d86 100644
--- a/arch/s390/crypto/des_s390.c
+++ b/arch/s390/crypto/des_s390.c
@@ -3,7 +3,7 @@
3 * 3 *
4 * s390 implementation of the DES Cipher Algorithm. 4 * s390 implementation of the DES Cipher Algorithm.
5 * 5 *
6 * Copyright IBM Corp. 2003,2007 6 * Copyright IBM Corp. 2003,2011
7 * Author(s): Thomas Spatzier 7 * Author(s): Thomas Spatzier
8 * Jan Glauber (jan.glauber@de.ibm.com) 8 * Jan Glauber (jan.glauber@de.ibm.com)
9 * 9 *
@@ -24,6 +24,8 @@
24 24
25#define DES3_KEY_SIZE (3 * DES_KEY_SIZE) 25#define DES3_KEY_SIZE (3 * DES_KEY_SIZE)
26 26
27static u8 *ctrblk;
28
27struct s390_des_ctx { 29struct s390_des_ctx {
28 u8 iv[DES_BLOCK_SIZE]; 30 u8 iv[DES_BLOCK_SIZE];
29 u8 key[DES3_KEY_SIZE]; 31 u8 key[DES3_KEY_SIZE];
@@ -370,6 +372,143 @@ static struct crypto_alg cbc_des3_alg = {
370 } 372 }
371}; 373};
372 374
375static int ctr_desall_crypt(struct blkcipher_desc *desc, long func,
376 struct s390_des_ctx *ctx, struct blkcipher_walk *walk)
377{
378 int ret = blkcipher_walk_virt_block(desc, walk, DES_BLOCK_SIZE);
379 unsigned int i, n, nbytes;
380 u8 buf[DES_BLOCK_SIZE];
381 u8 *out, *in;
382
383 memcpy(ctrblk, walk->iv, DES_BLOCK_SIZE);
384 while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) {
385 out = walk->dst.virt.addr;
386 in = walk->src.virt.addr;
387 while (nbytes >= DES_BLOCK_SIZE) {
388 /* align to block size, max. PAGE_SIZE */
389 n = (nbytes > PAGE_SIZE) ? PAGE_SIZE :
390 nbytes & ~(DES_BLOCK_SIZE - 1);
391 for (i = DES_BLOCK_SIZE; i < n; i += DES_BLOCK_SIZE) {
392 memcpy(ctrblk + i, ctrblk + i - DES_BLOCK_SIZE,
393 DES_BLOCK_SIZE);
394 crypto_inc(ctrblk + i, DES_BLOCK_SIZE);
395 }
396 ret = crypt_s390_kmctr(func, ctx->key, out, in, n, ctrblk);
397 BUG_ON((ret < 0) || (ret != n));
398 if (n > DES_BLOCK_SIZE)
399 memcpy(ctrblk, ctrblk + n - DES_BLOCK_SIZE,
400 DES_BLOCK_SIZE);
401 crypto_inc(ctrblk, DES_BLOCK_SIZE);
402 out += n;
403 in += n;
404 nbytes -= n;
405 }
406 ret = blkcipher_walk_done(desc, walk, nbytes);
407 }
408
409 /* final block may be < DES_BLOCK_SIZE, copy only nbytes */
410 if (nbytes) {
411 out = walk->dst.virt.addr;
412 in = walk->src.virt.addr;
413 ret = crypt_s390_kmctr(func, ctx->key, buf, in,
414 DES_BLOCK_SIZE, ctrblk);
415 BUG_ON(ret < 0 || ret != DES_BLOCK_SIZE);
416 memcpy(out, buf, nbytes);
417 crypto_inc(ctrblk, DES_BLOCK_SIZE);
418 ret = blkcipher_walk_done(desc, walk, 0);
419 }
420 memcpy(walk->iv, ctrblk, DES_BLOCK_SIZE);
421 return ret;
422}
423
424static int ctr_des_encrypt(struct blkcipher_desc *desc,
425 struct scatterlist *dst, struct scatterlist *src,
426 unsigned int nbytes)
427{
428 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
429 struct blkcipher_walk walk;
430
431 blkcipher_walk_init(&walk, dst, src, nbytes);
432 return ctr_desall_crypt(desc, KMCTR_DEA_ENCRYPT, ctx, &walk);
433}
434
435static int ctr_des_decrypt(struct blkcipher_desc *desc,
436 struct scatterlist *dst, struct scatterlist *src,
437 unsigned int nbytes)
438{
439 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
440 struct blkcipher_walk walk;
441
442 blkcipher_walk_init(&walk, dst, src, nbytes);
443 return ctr_desall_crypt(desc, KMCTR_DEA_DECRYPT, ctx, &walk);
444}
445
446static struct crypto_alg ctr_des_alg = {
447 .cra_name = "ctr(des)",
448 .cra_driver_name = "ctr-des-s390",
449 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
450 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
451 .cra_blocksize = 1,
452 .cra_ctxsize = sizeof(struct s390_des_ctx),
453 .cra_type = &crypto_blkcipher_type,
454 .cra_module = THIS_MODULE,
455 .cra_list = LIST_HEAD_INIT(ctr_des_alg.cra_list),
456 .cra_u = {
457 .blkcipher = {
458 .min_keysize = DES_KEY_SIZE,
459 .max_keysize = DES_KEY_SIZE,
460 .ivsize = DES_BLOCK_SIZE,
461 .setkey = des_setkey,
462 .encrypt = ctr_des_encrypt,
463 .decrypt = ctr_des_decrypt,
464 }
465 }
466};
467
468static int ctr_des3_encrypt(struct blkcipher_desc *desc,
469 struct scatterlist *dst, struct scatterlist *src,
470 unsigned int nbytes)
471{
472 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
473 struct blkcipher_walk walk;
474
475 blkcipher_walk_init(&walk, dst, src, nbytes);
476 return ctr_desall_crypt(desc, KMCTR_TDEA_192_ENCRYPT, ctx, &walk);
477}
478
479static int ctr_des3_decrypt(struct blkcipher_desc *desc,
480 struct scatterlist *dst, struct scatterlist *src,
481 unsigned int nbytes)
482{
483 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
484 struct blkcipher_walk walk;
485
486 blkcipher_walk_init(&walk, dst, src, nbytes);
487 return ctr_desall_crypt(desc, KMCTR_TDEA_192_DECRYPT, ctx, &walk);
488}
489
490static struct crypto_alg ctr_des3_alg = {
491 .cra_name = "ctr(des3_ede)",
492 .cra_driver_name = "ctr-des3_ede-s390",
493 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
494 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
495 .cra_blocksize = 1,
496 .cra_ctxsize = sizeof(struct s390_des_ctx),
497 .cra_type = &crypto_blkcipher_type,
498 .cra_module = THIS_MODULE,
499 .cra_list = LIST_HEAD_INIT(ctr_des3_alg.cra_list),
500 .cra_u = {
501 .blkcipher = {
502 .min_keysize = DES3_KEY_SIZE,
503 .max_keysize = DES3_KEY_SIZE,
504 .ivsize = DES_BLOCK_SIZE,
505 .setkey = des3_setkey,
506 .encrypt = ctr_des3_encrypt,
507 .decrypt = ctr_des3_decrypt,
508 }
509 }
510};
511
373static int __init des_s390_init(void) 512static int __init des_s390_init(void)
374{ 513{
375 int ret; 514 int ret;
@@ -396,9 +535,32 @@ static int __init des_s390_init(void)
396 ret = crypto_register_alg(&cbc_des3_alg); 535 ret = crypto_register_alg(&cbc_des3_alg);
397 if (ret) 536 if (ret)
398 goto cbc_des3_err; 537 goto cbc_des3_err;
538
539 if (crypt_s390_func_available(KMCTR_DEA_ENCRYPT,
540 CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
541 crypt_s390_func_available(KMCTR_TDEA_192_ENCRYPT,
542 CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
543 ret = crypto_register_alg(&ctr_des_alg);
544 if (ret)
545 goto ctr_des_err;
546 ret = crypto_register_alg(&ctr_des3_alg);
547 if (ret)
548 goto ctr_des3_err;
549 ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
550 if (!ctrblk) {
551 ret = -ENOMEM;
552 goto ctr_mem_err;
553 }
554 }
399out: 555out:
400 return ret; 556 return ret;
401 557
558ctr_mem_err:
559 crypto_unregister_alg(&ctr_des3_alg);
560ctr_des3_err:
561 crypto_unregister_alg(&ctr_des_alg);
562ctr_des_err:
563 crypto_unregister_alg(&cbc_des3_alg);
402cbc_des3_err: 564cbc_des3_err:
403 crypto_unregister_alg(&ecb_des3_alg); 565 crypto_unregister_alg(&ecb_des3_alg);
404ecb_des3_err: 566ecb_des3_err:
@@ -415,6 +577,11 @@ des_err:
415 577
416static void __exit des_s390_exit(void) 578static void __exit des_s390_exit(void)
417{ 579{
580 if (ctrblk) {
581 crypto_unregister_alg(&ctr_des_alg);
582 crypto_unregister_alg(&ctr_des3_alg);
583 free_page((unsigned long) ctrblk);
584 }
418 crypto_unregister_alg(&cbc_des3_alg); 585 crypto_unregister_alg(&cbc_des3_alg);
419 crypto_unregister_alg(&ecb_des3_alg); 586 crypto_unregister_alg(&ecb_des3_alg);
420 crypto_unregister_alg(&des3_alg); 587 crypto_unregister_alg(&des3_alg);