diff options
author | Gerald Schaefer <gerald.schaefer@de.ibm.com> | 2011-04-26 02:12:42 -0400 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2011-05-04 01:06:30 -0400 |
commit | 99d97222150a24e6096805530e141af94183b9a1 (patch) | |
tree | efe6ac0e8fee6230b0dce3846ab0c0e93e1c5c8e /arch/s390/crypto | |
parent | 98971f8439b1bb9a61682fe24a865ddd25167a6b (diff) |
crypto: s390 - add System z hardware support for XTS mode
This patch adds System z hardware acceleration support for the AES XTS mode.
The hardware support is available beginning with System z196.
Signed-off-by: Jan Glauber <jang@linux.vnet.ibm.com>
Signed-off-by: Gerald Schaefer <gerald.schaefer@de.ibm.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch/s390/crypto')
-rw-r--r-- | arch/s390/crypto/aes_s390.c | 233 | ||||
-rw-r--r-- | arch/s390/crypto/crypt_s390.h | 31 |
2 files changed, 264 insertions, 0 deletions
diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c index fc97b949254f..8230e8605deb 100644 --- a/arch/s390/crypto/aes_s390.c +++ b/arch/s390/crypto/aes_s390.c | |||
@@ -45,6 +45,24 @@ struct s390_aes_ctx { | |||
45 | } fallback; | 45 | } fallback; |
46 | }; | 46 | }; |
47 | 47 | ||
48 | struct pcc_param { | ||
49 | u8 key[32]; | ||
50 | u8 tweak[16]; | ||
51 | u8 block[16]; | ||
52 | u8 bit[16]; | ||
53 | u8 xts[16]; | ||
54 | }; | ||
55 | |||
56 | struct s390_xts_ctx { | ||
57 | u8 key[32]; | ||
58 | u8 xts_param[16]; | ||
59 | struct pcc_param pcc; | ||
60 | long enc; | ||
61 | long dec; | ||
62 | int key_len; | ||
63 | struct crypto_blkcipher *fallback; | ||
64 | }; | ||
65 | |||
48 | /* | 66 | /* |
49 | * Check if the key_len is supported by the HW. | 67 | * Check if the key_len is supported by the HW. |
50 | * Returns 0 if it is, a positive number if it is not and software fallback is | 68 | * Returns 0 if it is, a positive number if it is not and software fallback is |
@@ -504,8 +522,211 @@ static struct crypto_alg cbc_aes_alg = { | |||
504 | } | 522 | } |
505 | }; | 523 | }; |
506 | 524 | ||
525 | static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key, | ||
526 | unsigned int len) | ||
527 | { | ||
528 | struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm); | ||
529 | unsigned int ret; | ||
530 | |||
531 | xts_ctx->fallback->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; | ||
532 | xts_ctx->fallback->base.crt_flags |= (tfm->crt_flags & | ||
533 | CRYPTO_TFM_REQ_MASK); | ||
534 | |||
535 | ret = crypto_blkcipher_setkey(xts_ctx->fallback, key, len); | ||
536 | if (ret) { | ||
537 | tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; | ||
538 | tfm->crt_flags |= (xts_ctx->fallback->base.crt_flags & | ||
539 | CRYPTO_TFM_RES_MASK); | ||
540 | } | ||
541 | return ret; | ||
542 | } | ||
543 | |||
544 | static int xts_fallback_decrypt(struct blkcipher_desc *desc, | ||
545 | struct scatterlist *dst, struct scatterlist *src, | ||
546 | unsigned int nbytes) | ||
547 | { | ||
548 | struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm); | ||
549 | struct crypto_blkcipher *tfm; | ||
550 | unsigned int ret; | ||
551 | |||
552 | tfm = desc->tfm; | ||
553 | desc->tfm = xts_ctx->fallback; | ||
554 | |||
555 | ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes); | ||
556 | |||
557 | desc->tfm = tfm; | ||
558 | return ret; | ||
559 | } | ||
560 | |||
561 | static int xts_fallback_encrypt(struct blkcipher_desc *desc, | ||
562 | struct scatterlist *dst, struct scatterlist *src, | ||
563 | unsigned int nbytes) | ||
564 | { | ||
565 | struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm); | ||
566 | struct crypto_blkcipher *tfm; | ||
567 | unsigned int ret; | ||
568 | |||
569 | tfm = desc->tfm; | ||
570 | desc->tfm = xts_ctx->fallback; | ||
571 | |||
572 | ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes); | ||
573 | |||
574 | desc->tfm = tfm; | ||
575 | return ret; | ||
576 | } | ||
577 | |||
578 | static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, | ||
579 | unsigned int key_len) | ||
580 | { | ||
581 | struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm); | ||
582 | u32 *flags = &tfm->crt_flags; | ||
583 | |||
584 | switch (key_len) { | ||
585 | case 32: | ||
586 | xts_ctx->enc = KM_XTS_128_ENCRYPT; | ||
587 | xts_ctx->dec = KM_XTS_128_DECRYPT; | ||
588 | memcpy(xts_ctx->key + 16, in_key, 16); | ||
589 | memcpy(xts_ctx->pcc.key + 16, in_key + 16, 16); | ||
590 | break; | ||
591 | case 48: | ||
592 | xts_ctx->enc = 0; | ||
593 | xts_ctx->dec = 0; | ||
594 | xts_fallback_setkey(tfm, in_key, key_len); | ||
595 | break; | ||
596 | case 64: | ||
597 | xts_ctx->enc = KM_XTS_256_ENCRYPT; | ||
598 | xts_ctx->dec = KM_XTS_256_DECRYPT; | ||
599 | memcpy(xts_ctx->key, in_key, 32); | ||
600 | memcpy(xts_ctx->pcc.key, in_key + 32, 32); | ||
601 | break; | ||
602 | default: | ||
603 | *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; | ||
604 | return -EINVAL; | ||
605 | } | ||
606 | xts_ctx->key_len = key_len; | ||
607 | return 0; | ||
608 | } | ||
609 | |||
610 | static int xts_aes_crypt(struct blkcipher_desc *desc, long func, | ||
611 | struct s390_xts_ctx *xts_ctx, | ||
612 | struct blkcipher_walk *walk) | ||
613 | { | ||
614 | unsigned int offset = (xts_ctx->key_len >> 1) & 0x10; | ||
615 | int ret = blkcipher_walk_virt(desc, walk); | ||
616 | unsigned int nbytes = walk->nbytes; | ||
617 | unsigned int n; | ||
618 | u8 *in, *out; | ||
619 | void *param; | ||
620 | |||
621 | if (!nbytes) | ||
622 | goto out; | ||
623 | |||
624 | memset(xts_ctx->pcc.block, 0, sizeof(xts_ctx->pcc.block)); | ||
625 | memset(xts_ctx->pcc.bit, 0, sizeof(xts_ctx->pcc.bit)); | ||
626 | memset(xts_ctx->pcc.xts, 0, sizeof(xts_ctx->pcc.xts)); | ||
627 | memcpy(xts_ctx->pcc.tweak, walk->iv, sizeof(xts_ctx->pcc.tweak)); | ||
628 | param = xts_ctx->pcc.key + offset; | ||
629 | ret = crypt_s390_pcc(func, param); | ||
630 | BUG_ON(ret < 0); | ||
631 | |||
632 | memcpy(xts_ctx->xts_param, xts_ctx->pcc.xts, 16); | ||
633 | param = xts_ctx->key + offset; | ||
634 | do { | ||
635 | /* only use complete blocks */ | ||
636 | n = nbytes & ~(AES_BLOCK_SIZE - 1); | ||
637 | out = walk->dst.virt.addr; | ||
638 | in = walk->src.virt.addr; | ||
639 | |||
640 | ret = crypt_s390_km(func, param, out, in, n); | ||
641 | BUG_ON(ret < 0 || ret != n); | ||
642 | |||
643 | nbytes &= AES_BLOCK_SIZE - 1; | ||
644 | ret = blkcipher_walk_done(desc, walk, nbytes); | ||
645 | } while ((nbytes = walk->nbytes)); | ||
646 | out: | ||
647 | return ret; | ||
648 | } | ||
649 | |||
650 | static int xts_aes_encrypt(struct blkcipher_desc *desc, | ||
651 | struct scatterlist *dst, struct scatterlist *src, | ||
652 | unsigned int nbytes) | ||
653 | { | ||
654 | struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm); | ||
655 | struct blkcipher_walk walk; | ||
656 | |||
657 | if (unlikely(xts_ctx->key_len == 48)) | ||
658 | return xts_fallback_encrypt(desc, dst, src, nbytes); | ||
659 | |||
660 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
661 | return xts_aes_crypt(desc, xts_ctx->enc, xts_ctx, &walk); | ||
662 | } | ||
663 | |||
664 | static int xts_aes_decrypt(struct blkcipher_desc *desc, | ||
665 | struct scatterlist *dst, struct scatterlist *src, | ||
666 | unsigned int nbytes) | ||
667 | { | ||
668 | struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm); | ||
669 | struct blkcipher_walk walk; | ||
670 | |||
671 | if (unlikely(xts_ctx->key_len == 48)) | ||
672 | return xts_fallback_decrypt(desc, dst, src, nbytes); | ||
673 | |||
674 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
675 | return xts_aes_crypt(desc, xts_ctx->dec, xts_ctx, &walk); | ||
676 | } | ||
677 | |||
678 | static int xts_fallback_init(struct crypto_tfm *tfm) | ||
679 | { | ||
680 | const char *name = tfm->__crt_alg->cra_name; | ||
681 | struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm); | ||
682 | |||
683 | xts_ctx->fallback = crypto_alloc_blkcipher(name, 0, | ||
684 | CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); | ||
685 | |||
686 | if (IS_ERR(xts_ctx->fallback)) { | ||
687 | pr_err("Allocating XTS fallback algorithm %s failed\n", | ||
688 | name); | ||
689 | return PTR_ERR(xts_ctx->fallback); | ||
690 | } | ||
691 | return 0; | ||
692 | } | ||
693 | |||
694 | static void xts_fallback_exit(struct crypto_tfm *tfm) | ||
695 | { | ||
696 | struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm); | ||
697 | |||
698 | crypto_free_blkcipher(xts_ctx->fallback); | ||
699 | xts_ctx->fallback = NULL; | ||
700 | } | ||
701 | |||
702 | static struct crypto_alg xts_aes_alg = { | ||
703 | .cra_name = "xts(aes)", | ||
704 | .cra_driver_name = "xts-aes-s390", | ||
705 | .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, | ||
706 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | | ||
707 | CRYPTO_ALG_NEED_FALLBACK, | ||
708 | .cra_blocksize = AES_BLOCK_SIZE, | ||
709 | .cra_ctxsize = sizeof(struct s390_xts_ctx), | ||
710 | .cra_type = &crypto_blkcipher_type, | ||
711 | .cra_module = THIS_MODULE, | ||
712 | .cra_list = LIST_HEAD_INIT(xts_aes_alg.cra_list), | ||
713 | .cra_init = xts_fallback_init, | ||
714 | .cra_exit = xts_fallback_exit, | ||
715 | .cra_u = { | ||
716 | .blkcipher = { | ||
717 | .min_keysize = 2 * AES_MIN_KEY_SIZE, | ||
718 | .max_keysize = 2 * AES_MAX_KEY_SIZE, | ||
719 | .ivsize = AES_BLOCK_SIZE, | ||
720 | .setkey = xts_aes_set_key, | ||
721 | .encrypt = xts_aes_encrypt, | ||
722 | .decrypt = xts_aes_decrypt, | ||
723 | } | ||
724 | } | ||
725 | }; | ||
726 | |||
507 | static int __init aes_s390_init(void) | 727 | static int __init aes_s390_init(void) |
508 | { | 728 | { |
729 | unsigned long long facility_bits[2]; | ||
509 | int ret; | 730 | int ret; |
510 | 731 | ||
511 | if (crypt_s390_func_available(KM_AES_128_ENCRYPT, CRYPT_S390_MSA)) | 732 | if (crypt_s390_func_available(KM_AES_128_ENCRYPT, CRYPT_S390_MSA)) |
@@ -535,9 +756,20 @@ static int __init aes_s390_init(void) | |||
535 | if (ret) | 756 | if (ret) |
536 | goto cbc_aes_err; | 757 | goto cbc_aes_err; |
537 | 758 | ||
759 | if (crypt_s390_func_available(KM_XTS_128_ENCRYPT, | ||
760 | CRYPT_S390_MSA | CRYPT_S390_MSA4) && | ||
761 | crypt_s390_func_available(KM_XTS_256_ENCRYPT, | ||
762 | CRYPT_S390_MSA | CRYPT_S390_MSA4)) { | ||
763 | ret = crypto_register_alg(&xts_aes_alg); | ||
764 | if (ret) | ||
765 | goto xts_aes_err; | ||
766 | } | ||
767 | |||
538 | out: | 768 | out: |
539 | return ret; | 769 | return ret; |
540 | 770 | ||
771 | xts_aes_err: | ||
772 | crypto_unregister_alg(&cbc_aes_alg); | ||
541 | cbc_aes_err: | 773 | cbc_aes_err: |
542 | crypto_unregister_alg(&ecb_aes_alg); | 774 | crypto_unregister_alg(&ecb_aes_alg); |
543 | ecb_aes_err: | 775 | ecb_aes_err: |
@@ -548,6 +780,7 @@ aes_err: | |||
548 | 780 | ||
549 | static void __exit aes_s390_fini(void) | 781 | static void __exit aes_s390_fini(void) |
550 | { | 782 | { |
783 | crypto_unregister_alg(&xts_aes_alg); | ||
551 | crypto_unregister_alg(&cbc_aes_alg); | 784 | crypto_unregister_alg(&cbc_aes_alg); |
552 | crypto_unregister_alg(&ecb_aes_alg); | 785 | crypto_unregister_alg(&ecb_aes_alg); |
553 | crypto_unregister_alg(&aes_alg); | 786 | crypto_unregister_alg(&aes_alg); |
diff --git a/arch/s390/crypto/crypt_s390.h b/arch/s390/crypto/crypt_s390.h index 4b8c96cab252..7cbfaf080a59 100644 --- a/arch/s390/crypto/crypt_s390.h +++ b/arch/s390/crypto/crypt_s390.h | |||
@@ -55,6 +55,10 @@ enum crypt_s390_km_func { | |||
55 | KM_AES_192_DECRYPT = CRYPT_S390_KM | 0x13 | 0x80, | 55 | KM_AES_192_DECRYPT = CRYPT_S390_KM | 0x13 | 0x80, |
56 | KM_AES_256_ENCRYPT = CRYPT_S390_KM | 0x14, | 56 | KM_AES_256_ENCRYPT = CRYPT_S390_KM | 0x14, |
57 | KM_AES_256_DECRYPT = CRYPT_S390_KM | 0x14 | 0x80, | 57 | KM_AES_256_DECRYPT = CRYPT_S390_KM | 0x14 | 0x80, |
58 | KM_XTS_128_ENCRYPT = CRYPT_S390_KM | 0x32, | ||
59 | KM_XTS_128_DECRYPT = CRYPT_S390_KM | 0x32 | 0x80, | ||
60 | KM_XTS_256_ENCRYPT = CRYPT_S390_KM | 0x34, | ||
61 | KM_XTS_256_DECRYPT = CRYPT_S390_KM | 0x34 | 0x80, | ||
58 | }; | 62 | }; |
59 | 63 | ||
60 | /* | 64 | /* |
@@ -334,4 +338,31 @@ static inline int crypt_s390_func_available(int func, | |||
334 | return (status[func >> 3] & (0x80 >> (func & 7))) != 0; | 338 | return (status[func >> 3] & (0x80 >> (func & 7))) != 0; |
335 | } | 339 | } |
336 | 340 | ||
341 | /** | ||
342 | * crypt_s390_pcc: | ||
343 | * @func: the function code passed to KM; see crypt_s390_km_func | ||
344 | * @param: address of parameter block; see POP for details on each func | ||
345 | * | ||
346 | * Executes the PCC (PERFORM CRYPTOGRAPHIC COMPUTATION) operation of the CPU. | ||
347 | * | ||
348 | * Returns -1 for failure, 0 for success. | ||
349 | */ | ||
350 | static inline int crypt_s390_pcc(long func, void *param) | ||
351 | { | ||
352 | register long __func asm("0") = func & 0x7f; /* encrypt or decrypt */ | ||
353 | register void *__param asm("1") = param; | ||
354 | int ret = -1; | ||
355 | |||
356 | asm volatile( | ||
357 | "0: .insn rre,0xb92c0000,0,0 \n" /* PCC opcode */ | ||
358 | "1: brc 1,0b \n" /* handle partial completion */ | ||
359 | " la %0,0\n" | ||
360 | "2:\n" | ||
361 | EX_TABLE(0b,2b) EX_TABLE(1b,2b) | ||
362 | : "+d" (ret) | ||
363 | : "d" (__func), "a" (__param) : "cc", "memory"); | ||
364 | return ret; | ||
365 | } | ||
366 | |||
367 | |||
337 | #endif /* _CRYPTO_ARCH_S390_CRYPT_S390_H */ | 368 | #endif /* _CRYPTO_ARCH_S390_CRYPT_S390_H */ |