aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--arch/s390/crypto/aes_s390.c117
-rw-r--r--arch/s390/crypto/crypt_s390.h493
-rw-r--r--arch/s390/crypto/des_s390.c72
-rw-r--r--arch/s390/crypto/ghash_s390.c16
-rw-r--r--arch/s390/crypto/prng.c60
-rw-r--r--arch/s390/crypto/sha1_s390.c10
-rw-r--r--arch/s390/crypto/sha256_s390.c14
-rw-r--r--arch/s390/crypto/sha512_s390.c14
-rw-r--r--arch/s390/crypto/sha_common.c10
-rw-r--r--arch/s390/include/asm/cpacf.h410
10 files changed, 556 insertions, 660 deletions
diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c
index 48e1a2d3e318..7554a8bb2adc 100644
--- a/arch/s390/crypto/aes_s390.c
+++ b/arch/s390/crypto/aes_s390.c
@@ -28,7 +28,7 @@
28#include <linux/init.h> 28#include <linux/init.h>
29#include <linux/spinlock.h> 29#include <linux/spinlock.h>
30#include <crypto/xts.h> 30#include <crypto/xts.h>
31#include "crypt_s390.h" 31#include <asm/cpacf.h>
32 32
33#define AES_KEYLEN_128 1 33#define AES_KEYLEN_128 1
34#define AES_KEYLEN_192 2 34#define AES_KEYLEN_192 2
@@ -145,16 +145,16 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
145 145
146 switch (sctx->key_len) { 146 switch (sctx->key_len) {
147 case 16: 147 case 16:
148 crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in, 148 cpacf_km(CPACF_KM_AES_128_ENC, &sctx->key, out, in,
149 AES_BLOCK_SIZE); 149 AES_BLOCK_SIZE);
150 break; 150 break;
151 case 24: 151 case 24:
152 crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in, 152 cpacf_km(CPACF_KM_AES_192_ENC, &sctx->key, out, in,
153 AES_BLOCK_SIZE); 153 AES_BLOCK_SIZE);
154 break; 154 break;
155 case 32: 155 case 32:
156 crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in, 156 cpacf_km(CPACF_KM_AES_256_ENC, &sctx->key, out, in,
157 AES_BLOCK_SIZE); 157 AES_BLOCK_SIZE);
158 break; 158 break;
159 } 159 }
160} 160}
@@ -170,16 +170,16 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
170 170
171 switch (sctx->key_len) { 171 switch (sctx->key_len) {
172 case 16: 172 case 16:
173 crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in, 173 cpacf_km(CPACF_KM_AES_128_DEC, &sctx->key, out, in,
174 AES_BLOCK_SIZE); 174 AES_BLOCK_SIZE);
175 break; 175 break;
176 case 24: 176 case 24:
177 crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in, 177 cpacf_km(CPACF_KM_AES_192_DEC, &sctx->key, out, in,
178 AES_BLOCK_SIZE); 178 AES_BLOCK_SIZE);
179 break; 179 break;
180 case 32: 180 case 32:
181 crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in, 181 cpacf_km(CPACF_KM_AES_256_DEC, &sctx->key, out, in,
182 AES_BLOCK_SIZE); 182 AES_BLOCK_SIZE);
183 break; 183 break;
184 } 184 }
185} 185}
@@ -212,7 +212,7 @@ static void fallback_exit_cip(struct crypto_tfm *tfm)
212static struct crypto_alg aes_alg = { 212static struct crypto_alg aes_alg = {
213 .cra_name = "aes", 213 .cra_name = "aes",
214 .cra_driver_name = "aes-s390", 214 .cra_driver_name = "aes-s390",
215 .cra_priority = CRYPT_S390_PRIORITY, 215 .cra_priority = 300,
216 .cra_flags = CRYPTO_ALG_TYPE_CIPHER | 216 .cra_flags = CRYPTO_ALG_TYPE_CIPHER |
217 CRYPTO_ALG_NEED_FALLBACK, 217 CRYPTO_ALG_NEED_FALLBACK,
218 .cra_blocksize = AES_BLOCK_SIZE, 218 .cra_blocksize = AES_BLOCK_SIZE,
@@ -298,16 +298,16 @@ static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
298 298
299 switch (key_len) { 299 switch (key_len) {
300 case 16: 300 case 16:
301 sctx->enc = KM_AES_128_ENCRYPT; 301 sctx->enc = CPACF_KM_AES_128_ENC;
302 sctx->dec = KM_AES_128_DECRYPT; 302 sctx->dec = CPACF_KM_AES_128_DEC;
303 break; 303 break;
304 case 24: 304 case 24:
305 sctx->enc = KM_AES_192_ENCRYPT; 305 sctx->enc = CPACF_KM_AES_192_ENC;
306 sctx->dec = KM_AES_192_DECRYPT; 306 sctx->dec = CPACF_KM_AES_192_DEC;
307 break; 307 break;
308 case 32: 308 case 32:
309 sctx->enc = KM_AES_256_ENCRYPT; 309 sctx->enc = CPACF_KM_AES_256_ENC;
310 sctx->dec = KM_AES_256_DECRYPT; 310 sctx->dec = CPACF_KM_AES_256_DEC;
311 break; 311 break;
312 } 312 }
313 313
@@ -326,7 +326,7 @@ static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
326 u8 *out = walk->dst.virt.addr; 326 u8 *out = walk->dst.virt.addr;
327 u8 *in = walk->src.virt.addr; 327 u8 *in = walk->src.virt.addr;
328 328
329 ret = crypt_s390_km(func, param, out, in, n); 329 ret = cpacf_km(func, param, out, in, n);
330 if (ret < 0 || ret != n) 330 if (ret < 0 || ret != n)
331 return -EIO; 331 return -EIO;
332 332
@@ -393,7 +393,7 @@ static void fallback_exit_blk(struct crypto_tfm *tfm)
393static struct crypto_alg ecb_aes_alg = { 393static struct crypto_alg ecb_aes_alg = {
394 .cra_name = "ecb(aes)", 394 .cra_name = "ecb(aes)",
395 .cra_driver_name = "ecb-aes-s390", 395 .cra_driver_name = "ecb-aes-s390",
396 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 396 .cra_priority = 400, /* combo: aes + ecb */
397 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | 397 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
398 CRYPTO_ALG_NEED_FALLBACK, 398 CRYPTO_ALG_NEED_FALLBACK,
399 .cra_blocksize = AES_BLOCK_SIZE, 399 .cra_blocksize = AES_BLOCK_SIZE,
@@ -427,16 +427,16 @@ static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
427 427
428 switch (key_len) { 428 switch (key_len) {
429 case 16: 429 case 16:
430 sctx->enc = KMC_AES_128_ENCRYPT; 430 sctx->enc = CPACF_KMC_AES_128_ENC;
431 sctx->dec = KMC_AES_128_DECRYPT; 431 sctx->dec = CPACF_KMC_AES_128_DEC;
432 break; 432 break;
433 case 24: 433 case 24:
434 sctx->enc = KMC_AES_192_ENCRYPT; 434 sctx->enc = CPACF_KMC_AES_192_ENC;
435 sctx->dec = KMC_AES_192_DECRYPT; 435 sctx->dec = CPACF_KMC_AES_192_DEC;
436 break; 436 break;
437 case 32: 437 case 32:
438 sctx->enc = KMC_AES_256_ENCRYPT; 438 sctx->enc = CPACF_KMC_AES_256_ENC;
439 sctx->dec = KMC_AES_256_DECRYPT; 439 sctx->dec = CPACF_KMC_AES_256_DEC;
440 break; 440 break;
441 } 441 }
442 442
@@ -465,7 +465,7 @@ static int cbc_aes_crypt(struct blkcipher_desc *desc, long func,
465 u8 *out = walk->dst.virt.addr; 465 u8 *out = walk->dst.virt.addr;
466 u8 *in = walk->src.virt.addr; 466 u8 *in = walk->src.virt.addr;
467 467
468 ret = crypt_s390_kmc(func, &param, out, in, n); 468 ret = cpacf_kmc(func, &param, out, in, n);
469 if (ret < 0 || ret != n) 469 if (ret < 0 || ret != n)
470 return -EIO; 470 return -EIO;
471 471
@@ -509,7 +509,7 @@ static int cbc_aes_decrypt(struct blkcipher_desc *desc,
509static struct crypto_alg cbc_aes_alg = { 509static struct crypto_alg cbc_aes_alg = {
510 .cra_name = "cbc(aes)", 510 .cra_name = "cbc(aes)",
511 .cra_driver_name = "cbc-aes-s390", 511 .cra_driver_name = "cbc-aes-s390",
512 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 512 .cra_priority = 400, /* combo: aes + cbc */
513 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | 513 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
514 CRYPTO_ALG_NEED_FALLBACK, 514 CRYPTO_ALG_NEED_FALLBACK,
515 .cra_blocksize = AES_BLOCK_SIZE, 515 .cra_blocksize = AES_BLOCK_SIZE,
@@ -596,8 +596,8 @@ static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
596 596
597 switch (key_len) { 597 switch (key_len) {
598 case 32: 598 case 32:
599 xts_ctx->enc = KM_XTS_128_ENCRYPT; 599 xts_ctx->enc = CPACF_KM_XTS_128_ENC;
600 xts_ctx->dec = KM_XTS_128_DECRYPT; 600 xts_ctx->dec = CPACF_KM_XTS_128_DEC;
601 memcpy(xts_ctx->key + 16, in_key, 16); 601 memcpy(xts_ctx->key + 16, in_key, 16);
602 memcpy(xts_ctx->pcc_key + 16, in_key + 16, 16); 602 memcpy(xts_ctx->pcc_key + 16, in_key + 16, 16);
603 break; 603 break;
@@ -607,8 +607,8 @@ static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
607 xts_fallback_setkey(tfm, in_key, key_len); 607 xts_fallback_setkey(tfm, in_key, key_len);
608 break; 608 break;
609 case 64: 609 case 64:
610 xts_ctx->enc = KM_XTS_256_ENCRYPT; 610 xts_ctx->enc = CPACF_KM_XTS_256_ENC;
611 xts_ctx->dec = KM_XTS_256_DECRYPT; 611 xts_ctx->dec = CPACF_KM_XTS_256_DEC;
612 memcpy(xts_ctx->key, in_key, 32); 612 memcpy(xts_ctx->key, in_key, 32);
613 memcpy(xts_ctx->pcc_key, in_key + 32, 32); 613 memcpy(xts_ctx->pcc_key, in_key + 32, 32);
614 break; 614 break;
@@ -643,7 +643,8 @@ static int xts_aes_crypt(struct blkcipher_desc *desc, long func,
643 memset(pcc_param.xts, 0, sizeof(pcc_param.xts)); 643 memset(pcc_param.xts, 0, sizeof(pcc_param.xts));
644 memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak)); 644 memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak));
645 memcpy(pcc_param.key, xts_ctx->pcc_key, 32); 645 memcpy(pcc_param.key, xts_ctx->pcc_key, 32);
646 ret = crypt_s390_pcc(func, &pcc_param.key[offset]); 646 /* remove decipher modifier bit from 'func' and call PCC */
647 ret = cpacf_pcc(func & 0x7f, &pcc_param.key[offset]);
647 if (ret < 0) 648 if (ret < 0)
648 return -EIO; 649 return -EIO;
649 650
@@ -655,7 +656,7 @@ static int xts_aes_crypt(struct blkcipher_desc *desc, long func,
655 out = walk->dst.virt.addr; 656 out = walk->dst.virt.addr;
656 in = walk->src.virt.addr; 657 in = walk->src.virt.addr;
657 658
658 ret = crypt_s390_km(func, &xts_param.key[offset], out, in, n); 659 ret = cpacf_km(func, &xts_param.key[offset], out, in, n);
659 if (ret < 0 || ret != n) 660 if (ret < 0 || ret != n)
660 return -EIO; 661 return -EIO;
661 662
@@ -721,7 +722,7 @@ static void xts_fallback_exit(struct crypto_tfm *tfm)
721static struct crypto_alg xts_aes_alg = { 722static struct crypto_alg xts_aes_alg = {
722 .cra_name = "xts(aes)", 723 .cra_name = "xts(aes)",
723 .cra_driver_name = "xts-aes-s390", 724 .cra_driver_name = "xts-aes-s390",
724 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 725 .cra_priority = 400, /* combo: aes + xts */
725 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | 726 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
726 CRYPTO_ALG_NEED_FALLBACK, 727 CRYPTO_ALG_NEED_FALLBACK,
727 .cra_blocksize = AES_BLOCK_SIZE, 728 .cra_blocksize = AES_BLOCK_SIZE,
@@ -751,16 +752,16 @@ static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
751 752
752 switch (key_len) { 753 switch (key_len) {
753 case 16: 754 case 16:
754 sctx->enc = KMCTR_AES_128_ENCRYPT; 755 sctx->enc = CPACF_KMCTR_AES_128_ENC;
755 sctx->dec = KMCTR_AES_128_DECRYPT; 756 sctx->dec = CPACF_KMCTR_AES_128_DEC;
756 break; 757 break;
757 case 24: 758 case 24:
758 sctx->enc = KMCTR_AES_192_ENCRYPT; 759 sctx->enc = CPACF_KMCTR_AES_192_ENC;
759 sctx->dec = KMCTR_AES_192_DECRYPT; 760 sctx->dec = CPACF_KMCTR_AES_192_DEC;
760 break; 761 break;
761 case 32: 762 case 32:
762 sctx->enc = KMCTR_AES_256_ENCRYPT; 763 sctx->enc = CPACF_KMCTR_AES_256_ENC;
763 sctx->dec = KMCTR_AES_256_DECRYPT; 764 sctx->dec = CPACF_KMCTR_AES_256_DEC;
764 break; 765 break;
765 } 766 }
766 767
@@ -804,8 +805,7 @@ static int ctr_aes_crypt(struct blkcipher_desc *desc, long func,
804 n = __ctrblk_init(ctrptr, nbytes); 805 n = __ctrblk_init(ctrptr, nbytes);
805 else 806 else
806 n = AES_BLOCK_SIZE; 807 n = AES_BLOCK_SIZE;
807 ret = crypt_s390_kmctr(func, sctx->key, out, in, 808 ret = cpacf_kmctr(func, sctx->key, out, in, n, ctrptr);
808 n, ctrptr);
809 if (ret < 0 || ret != n) { 809 if (ret < 0 || ret != n) {
810 if (ctrptr == ctrblk) 810 if (ctrptr == ctrblk)
811 spin_unlock(&ctrblk_lock); 811 spin_unlock(&ctrblk_lock);
@@ -837,8 +837,8 @@ static int ctr_aes_crypt(struct blkcipher_desc *desc, long func,
837 if (nbytes) { 837 if (nbytes) {
838 out = walk->dst.virt.addr; 838 out = walk->dst.virt.addr;
839 in = walk->src.virt.addr; 839 in = walk->src.virt.addr;
840 ret = crypt_s390_kmctr(func, sctx->key, buf, in, 840 ret = cpacf_kmctr(func, sctx->key, buf, in,
841 AES_BLOCK_SIZE, ctrbuf); 841 AES_BLOCK_SIZE, ctrbuf);
842 if (ret < 0 || ret != AES_BLOCK_SIZE) 842 if (ret < 0 || ret != AES_BLOCK_SIZE)
843 return -EIO; 843 return -EIO;
844 memcpy(out, buf, nbytes); 844 memcpy(out, buf, nbytes);
@@ -875,7 +875,7 @@ static int ctr_aes_decrypt(struct blkcipher_desc *desc,
875static struct crypto_alg ctr_aes_alg = { 875static struct crypto_alg ctr_aes_alg = {
876 .cra_name = "ctr(aes)", 876 .cra_name = "ctr(aes)",
877 .cra_driver_name = "ctr-aes-s390", 877 .cra_driver_name = "ctr-aes-s390",
878 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 878 .cra_priority = 400, /* combo: aes + ctr */
879 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 879 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
880 .cra_blocksize = 1, 880 .cra_blocksize = 1,
881 .cra_ctxsize = sizeof(struct s390_aes_ctx), 881 .cra_ctxsize = sizeof(struct s390_aes_ctx),
@@ -899,11 +899,11 @@ static int __init aes_s390_init(void)
899{ 899{
900 int ret; 900 int ret;
901 901
902 if (crypt_s390_func_available(KM_AES_128_ENCRYPT, CRYPT_S390_MSA)) 902 if (cpacf_query(CPACF_KM, CPACF_KM_AES_128_ENC))
903 keylen_flag |= AES_KEYLEN_128; 903 keylen_flag |= AES_KEYLEN_128;
904 if (crypt_s390_func_available(KM_AES_192_ENCRYPT, CRYPT_S390_MSA)) 904 if (cpacf_query(CPACF_KM, CPACF_KM_AES_192_ENC))
905 keylen_flag |= AES_KEYLEN_192; 905 keylen_flag |= AES_KEYLEN_192;
906 if (crypt_s390_func_available(KM_AES_256_ENCRYPT, CRYPT_S390_MSA)) 906 if (cpacf_query(CPACF_KM, CPACF_KM_AES_256_ENC))
907 keylen_flag |= AES_KEYLEN_256; 907 keylen_flag |= AES_KEYLEN_256;
908 908
909 if (!keylen_flag) 909 if (!keylen_flag)
@@ -926,22 +926,17 @@ static int __init aes_s390_init(void)
926 if (ret) 926 if (ret)
927 goto cbc_aes_err; 927 goto cbc_aes_err;
928 928
929 if (crypt_s390_func_available(KM_XTS_128_ENCRYPT, 929 if (cpacf_query(CPACF_KM, CPACF_KM_XTS_128_ENC) &&
930 CRYPT_S390_MSA | CRYPT_S390_MSA4) && 930 cpacf_query(CPACF_KM, CPACF_KM_XTS_256_ENC)) {
931 crypt_s390_func_available(KM_XTS_256_ENCRYPT,
932 CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
933 ret = crypto_register_alg(&xts_aes_alg); 931 ret = crypto_register_alg(&xts_aes_alg);
934 if (ret) 932 if (ret)
935 goto xts_aes_err; 933 goto xts_aes_err;
936 xts_aes_alg_reg = 1; 934 xts_aes_alg_reg = 1;
937 } 935 }
938 936
939 if (crypt_s390_func_available(KMCTR_AES_128_ENCRYPT, 937 if (cpacf_query(CPACF_KMCTR, CPACF_KMCTR_AES_128_ENC) &&
940 CRYPT_S390_MSA | CRYPT_S390_MSA4) && 938 cpacf_query(CPACF_KMCTR, CPACF_KMCTR_AES_192_ENC) &&
941 crypt_s390_func_available(KMCTR_AES_192_ENCRYPT, 939 cpacf_query(CPACF_KMCTR, CPACF_KMCTR_AES_256_ENC)) {
942 CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
943 crypt_s390_func_available(KMCTR_AES_256_ENCRYPT,
944 CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
945 ctrblk = (u8 *) __get_free_page(GFP_KERNEL); 940 ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
946 if (!ctrblk) { 941 if (!ctrblk) {
947 ret = -ENOMEM; 942 ret = -ENOMEM;
diff --git a/arch/s390/crypto/crypt_s390.h b/arch/s390/crypto/crypt_s390.h
deleted file mode 100644
index d9c4c313fbc6..000000000000
--- a/arch/s390/crypto/crypt_s390.h
+++ /dev/null
@@ -1,493 +0,0 @@
1/*
2 * Cryptographic API.
3 *
4 * Support for s390 cryptographic instructions.
5 *
6 * Copyright IBM Corp. 2003, 2015
7 * Author(s): Thomas Spatzier
8 * Jan Glauber (jan.glauber@de.ibm.com)
9 * Harald Freudenberger (freude@de.ibm.com)
10 *
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
14 * any later version.
15 *
16 */
17#ifndef _CRYPTO_ARCH_S390_CRYPT_S390_H
18#define _CRYPTO_ARCH_S390_CRYPT_S390_H
19
20#include <asm/errno.h>
21#include <asm/facility.h>
22
23#define CRYPT_S390_OP_MASK 0xFF00
24#define CRYPT_S390_FUNC_MASK 0x00FF
25
26#define CRYPT_S390_PRIORITY 300
27#define CRYPT_S390_COMPOSITE_PRIORITY 400
28
29#define CRYPT_S390_MSA 0x1
30#define CRYPT_S390_MSA3 0x2
31#define CRYPT_S390_MSA4 0x4
32#define CRYPT_S390_MSA5 0x8
33
34/* s390 cryptographic operations */
35enum crypt_s390_operations {
36 CRYPT_S390_KM = 0x0100,
37 CRYPT_S390_KMC = 0x0200,
38 CRYPT_S390_KIMD = 0x0300,
39 CRYPT_S390_KLMD = 0x0400,
40 CRYPT_S390_KMAC = 0x0500,
41 CRYPT_S390_KMCTR = 0x0600,
42 CRYPT_S390_PPNO = 0x0700
43};
44
45/*
46 * function codes for KM (CIPHER MESSAGE) instruction
47 * 0x80 is the decipher modifier bit
48 */
49enum crypt_s390_km_func {
50 KM_QUERY = CRYPT_S390_KM | 0x0,
51 KM_DEA_ENCRYPT = CRYPT_S390_KM | 0x1,
52 KM_DEA_DECRYPT = CRYPT_S390_KM | 0x1 | 0x80,
53 KM_TDEA_128_ENCRYPT = CRYPT_S390_KM | 0x2,
54 KM_TDEA_128_DECRYPT = CRYPT_S390_KM | 0x2 | 0x80,
55 KM_TDEA_192_ENCRYPT = CRYPT_S390_KM | 0x3,
56 KM_TDEA_192_DECRYPT = CRYPT_S390_KM | 0x3 | 0x80,
57 KM_AES_128_ENCRYPT = CRYPT_S390_KM | 0x12,
58 KM_AES_128_DECRYPT = CRYPT_S390_KM | 0x12 | 0x80,
59 KM_AES_192_ENCRYPT = CRYPT_S390_KM | 0x13,
60 KM_AES_192_DECRYPT = CRYPT_S390_KM | 0x13 | 0x80,
61 KM_AES_256_ENCRYPT = CRYPT_S390_KM | 0x14,
62 KM_AES_256_DECRYPT = CRYPT_S390_KM | 0x14 | 0x80,
63 KM_XTS_128_ENCRYPT = CRYPT_S390_KM | 0x32,
64 KM_XTS_128_DECRYPT = CRYPT_S390_KM | 0x32 | 0x80,
65 KM_XTS_256_ENCRYPT = CRYPT_S390_KM | 0x34,
66 KM_XTS_256_DECRYPT = CRYPT_S390_KM | 0x34 | 0x80,
67};
68
69/*
70 * function codes for KMC (CIPHER MESSAGE WITH CHAINING)
71 * instruction
72 */
73enum crypt_s390_kmc_func {
74 KMC_QUERY = CRYPT_S390_KMC | 0x0,
75 KMC_DEA_ENCRYPT = CRYPT_S390_KMC | 0x1,
76 KMC_DEA_DECRYPT = CRYPT_S390_KMC | 0x1 | 0x80,
77 KMC_TDEA_128_ENCRYPT = CRYPT_S390_KMC | 0x2,
78 KMC_TDEA_128_DECRYPT = CRYPT_S390_KMC | 0x2 | 0x80,
79 KMC_TDEA_192_ENCRYPT = CRYPT_S390_KMC | 0x3,
80 KMC_TDEA_192_DECRYPT = CRYPT_S390_KMC | 0x3 | 0x80,
81 KMC_AES_128_ENCRYPT = CRYPT_S390_KMC | 0x12,
82 KMC_AES_128_DECRYPT = CRYPT_S390_KMC | 0x12 | 0x80,
83 KMC_AES_192_ENCRYPT = CRYPT_S390_KMC | 0x13,
84 KMC_AES_192_DECRYPT = CRYPT_S390_KMC | 0x13 | 0x80,
85 KMC_AES_256_ENCRYPT = CRYPT_S390_KMC | 0x14,
86 KMC_AES_256_DECRYPT = CRYPT_S390_KMC | 0x14 | 0x80,
87 KMC_PRNG = CRYPT_S390_KMC | 0x43,
88};
89
90/*
91 * function codes for KMCTR (CIPHER MESSAGE WITH COUNTER)
92 * instruction
93 */
94enum crypt_s390_kmctr_func {
95 KMCTR_QUERY = CRYPT_S390_KMCTR | 0x0,
96 KMCTR_DEA_ENCRYPT = CRYPT_S390_KMCTR | 0x1,
97 KMCTR_DEA_DECRYPT = CRYPT_S390_KMCTR | 0x1 | 0x80,
98 KMCTR_TDEA_128_ENCRYPT = CRYPT_S390_KMCTR | 0x2,
99 KMCTR_TDEA_128_DECRYPT = CRYPT_S390_KMCTR | 0x2 | 0x80,
100 KMCTR_TDEA_192_ENCRYPT = CRYPT_S390_KMCTR | 0x3,
101 KMCTR_TDEA_192_DECRYPT = CRYPT_S390_KMCTR | 0x3 | 0x80,
102 KMCTR_AES_128_ENCRYPT = CRYPT_S390_KMCTR | 0x12,
103 KMCTR_AES_128_DECRYPT = CRYPT_S390_KMCTR | 0x12 | 0x80,
104 KMCTR_AES_192_ENCRYPT = CRYPT_S390_KMCTR | 0x13,
105 KMCTR_AES_192_DECRYPT = CRYPT_S390_KMCTR | 0x13 | 0x80,
106 KMCTR_AES_256_ENCRYPT = CRYPT_S390_KMCTR | 0x14,
107 KMCTR_AES_256_DECRYPT = CRYPT_S390_KMCTR | 0x14 | 0x80,
108};
109
110/*
111 * function codes for KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST)
112 * instruction
113 */
114enum crypt_s390_kimd_func {
115 KIMD_QUERY = CRYPT_S390_KIMD | 0,
116 KIMD_SHA_1 = CRYPT_S390_KIMD | 1,
117 KIMD_SHA_256 = CRYPT_S390_KIMD | 2,
118 KIMD_SHA_512 = CRYPT_S390_KIMD | 3,
119 KIMD_GHASH = CRYPT_S390_KIMD | 65,
120};
121
122/*
123 * function codes for KLMD (COMPUTE LAST MESSAGE DIGEST)
124 * instruction
125 */
126enum crypt_s390_klmd_func {
127 KLMD_QUERY = CRYPT_S390_KLMD | 0,
128 KLMD_SHA_1 = CRYPT_S390_KLMD | 1,
129 KLMD_SHA_256 = CRYPT_S390_KLMD | 2,
130 KLMD_SHA_512 = CRYPT_S390_KLMD | 3,
131};
132
133/*
134 * function codes for KMAC (COMPUTE MESSAGE AUTHENTICATION CODE)
135 * instruction
136 */
137enum crypt_s390_kmac_func {
138 KMAC_QUERY = CRYPT_S390_KMAC | 0,
139 KMAC_DEA = CRYPT_S390_KMAC | 1,
140 KMAC_TDEA_128 = CRYPT_S390_KMAC | 2,
141 KMAC_TDEA_192 = CRYPT_S390_KMAC | 3
142};
143
144/*
145 * function codes for PPNO (PERFORM PSEUDORANDOM NUMBER
146 * OPERATION) instruction
147 */
148enum crypt_s390_ppno_func {
149 PPNO_QUERY = CRYPT_S390_PPNO | 0,
150 PPNO_SHA512_DRNG_GEN = CRYPT_S390_PPNO | 3,
151 PPNO_SHA512_DRNG_SEED = CRYPT_S390_PPNO | 0x83
152};
153
154/**
155 * crypt_s390_km:
156 * @func: the function code passed to KM; see crypt_s390_km_func
157 * @param: address of parameter block; see POP for details on each func
158 * @dest: address of destination memory area
159 * @src: address of source memory area
160 * @src_len: length of src operand in bytes
161 *
162 * Executes the KM (CIPHER MESSAGE) operation of the CPU.
163 *
164 * Returns -1 for failure, 0 for the query func, number of processed
165 * bytes for encryption/decryption funcs
166 */
167static inline int crypt_s390_km(long func, void *param,
168 u8 *dest, const u8 *src, long src_len)
169{
170 register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
171 register void *__param asm("1") = param;
172 register const u8 *__src asm("2") = src;
173 register long __src_len asm("3") = src_len;
174 register u8 *__dest asm("4") = dest;
175 int ret;
176
177 asm volatile(
178 "0: .insn rre,0xb92e0000,%3,%1\n" /* KM opcode */
179 "1: brc 1,0b\n" /* handle partial completion */
180 " la %0,0\n"
181 "2:\n"
182 EX_TABLE(0b, 2b) EX_TABLE(1b, 2b)
183 : "=d" (ret), "+a" (__src), "+d" (__src_len), "+a" (__dest)
184 : "d" (__func), "a" (__param), "0" (-1) : "cc", "memory");
185 if (ret < 0)
186 return ret;
187 return (func & CRYPT_S390_FUNC_MASK) ? src_len - __src_len : __src_len;
188}
189
190/**
191 * crypt_s390_kmc:
192 * @func: the function code passed to KM; see crypt_s390_kmc_func
193 * @param: address of parameter block; see POP for details on each func
194 * @dest: address of destination memory area
195 * @src: address of source memory area
196 * @src_len: length of src operand in bytes
197 *
198 * Executes the KMC (CIPHER MESSAGE WITH CHAINING) operation of the CPU.
199 *
200 * Returns -1 for failure, 0 for the query func, number of processed
201 * bytes for encryption/decryption funcs
202 */
203static inline int crypt_s390_kmc(long func, void *param,
204 u8 *dest, const u8 *src, long src_len)
205{
206 register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
207 register void *__param asm("1") = param;
208 register const u8 *__src asm("2") = src;
209 register long __src_len asm("3") = src_len;
210 register u8 *__dest asm("4") = dest;
211 int ret;
212
213 asm volatile(
214 "0: .insn rre,0xb92f0000,%3,%1\n" /* KMC opcode */
215 "1: brc 1,0b\n" /* handle partial completion */
216 " la %0,0\n"
217 "2:\n"
218 EX_TABLE(0b, 2b) EX_TABLE(1b, 2b)
219 : "=d" (ret), "+a" (__src), "+d" (__src_len), "+a" (__dest)
220 : "d" (__func), "a" (__param), "0" (-1) : "cc", "memory");
221 if (ret < 0)
222 return ret;
223 return (func & CRYPT_S390_FUNC_MASK) ? src_len - __src_len : __src_len;
224}
225
226/**
227 * crypt_s390_kimd:
228 * @func: the function code passed to KM; see crypt_s390_kimd_func
229 * @param: address of parameter block; see POP for details on each func
230 * @src: address of source memory area
231 * @src_len: length of src operand in bytes
232 *
233 * Executes the KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST) operation
234 * of the CPU.
235 *
236 * Returns -1 for failure, 0 for the query func, number of processed
237 * bytes for digest funcs
238 */
239static inline int crypt_s390_kimd(long func, void *param,
240 const u8 *src, long src_len)
241{
242 register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
243 register void *__param asm("1") = param;
244 register const u8 *__src asm("2") = src;
245 register long __src_len asm("3") = src_len;
246 int ret;
247
248 asm volatile(
249 "0: .insn rre,0xb93e0000,%1,%1\n" /* KIMD opcode */
250 "1: brc 1,0b\n" /* handle partial completion */
251 " la %0,0\n"
252 "2:\n"
253 EX_TABLE(0b, 2b) EX_TABLE(1b, 2b)
254 : "=d" (ret), "+a" (__src), "+d" (__src_len)
255 : "d" (__func), "a" (__param), "0" (-1) : "cc", "memory");
256 if (ret < 0)
257 return ret;
258 return (func & CRYPT_S390_FUNC_MASK) ? src_len - __src_len : __src_len;
259}
260
261/**
262 * crypt_s390_klmd:
263 * @func: the function code passed to KM; see crypt_s390_klmd_func
264 * @param: address of parameter block; see POP for details on each func
265 * @src: address of source memory area
266 * @src_len: length of src operand in bytes
267 *
268 * Executes the KLMD (COMPUTE LAST MESSAGE DIGEST) operation of the CPU.
269 *
270 * Returns -1 for failure, 0 for the query func, number of processed
271 * bytes for digest funcs
272 */
273static inline int crypt_s390_klmd(long func, void *param,
274 const u8 *src, long src_len)
275{
276 register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
277 register void *__param asm("1") = param;
278 register const u8 *__src asm("2") = src;
279 register long __src_len asm("3") = src_len;
280 int ret;
281
282 asm volatile(
283 "0: .insn rre,0xb93f0000,%1,%1\n" /* KLMD opcode */
284 "1: brc 1,0b\n" /* handle partial completion */
285 " la %0,0\n"
286 "2:\n"
287 EX_TABLE(0b, 2b) EX_TABLE(1b, 2b)
288 : "=d" (ret), "+a" (__src), "+d" (__src_len)
289 : "d" (__func), "a" (__param), "0" (-1) : "cc", "memory");
290 if (ret < 0)
291 return ret;
292 return (func & CRYPT_S390_FUNC_MASK) ? src_len - __src_len : __src_len;
293}
294
295/**
296 * crypt_s390_kmac:
297 * @func: the function code passed to KM; see crypt_s390_klmd_func
298 * @param: address of parameter block; see POP for details on each func
299 * @src: address of source memory area
300 * @src_len: length of src operand in bytes
301 *
302 * Executes the KMAC (COMPUTE MESSAGE AUTHENTICATION CODE) operation
303 * of the CPU.
304 *
305 * Returns -1 for failure, 0 for the query func, number of processed
306 * bytes for digest funcs
307 */
308static inline int crypt_s390_kmac(long func, void *param,
309 const u8 *src, long src_len)
310{
311 register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
312 register void *__param asm("1") = param;
313 register const u8 *__src asm("2") = src;
314 register long __src_len asm("3") = src_len;
315 int ret;
316
317 asm volatile(
318 "0: .insn rre,0xb91e0000,%1,%1\n" /* KLAC opcode */
319 "1: brc 1,0b\n" /* handle partial completion */
320 " la %0,0\n"
321 "2:\n"
322 EX_TABLE(0b, 2b) EX_TABLE(1b, 2b)
323 : "=d" (ret), "+a" (__src), "+d" (__src_len)
324 : "d" (__func), "a" (__param), "0" (-1) : "cc", "memory");
325 if (ret < 0)
326 return ret;
327 return (func & CRYPT_S390_FUNC_MASK) ? src_len - __src_len : __src_len;
328}
329
330/**
331 * crypt_s390_kmctr:
332 * @func: the function code passed to KMCTR; see crypt_s390_kmctr_func
333 * @param: address of parameter block; see POP for details on each func
334 * @dest: address of destination memory area
335 * @src: address of source memory area
336 * @src_len: length of src operand in bytes
337 * @counter: address of counter value
338 *
339 * Executes the KMCTR (CIPHER MESSAGE WITH COUNTER) operation of the CPU.
340 *
341 * Returns -1 for failure, 0 for the query func, number of processed
342 * bytes for encryption/decryption funcs
343 */
344static inline int crypt_s390_kmctr(long func, void *param, u8 *dest,
345 const u8 *src, long src_len, u8 *counter)
346{
347 register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
348 register void *__param asm("1") = param;
349 register const u8 *__src asm("2") = src;
350 register long __src_len asm("3") = src_len;
351 register u8 *__dest asm("4") = dest;
352 register u8 *__ctr asm("6") = counter;
353 int ret = -1;
354
355 asm volatile(
356 "0: .insn rrf,0xb92d0000,%3,%1,%4,0\n" /* KMCTR opcode */
357 "1: brc 1,0b\n" /* handle partial completion */
358 " la %0,0\n"
359 "2:\n"
360 EX_TABLE(0b, 2b) EX_TABLE(1b, 2b)
361 : "+d" (ret), "+a" (__src), "+d" (__src_len), "+a" (__dest),
362 "+a" (__ctr)
363 : "d" (__func), "a" (__param) : "cc", "memory");
364 if (ret < 0)
365 return ret;
366 return (func & CRYPT_S390_FUNC_MASK) ? src_len - __src_len : __src_len;
367}
368
369/**
370 * crypt_s390_ppno:
371 * @func: the function code passed to PPNO; see crypt_s390_ppno_func
372 * @param: address of parameter block; see POP for details on each func
373 * @dest: address of destination memory area
374 * @dest_len: size of destination memory area in bytes
375 * @seed: address of seed data
376 * @seed_len: size of seed data in bytes
377 *
378 * Executes the PPNO (PERFORM PSEUDORANDOM NUMBER OPERATION)
379 * operation of the CPU.
380 *
381 * Returns -1 for failure, 0 for the query func, number of random
382 * bytes stored in dest buffer for generate function
383 */
384static inline int crypt_s390_ppno(long func, void *param,
385 u8 *dest, long dest_len,
386 const u8 *seed, long seed_len)
387{
388 register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
389 register void *__param asm("1") = param; /* param block (240 bytes) */
390 register u8 *__dest asm("2") = dest; /* buf for recv random bytes */
391 register long __dest_len asm("3") = dest_len; /* requested random bytes */
392 register const u8 *__seed asm("4") = seed; /* buf with seed data */
393 register long __seed_len asm("5") = seed_len; /* bytes in seed buf */
394 int ret = -1;
395
396 asm volatile (
397 "0: .insn rre,0xb93c0000,%1,%5\n" /* PPNO opcode */
398 "1: brc 1,0b\n" /* handle partial completion */
399 " la %0,0\n"
400 "2:\n"
401 EX_TABLE(0b, 2b) EX_TABLE(1b, 2b)
402 : "+d" (ret), "+a"(__dest), "+d"(__dest_len)
403 : "d"(__func), "a"(__param), "a"(__seed), "d"(__seed_len)
404 : "cc", "memory");
405 if (ret < 0)
406 return ret;
407 return (func & CRYPT_S390_FUNC_MASK) ? dest_len - __dest_len : 0;
408}
409
410/**
411 * crypt_s390_func_available:
412 * @func: the function code of the specific function; 0 if op in general
413 *
414 * Tests if a specific crypto function is implemented on the machine.
415 *
416 * Returns 1 if func available; 0 if func or op in general not available
417 */
418static inline int crypt_s390_func_available(int func,
419 unsigned int facility_mask)
420{
421 unsigned char status[16];
422 int ret;
423
424 if (facility_mask & CRYPT_S390_MSA && !test_facility(17))
425 return 0;
426 if (facility_mask & CRYPT_S390_MSA3 && !test_facility(76))
427 return 0;
428 if (facility_mask & CRYPT_S390_MSA4 && !test_facility(77))
429 return 0;
430 if (facility_mask & CRYPT_S390_MSA5 && !test_facility(57))
431 return 0;
432
433 switch (func & CRYPT_S390_OP_MASK) {
434 case CRYPT_S390_KM:
435 ret = crypt_s390_km(KM_QUERY, &status, NULL, NULL, 0);
436 break;
437 case CRYPT_S390_KMC:
438 ret = crypt_s390_kmc(KMC_QUERY, &status, NULL, NULL, 0);
439 break;
440 case CRYPT_S390_KIMD:
441 ret = crypt_s390_kimd(KIMD_QUERY, &status, NULL, 0);
442 break;
443 case CRYPT_S390_KLMD:
444 ret = crypt_s390_klmd(KLMD_QUERY, &status, NULL, 0);
445 break;
446 case CRYPT_S390_KMAC:
447 ret = crypt_s390_kmac(KMAC_QUERY, &status, NULL, 0);
448 break;
449 case CRYPT_S390_KMCTR:
450 ret = crypt_s390_kmctr(KMCTR_QUERY, &status,
451 NULL, NULL, 0, NULL);
452 break;
453 case CRYPT_S390_PPNO:
454 ret = crypt_s390_ppno(PPNO_QUERY, &status,
455 NULL, 0, NULL, 0);
456 break;
457 default:
458 return 0;
459 }
460 if (ret < 0)
461 return 0;
462 func &= CRYPT_S390_FUNC_MASK;
463 func &= 0x7f; /* mask modifier bit */
464 return (status[func >> 3] & (0x80 >> (func & 7))) != 0;
465}
466
467/**
468 * crypt_s390_pcc:
469 * @func: the function code passed to KM; see crypt_s390_km_func
470 * @param: address of parameter block; see POP for details on each func
471 *
472 * Executes the PCC (PERFORM CRYPTOGRAPHIC COMPUTATION) operation of the CPU.
473 *
474 * Returns -1 for failure, 0 for success.
475 */
476static inline int crypt_s390_pcc(long func, void *param)
477{
478 register long __func asm("0") = func & 0x7f; /* encrypt or decrypt */
479 register void *__param asm("1") = param;
480 int ret = -1;
481
482 asm volatile(
483 "0: .insn rre,0xb92c0000,0,0\n" /* PCC opcode */
484 "1: brc 1,0b\n" /* handle partial completion */
485 " la %0,0\n"
486 "2:\n"
487 EX_TABLE(0b, 2b) EX_TABLE(1b, 2b)
488 : "+d" (ret)
489 : "d" (__func), "a" (__param) : "cc", "memory");
490 return ret;
491}
492
493#endif /* _CRYPTO_ARCH_S390_CRYPT_S390_H */
diff --git a/arch/s390/crypto/des_s390.c b/arch/s390/crypto/des_s390.c
index fba1c10a2dd0..697e71a75fc2 100644
--- a/arch/s390/crypto/des_s390.c
+++ b/arch/s390/crypto/des_s390.c
@@ -20,8 +20,7 @@
20#include <linux/crypto.h> 20#include <linux/crypto.h>
21#include <crypto/algapi.h> 21#include <crypto/algapi.h>
22#include <crypto/des.h> 22#include <crypto/des.h>
23 23#include <asm/cpacf.h>
24#include "crypt_s390.h"
25 24
26#define DES3_KEY_SIZE (3 * DES_KEY_SIZE) 25#define DES3_KEY_SIZE (3 * DES_KEY_SIZE)
27 26
@@ -54,20 +53,20 @@ static void des_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
54{ 53{
55 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm); 54 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
56 55
57 crypt_s390_km(KM_DEA_ENCRYPT, ctx->key, out, in, DES_BLOCK_SIZE); 56 cpacf_km(CPACF_KM_DEA_ENC, ctx->key, out, in, DES_BLOCK_SIZE);
58} 57}
59 58
60static void des_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 59static void des_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
61{ 60{
62 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm); 61 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
63 62
64 crypt_s390_km(KM_DEA_DECRYPT, ctx->key, out, in, DES_BLOCK_SIZE); 63 cpacf_km(CPACF_KM_DEA_DEC, ctx->key, out, in, DES_BLOCK_SIZE);
65} 64}
66 65
67static struct crypto_alg des_alg = { 66static struct crypto_alg des_alg = {
68 .cra_name = "des", 67 .cra_name = "des",
69 .cra_driver_name = "des-s390", 68 .cra_driver_name = "des-s390",
70 .cra_priority = CRYPT_S390_PRIORITY, 69 .cra_priority = 300,
71 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 70 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
72 .cra_blocksize = DES_BLOCK_SIZE, 71 .cra_blocksize = DES_BLOCK_SIZE,
73 .cra_ctxsize = sizeof(struct s390_des_ctx), 72 .cra_ctxsize = sizeof(struct s390_des_ctx),
@@ -95,7 +94,7 @@ static int ecb_desall_crypt(struct blkcipher_desc *desc, long func,
95 u8 *out = walk->dst.virt.addr; 94 u8 *out = walk->dst.virt.addr;
96 u8 *in = walk->src.virt.addr; 95 u8 *in = walk->src.virt.addr;
97 96
98 ret = crypt_s390_km(func, key, out, in, n); 97 ret = cpacf_km(func, key, out, in, n);
99 if (ret < 0 || ret != n) 98 if (ret < 0 || ret != n)
100 return -EIO; 99 return -EIO;
101 100
@@ -128,7 +127,7 @@ static int cbc_desall_crypt(struct blkcipher_desc *desc, long func,
128 u8 *out = walk->dst.virt.addr; 127 u8 *out = walk->dst.virt.addr;
129 u8 *in = walk->src.virt.addr; 128 u8 *in = walk->src.virt.addr;
130 129
131 ret = crypt_s390_kmc(func, &param, out, in, n); 130 ret = cpacf_kmc(func, &param, out, in, n);
132 if (ret < 0 || ret != n) 131 if (ret < 0 || ret != n)
133 return -EIO; 132 return -EIO;
134 133
@@ -149,7 +148,7 @@ static int ecb_des_encrypt(struct blkcipher_desc *desc,
149 struct blkcipher_walk walk; 148 struct blkcipher_walk walk;
150 149
151 blkcipher_walk_init(&walk, dst, src, nbytes); 150 blkcipher_walk_init(&walk, dst, src, nbytes);
152 return ecb_desall_crypt(desc, KM_DEA_ENCRYPT, ctx->key, &walk); 151 return ecb_desall_crypt(desc, CPACF_KM_DEA_ENC, ctx->key, &walk);
153} 152}
154 153
155static int ecb_des_decrypt(struct blkcipher_desc *desc, 154static int ecb_des_decrypt(struct blkcipher_desc *desc,
@@ -160,13 +159,13 @@ static int ecb_des_decrypt(struct blkcipher_desc *desc,
160 struct blkcipher_walk walk; 159 struct blkcipher_walk walk;
161 160
162 blkcipher_walk_init(&walk, dst, src, nbytes); 161 blkcipher_walk_init(&walk, dst, src, nbytes);
163 return ecb_desall_crypt(desc, KM_DEA_DECRYPT, ctx->key, &walk); 162 return ecb_desall_crypt(desc, CPACF_KM_DEA_DEC, ctx->key, &walk);
164} 163}
165 164
166static struct crypto_alg ecb_des_alg = { 165static struct crypto_alg ecb_des_alg = {
167 .cra_name = "ecb(des)", 166 .cra_name = "ecb(des)",
168 .cra_driver_name = "ecb-des-s390", 167 .cra_driver_name = "ecb-des-s390",
169 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 168 .cra_priority = 400, /* combo: des + ecb */
170 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 169 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
171 .cra_blocksize = DES_BLOCK_SIZE, 170 .cra_blocksize = DES_BLOCK_SIZE,
172 .cra_ctxsize = sizeof(struct s390_des_ctx), 171 .cra_ctxsize = sizeof(struct s390_des_ctx),
@@ -190,7 +189,7 @@ static int cbc_des_encrypt(struct blkcipher_desc *desc,
190 struct blkcipher_walk walk; 189 struct blkcipher_walk walk;
191 190
192 blkcipher_walk_init(&walk, dst, src, nbytes); 191 blkcipher_walk_init(&walk, dst, src, nbytes);
193 return cbc_desall_crypt(desc, KMC_DEA_ENCRYPT, &walk); 192 return cbc_desall_crypt(desc, CPACF_KMC_DEA_ENC, &walk);
194} 193}
195 194
196static int cbc_des_decrypt(struct blkcipher_desc *desc, 195static int cbc_des_decrypt(struct blkcipher_desc *desc,
@@ -200,13 +199,13 @@ static int cbc_des_decrypt(struct blkcipher_desc *desc,
200 struct blkcipher_walk walk; 199 struct blkcipher_walk walk;
201 200
202 blkcipher_walk_init(&walk, dst, src, nbytes); 201 blkcipher_walk_init(&walk, dst, src, nbytes);
203 return cbc_desall_crypt(desc, KMC_DEA_DECRYPT, &walk); 202 return cbc_desall_crypt(desc, CPACF_KMC_DEA_DEC, &walk);
204} 203}
205 204
206static struct crypto_alg cbc_des_alg = { 205static struct crypto_alg cbc_des_alg = {
207 .cra_name = "cbc(des)", 206 .cra_name = "cbc(des)",
208 .cra_driver_name = "cbc-des-s390", 207 .cra_driver_name = "cbc-des-s390",
209 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 208 .cra_priority = 400, /* combo: des + cbc */
210 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 209 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
211 .cra_blocksize = DES_BLOCK_SIZE, 210 .cra_blocksize = DES_BLOCK_SIZE,
212 .cra_ctxsize = sizeof(struct s390_des_ctx), 211 .cra_ctxsize = sizeof(struct s390_des_ctx),
@@ -258,20 +257,20 @@ static void des3_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
258{ 257{
259 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm); 258 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
260 259
261 crypt_s390_km(KM_TDEA_192_ENCRYPT, ctx->key, dst, src, DES_BLOCK_SIZE); 260 cpacf_km(CPACF_KM_TDEA_192_ENC, ctx->key, dst, src, DES_BLOCK_SIZE);
262} 261}
263 262
264static void des3_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 263static void des3_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
265{ 264{
266 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm); 265 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
267 266
268 crypt_s390_km(KM_TDEA_192_DECRYPT, ctx->key, dst, src, DES_BLOCK_SIZE); 267 cpacf_km(CPACF_KM_TDEA_192_DEC, ctx->key, dst, src, DES_BLOCK_SIZE);
269} 268}
270 269
271static struct crypto_alg des3_alg = { 270static struct crypto_alg des3_alg = {
272 .cra_name = "des3_ede", 271 .cra_name = "des3_ede",
273 .cra_driver_name = "des3_ede-s390", 272 .cra_driver_name = "des3_ede-s390",
274 .cra_priority = CRYPT_S390_PRIORITY, 273 .cra_priority = 300,
275 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 274 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
276 .cra_blocksize = DES_BLOCK_SIZE, 275 .cra_blocksize = DES_BLOCK_SIZE,
277 .cra_ctxsize = sizeof(struct s390_des_ctx), 276 .cra_ctxsize = sizeof(struct s390_des_ctx),
@@ -295,7 +294,7 @@ static int ecb_des3_encrypt(struct blkcipher_desc *desc,
295 struct blkcipher_walk walk; 294 struct blkcipher_walk walk;
296 295
297 blkcipher_walk_init(&walk, dst, src, nbytes); 296 blkcipher_walk_init(&walk, dst, src, nbytes);
298 return ecb_desall_crypt(desc, KM_TDEA_192_ENCRYPT, ctx->key, &walk); 297 return ecb_desall_crypt(desc, CPACF_KM_TDEA_192_ENC, ctx->key, &walk);
299} 298}
300 299
301static int ecb_des3_decrypt(struct blkcipher_desc *desc, 300static int ecb_des3_decrypt(struct blkcipher_desc *desc,
@@ -306,13 +305,13 @@ static int ecb_des3_decrypt(struct blkcipher_desc *desc,
306 struct blkcipher_walk walk; 305 struct blkcipher_walk walk;
307 306
308 blkcipher_walk_init(&walk, dst, src, nbytes); 307 blkcipher_walk_init(&walk, dst, src, nbytes);
309 return ecb_desall_crypt(desc, KM_TDEA_192_DECRYPT, ctx->key, &walk); 308 return ecb_desall_crypt(desc, CPACF_KM_TDEA_192_DEC, ctx->key, &walk);
310} 309}
311 310
312static struct crypto_alg ecb_des3_alg = { 311static struct crypto_alg ecb_des3_alg = {
313 .cra_name = "ecb(des3_ede)", 312 .cra_name = "ecb(des3_ede)",
314 .cra_driver_name = "ecb-des3_ede-s390", 313 .cra_driver_name = "ecb-des3_ede-s390",
315 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 314 .cra_priority = 400, /* combo: des3 + ecb */
316 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 315 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
317 .cra_blocksize = DES_BLOCK_SIZE, 316 .cra_blocksize = DES_BLOCK_SIZE,
318 .cra_ctxsize = sizeof(struct s390_des_ctx), 317 .cra_ctxsize = sizeof(struct s390_des_ctx),
@@ -336,7 +335,7 @@ static int cbc_des3_encrypt(struct blkcipher_desc *desc,
336 struct blkcipher_walk walk; 335 struct blkcipher_walk walk;
337 336
338 blkcipher_walk_init(&walk, dst, src, nbytes); 337 blkcipher_walk_init(&walk, dst, src, nbytes);
339 return cbc_desall_crypt(desc, KMC_TDEA_192_ENCRYPT, &walk); 338 return cbc_desall_crypt(desc, CPACF_KMC_TDEA_192_ENC, &walk);
340} 339}
341 340
342static int cbc_des3_decrypt(struct blkcipher_desc *desc, 341static int cbc_des3_decrypt(struct blkcipher_desc *desc,
@@ -346,13 +345,13 @@ static int cbc_des3_decrypt(struct blkcipher_desc *desc,
346 struct blkcipher_walk walk; 345 struct blkcipher_walk walk;
347 346
348 blkcipher_walk_init(&walk, dst, src, nbytes); 347 blkcipher_walk_init(&walk, dst, src, nbytes);
349 return cbc_desall_crypt(desc, KMC_TDEA_192_DECRYPT, &walk); 348 return cbc_desall_crypt(desc, CPACF_KMC_TDEA_192_DEC, &walk);
350} 349}
351 350
352static struct crypto_alg cbc_des3_alg = { 351static struct crypto_alg cbc_des3_alg = {
353 .cra_name = "cbc(des3_ede)", 352 .cra_name = "cbc(des3_ede)",
354 .cra_driver_name = "cbc-des3_ede-s390", 353 .cra_driver_name = "cbc-des3_ede-s390",
355 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 354 .cra_priority = 400, /* combo: des3 + cbc */
356 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 355 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
357 .cra_blocksize = DES_BLOCK_SIZE, 356 .cra_blocksize = DES_BLOCK_SIZE,
358 .cra_ctxsize = sizeof(struct s390_des_ctx), 357 .cra_ctxsize = sizeof(struct s390_des_ctx),
@@ -407,8 +406,7 @@ static int ctr_desall_crypt(struct blkcipher_desc *desc, long func,
407 n = __ctrblk_init(ctrptr, nbytes); 406 n = __ctrblk_init(ctrptr, nbytes);
408 else 407 else
409 n = DES_BLOCK_SIZE; 408 n = DES_BLOCK_SIZE;
410 ret = crypt_s390_kmctr(func, ctx->key, out, in, 409 ret = cpacf_kmctr(func, ctx->key, out, in, n, ctrptr);
411 n, ctrptr);
412 if (ret < 0 || ret != n) { 410 if (ret < 0 || ret != n) {
413 if (ctrptr == ctrblk) 411 if (ctrptr == ctrblk)
414 spin_unlock(&ctrblk_lock); 412 spin_unlock(&ctrblk_lock);
@@ -438,8 +436,8 @@ static int ctr_desall_crypt(struct blkcipher_desc *desc, long func,
438 if (nbytes) { 436 if (nbytes) {
439 out = walk->dst.virt.addr; 437 out = walk->dst.virt.addr;
440 in = walk->src.virt.addr; 438 in = walk->src.virt.addr;
441 ret = crypt_s390_kmctr(func, ctx->key, buf, in, 439 ret = cpacf_kmctr(func, ctx->key, buf, in,
442 DES_BLOCK_SIZE, ctrbuf); 440 DES_BLOCK_SIZE, ctrbuf);
443 if (ret < 0 || ret != DES_BLOCK_SIZE) 441 if (ret < 0 || ret != DES_BLOCK_SIZE)
444 return -EIO; 442 return -EIO;
445 memcpy(out, buf, nbytes); 443 memcpy(out, buf, nbytes);
@@ -458,7 +456,7 @@ static int ctr_des_encrypt(struct blkcipher_desc *desc,
458 struct blkcipher_walk walk; 456 struct blkcipher_walk walk;
459 457
460 blkcipher_walk_init(&walk, dst, src, nbytes); 458 blkcipher_walk_init(&walk, dst, src, nbytes);
461 return ctr_desall_crypt(desc, KMCTR_DEA_ENCRYPT, ctx, &walk); 459 return ctr_desall_crypt(desc, CPACF_KMCTR_DEA_ENC, ctx, &walk);
462} 460}
463 461
464static int ctr_des_decrypt(struct blkcipher_desc *desc, 462static int ctr_des_decrypt(struct blkcipher_desc *desc,
@@ -469,13 +467,13 @@ static int ctr_des_decrypt(struct blkcipher_desc *desc,
469 struct blkcipher_walk walk; 467 struct blkcipher_walk walk;
470 468
471 blkcipher_walk_init(&walk, dst, src, nbytes); 469 blkcipher_walk_init(&walk, dst, src, nbytes);
472 return ctr_desall_crypt(desc, KMCTR_DEA_DECRYPT, ctx, &walk); 470 return ctr_desall_crypt(desc, CPACF_KMCTR_DEA_DEC, ctx, &walk);
473} 471}
474 472
475static struct crypto_alg ctr_des_alg = { 473static struct crypto_alg ctr_des_alg = {
476 .cra_name = "ctr(des)", 474 .cra_name = "ctr(des)",
477 .cra_driver_name = "ctr-des-s390", 475 .cra_driver_name = "ctr-des-s390",
478 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 476 .cra_priority = 400, /* combo: des + ctr */
479 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 477 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
480 .cra_blocksize = 1, 478 .cra_blocksize = 1,
481 .cra_ctxsize = sizeof(struct s390_des_ctx), 479 .cra_ctxsize = sizeof(struct s390_des_ctx),
@@ -501,7 +499,7 @@ static int ctr_des3_encrypt(struct blkcipher_desc *desc,
501 struct blkcipher_walk walk; 499 struct blkcipher_walk walk;
502 500
503 blkcipher_walk_init(&walk, dst, src, nbytes); 501 blkcipher_walk_init(&walk, dst, src, nbytes);
504 return ctr_desall_crypt(desc, KMCTR_TDEA_192_ENCRYPT, ctx, &walk); 502 return ctr_desall_crypt(desc, CPACF_KMCTR_TDEA_192_ENC, ctx, &walk);
505} 503}
506 504
507static int ctr_des3_decrypt(struct blkcipher_desc *desc, 505static int ctr_des3_decrypt(struct blkcipher_desc *desc,
@@ -512,13 +510,13 @@ static int ctr_des3_decrypt(struct blkcipher_desc *desc,
512 struct blkcipher_walk walk; 510 struct blkcipher_walk walk;
513 511
514 blkcipher_walk_init(&walk, dst, src, nbytes); 512 blkcipher_walk_init(&walk, dst, src, nbytes);
515 return ctr_desall_crypt(desc, KMCTR_TDEA_192_DECRYPT, ctx, &walk); 513 return ctr_desall_crypt(desc, CPACF_KMCTR_TDEA_192_DEC, ctx, &walk);
516} 514}
517 515
518static struct crypto_alg ctr_des3_alg = { 516static struct crypto_alg ctr_des3_alg = {
519 .cra_name = "ctr(des3_ede)", 517 .cra_name = "ctr(des3_ede)",
520 .cra_driver_name = "ctr-des3_ede-s390", 518 .cra_driver_name = "ctr-des3_ede-s390",
521 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 519 .cra_priority = 400, /* combo: des3 + ede */
522 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 520 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
523 .cra_blocksize = 1, 521 .cra_blocksize = 1,
524 .cra_ctxsize = sizeof(struct s390_des_ctx), 522 .cra_ctxsize = sizeof(struct s390_des_ctx),
@@ -540,8 +538,8 @@ static int __init des_s390_init(void)
540{ 538{
541 int ret; 539 int ret;
542 540
543 if (!crypt_s390_func_available(KM_DEA_ENCRYPT, CRYPT_S390_MSA) || 541 if (!cpacf_query(CPACF_KM, CPACF_KM_DEA_ENC) ||
544 !crypt_s390_func_available(KM_TDEA_192_ENCRYPT, CRYPT_S390_MSA)) 542 !cpacf_query(CPACF_KM, CPACF_KM_TDEA_192_ENC))
545 return -EOPNOTSUPP; 543 return -EOPNOTSUPP;
546 544
547 ret = crypto_register_alg(&des_alg); 545 ret = crypto_register_alg(&des_alg);
@@ -563,10 +561,8 @@ static int __init des_s390_init(void)
563 if (ret) 561 if (ret)
564 goto cbc_des3_err; 562 goto cbc_des3_err;
565 563
566 if (crypt_s390_func_available(KMCTR_DEA_ENCRYPT, 564 if (cpacf_query(CPACF_KMCTR, CPACF_KMCTR_DEA_ENC) &&
567 CRYPT_S390_MSA | CRYPT_S390_MSA4) && 565 cpacf_query(CPACF_KMCTR, CPACF_KMCTR_TDEA_192_ENC)) {
568 crypt_s390_func_available(KMCTR_TDEA_192_ENCRYPT,
569 CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
570 ret = crypto_register_alg(&ctr_des_alg); 566 ret = crypto_register_alg(&ctr_des_alg);
571 if (ret) 567 if (ret)
572 goto ctr_des_err; 568 goto ctr_des_err;
diff --git a/arch/s390/crypto/ghash_s390.c b/arch/s390/crypto/ghash_s390.c
index 26e14efd30a7..ab68de72e795 100644
--- a/arch/s390/crypto/ghash_s390.c
+++ b/arch/s390/crypto/ghash_s390.c
@@ -10,8 +10,7 @@
10#include <crypto/internal/hash.h> 10#include <crypto/internal/hash.h>
11#include <linux/module.h> 11#include <linux/module.h>
12#include <linux/cpufeature.h> 12#include <linux/cpufeature.h>
13 13#include <asm/cpacf.h>
14#include "crypt_s390.h"
15 14
16#define GHASH_BLOCK_SIZE 16 15#define GHASH_BLOCK_SIZE 16
17#define GHASH_DIGEST_SIZE 16 16#define GHASH_DIGEST_SIZE 16
@@ -72,8 +71,8 @@ static int ghash_update(struct shash_desc *desc,
72 src += n; 71 src += n;
73 72
74 if (!dctx->bytes) { 73 if (!dctx->bytes) {
75 ret = crypt_s390_kimd(KIMD_GHASH, dctx, buf, 74 ret = cpacf_kimd(CPACF_KIMD_GHASH, dctx, buf,
76 GHASH_BLOCK_SIZE); 75 GHASH_BLOCK_SIZE);
77 if (ret != GHASH_BLOCK_SIZE) 76 if (ret != GHASH_BLOCK_SIZE)
78 return -EIO; 77 return -EIO;
79 } 78 }
@@ -81,7 +80,7 @@ static int ghash_update(struct shash_desc *desc,
81 80
82 n = srclen & ~(GHASH_BLOCK_SIZE - 1); 81 n = srclen & ~(GHASH_BLOCK_SIZE - 1);
83 if (n) { 82 if (n) {
84 ret = crypt_s390_kimd(KIMD_GHASH, dctx, src, n); 83 ret = cpacf_kimd(CPACF_KIMD_GHASH, dctx, src, n);
85 if (ret != n) 84 if (ret != n)
86 return -EIO; 85 return -EIO;
87 src += n; 86 src += n;
@@ -106,7 +105,7 @@ static int ghash_flush(struct ghash_desc_ctx *dctx)
106 105
107 memset(pos, 0, dctx->bytes); 106 memset(pos, 0, dctx->bytes);
108 107
109 ret = crypt_s390_kimd(KIMD_GHASH, dctx, buf, GHASH_BLOCK_SIZE); 108 ret = cpacf_kimd(CPACF_KIMD_GHASH, dctx, buf, GHASH_BLOCK_SIZE);
110 if (ret != GHASH_BLOCK_SIZE) 109 if (ret != GHASH_BLOCK_SIZE)
111 return -EIO; 110 return -EIO;
112 111
@@ -137,7 +136,7 @@ static struct shash_alg ghash_alg = {
137 .base = { 136 .base = {
138 .cra_name = "ghash", 137 .cra_name = "ghash",
139 .cra_driver_name = "ghash-s390", 138 .cra_driver_name = "ghash-s390",
140 .cra_priority = CRYPT_S390_PRIORITY, 139 .cra_priority = 300,
141 .cra_flags = CRYPTO_ALG_TYPE_SHASH, 140 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
142 .cra_blocksize = GHASH_BLOCK_SIZE, 141 .cra_blocksize = GHASH_BLOCK_SIZE,
143 .cra_ctxsize = sizeof(struct ghash_ctx), 142 .cra_ctxsize = sizeof(struct ghash_ctx),
@@ -147,8 +146,7 @@ static struct shash_alg ghash_alg = {
147 146
148static int __init ghash_mod_init(void) 147static int __init ghash_mod_init(void)
149{ 148{
150 if (!crypt_s390_func_available(KIMD_GHASH, 149 if (!cpacf_query(CPACF_KIMD, CPACF_KIMD_GHASH))
151 CRYPT_S390_MSA | CRYPT_S390_MSA4))
152 return -EOPNOTSUPP; 150 return -EOPNOTSUPP;
153 151
154 return crypto_register_shash(&ghash_alg); 152 return crypto_register_shash(&ghash_alg);
diff --git a/arch/s390/crypto/prng.c b/arch/s390/crypto/prng.c
index d750cc0dfe30..41527b113f5a 100644
--- a/arch/s390/crypto/prng.c
+++ b/arch/s390/crypto/prng.c
@@ -23,8 +23,7 @@
23#include <asm/debug.h> 23#include <asm/debug.h>
24#include <asm/uaccess.h> 24#include <asm/uaccess.h>
25#include <asm/timex.h> 25#include <asm/timex.h>
26 26#include <asm/cpacf.h>
27#include "crypt_s390.h"
28 27
29MODULE_LICENSE("GPL"); 28MODULE_LICENSE("GPL");
30MODULE_AUTHOR("IBM Corporation"); 29MODULE_AUTHOR("IBM Corporation");
@@ -136,8 +135,8 @@ static int generate_entropy(u8 *ebuf, size_t nbytes)
136 else 135 else
137 h = ebuf; 136 h = ebuf;
138 /* generate sha256 from this page */ 137 /* generate sha256 from this page */
139 if (crypt_s390_kimd(KIMD_SHA_256, h, 138 if (cpacf_kimd(CPACF_KIMD_SHA_256, h,
140 pg, PAGE_SIZE) != PAGE_SIZE) { 139 pg, PAGE_SIZE) != PAGE_SIZE) {
141 prng_errorflag = PRNG_GEN_ENTROPY_FAILED; 140 prng_errorflag = PRNG_GEN_ENTROPY_FAILED;
142 ret = -EIO; 141 ret = -EIO;
143 goto out; 142 goto out;
@@ -164,9 +163,9 @@ static void prng_tdes_add_entropy(void)
164 int ret; 163 int ret;
165 164
166 for (i = 0; i < 16; i++) { 165 for (i = 0; i < 16; i++) {
167 ret = crypt_s390_kmc(KMC_PRNG, prng_data->prngws.parm_block, 166 ret = cpacf_kmc(CPACF_KMC_PRNG, prng_data->prngws.parm_block,
168 (char *)entropy, (char *)entropy, 167 (char *)entropy, (char *)entropy,
169 sizeof(entropy)); 168 sizeof(entropy));
170 BUG_ON(ret < 0 || ret != sizeof(entropy)); 169 BUG_ON(ret < 0 || ret != sizeof(entropy));
171 memcpy(prng_data->prngws.parm_block, entropy, sizeof(entropy)); 170 memcpy(prng_data->prngws.parm_block, entropy, sizeof(entropy));
172 } 171 }
@@ -311,9 +310,8 @@ static int __init prng_sha512_selftest(void)
311 memset(&ws, 0, sizeof(ws)); 310 memset(&ws, 0, sizeof(ws));
312 311
313 /* initial seed */ 312 /* initial seed */
314 ret = crypt_s390_ppno(PPNO_SHA512_DRNG_SEED, 313 ret = cpacf_ppno(CPACF_PPNO_SHA512_DRNG_SEED, &ws, NULL, 0,
315 &ws, NULL, 0, 314 seed, sizeof(seed));
316 seed, sizeof(seed));
317 if (ret < 0) { 315 if (ret < 0) {
318 pr_err("The prng self test seed operation for the " 316 pr_err("The prng self test seed operation for the "
319 "SHA-512 mode failed with rc=%d\n", ret); 317 "SHA-512 mode failed with rc=%d\n", ret);
@@ -331,18 +329,16 @@ static int __init prng_sha512_selftest(void)
331 } 329 }
332 330
333 /* generate random bytes */ 331 /* generate random bytes */
334 ret = crypt_s390_ppno(PPNO_SHA512_DRNG_GEN, 332 ret = cpacf_ppno(CPACF_PPNO_SHA512_DRNG_GEN,
335 &ws, buf, sizeof(buf), 333 &ws, buf, sizeof(buf), NULL, 0);
336 NULL, 0);
337 if (ret < 0) { 334 if (ret < 0) {
338 pr_err("The prng self test generate operation for " 335 pr_err("The prng self test generate operation for "
339 "the SHA-512 mode failed with rc=%d\n", ret); 336 "the SHA-512 mode failed with rc=%d\n", ret);
340 prng_errorflag = PRNG_SELFTEST_FAILED; 337 prng_errorflag = PRNG_SELFTEST_FAILED;
341 return -EIO; 338 return -EIO;
342 } 339 }
343 ret = crypt_s390_ppno(PPNO_SHA512_DRNG_GEN, 340 ret = cpacf_ppno(CPACF_PPNO_SHA512_DRNG_GEN,
344 &ws, buf, sizeof(buf), 341 &ws, buf, sizeof(buf), NULL, 0);
345 NULL, 0);
346 if (ret < 0) { 342 if (ret < 0) {
347 pr_err("The prng self test generate operation for " 343 pr_err("The prng self test generate operation for "
348 "the SHA-512 mode failed with rc=%d\n", ret); 344 "the SHA-512 mode failed with rc=%d\n", ret);
@@ -396,9 +392,8 @@ static int __init prng_sha512_instantiate(void)
396 get_tod_clock_ext(seed + 48); 392 get_tod_clock_ext(seed + 48);
397 393
398 /* initial seed of the ppno drng */ 394 /* initial seed of the ppno drng */
399 ret = crypt_s390_ppno(PPNO_SHA512_DRNG_SEED, 395 ret = cpacf_ppno(CPACF_PPNO_SHA512_DRNG_SEED,
400 &prng_data->ppnows, NULL, 0, 396 &prng_data->ppnows, NULL, 0, seed, sizeof(seed));
401 seed, sizeof(seed));
402 if (ret < 0) { 397 if (ret < 0) {
403 prng_errorflag = PRNG_SEED_FAILED; 398 prng_errorflag = PRNG_SEED_FAILED;
404 ret = -EIO; 399 ret = -EIO;
@@ -409,11 +404,9 @@ static int __init prng_sha512_instantiate(void)
409 bytes for the FIPS 140-2 Conditional Self Test */ 404 bytes for the FIPS 140-2 Conditional Self Test */
410 if (fips_enabled) { 405 if (fips_enabled) {
411 prng_data->prev = prng_data->buf + prng_chunk_size; 406 prng_data->prev = prng_data->buf + prng_chunk_size;
412 ret = crypt_s390_ppno(PPNO_SHA512_DRNG_GEN, 407 ret = cpacf_ppno(CPACF_PPNO_SHA512_DRNG_GEN,
413 &prng_data->ppnows, 408 &prng_data->ppnows,
414 prng_data->prev, 409 prng_data->prev, prng_chunk_size, NULL, 0);
415 prng_chunk_size,
416 NULL, 0);
417 if (ret < 0 || ret != prng_chunk_size) { 410 if (ret < 0 || ret != prng_chunk_size) {
418 prng_errorflag = PRNG_GEN_FAILED; 411 prng_errorflag = PRNG_GEN_FAILED;
419 ret = -EIO; 412 ret = -EIO;
@@ -447,9 +440,8 @@ static int prng_sha512_reseed(void)
447 return ret; 440 return ret;
448 441
449 /* do a reseed of the ppno drng with this bytestring */ 442 /* do a reseed of the ppno drng with this bytestring */
450 ret = crypt_s390_ppno(PPNO_SHA512_DRNG_SEED, 443 ret = cpacf_ppno(CPACF_PPNO_SHA512_DRNG_SEED,
451 &prng_data->ppnows, NULL, 0, 444 &prng_data->ppnows, NULL, 0, seed, sizeof(seed));
452 seed, sizeof(seed));
453 if (ret) { 445 if (ret) {
454 prng_errorflag = PRNG_RESEED_FAILED; 446 prng_errorflag = PRNG_RESEED_FAILED;
455 return -EIO; 447 return -EIO;
@@ -471,9 +463,8 @@ static int prng_sha512_generate(u8 *buf, size_t nbytes)
471 } 463 }
472 464
473 /* PPNO generate */ 465 /* PPNO generate */
474 ret = crypt_s390_ppno(PPNO_SHA512_DRNG_GEN, 466 ret = cpacf_ppno(CPACF_PPNO_SHA512_DRNG_GEN,
475 &prng_data->ppnows, buf, nbytes, 467 &prng_data->ppnows, buf, nbytes, NULL, 0);
476 NULL, 0);
477 if (ret < 0 || ret != nbytes) { 468 if (ret < 0 || ret != nbytes) {
478 prng_errorflag = PRNG_GEN_FAILED; 469 prng_errorflag = PRNG_GEN_FAILED;
479 return -EIO; 470 return -EIO;
@@ -555,8 +546,8 @@ static ssize_t prng_tdes_read(struct file *file, char __user *ubuf,
555 * Note: you can still get strict X9.17 conformity by setting 546 * Note: you can still get strict X9.17 conformity by setting
556 * prng_chunk_size to 8 bytes. 547 * prng_chunk_size to 8 bytes.
557 */ 548 */
558 tmp = crypt_s390_kmc(KMC_PRNG, prng_data->prngws.parm_block, 549 tmp = cpacf_kmc(CPACF_KMC_PRNG, prng_data->prngws.parm_block,
559 prng_data->buf, prng_data->buf, n); 550 prng_data->buf, prng_data->buf, n);
560 if (tmp < 0 || tmp != n) { 551 if (tmp < 0 || tmp != n) {
561 ret = -EIO; 552 ret = -EIO;
562 break; 553 break;
@@ -815,14 +806,13 @@ static int __init prng_init(void)
815 int ret; 806 int ret;
816 807
817 /* check if the CPU has a PRNG */ 808 /* check if the CPU has a PRNG */
818 if (!crypt_s390_func_available(KMC_PRNG, CRYPT_S390_MSA)) 809 if (!cpacf_query(CPACF_KMC, CPACF_KMC_PRNG))
819 return -EOPNOTSUPP; 810 return -EOPNOTSUPP;
820 811
821 /* choose prng mode */ 812 /* choose prng mode */
822 if (prng_mode != PRNG_MODE_TDES) { 813 if (prng_mode != PRNG_MODE_TDES) {
823 /* check for MSA5 support for PPNO operations */ 814 /* check for MSA5 support for PPNO operations */
824 if (!crypt_s390_func_available(PPNO_SHA512_DRNG_GEN, 815 if (!cpacf_query(CPACF_PPNO, CPACF_PPNO_SHA512_DRNG_GEN)) {
825 CRYPT_S390_MSA5)) {
826 if (prng_mode == PRNG_MODE_SHA512) { 816 if (prng_mode == PRNG_MODE_SHA512) {
827 pr_err("The prng module cannot " 817 pr_err("The prng module cannot "
828 "start in SHA-512 mode\n"); 818 "start in SHA-512 mode\n");
diff --git a/arch/s390/crypto/sha1_s390.c b/arch/s390/crypto/sha1_s390.c
index 9208eadae9f0..5fbf91bbb478 100644
--- a/arch/s390/crypto/sha1_s390.c
+++ b/arch/s390/crypto/sha1_s390.c
@@ -28,8 +28,8 @@
28#include <linux/module.h> 28#include <linux/module.h>
29#include <linux/cpufeature.h> 29#include <linux/cpufeature.h>
30#include <crypto/sha.h> 30#include <crypto/sha.h>
31#include <asm/cpacf.h>
31 32
32#include "crypt_s390.h"
33#include "sha.h" 33#include "sha.h"
34 34
35static int sha1_init(struct shash_desc *desc) 35static int sha1_init(struct shash_desc *desc)
@@ -42,7 +42,7 @@ static int sha1_init(struct shash_desc *desc)
42 sctx->state[3] = SHA1_H3; 42 sctx->state[3] = SHA1_H3;
43 sctx->state[4] = SHA1_H4; 43 sctx->state[4] = SHA1_H4;
44 sctx->count = 0; 44 sctx->count = 0;
45 sctx->func = KIMD_SHA_1; 45 sctx->func = CPACF_KIMD_SHA_1;
46 46
47 return 0; 47 return 0;
48} 48}
@@ -66,7 +66,7 @@ static int sha1_import(struct shash_desc *desc, const void *in)
66 sctx->count = ictx->count; 66 sctx->count = ictx->count;
67 memcpy(sctx->state, ictx->state, sizeof(ictx->state)); 67 memcpy(sctx->state, ictx->state, sizeof(ictx->state));
68 memcpy(sctx->buf, ictx->buffer, sizeof(ictx->buffer)); 68 memcpy(sctx->buf, ictx->buffer, sizeof(ictx->buffer));
69 sctx->func = KIMD_SHA_1; 69 sctx->func = CPACF_KIMD_SHA_1;
70 return 0; 70 return 0;
71} 71}
72 72
@@ -82,7 +82,7 @@ static struct shash_alg alg = {
82 .base = { 82 .base = {
83 .cra_name = "sha1", 83 .cra_name = "sha1",
84 .cra_driver_name= "sha1-s390", 84 .cra_driver_name= "sha1-s390",
85 .cra_priority = CRYPT_S390_PRIORITY, 85 .cra_priority = 300,
86 .cra_flags = CRYPTO_ALG_TYPE_SHASH, 86 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
87 .cra_blocksize = SHA1_BLOCK_SIZE, 87 .cra_blocksize = SHA1_BLOCK_SIZE,
88 .cra_module = THIS_MODULE, 88 .cra_module = THIS_MODULE,
@@ -91,7 +91,7 @@ static struct shash_alg alg = {
91 91
92static int __init sha1_s390_init(void) 92static int __init sha1_s390_init(void)
93{ 93{
94 if (!crypt_s390_func_available(KIMD_SHA_1, CRYPT_S390_MSA)) 94 if (!cpacf_query(CPACF_KIMD, CPACF_KIMD_SHA_1))
95 return -EOPNOTSUPP; 95 return -EOPNOTSUPP;
96 return crypto_register_shash(&alg); 96 return crypto_register_shash(&alg);
97} 97}
diff --git a/arch/s390/crypto/sha256_s390.c b/arch/s390/crypto/sha256_s390.c
index 667888f5c964..10aac0b11988 100644
--- a/arch/s390/crypto/sha256_s390.c
+++ b/arch/s390/crypto/sha256_s390.c
@@ -18,8 +18,8 @@
18#include <linux/module.h> 18#include <linux/module.h>
19#include <linux/cpufeature.h> 19#include <linux/cpufeature.h>
20#include <crypto/sha.h> 20#include <crypto/sha.h>
21#include <asm/cpacf.h>
21 22
22#include "crypt_s390.h"
23#include "sha.h" 23#include "sha.h"
24 24
25static int sha256_init(struct shash_desc *desc) 25static int sha256_init(struct shash_desc *desc)
@@ -35,7 +35,7 @@ static int sha256_init(struct shash_desc *desc)
35 sctx->state[6] = SHA256_H6; 35 sctx->state[6] = SHA256_H6;
36 sctx->state[7] = SHA256_H7; 36 sctx->state[7] = SHA256_H7;
37 sctx->count = 0; 37 sctx->count = 0;
38 sctx->func = KIMD_SHA_256; 38 sctx->func = CPACF_KIMD_SHA_256;
39 39
40 return 0; 40 return 0;
41} 41}
@@ -59,7 +59,7 @@ static int sha256_import(struct shash_desc *desc, const void *in)
59 sctx->count = ictx->count; 59 sctx->count = ictx->count;
60 memcpy(sctx->state, ictx->state, sizeof(ictx->state)); 60 memcpy(sctx->state, ictx->state, sizeof(ictx->state));
61 memcpy(sctx->buf, ictx->buf, sizeof(ictx->buf)); 61 memcpy(sctx->buf, ictx->buf, sizeof(ictx->buf));
62 sctx->func = KIMD_SHA_256; 62 sctx->func = CPACF_KIMD_SHA_256;
63 return 0; 63 return 0;
64} 64}
65 65
@@ -75,7 +75,7 @@ static struct shash_alg sha256_alg = {
75 .base = { 75 .base = {
76 .cra_name = "sha256", 76 .cra_name = "sha256",
77 .cra_driver_name= "sha256-s390", 77 .cra_driver_name= "sha256-s390",
78 .cra_priority = CRYPT_S390_PRIORITY, 78 .cra_priority = 300,
79 .cra_flags = CRYPTO_ALG_TYPE_SHASH, 79 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
80 .cra_blocksize = SHA256_BLOCK_SIZE, 80 .cra_blocksize = SHA256_BLOCK_SIZE,
81 .cra_module = THIS_MODULE, 81 .cra_module = THIS_MODULE,
@@ -95,7 +95,7 @@ static int sha224_init(struct shash_desc *desc)
95 sctx->state[6] = SHA224_H6; 95 sctx->state[6] = SHA224_H6;
96 sctx->state[7] = SHA224_H7; 96 sctx->state[7] = SHA224_H7;
97 sctx->count = 0; 97 sctx->count = 0;
98 sctx->func = KIMD_SHA_256; 98 sctx->func = CPACF_KIMD_SHA_256;
99 99
100 return 0; 100 return 0;
101} 101}
@@ -112,7 +112,7 @@ static struct shash_alg sha224_alg = {
112 .base = { 112 .base = {
113 .cra_name = "sha224", 113 .cra_name = "sha224",
114 .cra_driver_name= "sha224-s390", 114 .cra_driver_name= "sha224-s390",
115 .cra_priority = CRYPT_S390_PRIORITY, 115 .cra_priority = 300,
116 .cra_flags = CRYPTO_ALG_TYPE_SHASH, 116 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
117 .cra_blocksize = SHA224_BLOCK_SIZE, 117 .cra_blocksize = SHA224_BLOCK_SIZE,
118 .cra_module = THIS_MODULE, 118 .cra_module = THIS_MODULE,
@@ -123,7 +123,7 @@ static int __init sha256_s390_init(void)
123{ 123{
124 int ret; 124 int ret;
125 125
126 if (!crypt_s390_func_available(KIMD_SHA_256, CRYPT_S390_MSA)) 126 if (!cpacf_query(CPACF_KIMD, CPACF_KIMD_SHA_256))
127 return -EOPNOTSUPP; 127 return -EOPNOTSUPP;
128 ret = crypto_register_shash(&sha256_alg); 128 ret = crypto_register_shash(&sha256_alg);
129 if (ret < 0) 129 if (ret < 0)
diff --git a/arch/s390/crypto/sha512_s390.c b/arch/s390/crypto/sha512_s390.c
index 2ba66b1518f0..ea85757be407 100644
--- a/arch/s390/crypto/sha512_s390.c
+++ b/arch/s390/crypto/sha512_s390.c
@@ -19,9 +19,9 @@
19#include <linux/kernel.h> 19#include <linux/kernel.h>
20#include <linux/module.h> 20#include <linux/module.h>
21#include <linux/cpufeature.h> 21#include <linux/cpufeature.h>
22#include <asm/cpacf.h>
22 23
23#include "sha.h" 24#include "sha.h"
24#include "crypt_s390.h"
25 25
26static int sha512_init(struct shash_desc *desc) 26static int sha512_init(struct shash_desc *desc)
27{ 27{
@@ -36,7 +36,7 @@ static int sha512_init(struct shash_desc *desc)
36 *(__u64 *)&ctx->state[12] = 0x1f83d9abfb41bd6bULL; 36 *(__u64 *)&ctx->state[12] = 0x1f83d9abfb41bd6bULL;
37 *(__u64 *)&ctx->state[14] = 0x5be0cd19137e2179ULL; 37 *(__u64 *)&ctx->state[14] = 0x5be0cd19137e2179ULL;
38 ctx->count = 0; 38 ctx->count = 0;
39 ctx->func = KIMD_SHA_512; 39 ctx->func = CPACF_KIMD_SHA_512;
40 40
41 return 0; 41 return 0;
42} 42}
@@ -64,7 +64,7 @@ static int sha512_import(struct shash_desc *desc, const void *in)
64 64
65 memcpy(sctx->state, ictx->state, sizeof(ictx->state)); 65 memcpy(sctx->state, ictx->state, sizeof(ictx->state));
66 memcpy(sctx->buf, ictx->buf, sizeof(ictx->buf)); 66 memcpy(sctx->buf, ictx->buf, sizeof(ictx->buf));
67 sctx->func = KIMD_SHA_512; 67 sctx->func = CPACF_KIMD_SHA_512;
68 return 0; 68 return 0;
69} 69}
70 70
@@ -80,7 +80,7 @@ static struct shash_alg sha512_alg = {
80 .base = { 80 .base = {
81 .cra_name = "sha512", 81 .cra_name = "sha512",
82 .cra_driver_name= "sha512-s390", 82 .cra_driver_name= "sha512-s390",
83 .cra_priority = CRYPT_S390_PRIORITY, 83 .cra_priority = 300,
84 .cra_flags = CRYPTO_ALG_TYPE_SHASH, 84 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
85 .cra_blocksize = SHA512_BLOCK_SIZE, 85 .cra_blocksize = SHA512_BLOCK_SIZE,
86 .cra_module = THIS_MODULE, 86 .cra_module = THIS_MODULE,
@@ -102,7 +102,7 @@ static int sha384_init(struct shash_desc *desc)
102 *(__u64 *)&ctx->state[12] = 0xdb0c2e0d64f98fa7ULL; 102 *(__u64 *)&ctx->state[12] = 0xdb0c2e0d64f98fa7ULL;
103 *(__u64 *)&ctx->state[14] = 0x47b5481dbefa4fa4ULL; 103 *(__u64 *)&ctx->state[14] = 0x47b5481dbefa4fa4ULL;
104 ctx->count = 0; 104 ctx->count = 0;
105 ctx->func = KIMD_SHA_512; 105 ctx->func = CPACF_KIMD_SHA_512;
106 106
107 return 0; 107 return 0;
108} 108}
@@ -119,7 +119,7 @@ static struct shash_alg sha384_alg = {
119 .base = { 119 .base = {
120 .cra_name = "sha384", 120 .cra_name = "sha384",
121 .cra_driver_name= "sha384-s390", 121 .cra_driver_name= "sha384-s390",
122 .cra_priority = CRYPT_S390_PRIORITY, 122 .cra_priority = 300,
123 .cra_flags = CRYPTO_ALG_TYPE_SHASH, 123 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
124 .cra_blocksize = SHA384_BLOCK_SIZE, 124 .cra_blocksize = SHA384_BLOCK_SIZE,
125 .cra_ctxsize = sizeof(struct s390_sha_ctx), 125 .cra_ctxsize = sizeof(struct s390_sha_ctx),
@@ -133,7 +133,7 @@ static int __init init(void)
133{ 133{
134 int ret; 134 int ret;
135 135
136 if (!crypt_s390_func_available(KIMD_SHA_512, CRYPT_S390_MSA)) 136 if (!cpacf_query(CPACF_KIMD, CPACF_KIMD_SHA_512))
137 return -EOPNOTSUPP; 137 return -EOPNOTSUPP;
138 if ((ret = crypto_register_shash(&sha512_alg)) < 0) 138 if ((ret = crypto_register_shash(&sha512_alg)) < 0)
139 goto out; 139 goto out;
diff --git a/arch/s390/crypto/sha_common.c b/arch/s390/crypto/sha_common.c
index 8620b0ec9c42..8e908166c3ee 100644
--- a/arch/s390/crypto/sha_common.c
+++ b/arch/s390/crypto/sha_common.c
@@ -15,8 +15,8 @@
15 15
16#include <crypto/internal/hash.h> 16#include <crypto/internal/hash.h>
17#include <linux/module.h> 17#include <linux/module.h>
18#include <asm/cpacf.h>
18#include "sha.h" 19#include "sha.h"
19#include "crypt_s390.h"
20 20
21int s390_sha_update(struct shash_desc *desc, const u8 *data, unsigned int len) 21int s390_sha_update(struct shash_desc *desc, const u8 *data, unsigned int len)
22{ 22{
@@ -35,7 +35,7 @@ int s390_sha_update(struct shash_desc *desc, const u8 *data, unsigned int len)
35 /* process one stored block */ 35 /* process one stored block */
36 if (index) { 36 if (index) {
37 memcpy(ctx->buf + index, data, bsize - index); 37 memcpy(ctx->buf + index, data, bsize - index);
38 ret = crypt_s390_kimd(ctx->func, ctx->state, ctx->buf, bsize); 38 ret = cpacf_kimd(ctx->func, ctx->state, ctx->buf, bsize);
39 if (ret != bsize) 39 if (ret != bsize)
40 return -EIO; 40 return -EIO;
41 data += bsize - index; 41 data += bsize - index;
@@ -45,8 +45,8 @@ int s390_sha_update(struct shash_desc *desc, const u8 *data, unsigned int len)
45 45
46 /* process as many blocks as possible */ 46 /* process as many blocks as possible */
47 if (len >= bsize) { 47 if (len >= bsize) {
48 ret = crypt_s390_kimd(ctx->func, ctx->state, data, 48 ret = cpacf_kimd(ctx->func, ctx->state, data,
49 len & ~(bsize - 1)); 49 len & ~(bsize - 1));
50 if (ret != (len & ~(bsize - 1))) 50 if (ret != (len & ~(bsize - 1)))
51 return -EIO; 51 return -EIO;
52 data += ret; 52 data += ret;
@@ -89,7 +89,7 @@ int s390_sha_final(struct shash_desc *desc, u8 *out)
89 bits = ctx->count * 8; 89 bits = ctx->count * 8;
90 memcpy(ctx->buf + end - 8, &bits, sizeof(bits)); 90 memcpy(ctx->buf + end - 8, &bits, sizeof(bits));
91 91
92 ret = crypt_s390_kimd(ctx->func, ctx->state, ctx->buf, end); 92 ret = cpacf_kimd(ctx->func, ctx->state, ctx->buf, end);
93 if (ret != end) 93 if (ret != end)
94 return -EIO; 94 return -EIO;
95 95
diff --git a/arch/s390/include/asm/cpacf.h b/arch/s390/include/asm/cpacf.h
new file mode 100644
index 000000000000..1a82cf26ee11
--- /dev/null
+++ b/arch/s390/include/asm/cpacf.h
@@ -0,0 +1,410 @@
1/*
2 * CP Assist for Cryptographic Functions (CPACF)
3 *
4 * Copyright IBM Corp. 2003, 2016
5 * Author(s): Thomas Spatzier
6 * Jan Glauber
7 * Harald Freudenberger (freude@de.ibm.com)
8 * Martin Schwidefsky <schwidefsky@de.ibm.com>
9 */
10#ifndef _ASM_S390_CPACF_H
11#define _ASM_S390_CPACF_H
12
13#include <asm/facility.h>
14
15/*
16 * Instruction opcodes for the CPACF instructions
17 */
18#define CPACF_KMAC 0xb91e /* MSA */
19#define CPACF_KM 0xb92e /* MSA */
20#define CPACF_KMC 0xb92f /* MSA */
21#define CPACF_KIMD 0xb93e /* MSA */
22#define CPACF_KLMD 0xb93f /* MSA */
23#define CPACF_PCC 0xb92c /* MSA4 */
24#define CPACF_KMCTR 0xb92d /* MSA4 */
25#define CPACF_PPNO 0xb93c /* MSA5 */
26
27/*
28 * Function codes for the KM (CIPHER MESSAGE)
29 * instruction (0x80 is the decipher modifier bit)
30 */
31#define CPACF_KM_QUERY 0x00
32#define CPACF_KM_DEA_ENC 0x01
33#define CPACF_KM_DEA_DEC 0x81
34#define CPACF_KM_TDEA_128_ENC 0x02
35#define CPACF_KM_TDEA_128_DEC 0x82
36#define CPACF_KM_TDEA_192_ENC 0x03
37#define CPACF_KM_TDEA_192_DEC 0x83
38#define CPACF_KM_AES_128_ENC 0x12
39#define CPACF_KM_AES_128_DEC 0x92
40#define CPACF_KM_AES_192_ENC 0x13
41#define CPACF_KM_AES_192_DEC 0x93
42#define CPACF_KM_AES_256_ENC 0x14
43#define CPACF_KM_AES_256_DEC 0x94
44#define CPACF_KM_XTS_128_ENC 0x32
45#define CPACF_KM_XTS_128_DEC 0xb2
46#define CPACF_KM_XTS_256_ENC 0x34
47#define CPACF_KM_XTS_256_DEC 0xb4
48
49/*
50 * Function codes for the KMC (CIPHER MESSAGE WITH CHAINING)
51 * instruction (0x80 is the decipher modifier bit)
52 */
53#define CPACF_KMC_QUERY 0x00
54#define CPACF_KMC_DEA_ENC 0x01
55#define CPACF_KMC_DEA_DEC 0x81
56#define CPACF_KMC_TDEA_128_ENC 0x02
57#define CPACF_KMC_TDEA_128_DEC 0x82
58#define CPACF_KMC_TDEA_192_ENC 0x03
59#define CPACF_KMC_TDEA_192_DEC 0x83
60#define CPACF_KMC_AES_128_ENC 0x12
61#define CPACF_KMC_AES_128_DEC 0x92
62#define CPACF_KMC_AES_192_ENC 0x13
63#define CPACF_KMC_AES_192_DEC 0x93
64#define CPACF_KMC_AES_256_ENC 0x14
65#define CPACF_KMC_AES_256_DEC 0x94
66#define CPACF_KMC_PRNG 0x43
67
68/*
69 * Function codes for the KMCTR (CIPHER MESSAGE WITH COUNTER)
70 * instruction (0x80 is the decipher modifier bit)
71 */
72#define CPACF_KMCTR_QUERY 0x00
73#define CPACF_KMCTR_DEA_ENC 0x01
74#define CPACF_KMCTR_DEA_DEC 0x81
75#define CPACF_KMCTR_TDEA_128_ENC 0x02
76#define CPACF_KMCTR_TDEA_128_DEC 0x82
77#define CPACF_KMCTR_TDEA_192_ENC 0x03
78#define CPACF_KMCTR_TDEA_192_DEC 0x83
79#define CPACF_KMCTR_AES_128_ENC 0x12
80#define CPACF_KMCTR_AES_128_DEC 0x92
81#define CPACF_KMCTR_AES_192_ENC 0x13
82#define CPACF_KMCTR_AES_192_DEC 0x93
83#define CPACF_KMCTR_AES_256_ENC 0x14
84#define CPACF_KMCTR_AES_256_DEC 0x94
85
86/*
87 * Function codes for the KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST)
88 * instruction (0x80 is the decipher modifier bit)
89 */
90#define CPACF_KIMD_QUERY 0x00
91#define CPACF_KIMD_SHA_1 0x01
92#define CPACF_KIMD_SHA_256 0x02
93#define CPACF_KIMD_SHA_512 0x03
94#define CPACF_KIMD_GHASH 0x41
95
96/*
97 * Function codes for the KLMD (COMPUTE LAST MESSAGE DIGEST)
98 * instruction (0x80 is the decipher modifier bit)
99 */
100#define CPACF_KLMD_QUERY 0x00
101#define CPACF_KLMD_SHA_1 0x01
102#define CPACF_KLMD_SHA_256 0x02
103#define CPACF_KLMD_SHA_512 0x03
104
105/*
106 * function codes for the KMAC (COMPUTE MESSAGE AUTHENTICATION CODE)
107 * instruction (0x80 is the decipher modifier bit)
108 */
109#define CPACF_KMAC_QUERY 0x00
110#define CPACF_KMAC_DEA 0x01
111#define CPACF_KMAC_TDEA_128 0x02
112#define CPACF_KMAC_TDEA_192 0x03
113
114/*
115 * Function codes for the PPNO (PERFORM PSEUDORANDOM NUMBER OPERATION)
116 * instruction (0x80 is the decipher modifier bit)
117 */
118#define CPACF_PPNO_QUERY 0x00
119#define CPACF_PPNO_SHA512_DRNG_GEN 0x03
120#define CPACF_PPNO_SHA512_DRNG_SEED 0x83
121
122/**
123 * cpacf_query() - check if a specific CPACF function is available
124 * @opcode: the opcode of the crypto instruction
125 * @func: the function code to test for
126 *
127 * Executes the query function for the given crypto instruction @opcode
128 * and checks if @func is available
129 *
130 * Returns 1 if @func is available for @opcode, 0 otherwise
131 */
132static inline void __cpacf_query(unsigned int opcode, unsigned char *status)
133{
134 typedef struct { unsigned char _[16]; } status_type;
135 register unsigned long r0 asm("0") = 0; /* query function */
136 register unsigned long r1 asm("1") = (unsigned long) status;
137
138 asm volatile(
139 /* Parameter registers are ignored, but may not be 0 */
140 "0: .insn rrf,%[opc] << 16,2,2,2,0\n"
141 " brc 1,0b\n" /* handle partial completion */
142 : "=m" (*(status_type *) status)
143 : [fc] "d" (r0), [pba] "a" (r1), [opc] "i" (opcode)
144 : "cc");
145}
146
147static inline int cpacf_query(unsigned int opcode, unsigned int func)
148{
149 unsigned char status[16];
150
151 switch (opcode) {
152 case CPACF_KMAC:
153 case CPACF_KM:
154 case CPACF_KMC:
155 case CPACF_KIMD:
156 case CPACF_KLMD:
157 if (!test_facility(17)) /* check for MSA */
158 return 0;
159 break;
160 case CPACF_PCC:
161 case CPACF_KMCTR:
162 if (!test_facility(77)) /* check for MSA4 */
163 return 0;
164 break;
165 case CPACF_PPNO:
166 if (!test_facility(57)) /* check for MSA5 */
167 return 0;
168 break;
169 default:
170 BUG();
171 }
172 __cpacf_query(opcode, status);
173 return (status[func >> 3] & (0x80 >> (func & 7))) != 0;
174}
175
176/**
177 * cpacf_km() - executes the KM (CIPHER MESSAGE) instruction
178 * @func: the function code passed to KM; see CPACF_KM_xxx defines
179 * @param: address of parameter block; see POP for details on each func
180 * @dest: address of destination memory area
181 * @src: address of source memory area
182 * @src_len: length of src operand in bytes
183 *
184 * Returns 0 for the query func, number of processed bytes for
185 * encryption/decryption funcs
186 */
187static inline int cpacf_km(long func, void *param,
188 u8 *dest, const u8 *src, long src_len)
189{
190 register unsigned long r0 asm("0") = (unsigned long) func;
191 register unsigned long r1 asm("1") = (unsigned long) param;
192 register unsigned long r2 asm("2") = (unsigned long) src;
193 register unsigned long r3 asm("3") = (unsigned long) src_len;
194 register unsigned long r4 asm("4") = (unsigned long) dest;
195
196 asm volatile(
197 "0: .insn rre,%[opc] << 16,%[dst],%[src]\n"
198 " brc 1,0b\n" /* handle partial completion */
199 : [src] "+a" (r2), [len] "+d" (r3), [dst] "+a" (r4)
200 : [fc] "d" (r0), [pba] "a" (r1), [opc] "i" (CPACF_KM)
201 : "cc", "memory");
202
203 return src_len - r3;
204}
205
206/**
207 * cpacf_kmc() - executes the KMC (CIPHER MESSAGE WITH CHAINING) instruction
208 * @func: the function code passed to KM; see CPACF_KMC_xxx defines
209 * @param: address of parameter block; see POP for details on each func
210 * @dest: address of destination memory area
211 * @src: address of source memory area
212 * @src_len: length of src operand in bytes
213 *
214 * Returns 0 for the query func, number of processed bytes for
215 * encryption/decryption funcs
216 */
217static inline int cpacf_kmc(long func, void *param,
218 u8 *dest, const u8 *src, long src_len)
219{
220 register unsigned long r0 asm("0") = (unsigned long) func;
221 register unsigned long r1 asm("1") = (unsigned long) param;
222 register unsigned long r2 asm("2") = (unsigned long) src;
223 register unsigned long r3 asm("3") = (unsigned long) src_len;
224 register unsigned long r4 asm("4") = (unsigned long) dest;
225
226 asm volatile(
227 "0: .insn rre,%[opc] << 16,%[dst],%[src]\n"
228 " brc 1,0b\n" /* handle partial completion */
229 : [src] "+a" (r2), [len] "+d" (r3), [dst] "+a" (r4)
230 : [fc] "d" (r0), [pba] "a" (r1), [opc] "i" (CPACF_KMC)
231 : "cc", "memory");
232
233 return src_len - r3;
234}
235
236/**
237 * cpacf_kimd() - executes the KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST)
238 * instruction
239 * @func: the function code passed to KM; see CPACF_KIMD_xxx defines
240 * @param: address of parameter block; see POP for details on each func
241 * @src: address of source memory area
242 * @src_len: length of src operand in bytes
243 *
244 * Returns 0 for the query func, number of processed bytes for digest funcs
245 */
246static inline int cpacf_kimd(long func, void *param,
247 const u8 *src, long src_len)
248{
249 register unsigned long r0 asm("0") = (unsigned long) func;
250 register unsigned long r1 asm("1") = (unsigned long) param;
251 register unsigned long r2 asm("2") = (unsigned long) src;
252 register unsigned long r3 asm("3") = (unsigned long) src_len;
253
254 asm volatile(
255 "0: .insn rre,%[opc] << 16,0,%[src]\n"
256 " brc 1,0b\n" /* handle partial completion */
257 : [src] "+a" (r2), [len] "+d" (r3)
258 : [fc] "d" (r0), [pba] "a" (r1), [opc] "i" (CPACF_KIMD)
259 : "cc", "memory");
260
261 return src_len - r3;
262}
263
264/**
265 * cpacf_klmd() - executes the KLMD (COMPUTE LAST MESSAGE DIGEST) instruction
266 * @func: the function code passed to KM; see CPACF_KLMD_xxx defines
267 * @param: address of parameter block; see POP for details on each func
268 * @src: address of source memory area
269 * @src_len: length of src operand in bytes
270 *
271 * Returns 0 for the query func, number of processed bytes for digest funcs
272 */
273static inline int cpacf_klmd(long func, void *param,
274 const u8 *src, long src_len)
275{
276 register unsigned long r0 asm("0") = (unsigned long) func;
277 register unsigned long r1 asm("1") = (unsigned long) param;
278 register unsigned long r2 asm("2") = (unsigned long) src;
279 register unsigned long r3 asm("3") = (unsigned long) src_len;
280
281 asm volatile(
282 "0: .insn rre,%[opc] << 16,0,%[src]\n"
283 " brc 1,0b\n" /* handle partial completion */
284 : [src] "+a" (r2), [len] "+d" (r3)
285 : [fc] "d" (r0), [pba] "a" (r1), [opc] "i" (CPACF_KLMD)
286 : "cc", "memory");
287
288 return src_len - r3;
289}
290
291/**
292 * cpacf_kmac() - executes the KMAC (COMPUTE MESSAGE AUTHENTICATION CODE)
293 * instruction
294 * @func: the function code passed to KM; see CPACF_KMAC_xxx defines
295 * @param: address of parameter block; see POP for details on each func
296 * @src: address of source memory area
297 * @src_len: length of src operand in bytes
298 *
299 * Returns 0 for the query func, number of processed bytes for digest funcs
300 */
301static inline int cpacf_kmac(long func, void *param,
302 const u8 *src, long src_len)
303{
304 register unsigned long r0 asm("0") = (unsigned long) func;
305 register unsigned long r1 asm("1") = (unsigned long) param;
306 register unsigned long r2 asm("2") = (unsigned long) src;
307 register unsigned long r3 asm("3") = (unsigned long) src_len;
308
309 asm volatile(
310 "0: .insn rre,%[opc] << 16,0,%[src]\n"
311 " brc 1,0b\n" /* handle partial completion */
312 : [src] "+a" (r2), [len] "+d" (r3)
313 : [fc] "d" (r0), [pba] "a" (r1), [opc] "i" (CPACF_KMAC)
314 : "cc", "memory");
315
316 return src_len - r3;
317}
318
319/**
320 * cpacf_kmctr() - executes the KMCTR (CIPHER MESSAGE WITH COUNTER) instruction
321 * @func: the function code passed to KMCTR; see CPACF_KMCTR_xxx defines
322 * @param: address of parameter block; see POP for details on each func
323 * @dest: address of destination memory area
324 * @src: address of source memory area
325 * @src_len: length of src operand in bytes
326 * @counter: address of counter value
327 *
328 * Returns 0 for the query func, number of processed bytes for
329 * encryption/decryption funcs
330 */
331static inline int cpacf_kmctr(long func, void *param, u8 *dest,
332 const u8 *src, long src_len, u8 *counter)
333{
334 register unsigned long r0 asm("0") = (unsigned long) func;
335 register unsigned long r1 asm("1") = (unsigned long) param;
336 register unsigned long r2 asm("2") = (unsigned long) src;
337 register unsigned long r3 asm("3") = (unsigned long) src_len;
338 register unsigned long r4 asm("4") = (unsigned long) dest;
339 register unsigned long r6 asm("6") = (unsigned long) counter;
340
341 asm volatile(
342 "0: .insn rrf,%[opc] << 16,%[dst],%[src],%[ctr],0\n"
343 " brc 1,0b\n" /* handle partial completion */
344 : [src] "+a" (r2), [len] "+d" (r3),
345 [dst] "+a" (r4), [ctr] "+a" (r6)
346 : [fc] "d" (r0), [pba] "a" (r1), [opc] "i" (CPACF_KMCTR)
347 : "cc", "memory");
348
349 return src_len - r3;
350}
351
352/**
353 * cpacf_ppno() - executes the PPNO (PERFORM PSEUDORANDOM NUMBER OPERATION)
354 * instruction
355 * @func: the function code passed to PPNO; see CPACF_PPNO_xxx defines
356 * @param: address of parameter block; see POP for details on each func
357 * @dest: address of destination memory area
358 * @dest_len: size of destination memory area in bytes
359 * @seed: address of seed data
360 * @seed_len: size of seed data in bytes
361 *
362 * Returns 0 for the query func, number of random bytes stored in
363 * dest buffer for generate function
364 */
365static inline int cpacf_ppno(long func, void *param,
366 u8 *dest, long dest_len,
367 const u8 *seed, long seed_len)
368{
369 register unsigned long r0 asm("0") = (unsigned long) func;
370 register unsigned long r1 asm("1") = (unsigned long) param;
371 register unsigned long r2 asm("2") = (unsigned long) dest;
372 register unsigned long r3 asm("3") = (unsigned long) dest_len;
373 register unsigned long r4 asm("4") = (unsigned long) seed;
374 register unsigned long r5 asm("5") = (unsigned long) seed_len;
375
376 asm volatile (
377 "0: .insn rre,%[opc] << 16,%[dst],%[seed]\n"
378 " brc 1,0b\n" /* handle partial completion */
379 : [dst] "+a" (r2), [dlen] "+d" (r3)
380 : [fc] "d" (r0), [pba] "a" (r1),
381 [seed] "a" (r4), [slen] "d" (r5), [opc] "i" (CPACF_PPNO)
382 : "cc", "memory");
383
384 return dest_len - r3;
385}
386
387/**
388 * cpacf_pcc() - executes the PCC (PERFORM CRYPTOGRAPHIC COMPUTATION)
389 * instruction
390 * @func: the function code passed to PCC; see CPACF_KM_xxx defines
391 * @param: address of parameter block; see POP for details on each func
392 *
393 * Returns 0.
394 */
395static inline int cpacf_pcc(long func, void *param)
396{
397 register unsigned long r0 asm("0") = (unsigned long) func;
398 register unsigned long r1 asm("1") = (unsigned long) param;
399
400 asm volatile(
401 "0: .insn rre,%[opc] << 16,0,0\n" /* PCC opcode */
402 " brc 1,0b\n" /* handle partial completion */
403 :
404 : [fc] "d" (r0), [pba] "a" (r1), [opc] "i" (CPACF_PCC)
405 : "cc", "memory");
406
407 return 0;
408}
409
410#endif /* _ASM_S390_CPACF_H */