diff options
author | Jan Glauber <jang@linux.vnet.ibm.com> | 2012-10-26 09:06:12 -0400 |
---|---|---|
committer | Martin Schwidefsky <schwidefsky@de.ibm.com> | 2012-11-23 05:14:27 -0500 |
commit | 36eb2caa7bace31b7868a57f77cb148e58d1c9f9 (patch) | |
tree | 440ee2134281ce81fec701f1f2ca4f2393f5659c /arch/s390 | |
parent | ce1d801462ce75f9ba84e0bb32a05e1a7c881efe (diff) |
s390/crypto: Don't panic after crypto instruction failures
Remove the BUG_ON's that check for failure or incomplete
results of the s390 hardware crypto instructions.
Rather report the errors as -EIO to the crypto layer.
Signed-off-by: Jan Glauber <jang@linux.vnet.ibm.com>
Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com>
Diffstat (limited to 'arch/s390')
-rw-r--r-- | arch/s390/crypto/aes_s390.c | 18 | ||||
-rw-r--r-- | arch/s390/crypto/des_s390.c | 12 | ||||
-rw-r--r-- | arch/s390/crypto/ghash_s390.c | 21 | ||||
-rw-r--r-- | arch/s390/crypto/sha_common.c | 9 |
4 files changed, 39 insertions, 21 deletions
diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c index da3c1a7dcd8e..b4dbade8ca24 100644 --- a/arch/s390/crypto/aes_s390.c +++ b/arch/s390/crypto/aes_s390.c | |||
@@ -325,7 +325,8 @@ static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param, | |||
325 | u8 *in = walk->src.virt.addr; | 325 | u8 *in = walk->src.virt.addr; |
326 | 326 | ||
327 | ret = crypt_s390_km(func, param, out, in, n); | 327 | ret = crypt_s390_km(func, param, out, in, n); |
328 | BUG_ON((ret < 0) || (ret != n)); | 328 | if (ret < 0 || ret != n) |
329 | return -EIO; | ||
329 | 330 | ||
330 | nbytes &= AES_BLOCK_SIZE - 1; | 331 | nbytes &= AES_BLOCK_SIZE - 1; |
331 | ret = blkcipher_walk_done(desc, walk, nbytes); | 332 | ret = blkcipher_walk_done(desc, walk, nbytes); |
@@ -457,7 +458,8 @@ static int cbc_aes_crypt(struct blkcipher_desc *desc, long func, void *param, | |||
457 | u8 *in = walk->src.virt.addr; | 458 | u8 *in = walk->src.virt.addr; |
458 | 459 | ||
459 | ret = crypt_s390_kmc(func, param, out, in, n); | 460 | ret = crypt_s390_kmc(func, param, out, in, n); |
460 | BUG_ON((ret < 0) || (ret != n)); | 461 | if (ret < 0 || ret != n) |
462 | return -EIO; | ||
461 | 463 | ||
462 | nbytes &= AES_BLOCK_SIZE - 1; | 464 | nbytes &= AES_BLOCK_SIZE - 1; |
463 | ret = blkcipher_walk_done(desc, walk, nbytes); | 465 | ret = blkcipher_walk_done(desc, walk, nbytes); |
@@ -625,7 +627,8 @@ static int xts_aes_crypt(struct blkcipher_desc *desc, long func, | |||
625 | memcpy(xts_ctx->pcc.tweak, walk->iv, sizeof(xts_ctx->pcc.tweak)); | 627 | memcpy(xts_ctx->pcc.tweak, walk->iv, sizeof(xts_ctx->pcc.tweak)); |
626 | param = xts_ctx->pcc.key + offset; | 628 | param = xts_ctx->pcc.key + offset; |
627 | ret = crypt_s390_pcc(func, param); | 629 | ret = crypt_s390_pcc(func, param); |
628 | BUG_ON(ret < 0); | 630 | if (ret < 0) |
631 | return -EIO; | ||
629 | 632 | ||
630 | memcpy(xts_ctx->xts_param, xts_ctx->pcc.xts, 16); | 633 | memcpy(xts_ctx->xts_param, xts_ctx->pcc.xts, 16); |
631 | param = xts_ctx->key + offset; | 634 | param = xts_ctx->key + offset; |
@@ -636,7 +639,8 @@ static int xts_aes_crypt(struct blkcipher_desc *desc, long func, | |||
636 | in = walk->src.virt.addr; | 639 | in = walk->src.virt.addr; |
637 | 640 | ||
638 | ret = crypt_s390_km(func, param, out, in, n); | 641 | ret = crypt_s390_km(func, param, out, in, n); |
639 | BUG_ON(ret < 0 || ret != n); | 642 | if (ret < 0 || ret != n) |
643 | return -EIO; | ||
640 | 644 | ||
641 | nbytes &= AES_BLOCK_SIZE - 1; | 645 | nbytes &= AES_BLOCK_SIZE - 1; |
642 | ret = blkcipher_walk_done(desc, walk, nbytes); | 646 | ret = blkcipher_walk_done(desc, walk, nbytes); |
@@ -769,7 +773,8 @@ static int ctr_aes_crypt(struct blkcipher_desc *desc, long func, | |||
769 | crypto_inc(ctrblk + i, AES_BLOCK_SIZE); | 773 | crypto_inc(ctrblk + i, AES_BLOCK_SIZE); |
770 | } | 774 | } |
771 | ret = crypt_s390_kmctr(func, sctx->key, out, in, n, ctrblk); | 775 | ret = crypt_s390_kmctr(func, sctx->key, out, in, n, ctrblk); |
772 | BUG_ON(ret < 0 || ret != n); | 776 | if (ret < 0 || ret != n) |
777 | return -EIO; | ||
773 | if (n > AES_BLOCK_SIZE) | 778 | if (n > AES_BLOCK_SIZE) |
774 | memcpy(ctrblk, ctrblk + n - AES_BLOCK_SIZE, | 779 | memcpy(ctrblk, ctrblk + n - AES_BLOCK_SIZE, |
775 | AES_BLOCK_SIZE); | 780 | AES_BLOCK_SIZE); |
@@ -788,7 +793,8 @@ static int ctr_aes_crypt(struct blkcipher_desc *desc, long func, | |||
788 | in = walk->src.virt.addr; | 793 | in = walk->src.virt.addr; |
789 | ret = crypt_s390_kmctr(func, sctx->key, buf, in, | 794 | ret = crypt_s390_kmctr(func, sctx->key, buf, in, |
790 | AES_BLOCK_SIZE, ctrblk); | 795 | AES_BLOCK_SIZE, ctrblk); |
791 | BUG_ON(ret < 0 || ret != AES_BLOCK_SIZE); | 796 | if (ret < 0 || ret != AES_BLOCK_SIZE) |
797 | return -EIO; | ||
792 | memcpy(out, buf, nbytes); | 798 | memcpy(out, buf, nbytes); |
793 | crypto_inc(ctrblk, AES_BLOCK_SIZE); | 799 | crypto_inc(ctrblk, AES_BLOCK_SIZE); |
794 | ret = blkcipher_walk_done(desc, walk, 0); | 800 | ret = blkcipher_walk_done(desc, walk, 0); |
diff --git a/arch/s390/crypto/des_s390.c b/arch/s390/crypto/des_s390.c index b49fb96f4207..bcca01c9989d 100644 --- a/arch/s390/crypto/des_s390.c +++ b/arch/s390/crypto/des_s390.c | |||
@@ -94,7 +94,8 @@ static int ecb_desall_crypt(struct blkcipher_desc *desc, long func, | |||
94 | u8 *in = walk->src.virt.addr; | 94 | u8 *in = walk->src.virt.addr; |
95 | 95 | ||
96 | ret = crypt_s390_km(func, key, out, in, n); | 96 | ret = crypt_s390_km(func, key, out, in, n); |
97 | BUG_ON((ret < 0) || (ret != n)); | 97 | if (ret < 0 || ret != n) |
98 | return -EIO; | ||
98 | 99 | ||
99 | nbytes &= DES_BLOCK_SIZE - 1; | 100 | nbytes &= DES_BLOCK_SIZE - 1; |
100 | ret = blkcipher_walk_done(desc, walk, nbytes); | 101 | ret = blkcipher_walk_done(desc, walk, nbytes); |
@@ -120,7 +121,8 @@ static int cbc_desall_crypt(struct blkcipher_desc *desc, long func, | |||
120 | u8 *in = walk->src.virt.addr; | 121 | u8 *in = walk->src.virt.addr; |
121 | 122 | ||
122 | ret = crypt_s390_kmc(func, iv, out, in, n); | 123 | ret = crypt_s390_kmc(func, iv, out, in, n); |
123 | BUG_ON((ret < 0) || (ret != n)); | 124 | if (ret < 0 || ret != n) |
125 | return -EIO; | ||
124 | 126 | ||
125 | nbytes &= DES_BLOCK_SIZE - 1; | 127 | nbytes &= DES_BLOCK_SIZE - 1; |
126 | ret = blkcipher_walk_done(desc, walk, nbytes); | 128 | ret = blkcipher_walk_done(desc, walk, nbytes); |
@@ -386,7 +388,8 @@ static int ctr_desall_crypt(struct blkcipher_desc *desc, long func, | |||
386 | crypto_inc(ctrblk + i, DES_BLOCK_SIZE); | 388 | crypto_inc(ctrblk + i, DES_BLOCK_SIZE); |
387 | } | 389 | } |
388 | ret = crypt_s390_kmctr(func, ctx->key, out, in, n, ctrblk); | 390 | ret = crypt_s390_kmctr(func, ctx->key, out, in, n, ctrblk); |
389 | BUG_ON((ret < 0) || (ret != n)); | 391 | if (ret < 0 || ret != n) |
392 | return -EIO; | ||
390 | if (n > DES_BLOCK_SIZE) | 393 | if (n > DES_BLOCK_SIZE) |
391 | memcpy(ctrblk, ctrblk + n - DES_BLOCK_SIZE, | 394 | memcpy(ctrblk, ctrblk + n - DES_BLOCK_SIZE, |
392 | DES_BLOCK_SIZE); | 395 | DES_BLOCK_SIZE); |
@@ -404,7 +407,8 @@ static int ctr_desall_crypt(struct blkcipher_desc *desc, long func, | |||
404 | in = walk->src.virt.addr; | 407 | in = walk->src.virt.addr; |
405 | ret = crypt_s390_kmctr(func, ctx->key, buf, in, | 408 | ret = crypt_s390_kmctr(func, ctx->key, buf, in, |
406 | DES_BLOCK_SIZE, ctrblk); | 409 | DES_BLOCK_SIZE, ctrblk); |
407 | BUG_ON(ret < 0 || ret != DES_BLOCK_SIZE); | 410 | if (ret < 0 || ret != DES_BLOCK_SIZE) |
411 | return -EIO; | ||
408 | memcpy(out, buf, nbytes); | 412 | memcpy(out, buf, nbytes); |
409 | crypto_inc(ctrblk, DES_BLOCK_SIZE); | 413 | crypto_inc(ctrblk, DES_BLOCK_SIZE); |
410 | ret = blkcipher_walk_done(desc, walk, 0); | 414 | ret = blkcipher_walk_done(desc, walk, 0); |
diff --git a/arch/s390/crypto/ghash_s390.c b/arch/s390/crypto/ghash_s390.c index 1ebd3a15cca4..d43485d142e9 100644 --- a/arch/s390/crypto/ghash_s390.c +++ b/arch/s390/crypto/ghash_s390.c | |||
@@ -72,14 +72,16 @@ static int ghash_update(struct shash_desc *desc, | |||
72 | if (!dctx->bytes) { | 72 | if (!dctx->bytes) { |
73 | ret = crypt_s390_kimd(KIMD_GHASH, ctx, buf, | 73 | ret = crypt_s390_kimd(KIMD_GHASH, ctx, buf, |
74 | GHASH_BLOCK_SIZE); | 74 | GHASH_BLOCK_SIZE); |
75 | BUG_ON(ret != GHASH_BLOCK_SIZE); | 75 | if (ret != GHASH_BLOCK_SIZE) |
76 | return -EIO; | ||
76 | } | 77 | } |
77 | } | 78 | } |
78 | 79 | ||
79 | n = srclen & ~(GHASH_BLOCK_SIZE - 1); | 80 | n = srclen & ~(GHASH_BLOCK_SIZE - 1); |
80 | if (n) { | 81 | if (n) { |
81 | ret = crypt_s390_kimd(KIMD_GHASH, ctx, src, n); | 82 | ret = crypt_s390_kimd(KIMD_GHASH, ctx, src, n); |
82 | BUG_ON(ret != n); | 83 | if (ret != n) |
84 | return -EIO; | ||
83 | src += n; | 85 | src += n; |
84 | srclen -= n; | 86 | srclen -= n; |
85 | } | 87 | } |
@@ -92,7 +94,7 @@ static int ghash_update(struct shash_desc *desc, | |||
92 | return 0; | 94 | return 0; |
93 | } | 95 | } |
94 | 96 | ||
95 | static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx) | 97 | static int ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx) |
96 | { | 98 | { |
97 | u8 *buf = dctx->buffer; | 99 | u8 *buf = dctx->buffer; |
98 | int ret; | 100 | int ret; |
@@ -103,21 +105,24 @@ static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx) | |||
103 | memset(pos, 0, dctx->bytes); | 105 | memset(pos, 0, dctx->bytes); |
104 | 106 | ||
105 | ret = crypt_s390_kimd(KIMD_GHASH, ctx, buf, GHASH_BLOCK_SIZE); | 107 | ret = crypt_s390_kimd(KIMD_GHASH, ctx, buf, GHASH_BLOCK_SIZE); |
106 | BUG_ON(ret != GHASH_BLOCK_SIZE); | 108 | if (ret != GHASH_BLOCK_SIZE) |
109 | return -EIO; | ||
107 | } | 110 | } |
108 | 111 | ||
109 | dctx->bytes = 0; | 112 | dctx->bytes = 0; |
113 | return 0; | ||
110 | } | 114 | } |
111 | 115 | ||
112 | static int ghash_final(struct shash_desc *desc, u8 *dst) | 116 | static int ghash_final(struct shash_desc *desc, u8 *dst) |
113 | { | 117 | { |
114 | struct ghash_desc_ctx *dctx = shash_desc_ctx(desc); | 118 | struct ghash_desc_ctx *dctx = shash_desc_ctx(desc); |
115 | struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm); | 119 | struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm); |
120 | int ret; | ||
116 | 121 | ||
117 | ghash_flush(ctx, dctx); | 122 | ret = ghash_flush(ctx, dctx); |
118 | memcpy(dst, ctx->icv, GHASH_BLOCK_SIZE); | 123 | if (!ret) |
119 | 124 | memcpy(dst, ctx->icv, GHASH_BLOCK_SIZE); | |
120 | return 0; | 125 | return ret; |
121 | } | 126 | } |
122 | 127 | ||
123 | static struct shash_alg ghash_alg = { | 128 | static struct shash_alg ghash_alg = { |
diff --git a/arch/s390/crypto/sha_common.c b/arch/s390/crypto/sha_common.c index bd37d09b9d3c..8620b0ec9c42 100644 --- a/arch/s390/crypto/sha_common.c +++ b/arch/s390/crypto/sha_common.c | |||
@@ -36,7 +36,8 @@ int s390_sha_update(struct shash_desc *desc, const u8 *data, unsigned int len) | |||
36 | if (index) { | 36 | if (index) { |
37 | memcpy(ctx->buf + index, data, bsize - index); | 37 | memcpy(ctx->buf + index, data, bsize - index); |
38 | ret = crypt_s390_kimd(ctx->func, ctx->state, ctx->buf, bsize); | 38 | ret = crypt_s390_kimd(ctx->func, ctx->state, ctx->buf, bsize); |
39 | BUG_ON(ret != bsize); | 39 | if (ret != bsize) |
40 | return -EIO; | ||
40 | data += bsize - index; | 41 | data += bsize - index; |
41 | len -= bsize - index; | 42 | len -= bsize - index; |
42 | index = 0; | 43 | index = 0; |
@@ -46,7 +47,8 @@ int s390_sha_update(struct shash_desc *desc, const u8 *data, unsigned int len) | |||
46 | if (len >= bsize) { | 47 | if (len >= bsize) { |
47 | ret = crypt_s390_kimd(ctx->func, ctx->state, data, | 48 | ret = crypt_s390_kimd(ctx->func, ctx->state, data, |
48 | len & ~(bsize - 1)); | 49 | len & ~(bsize - 1)); |
49 | BUG_ON(ret != (len & ~(bsize - 1))); | 50 | if (ret != (len & ~(bsize - 1))) |
51 | return -EIO; | ||
50 | data += ret; | 52 | data += ret; |
51 | len -= ret; | 53 | len -= ret; |
52 | } | 54 | } |
@@ -88,7 +90,8 @@ int s390_sha_final(struct shash_desc *desc, u8 *out) | |||
88 | memcpy(ctx->buf + end - 8, &bits, sizeof(bits)); | 90 | memcpy(ctx->buf + end - 8, &bits, sizeof(bits)); |
89 | 91 | ||
90 | ret = crypt_s390_kimd(ctx->func, ctx->state, ctx->buf, end); | 92 | ret = crypt_s390_kimd(ctx->func, ctx->state, ctx->buf, end); |
91 | BUG_ON(ret != end); | 93 | if (ret != end) |
94 | return -EIO; | ||
92 | 95 | ||
93 | /* copy digest to out */ | 96 | /* copy digest to out */ |
94 | memcpy(out, ctx->state, crypto_shash_digestsize(desc->tfm)); | 97 | memcpy(out, ctx->state, crypto_shash_digestsize(desc->tfm)); |