aboutsummaryrefslogtreecommitdiffstats
path: root/drivers
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2014-10-15 01:30:52 -0400
committerLinus Torvalds <torvalds@linux-foundation.org>2014-10-15 01:30:52 -0400
commit6929c358972facf2999f8768815c40dd88514fc2 (patch)
treeb7180709c0d16ef5f2e7344b94b1ca6cfa7461bb /drivers
parent23971bdffff5f7c904131dfb41c186711dc2c418 (diff)
parent4c5c30249452aaebf258751ea4222eba3dd3da4c (diff)
Merge tag 'llvmlinux-for-v3.18' of git://git.linuxfoundation.org/llvmlinux/kernel
Pull LLVM updates from Behan Webster: "These patches remove the use of VLAIS using a new SHASH_DESC_ON_STACK macro. Some of the previously accepted VLAIS removal patches haven't used this macro. I will push new patches to consistently use this macro in all those older cases for 3.19" [ More LLVM patches coming in through subsystem trees, and LLVM itself needs some fixes that are already in many distributions but not in released versions of LLVM. Some day this will all "just work" - Linus ] * tag 'llvmlinux-for-v3.18' of git://git.linuxfoundation.org/llvmlinux/kernel: crypto: LLVMLinux: Remove VLAIS usage from crypto/testmgr.c security, crypto: LLVMLinux: Remove VLAIS from ima_crypto.c crypto: LLVMLinux: Remove VLAIS usage from libcrc32c.c crypto: LLVMLinux: Remove VLAIS usage from crypto/hmac.c crypto, dm: LLVMLinux: Remove VLAIS usage from dm-crypt crypto: LLVMLinux: Remove VLAIS from crypto/.../qat_algs.c crypto: LLVMLinux: Remove VLAIS from crypto/omap_sham.c crypto: LLVMLinux: Remove VLAIS from crypto/n2_core.c crypto: LLVMLinux: Remove VLAIS from crypto/mv_cesa.c crypto: LLVMLinux: Remove VLAIS from crypto/ccp/ccp-crypto-sha.c btrfs: LLVMLinux: Remove VLAIS crypto: LLVMLinux: Add macro to remove use of VLAIS in crypto code
Diffstat (limited to 'drivers')
-rw-r--r--drivers/crypto/ccp/ccp-crypto-sha.c13
-rw-r--r--drivers/crypto/mv_cesa.c41
-rw-r--r--drivers/crypto/n2_core.c11
-rw-r--r--drivers/crypto/omap-sham.c28
-rw-r--r--drivers/crypto/qat/qat_common/qat_algs.c31
-rw-r--r--drivers/md/dm-crypt.c34
6 files changed, 67 insertions, 91 deletions
diff --git a/drivers/crypto/ccp/ccp-crypto-sha.c b/drivers/crypto/ccp/ccp-crypto-sha.c
index 873f23425245..96531571f7cf 100644
--- a/drivers/crypto/ccp/ccp-crypto-sha.c
+++ b/drivers/crypto/ccp/ccp-crypto-sha.c
@@ -198,10 +198,9 @@ static int ccp_sha_setkey(struct crypto_ahash *tfm, const u8 *key,
198{ 198{
199 struct ccp_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm)); 199 struct ccp_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
200 struct crypto_shash *shash = ctx->u.sha.hmac_tfm; 200 struct crypto_shash *shash = ctx->u.sha.hmac_tfm;
201 struct { 201
202 struct shash_desc sdesc; 202 SHASH_DESC_ON_STACK(sdesc, shash);
203 char ctx[crypto_shash_descsize(shash)]; 203
204 } desc;
205 unsigned int block_size = crypto_shash_blocksize(shash); 204 unsigned int block_size = crypto_shash_blocksize(shash);
206 unsigned int digest_size = crypto_shash_digestsize(shash); 205 unsigned int digest_size = crypto_shash_digestsize(shash);
207 int i, ret; 206 int i, ret;
@@ -216,11 +215,11 @@ static int ccp_sha_setkey(struct crypto_ahash *tfm, const u8 *key,
216 215
217 if (key_len > block_size) { 216 if (key_len > block_size) {
218 /* Must hash the input key */ 217 /* Must hash the input key */
219 desc.sdesc.tfm = shash; 218 sdesc->tfm = shash;
220 desc.sdesc.flags = crypto_ahash_get_flags(tfm) & 219 sdesc->flags = crypto_ahash_get_flags(tfm) &
221 CRYPTO_TFM_REQ_MAY_SLEEP; 220 CRYPTO_TFM_REQ_MAY_SLEEP;
222 221
223 ret = crypto_shash_digest(&desc.sdesc, key, key_len, 222 ret = crypto_shash_digest(sdesc, key, key_len,
224 ctx->u.sha.key); 223 ctx->u.sha.key);
225 if (ret) { 224 if (ret) {
226 crypto_ahash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 225 crypto_ahash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
diff --git a/drivers/crypto/mv_cesa.c b/drivers/crypto/mv_cesa.c
index 29d0ee504907..032c72c1f953 100644
--- a/drivers/crypto/mv_cesa.c
+++ b/drivers/crypto/mv_cesa.c
@@ -402,26 +402,23 @@ static int mv_hash_final_fallback(struct ahash_request *req)
402{ 402{
403 const struct mv_tfm_hash_ctx *tfm_ctx = crypto_tfm_ctx(req->base.tfm); 403 const struct mv_tfm_hash_ctx *tfm_ctx = crypto_tfm_ctx(req->base.tfm);
404 struct mv_req_hash_ctx *req_ctx = ahash_request_ctx(req); 404 struct mv_req_hash_ctx *req_ctx = ahash_request_ctx(req);
405 struct { 405 SHASH_DESC_ON_STACK(shash, tfm_ctx->fallback);
406 struct shash_desc shash;
407 char ctx[crypto_shash_descsize(tfm_ctx->fallback)];
408 } desc;
409 int rc; 406 int rc;
410 407
411 desc.shash.tfm = tfm_ctx->fallback; 408 shash->tfm = tfm_ctx->fallback;
412 desc.shash.flags = CRYPTO_TFM_REQ_MAY_SLEEP; 409 shash->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
413 if (unlikely(req_ctx->first_hash)) { 410 if (unlikely(req_ctx->first_hash)) {
414 crypto_shash_init(&desc.shash); 411 crypto_shash_init(shash);
415 crypto_shash_update(&desc.shash, req_ctx->buffer, 412 crypto_shash_update(shash, req_ctx->buffer,
416 req_ctx->extra_bytes); 413 req_ctx->extra_bytes);
417 } else { 414 } else {
418 /* only SHA1 for now.... 415 /* only SHA1 for now....
419 */ 416 */
420 rc = mv_hash_import_sha1_ctx(req_ctx, &desc.shash); 417 rc = mv_hash_import_sha1_ctx(req_ctx, shash);
421 if (rc) 418 if (rc)
422 goto out; 419 goto out;
423 } 420 }
424 rc = crypto_shash_final(&desc.shash, req->result); 421 rc = crypto_shash_final(shash, req->result);
425out: 422out:
426 return rc; 423 return rc;
427} 424}
@@ -794,23 +791,21 @@ static int mv_hash_setkey(struct crypto_ahash *tfm, const u8 * key,
794 ss = crypto_shash_statesize(ctx->base_hash); 791 ss = crypto_shash_statesize(ctx->base_hash);
795 792
796 { 793 {
797 struct { 794 SHASH_DESC_ON_STACK(shash, ctx->base_hash);
798 struct shash_desc shash; 795
799 char ctx[crypto_shash_descsize(ctx->base_hash)];
800 } desc;
801 unsigned int i; 796 unsigned int i;
802 char ipad[ss]; 797 char ipad[ss];
803 char opad[ss]; 798 char opad[ss];
804 799
805 desc.shash.tfm = ctx->base_hash; 800 shash->tfm = ctx->base_hash;
806 desc.shash.flags = crypto_shash_get_flags(ctx->base_hash) & 801 shash->flags = crypto_shash_get_flags(ctx->base_hash) &
807 CRYPTO_TFM_REQ_MAY_SLEEP; 802 CRYPTO_TFM_REQ_MAY_SLEEP;
808 803
809 if (keylen > bs) { 804 if (keylen > bs) {
810 int err; 805 int err;
811 806
812 err = 807 err =
813 crypto_shash_digest(&desc.shash, key, keylen, ipad); 808 crypto_shash_digest(shash, key, keylen, ipad);
814 if (err) 809 if (err)
815 return err; 810 return err;
816 811
@@ -826,12 +821,12 @@ static int mv_hash_setkey(struct crypto_ahash *tfm, const u8 * key,
826 opad[i] ^= 0x5c; 821 opad[i] ^= 0x5c;
827 } 822 }
828 823
829 rc = crypto_shash_init(&desc.shash) ? : 824 rc = crypto_shash_init(shash) ? :
830 crypto_shash_update(&desc.shash, ipad, bs) ? : 825 crypto_shash_update(shash, ipad, bs) ? :
831 crypto_shash_export(&desc.shash, ipad) ? : 826 crypto_shash_export(shash, ipad) ? :
832 crypto_shash_init(&desc.shash) ? : 827 crypto_shash_init(shash) ? :
833 crypto_shash_update(&desc.shash, opad, bs) ? : 828 crypto_shash_update(shash, opad, bs) ? :
834 crypto_shash_export(&desc.shash, opad); 829 crypto_shash_export(shash, opad);
835 830
836 if (rc == 0) 831 if (rc == 0)
837 mv_hash_init_ivs(ctx, ipad, opad); 832 mv_hash_init_ivs(ctx, ipad, opad);
diff --git a/drivers/crypto/n2_core.c b/drivers/crypto/n2_core.c
index 7263c10a56ee..f8e3207fecb1 100644
--- a/drivers/crypto/n2_core.c
+++ b/drivers/crypto/n2_core.c
@@ -445,10 +445,7 @@ static int n2_hmac_async_setkey(struct crypto_ahash *tfm, const u8 *key,
445 struct n2_hmac_ctx *ctx = crypto_ahash_ctx(tfm); 445 struct n2_hmac_ctx *ctx = crypto_ahash_ctx(tfm);
446 struct crypto_shash *child_shash = ctx->child_shash; 446 struct crypto_shash *child_shash = ctx->child_shash;
447 struct crypto_ahash *fallback_tfm; 447 struct crypto_ahash *fallback_tfm;
448 struct { 448 SHASH_DESC_ON_STACK(shash, child_shash);
449 struct shash_desc shash;
450 char ctx[crypto_shash_descsize(child_shash)];
451 } desc;
452 int err, bs, ds; 449 int err, bs, ds;
453 450
454 fallback_tfm = ctx->base.fallback_tfm; 451 fallback_tfm = ctx->base.fallback_tfm;
@@ -456,15 +453,15 @@ static int n2_hmac_async_setkey(struct crypto_ahash *tfm, const u8 *key,
456 if (err) 453 if (err)
457 return err; 454 return err;
458 455
459 desc.shash.tfm = child_shash; 456 shash->tfm = child_shash;
460 desc.shash.flags = crypto_ahash_get_flags(tfm) & 457 shash->flags = crypto_ahash_get_flags(tfm) &
461 CRYPTO_TFM_REQ_MAY_SLEEP; 458 CRYPTO_TFM_REQ_MAY_SLEEP;
462 459
463 bs = crypto_shash_blocksize(child_shash); 460 bs = crypto_shash_blocksize(child_shash);
464 ds = crypto_shash_digestsize(child_shash); 461 ds = crypto_shash_digestsize(child_shash);
465 BUG_ON(ds > N2_HASH_KEY_MAX); 462 BUG_ON(ds > N2_HASH_KEY_MAX);
466 if (keylen > bs) { 463 if (keylen > bs) {
467 err = crypto_shash_digest(&desc.shash, key, keylen, 464 err = crypto_shash_digest(shash, key, keylen,
468 ctx->hash_key); 465 ctx->hash_key);
469 if (err) 466 if (err)
470 return err; 467 return err;
diff --git a/drivers/crypto/omap-sham.c b/drivers/crypto/omap-sham.c
index 710d86386965..24ef48965e45 100644
--- a/drivers/crypto/omap-sham.c
+++ b/drivers/crypto/omap-sham.c
@@ -949,17 +949,14 @@ static int omap_sham_finish_hmac(struct ahash_request *req)
949 struct omap_sham_hmac_ctx *bctx = tctx->base; 949 struct omap_sham_hmac_ctx *bctx = tctx->base;
950 int bs = crypto_shash_blocksize(bctx->shash); 950 int bs = crypto_shash_blocksize(bctx->shash);
951 int ds = crypto_shash_digestsize(bctx->shash); 951 int ds = crypto_shash_digestsize(bctx->shash);
952 struct { 952 SHASH_DESC_ON_STACK(shash, bctx->shash);
953 struct shash_desc shash;
954 char ctx[crypto_shash_descsize(bctx->shash)];
955 } desc;
956 953
957 desc.shash.tfm = bctx->shash; 954 shash->tfm = bctx->shash;
958 desc.shash.flags = 0; /* not CRYPTO_TFM_REQ_MAY_SLEEP */ 955 shash->flags = 0; /* not CRYPTO_TFM_REQ_MAY_SLEEP */
959 956
960 return crypto_shash_init(&desc.shash) ?: 957 return crypto_shash_init(shash) ?:
961 crypto_shash_update(&desc.shash, bctx->opad, bs) ?: 958 crypto_shash_update(shash, bctx->opad, bs) ?:
962 crypto_shash_finup(&desc.shash, req->result, ds, req->result); 959 crypto_shash_finup(shash, req->result, ds, req->result);
963} 960}
964 961
965static int omap_sham_finish(struct ahash_request *req) 962static int omap_sham_finish(struct ahash_request *req)
@@ -1118,18 +1115,15 @@ static int omap_sham_update(struct ahash_request *req)
1118 return omap_sham_enqueue(req, OP_UPDATE); 1115 return omap_sham_enqueue(req, OP_UPDATE);
1119} 1116}
1120 1117
1121static int omap_sham_shash_digest(struct crypto_shash *shash, u32 flags, 1118static int omap_sham_shash_digest(struct crypto_shash *tfm, u32 flags,
1122 const u8 *data, unsigned int len, u8 *out) 1119 const u8 *data, unsigned int len, u8 *out)
1123{ 1120{
1124 struct { 1121 SHASH_DESC_ON_STACK(shash, tfm);
1125 struct shash_desc shash;
1126 char ctx[crypto_shash_descsize(shash)];
1127 } desc;
1128 1122
1129 desc.shash.tfm = shash; 1123 shash->tfm = tfm;
1130 desc.shash.flags = flags & CRYPTO_TFM_REQ_MAY_SLEEP; 1124 shash->flags = flags & CRYPTO_TFM_REQ_MAY_SLEEP;
1131 1125
1132 return crypto_shash_digest(&desc.shash, data, len, out); 1126 return crypto_shash_digest(shash, data, len, out);
1133} 1127}
1134 1128
1135static int omap_sham_final_shash(struct ahash_request *req) 1129static int omap_sham_final_shash(struct ahash_request *req)
diff --git a/drivers/crypto/qat/qat_common/qat_algs.c b/drivers/crypto/qat/qat_common/qat_algs.c
index 3e26fa2b293f..f2e2f158cfbe 100644
--- a/drivers/crypto/qat/qat_common/qat_algs.c
+++ b/drivers/crypto/qat/qat_common/qat_algs.c
@@ -149,10 +149,7 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
149 unsigned int auth_keylen) 149 unsigned int auth_keylen)
150{ 150{
151 struct qat_auth_state auth_state; 151 struct qat_auth_state auth_state;
152 struct { 152 SHASH_DESC_ON_STACK(shash, ctx->hash_tfm);
153 struct shash_desc shash;
154 char ctx[crypto_shash_descsize(ctx->hash_tfm)];
155 } desc;
156 struct sha1_state sha1; 153 struct sha1_state sha1;
157 struct sha256_state sha256; 154 struct sha256_state sha256;
158 struct sha512_state sha512; 155 struct sha512_state sha512;
@@ -165,12 +162,12 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
165 int i, offset; 162 int i, offset;
166 163
167 memset(auth_state.data, '\0', MAX_AUTH_STATE_SIZE + 64); 164 memset(auth_state.data, '\0', MAX_AUTH_STATE_SIZE + 64);
168 desc.shash.tfm = ctx->hash_tfm; 165 shash->tfm = ctx->hash_tfm;
169 desc.shash.flags = 0x0; 166 shash->flags = 0x0;
170 167
171 if (auth_keylen > block_size) { 168 if (auth_keylen > block_size) {
172 char buff[SHA512_BLOCK_SIZE]; 169 char buff[SHA512_BLOCK_SIZE];
173 int ret = crypto_shash_digest(&desc.shash, auth_key, 170 int ret = crypto_shash_digest(shash, auth_key,
174 auth_keylen, buff); 171 auth_keylen, buff);
175 if (ret) 172 if (ret)
176 return ret; 173 return ret;
@@ -193,10 +190,10 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
193 *opad_ptr ^= 0x5C; 190 *opad_ptr ^= 0x5C;
194 } 191 }
195 192
196 if (crypto_shash_init(&desc.shash)) 193 if (crypto_shash_init(shash))
197 return -EFAULT; 194 return -EFAULT;
198 195
199 if (crypto_shash_update(&desc.shash, ipad, block_size)) 196 if (crypto_shash_update(shash, ipad, block_size))
200 return -EFAULT; 197 return -EFAULT;
201 198
202 hash_state_out = (__be32 *)hash->sha.state1; 199 hash_state_out = (__be32 *)hash->sha.state1;
@@ -204,19 +201,19 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
204 201
205 switch (ctx->qat_hash_alg) { 202 switch (ctx->qat_hash_alg) {
206 case ICP_QAT_HW_AUTH_ALGO_SHA1: 203 case ICP_QAT_HW_AUTH_ALGO_SHA1:
207 if (crypto_shash_export(&desc.shash, &sha1)) 204 if (crypto_shash_export(shash, &sha1))
208 return -EFAULT; 205 return -EFAULT;
209 for (i = 0; i < digest_size >> 2; i++, hash_state_out++) 206 for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
210 *hash_state_out = cpu_to_be32(*(sha1.state + i)); 207 *hash_state_out = cpu_to_be32(*(sha1.state + i));
211 break; 208 break;
212 case ICP_QAT_HW_AUTH_ALGO_SHA256: 209 case ICP_QAT_HW_AUTH_ALGO_SHA256:
213 if (crypto_shash_export(&desc.shash, &sha256)) 210 if (crypto_shash_export(shash, &sha256))
214 return -EFAULT; 211 return -EFAULT;
215 for (i = 0; i < digest_size >> 2; i++, hash_state_out++) 212 for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
216 *hash_state_out = cpu_to_be32(*(sha256.state + i)); 213 *hash_state_out = cpu_to_be32(*(sha256.state + i));
217 break; 214 break;
218 case ICP_QAT_HW_AUTH_ALGO_SHA512: 215 case ICP_QAT_HW_AUTH_ALGO_SHA512:
219 if (crypto_shash_export(&desc.shash, &sha512)) 216 if (crypto_shash_export(shash, &sha512))
220 return -EFAULT; 217 return -EFAULT;
221 for (i = 0; i < digest_size >> 3; i++, hash512_state_out++) 218 for (i = 0; i < digest_size >> 3; i++, hash512_state_out++)
222 *hash512_state_out = cpu_to_be64(*(sha512.state + i)); 219 *hash512_state_out = cpu_to_be64(*(sha512.state + i));
@@ -225,10 +222,10 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
225 return -EFAULT; 222 return -EFAULT;
226 } 223 }
227 224
228 if (crypto_shash_init(&desc.shash)) 225 if (crypto_shash_init(shash))
229 return -EFAULT; 226 return -EFAULT;
230 227
231 if (crypto_shash_update(&desc.shash, opad, block_size)) 228 if (crypto_shash_update(shash, opad, block_size))
232 return -EFAULT; 229 return -EFAULT;
233 230
234 offset = round_up(qat_get_inter_state_size(ctx->qat_hash_alg), 8); 231 offset = round_up(qat_get_inter_state_size(ctx->qat_hash_alg), 8);
@@ -237,19 +234,19 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
237 234
238 switch (ctx->qat_hash_alg) { 235 switch (ctx->qat_hash_alg) {
239 case ICP_QAT_HW_AUTH_ALGO_SHA1: 236 case ICP_QAT_HW_AUTH_ALGO_SHA1:
240 if (crypto_shash_export(&desc.shash, &sha1)) 237 if (crypto_shash_export(shash, &sha1))
241 return -EFAULT; 238 return -EFAULT;
242 for (i = 0; i < digest_size >> 2; i++, hash_state_out++) 239 for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
243 *hash_state_out = cpu_to_be32(*(sha1.state + i)); 240 *hash_state_out = cpu_to_be32(*(sha1.state + i));
244 break; 241 break;
245 case ICP_QAT_HW_AUTH_ALGO_SHA256: 242 case ICP_QAT_HW_AUTH_ALGO_SHA256:
246 if (crypto_shash_export(&desc.shash, &sha256)) 243 if (crypto_shash_export(shash, &sha256))
247 return -EFAULT; 244 return -EFAULT;
248 for (i = 0; i < digest_size >> 2; i++, hash_state_out++) 245 for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
249 *hash_state_out = cpu_to_be32(*(sha256.state + i)); 246 *hash_state_out = cpu_to_be32(*(sha256.state + i));
250 break; 247 break;
251 case ICP_QAT_HW_AUTH_ALGO_SHA512: 248 case ICP_QAT_HW_AUTH_ALGO_SHA512:
252 if (crypto_shash_export(&desc.shash, &sha512)) 249 if (crypto_shash_export(shash, &sha512))
253 return -EFAULT; 250 return -EFAULT;
254 for (i = 0; i < digest_size >> 3; i++, hash512_state_out++) 251 for (i = 0; i < digest_size >> 3; i++, hash512_state_out++)
255 *hash512_state_out = cpu_to_be64(*(sha512.state + i)); 252 *hash512_state_out = cpu_to_be64(*(sha512.state + i));
diff --git a/drivers/md/dm-crypt.c b/drivers/md/dm-crypt.c
index cd15e0801228..fc93b9330af4 100644
--- a/drivers/md/dm-crypt.c
+++ b/drivers/md/dm-crypt.c
@@ -526,29 +526,26 @@ static int crypt_iv_lmk_one(struct crypt_config *cc, u8 *iv,
526 u8 *data) 526 u8 *data)
527{ 527{
528 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; 528 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk;
529 struct { 529 SHASH_DESC_ON_STACK(desc, lmk->hash_tfm);
530 struct shash_desc desc;
531 char ctx[crypto_shash_descsize(lmk->hash_tfm)];
532 } sdesc;
533 struct md5_state md5state; 530 struct md5_state md5state;
534 __le32 buf[4]; 531 __le32 buf[4];
535 int i, r; 532 int i, r;
536 533
537 sdesc.desc.tfm = lmk->hash_tfm; 534 desc->tfm = lmk->hash_tfm;
538 sdesc.desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; 535 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
539 536
540 r = crypto_shash_init(&sdesc.desc); 537 r = crypto_shash_init(desc);
541 if (r) 538 if (r)
542 return r; 539 return r;
543 540
544 if (lmk->seed) { 541 if (lmk->seed) {
545 r = crypto_shash_update(&sdesc.desc, lmk->seed, LMK_SEED_SIZE); 542 r = crypto_shash_update(desc, lmk->seed, LMK_SEED_SIZE);
546 if (r) 543 if (r)
547 return r; 544 return r;
548 } 545 }
549 546
550 /* Sector is always 512B, block size 16, add data of blocks 1-31 */ 547 /* Sector is always 512B, block size 16, add data of blocks 1-31 */
551 r = crypto_shash_update(&sdesc.desc, data + 16, 16 * 31); 548 r = crypto_shash_update(desc, data + 16, 16 * 31);
552 if (r) 549 if (r)
553 return r; 550 return r;
554 551
@@ -557,12 +554,12 @@ static int crypt_iv_lmk_one(struct crypt_config *cc, u8 *iv,
557 buf[1] = cpu_to_le32((((u64)dmreq->iv_sector >> 32) & 0x00FFFFFF) | 0x80000000); 554 buf[1] = cpu_to_le32((((u64)dmreq->iv_sector >> 32) & 0x00FFFFFF) | 0x80000000);
558 buf[2] = cpu_to_le32(4024); 555 buf[2] = cpu_to_le32(4024);
559 buf[3] = 0; 556 buf[3] = 0;
560 r = crypto_shash_update(&sdesc.desc, (u8 *)buf, sizeof(buf)); 557 r = crypto_shash_update(desc, (u8 *)buf, sizeof(buf));
561 if (r) 558 if (r)
562 return r; 559 return r;
563 560
564 /* No MD5 padding here */ 561 /* No MD5 padding here */
565 r = crypto_shash_export(&sdesc.desc, &md5state); 562 r = crypto_shash_export(desc, &md5state);
566 if (r) 563 if (r)
567 return r; 564 return r;
568 565
@@ -679,10 +676,7 @@ static int crypt_iv_tcw_whitening(struct crypt_config *cc,
679 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; 676 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw;
680 u64 sector = cpu_to_le64((u64)dmreq->iv_sector); 677 u64 sector = cpu_to_le64((u64)dmreq->iv_sector);
681 u8 buf[TCW_WHITENING_SIZE]; 678 u8 buf[TCW_WHITENING_SIZE];
682 struct { 679 SHASH_DESC_ON_STACK(desc, tcw->crc32_tfm);
683 struct shash_desc desc;
684 char ctx[crypto_shash_descsize(tcw->crc32_tfm)];
685 } sdesc;
686 int i, r; 680 int i, r;
687 681
688 /* xor whitening with sector number */ 682 /* xor whitening with sector number */
@@ -691,16 +685,16 @@ static int crypt_iv_tcw_whitening(struct crypt_config *cc,
691 crypto_xor(&buf[8], (u8 *)&sector, 8); 685 crypto_xor(&buf[8], (u8 *)&sector, 8);
692 686
693 /* calculate crc32 for every 32bit part and xor it */ 687 /* calculate crc32 for every 32bit part and xor it */
694 sdesc.desc.tfm = tcw->crc32_tfm; 688 desc->tfm = tcw->crc32_tfm;
695 sdesc.desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; 689 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
696 for (i = 0; i < 4; i++) { 690 for (i = 0; i < 4; i++) {
697 r = crypto_shash_init(&sdesc.desc); 691 r = crypto_shash_init(desc);
698 if (r) 692 if (r)
699 goto out; 693 goto out;
700 r = crypto_shash_update(&sdesc.desc, &buf[i * 4], 4); 694 r = crypto_shash_update(desc, &buf[i * 4], 4);
701 if (r) 695 if (r)
702 goto out; 696 goto out;
703 r = crypto_shash_final(&sdesc.desc, &buf[i * 4]); 697 r = crypto_shash_final(desc, &buf[i * 4]);
704 if (r) 698 if (r)
705 goto out; 699 goto out;
706 } 700 }