aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/md/dm-crypt.c
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2014-10-15 01:30:52 -0400
committerLinus Torvalds <torvalds@linux-foundation.org>2014-10-15 01:30:52 -0400
commit6929c358972facf2999f8768815c40dd88514fc2 (patch)
treeb7180709c0d16ef5f2e7344b94b1ca6cfa7461bb /drivers/md/dm-crypt.c
parent23971bdffff5f7c904131dfb41c186711dc2c418 (diff)
parent4c5c30249452aaebf258751ea4222eba3dd3da4c (diff)
Merge tag 'llvmlinux-for-v3.18' of git://git.linuxfoundation.org/llvmlinux/kernel
Pull LLVM updates from Behan Webster: "These patches remove the use of VLAIS using a new SHASH_DESC_ON_STACK macro. Some of the previously accepted VLAIS removal patches haven't used this macro. I will push new patches to consistently use this macro in all those older cases for 3.19" [ More LLVM patches coming in through subsystem trees, and LLVM itself needs some fixes that are already in many distributions but not in released versions of LLVM. Some day this will all "just work" - Linus ] * tag 'llvmlinux-for-v3.18' of git://git.linuxfoundation.org/llvmlinux/kernel: crypto: LLVMLinux: Remove VLAIS usage from crypto/testmgr.c security, crypto: LLVMLinux: Remove VLAIS from ima_crypto.c crypto: LLVMLinux: Remove VLAIS usage from libcrc32c.c crypto: LLVMLinux: Remove VLAIS usage from crypto/hmac.c crypto, dm: LLVMLinux: Remove VLAIS usage from dm-crypt crypto: LLVMLinux: Remove VLAIS from crypto/.../qat_algs.c crypto: LLVMLinux: Remove VLAIS from crypto/omap_sham.c crypto: LLVMLinux: Remove VLAIS from crypto/n2_core.c crypto: LLVMLinux: Remove VLAIS from crypto/mv_cesa.c crypto: LLVMLinux: Remove VLAIS from crypto/ccp/ccp-crypto-sha.c btrfs: LLVMLinux: Remove VLAIS crypto: LLVMLinux: Add macro to remove use of VLAIS in crypto code
Diffstat (limited to 'drivers/md/dm-crypt.c')
-rw-r--r--drivers/md/dm-crypt.c34
1 files changed, 14 insertions, 20 deletions
diff --git a/drivers/md/dm-crypt.c b/drivers/md/dm-crypt.c
index cd15e0801228..fc93b9330af4 100644
--- a/drivers/md/dm-crypt.c
+++ b/drivers/md/dm-crypt.c
@@ -526,29 +526,26 @@ static int crypt_iv_lmk_one(struct crypt_config *cc, u8 *iv,
526 u8 *data) 526 u8 *data)
527{ 527{
528 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; 528 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk;
529 struct { 529 SHASH_DESC_ON_STACK(desc, lmk->hash_tfm);
530 struct shash_desc desc;
531 char ctx[crypto_shash_descsize(lmk->hash_tfm)];
532 } sdesc;
533 struct md5_state md5state; 530 struct md5_state md5state;
534 __le32 buf[4]; 531 __le32 buf[4];
535 int i, r; 532 int i, r;
536 533
537 sdesc.desc.tfm = lmk->hash_tfm; 534 desc->tfm = lmk->hash_tfm;
538 sdesc.desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; 535 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
539 536
540 r = crypto_shash_init(&sdesc.desc); 537 r = crypto_shash_init(desc);
541 if (r) 538 if (r)
542 return r; 539 return r;
543 540
544 if (lmk->seed) { 541 if (lmk->seed) {
545 r = crypto_shash_update(&sdesc.desc, lmk->seed, LMK_SEED_SIZE); 542 r = crypto_shash_update(desc, lmk->seed, LMK_SEED_SIZE);
546 if (r) 543 if (r)
547 return r; 544 return r;
548 } 545 }
549 546
550 /* Sector is always 512B, block size 16, add data of blocks 1-31 */ 547 /* Sector is always 512B, block size 16, add data of blocks 1-31 */
551 r = crypto_shash_update(&sdesc.desc, data + 16, 16 * 31); 548 r = crypto_shash_update(desc, data + 16, 16 * 31);
552 if (r) 549 if (r)
553 return r; 550 return r;
554 551
@@ -557,12 +554,12 @@ static int crypt_iv_lmk_one(struct crypt_config *cc, u8 *iv,
557 buf[1] = cpu_to_le32((((u64)dmreq->iv_sector >> 32) & 0x00FFFFFF) | 0x80000000); 554 buf[1] = cpu_to_le32((((u64)dmreq->iv_sector >> 32) & 0x00FFFFFF) | 0x80000000);
558 buf[2] = cpu_to_le32(4024); 555 buf[2] = cpu_to_le32(4024);
559 buf[3] = 0; 556 buf[3] = 0;
560 r = crypto_shash_update(&sdesc.desc, (u8 *)buf, sizeof(buf)); 557 r = crypto_shash_update(desc, (u8 *)buf, sizeof(buf));
561 if (r) 558 if (r)
562 return r; 559 return r;
563 560
564 /* No MD5 padding here */ 561 /* No MD5 padding here */
565 r = crypto_shash_export(&sdesc.desc, &md5state); 562 r = crypto_shash_export(desc, &md5state);
566 if (r) 563 if (r)
567 return r; 564 return r;
568 565
@@ -679,10 +676,7 @@ static int crypt_iv_tcw_whitening(struct crypt_config *cc,
679 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; 676 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw;
680 u64 sector = cpu_to_le64((u64)dmreq->iv_sector); 677 u64 sector = cpu_to_le64((u64)dmreq->iv_sector);
681 u8 buf[TCW_WHITENING_SIZE]; 678 u8 buf[TCW_WHITENING_SIZE];
682 struct { 679 SHASH_DESC_ON_STACK(desc, tcw->crc32_tfm);
683 struct shash_desc desc;
684 char ctx[crypto_shash_descsize(tcw->crc32_tfm)];
685 } sdesc;
686 int i, r; 680 int i, r;
687 681
688 /* xor whitening with sector number */ 682 /* xor whitening with sector number */
@@ -691,16 +685,16 @@ static int crypt_iv_tcw_whitening(struct crypt_config *cc,
691 crypto_xor(&buf[8], (u8 *)&sector, 8); 685 crypto_xor(&buf[8], (u8 *)&sector, 8);
692 686
693 /* calculate crc32 for every 32bit part and xor it */ 687 /* calculate crc32 for every 32bit part and xor it */
694 sdesc.desc.tfm = tcw->crc32_tfm; 688 desc->tfm = tcw->crc32_tfm;
695 sdesc.desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; 689 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
696 for (i = 0; i < 4; i++) { 690 for (i = 0; i < 4; i++) {
697 r = crypto_shash_init(&sdesc.desc); 691 r = crypto_shash_init(desc);
698 if (r) 692 if (r)
699 goto out; 693 goto out;
700 r = crypto_shash_update(&sdesc.desc, &buf[i * 4], 4); 694 r = crypto_shash_update(desc, &buf[i * 4], 4);
701 if (r) 695 if (r)
702 goto out; 696 goto out;
703 r = crypto_shash_final(&sdesc.desc, &buf[i * 4]); 697 r = crypto_shash_final(desc, &buf[i * 4]);
704 if (r) 698 if (r)
705 goto out; 699 goto out;
706 } 700 }