diff options
author | Kees Cook <keescook@chromium.org> | 2018-07-15 23:59:12 -0400 |
---|---|---|
committer | Mike Snitzer <snitzer@redhat.com> | 2018-07-27 15:24:28 -0400 |
commit | c07c88f54f2323516e8038aa9301ab0db4812c81 (patch) | |
tree | d4ce17f49ce595adb276aee60aa02684516f27d1 | |
parent | c7329eff72aa237d6bedef6dc57c93dc048d2a16 (diff) |
dm crypt: convert essiv from ahash to shash
In preparing to remove all stack VLA usage from the kernel[1], remove
the discouraged use of AHASH_REQUEST_ON_STACK in favor of the smaller
SHASH_DESC_ON_STACK by converting from ahash-wrapped-shash to direct
shash. The stack allocation will be made a fixed size in a later patch
to the crypto subsystem.
[1] https://lkml.kernel.org/r/CA+55aFzCG-zNmZwX4A2FQpadafLfEzK6CC=qPXydAacU1RqZWA@mail.gmail.com
Signed-off-by: Kees Cook <keescook@chromium.org>
Reviewed-by: Eric Biggers <ebiggers@google.com>
Signed-off-by: Mike Snitzer <snitzer@redhat.com>
-rw-r--r-- | drivers/md/dm-crypt.c | 31 |
1 files changed, 14 insertions, 17 deletions
diff --git a/drivers/md/dm-crypt.c b/drivers/md/dm-crypt.c index c406767cb9b7..d412bd4b911c 100644 --- a/drivers/md/dm-crypt.c +++ b/drivers/md/dm-crypt.c | |||
@@ -99,7 +99,7 @@ struct crypt_iv_operations { | |||
99 | }; | 99 | }; |
100 | 100 | ||
101 | struct iv_essiv_private { | 101 | struct iv_essiv_private { |
102 | struct crypto_ahash *hash_tfm; | 102 | struct crypto_shash *hash_tfm; |
103 | u8 *salt; | 103 | u8 *salt; |
104 | }; | 104 | }; |
105 | 105 | ||
@@ -327,25 +327,22 @@ static int crypt_iv_plain64be_gen(struct crypt_config *cc, u8 *iv, | |||
327 | static int crypt_iv_essiv_init(struct crypt_config *cc) | 327 | static int crypt_iv_essiv_init(struct crypt_config *cc) |
328 | { | 328 | { |
329 | struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv; | 329 | struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv; |
330 | AHASH_REQUEST_ON_STACK(req, essiv->hash_tfm); | 330 | SHASH_DESC_ON_STACK(desc, essiv->hash_tfm); |
331 | struct scatterlist sg; | ||
332 | struct crypto_cipher *essiv_tfm; | 331 | struct crypto_cipher *essiv_tfm; |
333 | int err; | 332 | int err; |
334 | 333 | ||
335 | sg_init_one(&sg, cc->key, cc->key_size); | 334 | desc->tfm = essiv->hash_tfm; |
336 | ahash_request_set_tfm(req, essiv->hash_tfm); | 335 | desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP; |
337 | ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_SLEEP, NULL, NULL); | ||
338 | ahash_request_set_crypt(req, &sg, essiv->salt, cc->key_size); | ||
339 | 336 | ||
340 | err = crypto_ahash_digest(req); | 337 | err = crypto_shash_digest(desc, cc->key, cc->key_size, essiv->salt); |
341 | ahash_request_zero(req); | 338 | shash_desc_zero(desc); |
342 | if (err) | 339 | if (err) |
343 | return err; | 340 | return err; |
344 | 341 | ||
345 | essiv_tfm = cc->iv_private; | 342 | essiv_tfm = cc->iv_private; |
346 | 343 | ||
347 | err = crypto_cipher_setkey(essiv_tfm, essiv->salt, | 344 | err = crypto_cipher_setkey(essiv_tfm, essiv->salt, |
348 | crypto_ahash_digestsize(essiv->hash_tfm)); | 345 | crypto_shash_digestsize(essiv->hash_tfm)); |
349 | if (err) | 346 | if (err) |
350 | return err; | 347 | return err; |
351 | 348 | ||
@@ -356,7 +353,7 @@ static int crypt_iv_essiv_init(struct crypt_config *cc) | |||
356 | static int crypt_iv_essiv_wipe(struct crypt_config *cc) | 353 | static int crypt_iv_essiv_wipe(struct crypt_config *cc) |
357 | { | 354 | { |
358 | struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv; | 355 | struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv; |
359 | unsigned salt_size = crypto_ahash_digestsize(essiv->hash_tfm); | 356 | unsigned salt_size = crypto_shash_digestsize(essiv->hash_tfm); |
360 | struct crypto_cipher *essiv_tfm; | 357 | struct crypto_cipher *essiv_tfm; |
361 | int r, err = 0; | 358 | int r, err = 0; |
362 | 359 | ||
@@ -408,7 +405,7 @@ static void crypt_iv_essiv_dtr(struct crypt_config *cc) | |||
408 | struct crypto_cipher *essiv_tfm; | 405 | struct crypto_cipher *essiv_tfm; |
409 | struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv; | 406 | struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv; |
410 | 407 | ||
411 | crypto_free_ahash(essiv->hash_tfm); | 408 | crypto_free_shash(essiv->hash_tfm); |
412 | essiv->hash_tfm = NULL; | 409 | essiv->hash_tfm = NULL; |
413 | 410 | ||
414 | kzfree(essiv->salt); | 411 | kzfree(essiv->salt); |
@@ -426,7 +423,7 @@ static int crypt_iv_essiv_ctr(struct crypt_config *cc, struct dm_target *ti, | |||
426 | const char *opts) | 423 | const char *opts) |
427 | { | 424 | { |
428 | struct crypto_cipher *essiv_tfm = NULL; | 425 | struct crypto_cipher *essiv_tfm = NULL; |
429 | struct crypto_ahash *hash_tfm = NULL; | 426 | struct crypto_shash *hash_tfm = NULL; |
430 | u8 *salt = NULL; | 427 | u8 *salt = NULL; |
431 | int err; | 428 | int err; |
432 | 429 | ||
@@ -436,14 +433,14 @@ static int crypt_iv_essiv_ctr(struct crypt_config *cc, struct dm_target *ti, | |||
436 | } | 433 | } |
437 | 434 | ||
438 | /* Allocate hash algorithm */ | 435 | /* Allocate hash algorithm */ |
439 | hash_tfm = crypto_alloc_ahash(opts, 0, CRYPTO_ALG_ASYNC); | 436 | hash_tfm = crypto_alloc_shash(opts, 0, 0); |
440 | if (IS_ERR(hash_tfm)) { | 437 | if (IS_ERR(hash_tfm)) { |
441 | ti->error = "Error initializing ESSIV hash"; | 438 | ti->error = "Error initializing ESSIV hash"; |
442 | err = PTR_ERR(hash_tfm); | 439 | err = PTR_ERR(hash_tfm); |
443 | goto bad; | 440 | goto bad; |
444 | } | 441 | } |
445 | 442 | ||
446 | salt = kzalloc(crypto_ahash_digestsize(hash_tfm), GFP_KERNEL); | 443 | salt = kzalloc(crypto_shash_digestsize(hash_tfm), GFP_KERNEL); |
447 | if (!salt) { | 444 | if (!salt) { |
448 | ti->error = "Error kmallocing salt storage in ESSIV"; | 445 | ti->error = "Error kmallocing salt storage in ESSIV"; |
449 | err = -ENOMEM; | 446 | err = -ENOMEM; |
@@ -454,7 +451,7 @@ static int crypt_iv_essiv_ctr(struct crypt_config *cc, struct dm_target *ti, | |||
454 | cc->iv_gen_private.essiv.hash_tfm = hash_tfm; | 451 | cc->iv_gen_private.essiv.hash_tfm = hash_tfm; |
455 | 452 | ||
456 | essiv_tfm = alloc_essiv_cipher(cc, ti, salt, | 453 | essiv_tfm = alloc_essiv_cipher(cc, ti, salt, |
457 | crypto_ahash_digestsize(hash_tfm)); | 454 | crypto_shash_digestsize(hash_tfm)); |
458 | if (IS_ERR(essiv_tfm)) { | 455 | if (IS_ERR(essiv_tfm)) { |
459 | crypt_iv_essiv_dtr(cc); | 456 | crypt_iv_essiv_dtr(cc); |
460 | return PTR_ERR(essiv_tfm); | 457 | return PTR_ERR(essiv_tfm); |
@@ -465,7 +462,7 @@ static int crypt_iv_essiv_ctr(struct crypt_config *cc, struct dm_target *ti, | |||
465 | 462 | ||
466 | bad: | 463 | bad: |
467 | if (hash_tfm && !IS_ERR(hash_tfm)) | 464 | if (hash_tfm && !IS_ERR(hash_tfm)) |
468 | crypto_free_ahash(hash_tfm); | 465 | crypto_free_shash(hash_tfm); |
469 | kfree(salt); | 466 | kfree(salt); |
470 | return err; | 467 | return err; |
471 | } | 468 | } |