diff options
Diffstat (limited to 'drivers/md/dm-crypt.c')
-rw-r--r-- | drivers/md/dm-crypt.c | 146 |
1 files changed, 67 insertions, 79 deletions
diff --git a/drivers/md/dm-crypt.c b/drivers/md/dm-crypt.c index 6022ed12a795..bdbd34993a80 100644 --- a/drivers/md/dm-crypt.c +++ b/drivers/md/dm-crypt.c | |||
@@ -5,6 +5,7 @@ | |||
5 | * This file is released under the GPL. | 5 | * This file is released under the GPL. |
6 | */ | 6 | */ |
7 | 7 | ||
8 | #include <linux/err.h> | ||
8 | #include <linux/module.h> | 9 | #include <linux/module.h> |
9 | #include <linux/init.h> | 10 | #include <linux/init.h> |
10 | #include <linux/kernel.h> | 11 | #include <linux/kernel.h> |
@@ -78,11 +79,13 @@ struct crypt_config { | |||
78 | */ | 79 | */ |
79 | struct crypt_iv_operations *iv_gen_ops; | 80 | struct crypt_iv_operations *iv_gen_ops; |
80 | char *iv_mode; | 81 | char *iv_mode; |
81 | void *iv_gen_private; | 82 | struct crypto_cipher *iv_gen_private; |
82 | sector_t iv_offset; | 83 | sector_t iv_offset; |
83 | unsigned int iv_size; | 84 | unsigned int iv_size; |
84 | 85 | ||
85 | struct crypto_tfm *tfm; | 86 | char cipher[CRYPTO_MAX_ALG_NAME]; |
87 | char chainmode[CRYPTO_MAX_ALG_NAME]; | ||
88 | struct crypto_blkcipher *tfm; | ||
86 | unsigned int key_size; | 89 | unsigned int key_size; |
87 | u8 key[0]; | 90 | u8 key[0]; |
88 | }; | 91 | }; |
@@ -96,12 +99,12 @@ static kmem_cache_t *_crypt_io_pool; | |||
96 | /* | 99 | /* |
97 | * Different IV generation algorithms: | 100 | * Different IV generation algorithms: |
98 | * | 101 | * |
99 | * plain: the initial vector is the 32-bit low-endian version of the sector | 102 | * plain: the initial vector is the 32-bit little-endian version of the sector |
100 | * number, padded with zeros if neccessary. | 103 | * number, padded with zeros if neccessary. |
101 | * | 104 | * |
102 | * ess_iv: "encrypted sector|salt initial vector", the sector number is | 105 | * essiv: "encrypted sector|salt initial vector", the sector number is |
103 | * encrypted with the bulk cipher using a salt as key. The salt | 106 | * encrypted with the bulk cipher using a salt as key. The salt |
104 | * should be derived from the bulk cipher's key via hashing. | 107 | * should be derived from the bulk cipher's key via hashing. |
105 | * | 108 | * |
106 | * plumb: unimplemented, see: | 109 | * plumb: unimplemented, see: |
107 | * http://article.gmane.org/gmane.linux.kernel.device-mapper.dm-crypt/454 | 110 | * http://article.gmane.org/gmane.linux.kernel.device-mapper.dm-crypt/454 |
@@ -118,11 +121,13 @@ static int crypt_iv_plain_gen(struct crypt_config *cc, u8 *iv, sector_t sector) | |||
118 | static int crypt_iv_essiv_ctr(struct crypt_config *cc, struct dm_target *ti, | 121 | static int crypt_iv_essiv_ctr(struct crypt_config *cc, struct dm_target *ti, |
119 | const char *opts) | 122 | const char *opts) |
120 | { | 123 | { |
121 | struct crypto_tfm *essiv_tfm; | 124 | struct crypto_cipher *essiv_tfm; |
122 | struct crypto_tfm *hash_tfm; | 125 | struct crypto_hash *hash_tfm; |
126 | struct hash_desc desc; | ||
123 | struct scatterlist sg; | 127 | struct scatterlist sg; |
124 | unsigned int saltsize; | 128 | unsigned int saltsize; |
125 | u8 *salt; | 129 | u8 *salt; |
130 | int err; | ||
126 | 131 | ||
127 | if (opts == NULL) { | 132 | if (opts == NULL) { |
128 | ti->error = "Digest algorithm missing for ESSIV mode"; | 133 | ti->error = "Digest algorithm missing for ESSIV mode"; |
@@ -130,76 +135,70 @@ static int crypt_iv_essiv_ctr(struct crypt_config *cc, struct dm_target *ti, | |||
130 | } | 135 | } |
131 | 136 | ||
132 | /* Hash the cipher key with the given hash algorithm */ | 137 | /* Hash the cipher key with the given hash algorithm */ |
133 | hash_tfm = crypto_alloc_tfm(opts, CRYPTO_TFM_REQ_MAY_SLEEP); | 138 | hash_tfm = crypto_alloc_hash(opts, 0, CRYPTO_ALG_ASYNC); |
134 | if (hash_tfm == NULL) { | 139 | if (IS_ERR(hash_tfm)) { |
135 | ti->error = "Error initializing ESSIV hash"; | 140 | ti->error = "Error initializing ESSIV hash"; |
136 | return -EINVAL; | 141 | return PTR_ERR(hash_tfm); |
137 | } | 142 | } |
138 | 143 | ||
139 | if (crypto_tfm_alg_type(hash_tfm) != CRYPTO_ALG_TYPE_DIGEST) { | 144 | saltsize = crypto_hash_digestsize(hash_tfm); |
140 | ti->error = "Expected digest algorithm for ESSIV hash"; | ||
141 | crypto_free_tfm(hash_tfm); | ||
142 | return -EINVAL; | ||
143 | } | ||
144 | |||
145 | saltsize = crypto_tfm_alg_digestsize(hash_tfm); | ||
146 | salt = kmalloc(saltsize, GFP_KERNEL); | 145 | salt = kmalloc(saltsize, GFP_KERNEL); |
147 | if (salt == NULL) { | 146 | if (salt == NULL) { |
148 | ti->error = "Error kmallocing salt storage in ESSIV"; | 147 | ti->error = "Error kmallocing salt storage in ESSIV"; |
149 | crypto_free_tfm(hash_tfm); | 148 | crypto_free_hash(hash_tfm); |
150 | return -ENOMEM; | 149 | return -ENOMEM; |
151 | } | 150 | } |
152 | 151 | ||
153 | sg_set_buf(&sg, cc->key, cc->key_size); | 152 | sg_set_buf(&sg, cc->key, cc->key_size); |
154 | crypto_digest_digest(hash_tfm, &sg, 1, salt); | 153 | desc.tfm = hash_tfm; |
155 | crypto_free_tfm(hash_tfm); | 154 | desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; |
155 | err = crypto_hash_digest(&desc, &sg, cc->key_size, salt); | ||
156 | crypto_free_hash(hash_tfm); | ||
157 | |||
158 | if (err) { | ||
159 | ti->error = "Error calculating hash in ESSIV"; | ||
160 | return err; | ||
161 | } | ||
156 | 162 | ||
157 | /* Setup the essiv_tfm with the given salt */ | 163 | /* Setup the essiv_tfm with the given salt */ |
158 | essiv_tfm = crypto_alloc_tfm(crypto_tfm_alg_name(cc->tfm), | 164 | essiv_tfm = crypto_alloc_cipher(cc->cipher, 0, CRYPTO_ALG_ASYNC); |
159 | CRYPTO_TFM_MODE_ECB | | 165 | if (IS_ERR(essiv_tfm)) { |
160 | CRYPTO_TFM_REQ_MAY_SLEEP); | ||
161 | if (essiv_tfm == NULL) { | ||
162 | ti->error = "Error allocating crypto tfm for ESSIV"; | 166 | ti->error = "Error allocating crypto tfm for ESSIV"; |
163 | kfree(salt); | 167 | kfree(salt); |
164 | return -EINVAL; | 168 | return PTR_ERR(essiv_tfm); |
165 | } | 169 | } |
166 | if (crypto_tfm_alg_blocksize(essiv_tfm) | 170 | if (crypto_cipher_blocksize(essiv_tfm) != |
167 | != crypto_tfm_alg_ivsize(cc->tfm)) { | 171 | crypto_blkcipher_ivsize(cc->tfm)) { |
168 | ti->error = "Block size of ESSIV cipher does " | 172 | ti->error = "Block size of ESSIV cipher does " |
169 | "not match IV size of block cipher"; | 173 | "not match IV size of block cipher"; |
170 | crypto_free_tfm(essiv_tfm); | 174 | crypto_free_cipher(essiv_tfm); |
171 | kfree(salt); | 175 | kfree(salt); |
172 | return -EINVAL; | 176 | return -EINVAL; |
173 | } | 177 | } |
174 | if (crypto_cipher_setkey(essiv_tfm, salt, saltsize) < 0) { | 178 | err = crypto_cipher_setkey(essiv_tfm, salt, saltsize); |
179 | if (err) { | ||
175 | ti->error = "Failed to set key for ESSIV cipher"; | 180 | ti->error = "Failed to set key for ESSIV cipher"; |
176 | crypto_free_tfm(essiv_tfm); | 181 | crypto_free_cipher(essiv_tfm); |
177 | kfree(salt); | 182 | kfree(salt); |
178 | return -EINVAL; | 183 | return err; |
179 | } | 184 | } |
180 | kfree(salt); | 185 | kfree(salt); |
181 | 186 | ||
182 | cc->iv_gen_private = (void *)essiv_tfm; | 187 | cc->iv_gen_private = essiv_tfm; |
183 | return 0; | 188 | return 0; |
184 | } | 189 | } |
185 | 190 | ||
186 | static void crypt_iv_essiv_dtr(struct crypt_config *cc) | 191 | static void crypt_iv_essiv_dtr(struct crypt_config *cc) |
187 | { | 192 | { |
188 | crypto_free_tfm((struct crypto_tfm *)cc->iv_gen_private); | 193 | crypto_free_cipher(cc->iv_gen_private); |
189 | cc->iv_gen_private = NULL; | 194 | cc->iv_gen_private = NULL; |
190 | } | 195 | } |
191 | 196 | ||
192 | static int crypt_iv_essiv_gen(struct crypt_config *cc, u8 *iv, sector_t sector) | 197 | static int crypt_iv_essiv_gen(struct crypt_config *cc, u8 *iv, sector_t sector) |
193 | { | 198 | { |
194 | struct scatterlist sg; | ||
195 | |||
196 | memset(iv, 0, cc->iv_size); | 199 | memset(iv, 0, cc->iv_size); |
197 | *(u64 *)iv = cpu_to_le64(sector); | 200 | *(u64 *)iv = cpu_to_le64(sector); |
198 | 201 | crypto_cipher_encrypt_one(cc->iv_gen_private, iv, iv); | |
199 | sg_set_buf(&sg, iv, cc->iv_size); | ||
200 | crypto_cipher_encrypt((struct crypto_tfm *)cc->iv_gen_private, | ||
201 | &sg, &sg, cc->iv_size); | ||
202 | |||
203 | return 0; | 202 | return 0; |
204 | } | 203 | } |
205 | 204 | ||
@@ -220,6 +219,11 @@ crypt_convert_scatterlist(struct crypt_config *cc, struct scatterlist *out, | |||
220 | int write, sector_t sector) | 219 | int write, sector_t sector) |
221 | { | 220 | { |
222 | u8 iv[cc->iv_size]; | 221 | u8 iv[cc->iv_size]; |
222 | struct blkcipher_desc desc = { | ||
223 | .tfm = cc->tfm, | ||
224 | .info = iv, | ||
225 | .flags = CRYPTO_TFM_REQ_MAY_SLEEP, | ||
226 | }; | ||
223 | int r; | 227 | int r; |
224 | 228 | ||
225 | if (cc->iv_gen_ops) { | 229 | if (cc->iv_gen_ops) { |
@@ -228,14 +232,14 @@ crypt_convert_scatterlist(struct crypt_config *cc, struct scatterlist *out, | |||
228 | return r; | 232 | return r; |
229 | 233 | ||
230 | if (write) | 234 | if (write) |
231 | r = crypto_cipher_encrypt_iv(cc->tfm, out, in, length, iv); | 235 | r = crypto_blkcipher_encrypt_iv(&desc, out, in, length); |
232 | else | 236 | else |
233 | r = crypto_cipher_decrypt_iv(cc->tfm, out, in, length, iv); | 237 | r = crypto_blkcipher_decrypt_iv(&desc, out, in, length); |
234 | } else { | 238 | } else { |
235 | if (write) | 239 | if (write) |
236 | r = crypto_cipher_encrypt(cc->tfm, out, in, length); | 240 | r = crypto_blkcipher_encrypt(&desc, out, in, length); |
237 | else | 241 | else |
238 | r = crypto_cipher_decrypt(cc->tfm, out, in, length); | 242 | r = crypto_blkcipher_decrypt(&desc, out, in, length); |
239 | } | 243 | } |
240 | 244 | ||
241 | return r; | 245 | return r; |
@@ -510,13 +514,12 @@ static void crypt_encode_key(char *hex, u8 *key, unsigned int size) | |||
510 | static int crypt_ctr(struct dm_target *ti, unsigned int argc, char **argv) | 514 | static int crypt_ctr(struct dm_target *ti, unsigned int argc, char **argv) |
511 | { | 515 | { |
512 | struct crypt_config *cc; | 516 | struct crypt_config *cc; |
513 | struct crypto_tfm *tfm; | 517 | struct crypto_blkcipher *tfm; |
514 | char *tmp; | 518 | char *tmp; |
515 | char *cipher; | 519 | char *cipher; |
516 | char *chainmode; | 520 | char *chainmode; |
517 | char *ivmode; | 521 | char *ivmode; |
518 | char *ivopts; | 522 | char *ivopts; |
519 | unsigned int crypto_flags; | ||
520 | unsigned int key_size; | 523 | unsigned int key_size; |
521 | unsigned long long tmpll; | 524 | unsigned long long tmpll; |
522 | 525 | ||
@@ -556,31 +559,25 @@ static int crypt_ctr(struct dm_target *ti, unsigned int argc, char **argv) | |||
556 | ivmode = "plain"; | 559 | ivmode = "plain"; |
557 | } | 560 | } |
558 | 561 | ||
559 | /* Choose crypto_flags according to chainmode */ | 562 | if (strcmp(chainmode, "ecb") && !ivmode) { |
560 | if (strcmp(chainmode, "cbc") == 0) | 563 | ti->error = "This chaining mode requires an IV mechanism"; |
561 | crypto_flags = CRYPTO_TFM_MODE_CBC; | ||
562 | else if (strcmp(chainmode, "ecb") == 0) | ||
563 | crypto_flags = CRYPTO_TFM_MODE_ECB; | ||
564 | else { | ||
565 | ti->error = "Unknown chaining mode"; | ||
566 | goto bad1; | 564 | goto bad1; |
567 | } | 565 | } |
568 | 566 | ||
569 | if (crypto_flags != CRYPTO_TFM_MODE_ECB && !ivmode) { | 567 | if (snprintf(cc->cipher, CRYPTO_MAX_ALG_NAME, "%s(%s)", chainmode, |
570 | ti->error = "This chaining mode requires an IV mechanism"; | 568 | cipher) >= CRYPTO_MAX_ALG_NAME) { |
569 | ti->error = "Chain mode + cipher name is too long"; | ||
571 | goto bad1; | 570 | goto bad1; |
572 | } | 571 | } |
573 | 572 | ||
574 | tfm = crypto_alloc_tfm(cipher, crypto_flags | CRYPTO_TFM_REQ_MAY_SLEEP); | 573 | tfm = crypto_alloc_blkcipher(cc->cipher, 0, CRYPTO_ALG_ASYNC); |
575 | if (!tfm) { | 574 | if (IS_ERR(tfm)) { |
576 | ti->error = "Error allocating crypto tfm"; | 575 | ti->error = "Error allocating crypto tfm"; |
577 | goto bad1; | 576 | goto bad1; |
578 | } | 577 | } |
579 | if (crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER) { | ||
580 | ti->error = "Expected cipher algorithm"; | ||
581 | goto bad2; | ||
582 | } | ||
583 | 578 | ||
579 | strcpy(cc->cipher, cipher); | ||
580 | strcpy(cc->chainmode, chainmode); | ||
584 | cc->tfm = tfm; | 581 | cc->tfm = tfm; |
585 | 582 | ||
586 | /* | 583 | /* |
@@ -603,12 +600,12 @@ static int crypt_ctr(struct dm_target *ti, unsigned int argc, char **argv) | |||
603 | cc->iv_gen_ops->ctr(cc, ti, ivopts) < 0) | 600 | cc->iv_gen_ops->ctr(cc, ti, ivopts) < 0) |
604 | goto bad2; | 601 | goto bad2; |
605 | 602 | ||
606 | if (tfm->crt_cipher.cit_decrypt_iv && tfm->crt_cipher.cit_encrypt_iv) | 603 | cc->iv_size = crypto_blkcipher_ivsize(tfm); |
604 | if (cc->iv_size) | ||
607 | /* at least a 64 bit sector number should fit in our buffer */ | 605 | /* at least a 64 bit sector number should fit in our buffer */ |
608 | cc->iv_size = max(crypto_tfm_alg_ivsize(tfm), | 606 | cc->iv_size = max(cc->iv_size, |
609 | (unsigned int)(sizeof(u64) / sizeof(u8))); | 607 | (unsigned int)(sizeof(u64) / sizeof(u8))); |
610 | else { | 608 | else { |
611 | cc->iv_size = 0; | ||
612 | if (cc->iv_gen_ops) { | 609 | if (cc->iv_gen_ops) { |
613 | DMWARN("Selected cipher does not support IVs"); | 610 | DMWARN("Selected cipher does not support IVs"); |
614 | if (cc->iv_gen_ops->dtr) | 611 | if (cc->iv_gen_ops->dtr) |
@@ -629,7 +626,7 @@ static int crypt_ctr(struct dm_target *ti, unsigned int argc, char **argv) | |||
629 | goto bad4; | 626 | goto bad4; |
630 | } | 627 | } |
631 | 628 | ||
632 | if (tfm->crt_cipher.cit_setkey(tfm, cc->key, key_size) < 0) { | 629 | if (crypto_blkcipher_setkey(tfm, cc->key, key_size) < 0) { |
633 | ti->error = "Error setting key"; | 630 | ti->error = "Error setting key"; |
634 | goto bad5; | 631 | goto bad5; |
635 | } | 632 | } |
@@ -675,7 +672,7 @@ bad3: | |||
675 | if (cc->iv_gen_ops && cc->iv_gen_ops->dtr) | 672 | if (cc->iv_gen_ops && cc->iv_gen_ops->dtr) |
676 | cc->iv_gen_ops->dtr(cc); | 673 | cc->iv_gen_ops->dtr(cc); |
677 | bad2: | 674 | bad2: |
678 | crypto_free_tfm(tfm); | 675 | crypto_free_blkcipher(tfm); |
679 | bad1: | 676 | bad1: |
680 | /* Must zero key material before freeing */ | 677 | /* Must zero key material before freeing */ |
681 | memset(cc, 0, sizeof(*cc) + cc->key_size * sizeof(u8)); | 678 | memset(cc, 0, sizeof(*cc) + cc->key_size * sizeof(u8)); |
@@ -693,7 +690,7 @@ static void crypt_dtr(struct dm_target *ti) | |||
693 | kfree(cc->iv_mode); | 690 | kfree(cc->iv_mode); |
694 | if (cc->iv_gen_ops && cc->iv_gen_ops->dtr) | 691 | if (cc->iv_gen_ops && cc->iv_gen_ops->dtr) |
695 | cc->iv_gen_ops->dtr(cc); | 692 | cc->iv_gen_ops->dtr(cc); |
696 | crypto_free_tfm(cc->tfm); | 693 | crypto_free_blkcipher(cc->tfm); |
697 | dm_put_device(ti, cc->dev); | 694 | dm_put_device(ti, cc->dev); |
698 | 695 | ||
699 | /* Must zero key material before freeing */ | 696 | /* Must zero key material before freeing */ |
@@ -858,18 +855,9 @@ static int crypt_status(struct dm_target *ti, status_type_t type, | |||
858 | break; | 855 | break; |
859 | 856 | ||
860 | case STATUSTYPE_TABLE: | 857 | case STATUSTYPE_TABLE: |
861 | cipher = crypto_tfm_alg_name(cc->tfm); | 858 | cipher = crypto_blkcipher_name(cc->tfm); |
862 | 859 | ||
863 | switch(cc->tfm->crt_cipher.cit_mode) { | 860 | chainmode = cc->chainmode; |
864 | case CRYPTO_TFM_MODE_CBC: | ||
865 | chainmode = "cbc"; | ||
866 | break; | ||
867 | case CRYPTO_TFM_MODE_ECB: | ||
868 | chainmode = "ecb"; | ||
869 | break; | ||
870 | default: | ||
871 | BUG(); | ||
872 | } | ||
873 | 861 | ||
874 | if (cc->iv_mode) | 862 | if (cc->iv_mode) |
875 | DMEMIT("%s-%s-%s ", cipher, chainmode, cc->iv_mode); | 863 | DMEMIT("%s-%s-%s ", cipher, chainmode, cc->iv_mode); |