diff options
author | Herbert Xu <herbert@gondor.apana.org.au> | 2007-11-20 04:36:00 -0500 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2008-01-10 16:16:19 -0500 |
commit | 50b6544e1371bfe884f787107a8de0c2f8546e8f (patch) | |
tree | c4147931acf41f6e24b19ee0c4dafb8a84ef6a8b /crypto | |
parent | 3c7f076da557eadb37240d70b0399ff9763fa2ae (diff) |
[CRYPTO] cbc: Require block size to be a power of 2
All common block ciphers have a block size that's a power of 2. In fact,
all of our block ciphers obey this rule.
If we require this then CBC can be optimised to avoid an expensive divide
on in-place decryption.
I've also changed the saving of the first IV in the in-place decryption
case to the last IV because that lets us use walk->iv (which is already
aligned) for the xor operation where alignment is required.
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto')
-rw-r--r-- | crypto/cbc.c | 18 |
1 files changed, 10 insertions, 8 deletions
diff --git a/crypto/cbc.c b/crypto/cbc.c index b013d6fec1eb..6affff882cf8 100644 --- a/crypto/cbc.c +++ b/crypto/cbc.c | |||
@@ -14,6 +14,7 @@ | |||
14 | #include <linux/err.h> | 14 | #include <linux/err.h> |
15 | #include <linux/init.h> | 15 | #include <linux/init.h> |
16 | #include <linux/kernel.h> | 16 | #include <linux/kernel.h> |
17 | #include <linux/log2.h> | ||
17 | #include <linux/module.h> | 18 | #include <linux/module.h> |
18 | #include <linux/scatterlist.h> | 19 | #include <linux/scatterlist.h> |
19 | #include <linux/slab.h> | 20 | #include <linux/slab.h> |
@@ -143,17 +144,13 @@ static int crypto_cbc_decrypt_inplace(struct blkcipher_desc *desc, | |||
143 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = | 144 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = |
144 | crypto_cipher_alg(tfm)->cia_decrypt; | 145 | crypto_cipher_alg(tfm)->cia_decrypt; |
145 | int bsize = crypto_cipher_blocksize(tfm); | 146 | int bsize = crypto_cipher_blocksize(tfm); |
146 | unsigned long alignmask = crypto_cipher_alignmask(tfm); | ||
147 | unsigned int nbytes = walk->nbytes; | 147 | unsigned int nbytes = walk->nbytes; |
148 | u8 *src = walk->src.virt.addr; | 148 | u8 *src = walk->src.virt.addr; |
149 | u8 stack[bsize + alignmask]; | 149 | u8 last_iv[bsize]; |
150 | u8 *first_iv = (u8 *)ALIGN((unsigned long)stack, alignmask + 1); | ||
151 | |||
152 | memcpy(first_iv, walk->iv, bsize); | ||
153 | 150 | ||
154 | /* Start of the last block. */ | 151 | /* Start of the last block. */ |
155 | src += nbytes - nbytes % bsize - bsize; | 152 | src += nbytes - (nbytes & (bsize - 1)) - bsize; |
156 | memcpy(walk->iv, src, bsize); | 153 | memcpy(last_iv, src, bsize); |
157 | 154 | ||
158 | for (;;) { | 155 | for (;;) { |
159 | fn(crypto_cipher_tfm(tfm), src, src); | 156 | fn(crypto_cipher_tfm(tfm), src, src); |
@@ -163,7 +160,8 @@ static int crypto_cbc_decrypt_inplace(struct blkcipher_desc *desc, | |||
163 | src -= bsize; | 160 | src -= bsize; |
164 | } | 161 | } |
165 | 162 | ||
166 | crypto_xor(src, first_iv, bsize); | 163 | crypto_xor(src, walk->iv, bsize); |
164 | memcpy(walk->iv, last_iv, bsize); | ||
167 | 165 | ||
168 | return nbytes; | 166 | return nbytes; |
169 | } | 167 | } |
@@ -228,6 +226,10 @@ static struct crypto_instance *crypto_cbc_alloc(struct rtattr **tb) | |||
228 | if (IS_ERR(alg)) | 226 | if (IS_ERR(alg)) |
229 | return ERR_PTR(PTR_ERR(alg)); | 227 | return ERR_PTR(PTR_ERR(alg)); |
230 | 228 | ||
229 | inst = ERR_PTR(-EINVAL); | ||
230 | if (!is_power_of_2(alg->cra_blocksize)) | ||
231 | goto out_put_alg; | ||
232 | |||
231 | inst = crypto_alloc_instance("cbc", alg); | 233 | inst = crypto_alloc_instance("cbc", alg); |
232 | if (IS_ERR(inst)) | 234 | if (IS_ERR(inst)) |
233 | goto out_put_alg; | 235 | goto out_put_alg; |