diff options
author | Herbert Xu <herbert@gondor.apana.org.au> | 2007-10-04 03:24:05 -0400 |
---|---|---|
committer | David S. Miller <davem@sunset.davemloft.net> | 2007-10-10 19:55:48 -0400 |
commit | 7607bd8ff03b8af5af887931318cb2bb20361856 (patch) | |
tree | f714390d344511d65f7b866857102fbeb1d13741 | |
parent | 2614de1b9af5a9e49cda64b394e1348159565bd5 (diff) |
[CRYPTO] blkcipher: Added blkcipher_walk_virt_block
This patch adds the helper blkcipher_walk_virt_block which is similar to
blkcipher_walk_virt but uses a supplied block size instead of the block
size of the block cipher. This is useful for CTR where the block size is
1 but we still want to walk by the block size of the underlying cipher.
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r-- | crypto/blkcipher.c | 34 | ||||
-rw-r--r-- | include/crypto/algapi.h | 4 |
2 files changed, 28 insertions, 10 deletions
diff --git a/crypto/blkcipher.c b/crypto/blkcipher.c index 3d05586a8f34..f6c67f9d4e5c 100644 --- a/crypto/blkcipher.c +++ b/crypto/blkcipher.c | |||
@@ -84,8 +84,6 @@ static inline unsigned int blkcipher_done_slow(struct crypto_blkcipher *tfm, | |||
84 | static inline unsigned int blkcipher_done_fast(struct blkcipher_walk *walk, | 84 | static inline unsigned int blkcipher_done_fast(struct blkcipher_walk *walk, |
85 | unsigned int n) | 85 | unsigned int n) |
86 | { | 86 | { |
87 | n = walk->nbytes - n; | ||
88 | |||
89 | if (walk->flags & BLKCIPHER_WALK_COPY) { | 87 | if (walk->flags & BLKCIPHER_WALK_COPY) { |
90 | blkcipher_map_dst(walk); | 88 | blkcipher_map_dst(walk); |
91 | memcpy(walk->dst.virt.addr, walk->page, n); | 89 | memcpy(walk->dst.virt.addr, walk->page, n); |
@@ -109,13 +107,15 @@ int blkcipher_walk_done(struct blkcipher_desc *desc, | |||
109 | unsigned int nbytes = 0; | 107 | unsigned int nbytes = 0; |
110 | 108 | ||
111 | if (likely(err >= 0)) { | 109 | if (likely(err >= 0)) { |
112 | unsigned int bsize = crypto_blkcipher_blocksize(tfm); | 110 | unsigned int n = walk->nbytes - err; |
113 | unsigned int n; | ||
114 | 111 | ||
115 | if (likely(!(walk->flags & BLKCIPHER_WALK_SLOW))) | 112 | if (likely(!(walk->flags & BLKCIPHER_WALK_SLOW))) |
116 | n = blkcipher_done_fast(walk, err); | 113 | n = blkcipher_done_fast(walk, n); |
117 | else | 114 | else if (WARN_ON(err)) { |
118 | n = blkcipher_done_slow(tfm, walk, bsize); | 115 | err = -EINVAL; |
116 | goto err; | ||
117 | } else | ||
118 | n = blkcipher_done_slow(tfm, walk, n); | ||
119 | 119 | ||
120 | nbytes = walk->total - n; | 120 | nbytes = walk->total - n; |
121 | err = 0; | 121 | err = 0; |
@@ -132,6 +132,7 @@ int blkcipher_walk_done(struct blkcipher_desc *desc, | |||
132 | return blkcipher_walk_next(desc, walk); | 132 | return blkcipher_walk_next(desc, walk); |
133 | } | 133 | } |
134 | 134 | ||
135 | err: | ||
135 | if (walk->iv != desc->info) | 136 | if (walk->iv != desc->info) |
136 | memcpy(desc->info, walk->iv, crypto_blkcipher_ivsize(tfm)); | 137 | memcpy(desc->info, walk->iv, crypto_blkcipher_ivsize(tfm)); |
137 | if (walk->buffer != walk->page) | 138 | if (walk->buffer != walk->page) |
@@ -225,12 +226,12 @@ static int blkcipher_walk_next(struct blkcipher_desc *desc, | |||
225 | { | 226 | { |
226 | struct crypto_blkcipher *tfm = desc->tfm; | 227 | struct crypto_blkcipher *tfm = desc->tfm; |
227 | unsigned int alignmask = crypto_blkcipher_alignmask(tfm); | 228 | unsigned int alignmask = crypto_blkcipher_alignmask(tfm); |
228 | unsigned int bsize = crypto_blkcipher_blocksize(tfm); | 229 | unsigned int bsize; |
229 | unsigned int n; | 230 | unsigned int n; |
230 | int err; | 231 | int err; |
231 | 232 | ||
232 | n = walk->total; | 233 | n = walk->total; |
233 | if (unlikely(n < bsize)) { | 234 | if (unlikely(n < crypto_blkcipher_blocksize(tfm))) { |
234 | desc->flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN; | 235 | desc->flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN; |
235 | return blkcipher_walk_done(desc, walk, -EINVAL); | 236 | return blkcipher_walk_done(desc, walk, -EINVAL); |
236 | } | 237 | } |
@@ -247,6 +248,7 @@ static int blkcipher_walk_next(struct blkcipher_desc *desc, | |||
247 | } | 248 | } |
248 | } | 249 | } |
249 | 250 | ||
251 | bsize = min(walk->blocksize, n); | ||
250 | n = scatterwalk_clamp(&walk->in, n); | 252 | n = scatterwalk_clamp(&walk->in, n); |
251 | n = scatterwalk_clamp(&walk->out, n); | 253 | n = scatterwalk_clamp(&walk->out, n); |
252 | 254 | ||
@@ -277,7 +279,7 @@ static inline int blkcipher_copy_iv(struct blkcipher_walk *walk, | |||
277 | struct crypto_blkcipher *tfm, | 279 | struct crypto_blkcipher *tfm, |
278 | unsigned int alignmask) | 280 | unsigned int alignmask) |
279 | { | 281 | { |
280 | unsigned bs = crypto_blkcipher_blocksize(tfm); | 282 | unsigned bs = walk->blocksize; |
281 | unsigned int ivsize = crypto_blkcipher_ivsize(tfm); | 283 | unsigned int ivsize = crypto_blkcipher_ivsize(tfm); |
282 | unsigned aligned_bs = ALIGN(bs, alignmask + 1); | 284 | unsigned aligned_bs = ALIGN(bs, alignmask + 1); |
283 | unsigned int size = aligned_bs * 2 + ivsize + max(aligned_bs, ivsize) - | 285 | unsigned int size = aligned_bs * 2 + ivsize + max(aligned_bs, ivsize) - |
@@ -302,6 +304,7 @@ int blkcipher_walk_virt(struct blkcipher_desc *desc, | |||
302 | struct blkcipher_walk *walk) | 304 | struct blkcipher_walk *walk) |
303 | { | 305 | { |
304 | walk->flags &= ~BLKCIPHER_WALK_PHYS; | 306 | walk->flags &= ~BLKCIPHER_WALK_PHYS; |
307 | walk->blocksize = crypto_blkcipher_blocksize(desc->tfm); | ||
305 | return blkcipher_walk_first(desc, walk); | 308 | return blkcipher_walk_first(desc, walk); |
306 | } | 309 | } |
307 | EXPORT_SYMBOL_GPL(blkcipher_walk_virt); | 310 | EXPORT_SYMBOL_GPL(blkcipher_walk_virt); |
@@ -310,6 +313,7 @@ int blkcipher_walk_phys(struct blkcipher_desc *desc, | |||
310 | struct blkcipher_walk *walk) | 313 | struct blkcipher_walk *walk) |
311 | { | 314 | { |
312 | walk->flags |= BLKCIPHER_WALK_PHYS; | 315 | walk->flags |= BLKCIPHER_WALK_PHYS; |
316 | walk->blocksize = crypto_blkcipher_blocksize(desc->tfm); | ||
313 | return blkcipher_walk_first(desc, walk); | 317 | return blkcipher_walk_first(desc, walk); |
314 | } | 318 | } |
315 | EXPORT_SYMBOL_GPL(blkcipher_walk_phys); | 319 | EXPORT_SYMBOL_GPL(blkcipher_walk_phys); |
@@ -342,6 +346,16 @@ static int blkcipher_walk_first(struct blkcipher_desc *desc, | |||
342 | return blkcipher_walk_next(desc, walk); | 346 | return blkcipher_walk_next(desc, walk); |
343 | } | 347 | } |
344 | 348 | ||
349 | int blkcipher_walk_virt_block(struct blkcipher_desc *desc, | ||
350 | struct blkcipher_walk *walk, | ||
351 | unsigned int blocksize) | ||
352 | { | ||
353 | walk->flags &= ~BLKCIPHER_WALK_PHYS; | ||
354 | walk->blocksize = blocksize; | ||
355 | return blkcipher_walk_first(desc, walk); | ||
356 | } | ||
357 | EXPORT_SYMBOL_GPL(blkcipher_walk_virt_block); | ||
358 | |||
345 | static int setkey_unaligned(struct crypto_tfm *tfm, const u8 *key, | 359 | static int setkey_unaligned(struct crypto_tfm *tfm, const u8 *key, |
346 | unsigned int keylen) | 360 | unsigned int keylen) |
347 | { | 361 | { |
diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h index 4af72dc21202..b9b05d399d2b 100644 --- a/include/crypto/algapi.h +++ b/include/crypto/algapi.h | |||
@@ -91,6 +91,7 @@ struct blkcipher_walk { | |||
91 | u8 *iv; | 91 | u8 *iv; |
92 | 92 | ||
93 | int flags; | 93 | int flags; |
94 | unsigned int blocksize; | ||
94 | }; | 95 | }; |
95 | 96 | ||
96 | extern const struct crypto_type crypto_ablkcipher_type; | 97 | extern const struct crypto_type crypto_ablkcipher_type; |
@@ -129,6 +130,9 @@ int blkcipher_walk_virt(struct blkcipher_desc *desc, | |||
129 | struct blkcipher_walk *walk); | 130 | struct blkcipher_walk *walk); |
130 | int blkcipher_walk_phys(struct blkcipher_desc *desc, | 131 | int blkcipher_walk_phys(struct blkcipher_desc *desc, |
131 | struct blkcipher_walk *walk); | 132 | struct blkcipher_walk *walk); |
133 | int blkcipher_walk_virt_block(struct blkcipher_desc *desc, | ||
134 | struct blkcipher_walk *walk, | ||
135 | unsigned int blocksize); | ||
132 | 136 | ||
133 | static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm) | 137 | static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm) |
134 | { | 138 | { |