aboutsummaryrefslogtreecommitdiffstats
path: root/crypto/blkcipher.c
diff options
context:
space:
mode:
authorArd Biesheuvel <ard.biesheuvel@linaro.org>2014-03-04 00:28:38 -0500
committerHerbert Xu <herbert@gondor.apana.org.au>2014-03-10 08:17:10 -0400
commit822be00fe67105a90e536df52d1e4d688f34b5b2 (patch)
treef7e4844d1c560c621af5cba54aa36ea57c257e83 /crypto/blkcipher.c
parentd9e79726193346569af7953369a638ee2275ade5 (diff)
crypto: remove direct blkcipher_walk dependency on transform
In order to allow other uses of the blkcipher walk API than the blkcipher algos themselves, this patch copies some of the transform data members to the walk struct so the transform is only accessed at walk init time. Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/blkcipher.c')
-rw-r--r--crypto/blkcipher.c67
1 files changed, 33 insertions, 34 deletions
diff --git a/crypto/blkcipher.c b/crypto/blkcipher.c
index a79e7e9ab86e..46fdab5e9cc7 100644
--- a/crypto/blkcipher.c
+++ b/crypto/blkcipher.c
@@ -70,14 +70,12 @@ static inline u8 *blkcipher_get_spot(u8 *start, unsigned int len)
70 return max(start, end_page); 70 return max(start, end_page);
71} 71}
72 72
73static inline unsigned int blkcipher_done_slow(struct crypto_blkcipher *tfm, 73static inline unsigned int blkcipher_done_slow(struct blkcipher_walk *walk,
74 struct blkcipher_walk *walk,
75 unsigned int bsize) 74 unsigned int bsize)
76{ 75{
77 u8 *addr; 76 u8 *addr;
78 unsigned int alignmask = crypto_blkcipher_alignmask(tfm);
79 77
80 addr = (u8 *)ALIGN((unsigned long)walk->buffer, alignmask + 1); 78 addr = (u8 *)ALIGN((unsigned long)walk->buffer, walk->alignmask + 1);
81 addr = blkcipher_get_spot(addr, bsize); 79 addr = blkcipher_get_spot(addr, bsize);
82 scatterwalk_copychunks(addr, &walk->out, bsize, 1); 80 scatterwalk_copychunks(addr, &walk->out, bsize, 1);
83 return bsize; 81 return bsize;
@@ -105,7 +103,6 @@ static inline unsigned int blkcipher_done_fast(struct blkcipher_walk *walk,
105int blkcipher_walk_done(struct blkcipher_desc *desc, 103int blkcipher_walk_done(struct blkcipher_desc *desc,
106 struct blkcipher_walk *walk, int err) 104 struct blkcipher_walk *walk, int err)
107{ 105{
108 struct crypto_blkcipher *tfm = desc->tfm;
109 unsigned int nbytes = 0; 106 unsigned int nbytes = 0;
110 107
111 if (likely(err >= 0)) { 108 if (likely(err >= 0)) {
@@ -117,7 +114,7 @@ int blkcipher_walk_done(struct blkcipher_desc *desc,
117 err = -EINVAL; 114 err = -EINVAL;
118 goto err; 115 goto err;
119 } else 116 } else
120 n = blkcipher_done_slow(tfm, walk, n); 117 n = blkcipher_done_slow(walk, n);
121 118
122 nbytes = walk->total - n; 119 nbytes = walk->total - n;
123 err = 0; 120 err = 0;
@@ -136,7 +133,7 @@ err:
136 } 133 }
137 134
138 if (walk->iv != desc->info) 135 if (walk->iv != desc->info)
139 memcpy(desc->info, walk->iv, crypto_blkcipher_ivsize(tfm)); 136 memcpy(desc->info, walk->iv, walk->ivsize);
140 if (walk->buffer != walk->page) 137 if (walk->buffer != walk->page)
141 kfree(walk->buffer); 138 kfree(walk->buffer);
142 if (walk->page) 139 if (walk->page)
@@ -226,22 +223,20 @@ static inline int blkcipher_next_fast(struct blkcipher_desc *desc,
226static int blkcipher_walk_next(struct blkcipher_desc *desc, 223static int blkcipher_walk_next(struct blkcipher_desc *desc,
227 struct blkcipher_walk *walk) 224 struct blkcipher_walk *walk)
228{ 225{
229 struct crypto_blkcipher *tfm = desc->tfm;
230 unsigned int alignmask = crypto_blkcipher_alignmask(tfm);
231 unsigned int bsize; 226 unsigned int bsize;
232 unsigned int n; 227 unsigned int n;
233 int err; 228 int err;
234 229
235 n = walk->total; 230 n = walk->total;
236 if (unlikely(n < crypto_blkcipher_blocksize(tfm))) { 231 if (unlikely(n < walk->cipher_blocksize)) {
237 desc->flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN; 232 desc->flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN;
238 return blkcipher_walk_done(desc, walk, -EINVAL); 233 return blkcipher_walk_done(desc, walk, -EINVAL);
239 } 234 }
240 235
241 walk->flags &= ~(BLKCIPHER_WALK_SLOW | BLKCIPHER_WALK_COPY | 236 walk->flags &= ~(BLKCIPHER_WALK_SLOW | BLKCIPHER_WALK_COPY |
242 BLKCIPHER_WALK_DIFF); 237 BLKCIPHER_WALK_DIFF);
243 if (!scatterwalk_aligned(&walk->in, alignmask) || 238 if (!scatterwalk_aligned(&walk->in, walk->alignmask) ||
244 !scatterwalk_aligned(&walk->out, alignmask)) { 239 !scatterwalk_aligned(&walk->out, walk->alignmask)) {
245 walk->flags |= BLKCIPHER_WALK_COPY; 240 walk->flags |= BLKCIPHER_WALK_COPY;
246 if (!walk->page) { 241 if (!walk->page) {
247 walk->page = (void *)__get_free_page(GFP_ATOMIC); 242 walk->page = (void *)__get_free_page(GFP_ATOMIC);
@@ -250,12 +245,12 @@ static int blkcipher_walk_next(struct blkcipher_desc *desc,
250 } 245 }
251 } 246 }
252 247
253 bsize = min(walk->blocksize, n); 248 bsize = min(walk->walk_blocksize, n);
254 n = scatterwalk_clamp(&walk->in, n); 249 n = scatterwalk_clamp(&walk->in, n);
255 n = scatterwalk_clamp(&walk->out, n); 250 n = scatterwalk_clamp(&walk->out, n);
256 251
257 if (unlikely(n < bsize)) { 252 if (unlikely(n < bsize)) {
258 err = blkcipher_next_slow(desc, walk, bsize, alignmask); 253 err = blkcipher_next_slow(desc, walk, bsize, walk->alignmask);
259 goto set_phys_lowmem; 254 goto set_phys_lowmem;
260 } 255 }
261 256
@@ -277,28 +272,26 @@ set_phys_lowmem:
277 return err; 272 return err;
278} 273}
279 274
280static inline int blkcipher_copy_iv(struct blkcipher_walk *walk, 275static inline int blkcipher_copy_iv(struct blkcipher_walk *walk)
281 struct crypto_blkcipher *tfm,
282 unsigned int alignmask)
283{ 276{
284 unsigned bs = walk->blocksize; 277 unsigned bs = walk->walk_blocksize;
285 unsigned int ivsize = crypto_blkcipher_ivsize(tfm); 278 unsigned aligned_bs = ALIGN(bs, walk->alignmask + 1);
286 unsigned aligned_bs = ALIGN(bs, alignmask + 1); 279 unsigned int size = aligned_bs * 2 +
287 unsigned int size = aligned_bs * 2 + ivsize + max(aligned_bs, ivsize) - 280 walk->ivsize + max(aligned_bs, walk->ivsize) -
288 (alignmask + 1); 281 (walk->alignmask + 1);
289 u8 *iv; 282 u8 *iv;
290 283
291 size += alignmask & ~(crypto_tfm_ctx_alignment() - 1); 284 size += walk->alignmask & ~(crypto_tfm_ctx_alignment() - 1);
292 walk->buffer = kmalloc(size, GFP_ATOMIC); 285 walk->buffer = kmalloc(size, GFP_ATOMIC);
293 if (!walk->buffer) 286 if (!walk->buffer)
294 return -ENOMEM; 287 return -ENOMEM;
295 288
296 iv = (u8 *)ALIGN((unsigned long)walk->buffer, alignmask + 1); 289 iv = (u8 *)ALIGN((unsigned long)walk->buffer, walk->alignmask + 1);
297 iv = blkcipher_get_spot(iv, bs) + aligned_bs; 290 iv = blkcipher_get_spot(iv, bs) + aligned_bs;
298 iv = blkcipher_get_spot(iv, bs) + aligned_bs; 291 iv = blkcipher_get_spot(iv, bs) + aligned_bs;
299 iv = blkcipher_get_spot(iv, ivsize); 292 iv = blkcipher_get_spot(iv, walk->ivsize);
300 293
301 walk->iv = memcpy(iv, walk->iv, ivsize); 294 walk->iv = memcpy(iv, walk->iv, walk->ivsize);
302 return 0; 295 return 0;
303} 296}
304 297
@@ -306,7 +299,10 @@ int blkcipher_walk_virt(struct blkcipher_desc *desc,
306 struct blkcipher_walk *walk) 299 struct blkcipher_walk *walk)
307{ 300{
308 walk->flags &= ~BLKCIPHER_WALK_PHYS; 301 walk->flags &= ~BLKCIPHER_WALK_PHYS;
309 walk->blocksize = crypto_blkcipher_blocksize(desc->tfm); 302 walk->walk_blocksize = crypto_blkcipher_blocksize(desc->tfm);
303 walk->cipher_blocksize = walk->walk_blocksize;
304 walk->ivsize = crypto_blkcipher_ivsize(desc->tfm);
305 walk->alignmask = crypto_blkcipher_alignmask(desc->tfm);
310 return blkcipher_walk_first(desc, walk); 306 return blkcipher_walk_first(desc, walk);
311} 307}
312EXPORT_SYMBOL_GPL(blkcipher_walk_virt); 308EXPORT_SYMBOL_GPL(blkcipher_walk_virt);
@@ -315,7 +311,10 @@ int blkcipher_walk_phys(struct blkcipher_desc *desc,
315 struct blkcipher_walk *walk) 311 struct blkcipher_walk *walk)
316{ 312{
317 walk->flags |= BLKCIPHER_WALK_PHYS; 313 walk->flags |= BLKCIPHER_WALK_PHYS;
318 walk->blocksize = crypto_blkcipher_blocksize(desc->tfm); 314 walk->walk_blocksize = crypto_blkcipher_blocksize(desc->tfm);
315 walk->cipher_blocksize = walk->walk_blocksize;
316 walk->ivsize = crypto_blkcipher_ivsize(desc->tfm);
317 walk->alignmask = crypto_blkcipher_alignmask(desc->tfm);
319 return blkcipher_walk_first(desc, walk); 318 return blkcipher_walk_first(desc, walk);
320} 319}
321EXPORT_SYMBOL_GPL(blkcipher_walk_phys); 320EXPORT_SYMBOL_GPL(blkcipher_walk_phys);
@@ -323,9 +322,6 @@ EXPORT_SYMBOL_GPL(blkcipher_walk_phys);
323static int blkcipher_walk_first(struct blkcipher_desc *desc, 322static int blkcipher_walk_first(struct blkcipher_desc *desc,
324 struct blkcipher_walk *walk) 323 struct blkcipher_walk *walk)
325{ 324{
326 struct crypto_blkcipher *tfm = desc->tfm;
327 unsigned int alignmask = crypto_blkcipher_alignmask(tfm);
328
329 if (WARN_ON_ONCE(in_irq())) 325 if (WARN_ON_ONCE(in_irq()))
330 return -EDEADLK; 326 return -EDEADLK;
331 327
@@ -335,8 +331,8 @@ static int blkcipher_walk_first(struct blkcipher_desc *desc,
335 331
336 walk->buffer = NULL; 332 walk->buffer = NULL;
337 walk->iv = desc->info; 333 walk->iv = desc->info;
338 if (unlikely(((unsigned long)walk->iv & alignmask))) { 334 if (unlikely(((unsigned long)walk->iv & walk->alignmask))) {
339 int err = blkcipher_copy_iv(walk, tfm, alignmask); 335 int err = blkcipher_copy_iv(walk);
340 if (err) 336 if (err)
341 return err; 337 return err;
342 } 338 }
@@ -353,7 +349,10 @@ int blkcipher_walk_virt_block(struct blkcipher_desc *desc,
353 unsigned int blocksize) 349 unsigned int blocksize)
354{ 350{
355 walk->flags &= ~BLKCIPHER_WALK_PHYS; 351 walk->flags &= ~BLKCIPHER_WALK_PHYS;
356 walk->blocksize = blocksize; 352 walk->walk_blocksize = blocksize;
353 walk->cipher_blocksize = crypto_blkcipher_blocksize(desc->tfm);
354 walk->ivsize = crypto_blkcipher_ivsize(desc->tfm);
355 walk->alignmask = crypto_blkcipher_alignmask(desc->tfm);
357 return blkcipher_walk_first(desc, walk); 356 return blkcipher_walk_first(desc, walk);
358} 357}
359EXPORT_SYMBOL_GPL(blkcipher_walk_virt_block); 358EXPORT_SYMBOL_GPL(blkcipher_walk_virt_block);