summaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@google.com>2019-02-01 02:51:36 -0500
committerHerbert Xu <herbert@gondor.apana.org.au>2019-02-08 02:30:08 -0500
commit0f533e67d26f228ea5dfdacc8a4bdeb487af5208 (patch)
tree2124e65bceaf4d859803284f4e98f99beceee967 /crypto
parent42e95d1f10dcf8b18b1d7f52f7068985b3dc5b79 (diff)
crypto: aegis - fix handling chunked inputs
The generic AEGIS implementations all fail the improved AEAD tests because they produce the wrong result with some data layouts. The issue is that they assume that if the skcipher_walk API gives 'nbytes' not aligned to the walksize (a.k.a. walk.stride), then it is the end of the data. In fact, this can happen before the end. Fix them. Fixes: f606a88e5823 ("crypto: aegis - Add generic AEGIS AEAD implementations") Cc: <stable@vger.kernel.org> # v4.18+ Cc: Ondrej Mosnacek <omosnace@redhat.com> Signed-off-by: Eric Biggers <ebiggers@google.com> Reviewed-by: Ondrej Mosnacek <omosnace@redhat.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto')
-rw-r--r--crypto/aegis128.c14
-rw-r--r--crypto/aegis128l.c14
-rw-r--r--crypto/aegis256.c14
3 files changed, 21 insertions, 21 deletions
diff --git a/crypto/aegis128.c b/crypto/aegis128.c
index 96e078a8a00a..3718a8341303 100644
--- a/crypto/aegis128.c
+++ b/crypto/aegis128.c
@@ -286,19 +286,19 @@ static void crypto_aegis128_process_crypt(struct aegis_state *state,
286 const struct aegis128_ops *ops) 286 const struct aegis128_ops *ops)
287{ 287{
288 struct skcipher_walk walk; 288 struct skcipher_walk walk;
289 u8 *src, *dst;
290 unsigned int chunksize;
291 289
292 ops->skcipher_walk_init(&walk, req, false); 290 ops->skcipher_walk_init(&walk, req, false);
293 291
294 while (walk.nbytes) { 292 while (walk.nbytes) {
295 src = walk.src.virt.addr; 293 unsigned int nbytes = walk.nbytes;
296 dst = walk.dst.virt.addr;
297 chunksize = walk.nbytes;
298 294
299 ops->crypt_chunk(state, dst, src, chunksize); 295 if (nbytes < walk.total)
296 nbytes = round_down(nbytes, walk.stride);
300 297
301 skcipher_walk_done(&walk, 0); 298 ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr,
299 nbytes);
300
301 skcipher_walk_done(&walk, walk.nbytes - nbytes);
302 } 302 }
303} 303}
304 304
diff --git a/crypto/aegis128l.c b/crypto/aegis128l.c
index a210e779b911..275a8616d71b 100644
--- a/crypto/aegis128l.c
+++ b/crypto/aegis128l.c
@@ -349,19 +349,19 @@ static void crypto_aegis128l_process_crypt(struct aegis_state *state,
349 const struct aegis128l_ops *ops) 349 const struct aegis128l_ops *ops)
350{ 350{
351 struct skcipher_walk walk; 351 struct skcipher_walk walk;
352 u8 *src, *dst;
353 unsigned int chunksize;
354 352
355 ops->skcipher_walk_init(&walk, req, false); 353 ops->skcipher_walk_init(&walk, req, false);
356 354
357 while (walk.nbytes) { 355 while (walk.nbytes) {
358 src = walk.src.virt.addr; 356 unsigned int nbytes = walk.nbytes;
359 dst = walk.dst.virt.addr;
360 chunksize = walk.nbytes;
361 357
362 ops->crypt_chunk(state, dst, src, chunksize); 358 if (nbytes < walk.total)
359 nbytes = round_down(nbytes, walk.stride);
363 360
364 skcipher_walk_done(&walk, 0); 361 ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr,
362 nbytes);
363
364 skcipher_walk_done(&walk, walk.nbytes - nbytes);
365 } 365 }
366} 366}
367 367
diff --git a/crypto/aegis256.c b/crypto/aegis256.c
index 49882a28e93e..ecd6b7f34a2d 100644
--- a/crypto/aegis256.c
+++ b/crypto/aegis256.c
@@ -299,19 +299,19 @@ static void crypto_aegis256_process_crypt(struct aegis_state *state,
299 const struct aegis256_ops *ops) 299 const struct aegis256_ops *ops)
300{ 300{
301 struct skcipher_walk walk; 301 struct skcipher_walk walk;
302 u8 *src, *dst;
303 unsigned int chunksize;
304 302
305 ops->skcipher_walk_init(&walk, req, false); 303 ops->skcipher_walk_init(&walk, req, false);
306 304
307 while (walk.nbytes) { 305 while (walk.nbytes) {
308 src = walk.src.virt.addr; 306 unsigned int nbytes = walk.nbytes;
309 dst = walk.dst.virt.addr;
310 chunksize = walk.nbytes;
311 307
312 ops->crypt_chunk(state, dst, src, chunksize); 308 if (nbytes < walk.total)
309 nbytes = round_down(nbytes, walk.stride);
313 310
314 skcipher_walk_done(&walk, 0); 311 ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr,
312 nbytes);
313
314 skcipher_walk_done(&walk, walk.nbytes - nbytes);
315 } 315 }
316} 316}
317 317