diff options
author | David S. Miller <davem@davemloft.net> | 2012-12-19 18:20:23 -0500 |
---|---|---|
committer | David S. Miller <davem@davemloft.net> | 2012-12-19 18:20:23 -0500 |
commit | a8d97cef2168ffe5af1aeed6bf6cdc3ce53f3d0b (patch) | |
tree | 0f5e90df34046af96bf7cc3ef500b9f51a65fcc2 /arch/sparc | |
parent | 9f28ffc03e93343ac04874fda9edb7affea45165 (diff) |
sparc64: Fix AES ctr mode block size.
Like the generic versions, we need to support a block size
of '1' for CTR mode AES.
This was discovered thanks to all of the new test cases added by
Jussi Kivilinna.
Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc')
-rw-r--r-- | arch/sparc/crypto/aes_glue.c | 27 |
1 files changed, 24 insertions, 3 deletions
diff --git a/arch/sparc/crypto/aes_glue.c b/arch/sparc/crypto/aes_glue.c index 3965d1d36dfa..d26e75126fb5 100644 --- a/arch/sparc/crypto/aes_glue.c +++ b/arch/sparc/crypto/aes_glue.c | |||
@@ -329,6 +329,22 @@ static int cbc_decrypt(struct blkcipher_desc *desc, | |||
329 | return err; | 329 | return err; |
330 | } | 330 | } |
331 | 331 | ||
332 | static void ctr_crypt_final(struct crypto_sparc64_aes_ctx *ctx, | ||
333 | struct blkcipher_walk *walk) | ||
334 | { | ||
335 | u8 *ctrblk = walk->iv; | ||
336 | u64 keystream[AES_BLOCK_SIZE / sizeof(u64)]; | ||
337 | u8 *src = walk->src.virt.addr; | ||
338 | u8 *dst = walk->dst.virt.addr; | ||
339 | unsigned int nbytes = walk->nbytes; | ||
340 | |||
341 | ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk, | ||
342 | keystream, AES_BLOCK_SIZE); | ||
343 | crypto_xor((u8 *) keystream, src, nbytes); | ||
344 | memcpy(dst, keystream, nbytes); | ||
345 | crypto_inc(ctrblk, AES_BLOCK_SIZE); | ||
346 | } | ||
347 | |||
332 | static int ctr_crypt(struct blkcipher_desc *desc, | 348 | static int ctr_crypt(struct blkcipher_desc *desc, |
333 | struct scatterlist *dst, struct scatterlist *src, | 349 | struct scatterlist *dst, struct scatterlist *src, |
334 | unsigned int nbytes) | 350 | unsigned int nbytes) |
@@ -338,10 +354,11 @@ static int ctr_crypt(struct blkcipher_desc *desc, | |||
338 | int err; | 354 | int err; |
339 | 355 | ||
340 | blkcipher_walk_init(&walk, dst, src, nbytes); | 356 | blkcipher_walk_init(&walk, dst, src, nbytes); |
341 | err = blkcipher_walk_virt(desc, &walk); | 357 | err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); |
358 | desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; | ||
342 | 359 | ||
343 | ctx->ops->load_encrypt_keys(&ctx->key[0]); | 360 | ctx->ops->load_encrypt_keys(&ctx->key[0]); |
344 | while ((nbytes = walk.nbytes)) { | 361 | while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { |
345 | unsigned int block_len = nbytes & AES_BLOCK_MASK; | 362 | unsigned int block_len = nbytes & AES_BLOCK_MASK; |
346 | 363 | ||
347 | if (likely(block_len)) { | 364 | if (likely(block_len)) { |
@@ -353,6 +370,10 @@ static int ctr_crypt(struct blkcipher_desc *desc, | |||
353 | nbytes &= AES_BLOCK_SIZE - 1; | 370 | nbytes &= AES_BLOCK_SIZE - 1; |
354 | err = blkcipher_walk_done(desc, &walk, nbytes); | 371 | err = blkcipher_walk_done(desc, &walk, nbytes); |
355 | } | 372 | } |
373 | if (walk.nbytes) { | ||
374 | ctr_crypt_final(ctx, &walk); | ||
375 | err = blkcipher_walk_done(desc, &walk, 0); | ||
376 | } | ||
356 | fprs_write(0); | 377 | fprs_write(0); |
357 | return err; | 378 | return err; |
358 | } | 379 | } |
@@ -418,7 +439,7 @@ static struct crypto_alg algs[] = { { | |||
418 | .cra_driver_name = "ctr-aes-sparc64", | 439 | .cra_driver_name = "ctr-aes-sparc64", |
419 | .cra_priority = SPARC_CR_OPCODE_PRIORITY, | 440 | .cra_priority = SPARC_CR_OPCODE_PRIORITY, |
420 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | 441 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, |
421 | .cra_blocksize = AES_BLOCK_SIZE, | 442 | .cra_blocksize = 1, |
422 | .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx), | 443 | .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx), |
423 | .cra_alignmask = 7, | 444 | .cra_alignmask = 7, |
424 | .cra_type = &crypto_blkcipher_type, | 445 | .cra_type = &crypto_blkcipher_type, |