aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc/crypto/aes_glue.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/sparc/crypto/aes_glue.c')
-rw-r--r--arch/sparc/crypto/aes_glue.c31
1 files changed, 28 insertions, 3 deletions
diff --git a/arch/sparc/crypto/aes_glue.c b/arch/sparc/crypto/aes_glue.c
index 3965d1d36dfa..503e6d96ad4e 100644
--- a/arch/sparc/crypto/aes_glue.c
+++ b/arch/sparc/crypto/aes_glue.c
@@ -222,6 +222,7 @@ static int ecb_encrypt(struct blkcipher_desc *desc,
222 222
223 blkcipher_walk_init(&walk, dst, src, nbytes); 223 blkcipher_walk_init(&walk, dst, src, nbytes);
224 err = blkcipher_walk_virt(desc, &walk); 224 err = blkcipher_walk_virt(desc, &walk);
225 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
225 226
226 ctx->ops->load_encrypt_keys(&ctx->key[0]); 227 ctx->ops->load_encrypt_keys(&ctx->key[0]);
227 while ((nbytes = walk.nbytes)) { 228 while ((nbytes = walk.nbytes)) {
@@ -251,6 +252,7 @@ static int ecb_decrypt(struct blkcipher_desc *desc,
251 252
252 blkcipher_walk_init(&walk, dst, src, nbytes); 253 blkcipher_walk_init(&walk, dst, src, nbytes);
253 err = blkcipher_walk_virt(desc, &walk); 254 err = blkcipher_walk_virt(desc, &walk);
255 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
254 256
255 ctx->ops->load_decrypt_keys(&ctx->key[0]); 257 ctx->ops->load_decrypt_keys(&ctx->key[0]);
256 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)]; 258 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
@@ -280,6 +282,7 @@ static int cbc_encrypt(struct blkcipher_desc *desc,
280 282
281 blkcipher_walk_init(&walk, dst, src, nbytes); 283 blkcipher_walk_init(&walk, dst, src, nbytes);
282 err = blkcipher_walk_virt(desc, &walk); 284 err = blkcipher_walk_virt(desc, &walk);
285 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
283 286
284 ctx->ops->load_encrypt_keys(&ctx->key[0]); 287 ctx->ops->load_encrypt_keys(&ctx->key[0]);
285 while ((nbytes = walk.nbytes)) { 288 while ((nbytes = walk.nbytes)) {
@@ -309,6 +312,7 @@ static int cbc_decrypt(struct blkcipher_desc *desc,
309 312
310 blkcipher_walk_init(&walk, dst, src, nbytes); 313 blkcipher_walk_init(&walk, dst, src, nbytes);
311 err = blkcipher_walk_virt(desc, &walk); 314 err = blkcipher_walk_virt(desc, &walk);
315 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
312 316
313 ctx->ops->load_decrypt_keys(&ctx->key[0]); 317 ctx->ops->load_decrypt_keys(&ctx->key[0]);
314 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)]; 318 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
@@ -329,6 +333,22 @@ static int cbc_decrypt(struct blkcipher_desc *desc,
329 return err; 333 return err;
330} 334}
331 335
336static void ctr_crypt_final(struct crypto_sparc64_aes_ctx *ctx,
337 struct blkcipher_walk *walk)
338{
339 u8 *ctrblk = walk->iv;
340 u64 keystream[AES_BLOCK_SIZE / sizeof(u64)];
341 u8 *src = walk->src.virt.addr;
342 u8 *dst = walk->dst.virt.addr;
343 unsigned int nbytes = walk->nbytes;
344
345 ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk,
346 keystream, AES_BLOCK_SIZE);
347 crypto_xor((u8 *) keystream, src, nbytes);
348 memcpy(dst, keystream, nbytes);
349 crypto_inc(ctrblk, AES_BLOCK_SIZE);
350}
351
332static int ctr_crypt(struct blkcipher_desc *desc, 352static int ctr_crypt(struct blkcipher_desc *desc,
333 struct scatterlist *dst, struct scatterlist *src, 353 struct scatterlist *dst, struct scatterlist *src,
334 unsigned int nbytes) 354 unsigned int nbytes)
@@ -338,10 +358,11 @@ static int ctr_crypt(struct blkcipher_desc *desc,
338 int err; 358 int err;
339 359
340 blkcipher_walk_init(&walk, dst, src, nbytes); 360 blkcipher_walk_init(&walk, dst, src, nbytes);
341 err = blkcipher_walk_virt(desc, &walk); 361 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
362 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
342 363
343 ctx->ops->load_encrypt_keys(&ctx->key[0]); 364 ctx->ops->load_encrypt_keys(&ctx->key[0]);
344 while ((nbytes = walk.nbytes)) { 365 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
345 unsigned int block_len = nbytes & AES_BLOCK_MASK; 366 unsigned int block_len = nbytes & AES_BLOCK_MASK;
346 367
347 if (likely(block_len)) { 368 if (likely(block_len)) {
@@ -353,6 +374,10 @@ static int ctr_crypt(struct blkcipher_desc *desc,
353 nbytes &= AES_BLOCK_SIZE - 1; 374 nbytes &= AES_BLOCK_SIZE - 1;
354 err = blkcipher_walk_done(desc, &walk, nbytes); 375 err = blkcipher_walk_done(desc, &walk, nbytes);
355 } 376 }
377 if (walk.nbytes) {
378 ctr_crypt_final(ctx, &walk);
379 err = blkcipher_walk_done(desc, &walk, 0);
380 }
356 fprs_write(0); 381 fprs_write(0);
357 return err; 382 return err;
358} 383}
@@ -418,7 +443,7 @@ static struct crypto_alg algs[] = { {
418 .cra_driver_name = "ctr-aes-sparc64", 443 .cra_driver_name = "ctr-aes-sparc64",
419 .cra_priority = SPARC_CR_OPCODE_PRIORITY, 444 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
420 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 445 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
421 .cra_blocksize = AES_BLOCK_SIZE, 446 .cra_blocksize = 1,
422 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx), 447 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
423 .cra_alignmask = 7, 448 .cra_alignmask = 7,
424 .cra_type = &crypto_blkcipher_type, 449 .cra_type = &crypto_blkcipher_type,