aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/crypto/nx/nx-aes-ccm.c
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2015-07-14 04:53:21 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2015-07-17 09:20:13 -0400
commitcc81565307822a062820da294c17d9f3b6f49ecd (patch)
tree06ffc7475efc203a42a77f60dd61b2b984c5f90d /drivers/crypto/nx/nx-aes-ccm.c
parent2642d6abca9bc3404b7b967914300169135dcd31 (diff)
crypto: nx - Convert ccm to new AEAD interface
This patch converts the nx ccm and 4309 implementations to the new AEAD interface. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers/crypto/nx/nx-aes-ccm.c')
-rw-r--r--drivers/crypto/nx/nx-aes-ccm.c153
1 files changed, 72 insertions, 81 deletions
diff --git a/drivers/crypto/nx/nx-aes-ccm.c b/drivers/crypto/nx/nx-aes-ccm.c
index e4311ce0cd78..195c9207a98d 100644
--- a/drivers/crypto/nx/nx-aes-ccm.c
+++ b/drivers/crypto/nx/nx-aes-ccm.c
@@ -94,8 +94,6 @@ static int ccm_aes_nx_setauthsize(struct crypto_aead *tfm,
94 return -EINVAL; 94 return -EINVAL;
95 } 95 }
96 96
97 crypto_aead_crt(tfm)->authsize = authsize;
98
99 return 0; 97 return 0;
100} 98}
101 99
@@ -111,8 +109,6 @@ static int ccm4309_aes_nx_setauthsize(struct crypto_aead *tfm,
111 return -EINVAL; 109 return -EINVAL;
112 } 110 }
113 111
114 crypto_aead_crt(tfm)->authsize = authsize;
115
116 return 0; 112 return 0;
117} 113}
118 114
@@ -174,6 +170,7 @@ static int generate_pat(u8 *iv,
174 struct nx_crypto_ctx *nx_ctx, 170 struct nx_crypto_ctx *nx_ctx,
175 unsigned int authsize, 171 unsigned int authsize,
176 unsigned int nbytes, 172 unsigned int nbytes,
173 unsigned int assoclen,
177 u8 *out) 174 u8 *out)
178{ 175{
179 struct nx_sg *nx_insg = nx_ctx->in_sg; 176 struct nx_sg *nx_insg = nx_ctx->in_sg;
@@ -200,16 +197,16 @@ static int generate_pat(u8 *iv,
200 * greater than 2^32. 197 * greater than 2^32.
201 */ 198 */
202 199
203 if (!req->assoclen) { 200 if (!assoclen) {
204 b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0; 201 b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0;
205 } else if (req->assoclen <= 14) { 202 } else if (assoclen <= 14) {
206 /* if associated data is 14 bytes or less, we do 1 GCM 203 /* if associated data is 14 bytes or less, we do 1 GCM
207 * operation on 2 AES blocks, B0 (stored in the csbcpb) and B1, 204 * operation on 2 AES blocks, B0 (stored in the csbcpb) and B1,
208 * which is fed in through the source buffers here */ 205 * which is fed in through the source buffers here */
209 b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0; 206 b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0;
210 b1 = nx_ctx->priv.ccm.iauth_tag; 207 b1 = nx_ctx->priv.ccm.iauth_tag;
211 iauth_len = req->assoclen; 208 iauth_len = assoclen;
212 } else if (req->assoclen <= 65280) { 209 } else if (assoclen <= 65280) {
213 /* if associated data is less than (2^16 - 2^8), we construct 210 /* if associated data is less than (2^16 - 2^8), we construct
214 * B1 differently and feed in the associated data to a CCA 211 * B1 differently and feed in the associated data to a CCA
215 * operation */ 212 * operation */
@@ -223,7 +220,7 @@ static int generate_pat(u8 *iv,
223 } 220 }
224 221
225 /* generate B0 */ 222 /* generate B0 */
226 rc = generate_b0(iv, req->assoclen, authsize, nbytes, b0); 223 rc = generate_b0(iv, assoclen, authsize, nbytes, b0);
227 if (rc) 224 if (rc)
228 return rc; 225 return rc;
229 226
@@ -233,22 +230,22 @@ static int generate_pat(u8 *iv,
233 */ 230 */
234 if (b1) { 231 if (b1) {
235 memset(b1, 0, 16); 232 memset(b1, 0, 16);
236 if (req->assoclen <= 65280) { 233 if (assoclen <= 65280) {
237 *(u16 *)b1 = (u16)req->assoclen; 234 *(u16 *)b1 = assoclen;
238 scatterwalk_map_and_copy(b1 + 2, req->assoc, 0, 235 scatterwalk_map_and_copy(b1 + 2, req->src, 0,
239 iauth_len, SCATTERWALK_FROM_SG); 236 iauth_len, SCATTERWALK_FROM_SG);
240 } else { 237 } else {
241 *(u16 *)b1 = (u16)(0xfffe); 238 *(u16 *)b1 = (u16)(0xfffe);
242 *(u32 *)&b1[2] = (u32)req->assoclen; 239 *(u32 *)&b1[2] = assoclen;
243 scatterwalk_map_and_copy(b1 + 6, req->assoc, 0, 240 scatterwalk_map_and_copy(b1 + 6, req->src, 0,
244 iauth_len, SCATTERWALK_FROM_SG); 241 iauth_len, SCATTERWALK_FROM_SG);
245 } 242 }
246 } 243 }
247 244
248 /* now copy any remaining AAD to scatterlist and call nx... */ 245 /* now copy any remaining AAD to scatterlist and call nx... */
249 if (!req->assoclen) { 246 if (!assoclen) {
250 return rc; 247 return rc;
251 } else if (req->assoclen <= 14) { 248 } else if (assoclen <= 14) {
252 unsigned int len = 16; 249 unsigned int len = 16;
253 250
254 nx_insg = nx_build_sg_list(nx_insg, b1, &len, nx_ctx->ap->sglen); 251 nx_insg = nx_build_sg_list(nx_insg, b1, &len, nx_ctx->ap->sglen);
@@ -280,7 +277,7 @@ static int generate_pat(u8 *iv,
280 return rc; 277 return rc;
281 278
282 atomic_inc(&(nx_ctx->stats->aes_ops)); 279 atomic_inc(&(nx_ctx->stats->aes_ops));
283 atomic64_add(req->assoclen, &(nx_ctx->stats->aes_bytes)); 280 atomic64_add(assoclen, &nx_ctx->stats->aes_bytes);
284 281
285 } else { 282 } else {
286 unsigned int processed = 0, to_process; 283 unsigned int processed = 0, to_process;
@@ -294,15 +291,15 @@ static int generate_pat(u8 *iv,
294 nx_ctx->ap->databytelen/NX_PAGE_SIZE); 291 nx_ctx->ap->databytelen/NX_PAGE_SIZE);
295 292
296 do { 293 do {
297 to_process = min_t(u32, req->assoclen - processed, 294 to_process = min_t(u32, assoclen - processed,
298 nx_ctx->ap->databytelen); 295 nx_ctx->ap->databytelen);
299 296
300 nx_insg = nx_walk_and_build(nx_ctx->in_sg, 297 nx_insg = nx_walk_and_build(nx_ctx->in_sg,
301 nx_ctx->ap->sglen, 298 nx_ctx->ap->sglen,
302 req->assoc, processed, 299 req->src, processed,
303 &to_process); 300 &to_process);
304 301
305 if ((to_process + processed) < req->assoclen) { 302 if ((to_process + processed) < assoclen) {
306 NX_CPB_FDM(nx_ctx->csbcpb_aead) |= 303 NX_CPB_FDM(nx_ctx->csbcpb_aead) |=
307 NX_FDM_INTERMEDIATE; 304 NX_FDM_INTERMEDIATE;
308 } else { 305 } else {
@@ -328,11 +325,10 @@ static int generate_pat(u8 *iv,
328 NX_CPB_FDM(nx_ctx->csbcpb_aead) |= NX_FDM_CONTINUATION; 325 NX_CPB_FDM(nx_ctx->csbcpb_aead) |= NX_FDM_CONTINUATION;
329 326
330 atomic_inc(&(nx_ctx->stats->aes_ops)); 327 atomic_inc(&(nx_ctx->stats->aes_ops));
331 atomic64_add(req->assoclen, 328 atomic64_add(assoclen, &nx_ctx->stats->aes_bytes);
332 &(nx_ctx->stats->aes_bytes));
333 329
334 processed += to_process; 330 processed += to_process;
335 } while (processed < req->assoclen); 331 } while (processed < assoclen);
336 332
337 result = nx_ctx->csbcpb_aead->cpb.aes_cca.out_pat_or_b0; 333 result = nx_ctx->csbcpb_aead->cpb.aes_cca.out_pat_or_b0;
338 } 334 }
@@ -343,7 +339,8 @@ static int generate_pat(u8 *iv,
343} 339}
344 340
345static int ccm_nx_decrypt(struct aead_request *req, 341static int ccm_nx_decrypt(struct aead_request *req,
346 struct blkcipher_desc *desc) 342 struct blkcipher_desc *desc,
343 unsigned int assoclen)
347{ 344{
348 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); 345 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm);
349 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; 346 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
@@ -360,10 +357,10 @@ static int ccm_nx_decrypt(struct aead_request *req,
360 357
361 /* copy out the auth tag to compare with later */ 358 /* copy out the auth tag to compare with later */
362 scatterwalk_map_and_copy(priv->oauth_tag, 359 scatterwalk_map_and_copy(priv->oauth_tag,
363 req->src, nbytes, authsize, 360 req->src, nbytes + req->assoclen, authsize,
364 SCATTERWALK_FROM_SG); 361 SCATTERWALK_FROM_SG);
365 362
366 rc = generate_pat(desc->info, req, nx_ctx, authsize, nbytes, 363 rc = generate_pat(desc->info, req, nx_ctx, authsize, nbytes, assoclen,
367 csbcpb->cpb.aes_ccm.in_pat_or_b0); 364 csbcpb->cpb.aes_ccm.in_pat_or_b0);
368 if (rc) 365 if (rc)
369 goto out; 366 goto out;
@@ -383,8 +380,8 @@ static int ccm_nx_decrypt(struct aead_request *req,
383 NX_CPB_FDM(nx_ctx->csbcpb) &= ~NX_FDM_ENDE_ENCRYPT; 380 NX_CPB_FDM(nx_ctx->csbcpb) &= ~NX_FDM_ENDE_ENCRYPT;
384 381
385 rc = nx_build_sg_lists(nx_ctx, desc, req->dst, req->src, 382 rc = nx_build_sg_lists(nx_ctx, desc, req->dst, req->src,
386 &to_process, processed, 383 &to_process, processed + req->assoclen,
387 csbcpb->cpb.aes_ccm.iv_or_ctr); 384 csbcpb->cpb.aes_ccm.iv_or_ctr);
388 if (rc) 385 if (rc)
389 goto out; 386 goto out;
390 387
@@ -420,7 +417,8 @@ out:
420} 417}
421 418
422static int ccm_nx_encrypt(struct aead_request *req, 419static int ccm_nx_encrypt(struct aead_request *req,
423 struct blkcipher_desc *desc) 420 struct blkcipher_desc *desc,
421 unsigned int assoclen)
424{ 422{
425 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); 423 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm);
426 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; 424 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
@@ -432,7 +430,7 @@ static int ccm_nx_encrypt(struct aead_request *req,
432 430
433 spin_lock_irqsave(&nx_ctx->lock, irq_flags); 431 spin_lock_irqsave(&nx_ctx->lock, irq_flags);
434 432
435 rc = generate_pat(desc->info, req, nx_ctx, authsize, nbytes, 433 rc = generate_pat(desc->info, req, nx_ctx, authsize, nbytes, assoclen,
436 csbcpb->cpb.aes_ccm.in_pat_or_b0); 434 csbcpb->cpb.aes_ccm.in_pat_or_b0);
437 if (rc) 435 if (rc)
438 goto out; 436 goto out;
@@ -451,7 +449,7 @@ static int ccm_nx_encrypt(struct aead_request *req,
451 NX_CPB_FDM(csbcpb) |= NX_FDM_ENDE_ENCRYPT; 449 NX_CPB_FDM(csbcpb) |= NX_FDM_ENDE_ENCRYPT;
452 450
453 rc = nx_build_sg_lists(nx_ctx, desc, req->dst, req->src, 451 rc = nx_build_sg_lists(nx_ctx, desc, req->dst, req->src,
454 &to_process, processed, 452 &to_process, processed + req->assoclen,
455 csbcpb->cpb.aes_ccm.iv_or_ctr); 453 csbcpb->cpb.aes_ccm.iv_or_ctr);
456 if (rc) 454 if (rc)
457 goto out; 455 goto out;
@@ -483,7 +481,7 @@ static int ccm_nx_encrypt(struct aead_request *req,
483 481
484 /* copy out the auth tag */ 482 /* copy out the auth tag */
485 scatterwalk_map_and_copy(csbcpb->cpb.aes_ccm.out_pat_or_mac, 483 scatterwalk_map_and_copy(csbcpb->cpb.aes_ccm.out_pat_or_mac,
486 req->dst, nbytes, authsize, 484 req->dst, nbytes + req->assoclen, authsize,
487 SCATTERWALK_TO_SG); 485 SCATTERWALK_TO_SG);
488 486
489out: 487out:
@@ -503,9 +501,8 @@ static int ccm4309_aes_nx_encrypt(struct aead_request *req)
503 memcpy(iv + 4, req->iv, 8); 501 memcpy(iv + 4, req->iv, 8);
504 502
505 desc.info = iv; 503 desc.info = iv;
506 desc.tfm = (struct crypto_blkcipher *)req->base.tfm;
507 504
508 return ccm_nx_encrypt(req, &desc); 505 return ccm_nx_encrypt(req, &desc, req->assoclen - 8);
509} 506}
510 507
511static int ccm_aes_nx_encrypt(struct aead_request *req) 508static int ccm_aes_nx_encrypt(struct aead_request *req)
@@ -514,13 +511,12 @@ static int ccm_aes_nx_encrypt(struct aead_request *req)
514 int rc; 511 int rc;
515 512
516 desc.info = req->iv; 513 desc.info = req->iv;
517 desc.tfm = (struct crypto_blkcipher *)req->base.tfm;
518 514
519 rc = crypto_ccm_check_iv(desc.info); 515 rc = crypto_ccm_check_iv(desc.info);
520 if (rc) 516 if (rc)
521 return rc; 517 return rc;
522 518
523 return ccm_nx_encrypt(req, &desc); 519 return ccm_nx_encrypt(req, &desc, req->assoclen);
524} 520}
525 521
526static int ccm4309_aes_nx_decrypt(struct aead_request *req) 522static int ccm4309_aes_nx_decrypt(struct aead_request *req)
@@ -535,9 +531,8 @@ static int ccm4309_aes_nx_decrypt(struct aead_request *req)
535 memcpy(iv + 4, req->iv, 8); 531 memcpy(iv + 4, req->iv, 8);
536 532
537 desc.info = iv; 533 desc.info = iv;
538 desc.tfm = (struct crypto_blkcipher *)req->base.tfm;
539 534
540 return ccm_nx_decrypt(req, &desc); 535 return ccm_nx_decrypt(req, &desc, req->assoclen - 8);
541} 536}
542 537
543static int ccm_aes_nx_decrypt(struct aead_request *req) 538static int ccm_aes_nx_decrypt(struct aead_request *req)
@@ -546,13 +541,12 @@ static int ccm_aes_nx_decrypt(struct aead_request *req)
546 int rc; 541 int rc;
547 542
548 desc.info = req->iv; 543 desc.info = req->iv;
549 desc.tfm = (struct crypto_blkcipher *)req->base.tfm;
550 544
551 rc = crypto_ccm_check_iv(desc.info); 545 rc = crypto_ccm_check_iv(desc.info);
552 if (rc) 546 if (rc)
553 return rc; 547 return rc;
554 548
555 return ccm_nx_decrypt(req, &desc); 549 return ccm_nx_decrypt(req, &desc, req->assoclen);
556} 550}
557 551
558/* tell the block cipher walk routines that this is a stream cipher by 552/* tell the block cipher walk routines that this is a stream cipher by
@@ -560,47 +554,44 @@ static int ccm_aes_nx_decrypt(struct aead_request *req)
560 * during encrypt/decrypt doesn't solve this problem, because it calls 554 * during encrypt/decrypt doesn't solve this problem, because it calls
561 * blkcipher_walk_done under the covers, which doesn't use walk->blocksize, 555 * blkcipher_walk_done under the covers, which doesn't use walk->blocksize,
562 * but instead uses this tfm->blocksize. */ 556 * but instead uses this tfm->blocksize. */
563struct crypto_alg nx_ccm_aes_alg = { 557struct aead_alg nx_ccm_aes_alg = {
564 .cra_name = "ccm(aes)", 558 .base = {
565 .cra_driver_name = "ccm-aes-nx", 559 .cra_name = "ccm(aes)",
566 .cra_priority = 300, 560 .cra_driver_name = "ccm-aes-nx",
567 .cra_flags = CRYPTO_ALG_TYPE_AEAD | 561 .cra_priority = 300,
568 CRYPTO_ALG_NEED_FALLBACK, 562 .cra_flags = CRYPTO_ALG_NEED_FALLBACK |
569 .cra_blocksize = 1, 563 CRYPTO_ALG_AEAD_NEW,
570 .cra_ctxsize = sizeof(struct nx_crypto_ctx), 564 .cra_blocksize = 1,
571 .cra_type = &crypto_aead_type, 565 .cra_ctxsize = sizeof(struct nx_crypto_ctx),
572 .cra_module = THIS_MODULE, 566 .cra_module = THIS_MODULE,
573 .cra_init = nx_crypto_ctx_aes_ccm_init, 567 },
574 .cra_exit = nx_crypto_ctx_exit, 568 .init = nx_crypto_ctx_aes_ccm_init,
575 .cra_aead = { 569 .exit = nx_crypto_ctx_aead_exit,
576 .ivsize = AES_BLOCK_SIZE, 570 .ivsize = AES_BLOCK_SIZE,
577 .maxauthsize = AES_BLOCK_SIZE, 571 .maxauthsize = AES_BLOCK_SIZE,
578 .setkey = ccm_aes_nx_set_key, 572 .setkey = ccm_aes_nx_set_key,
579 .setauthsize = ccm_aes_nx_setauthsize, 573 .setauthsize = ccm_aes_nx_setauthsize,
580 .encrypt = ccm_aes_nx_encrypt, 574 .encrypt = ccm_aes_nx_encrypt,
581 .decrypt = ccm_aes_nx_decrypt, 575 .decrypt = ccm_aes_nx_decrypt,
582 }
583}; 576};
584 577
585struct crypto_alg nx_ccm4309_aes_alg = { 578struct aead_alg nx_ccm4309_aes_alg = {
586 .cra_name = "rfc4309(ccm(aes))", 579 .base = {
587 .cra_driver_name = "rfc4309-ccm-aes-nx", 580 .cra_name = "rfc4309(ccm(aes))",
588 .cra_priority = 300, 581 .cra_driver_name = "rfc4309-ccm-aes-nx",
589 .cra_flags = CRYPTO_ALG_TYPE_AEAD | 582 .cra_priority = 300,
590 CRYPTO_ALG_NEED_FALLBACK, 583 .cra_flags = CRYPTO_ALG_NEED_FALLBACK |
591 .cra_blocksize = 1, 584 CRYPTO_ALG_AEAD_NEW,
592 .cra_ctxsize = sizeof(struct nx_crypto_ctx), 585 .cra_blocksize = 1,
593 .cra_type = &crypto_nivaead_type, 586 .cra_ctxsize = sizeof(struct nx_crypto_ctx),
594 .cra_module = THIS_MODULE, 587 .cra_module = THIS_MODULE,
595 .cra_init = nx_crypto_ctx_aes_ccm_init, 588 },
596 .cra_exit = nx_crypto_ctx_exit, 589 .init = nx_crypto_ctx_aes_ccm_init,
597 .cra_aead = { 590 .exit = nx_crypto_ctx_aead_exit,
598 .ivsize = 8, 591 .ivsize = 8,
599 .maxauthsize = AES_BLOCK_SIZE, 592 .maxauthsize = AES_BLOCK_SIZE,
600 .setkey = ccm4309_aes_nx_set_key, 593 .setkey = ccm4309_aes_nx_set_key,
601 .setauthsize = ccm4309_aes_nx_setauthsize, 594 .setauthsize = ccm4309_aes_nx_setauthsize,
602 .encrypt = ccm4309_aes_nx_encrypt, 595 .encrypt = ccm4309_aes_nx_encrypt,
603 .decrypt = ccm4309_aes_nx_decrypt, 596 .decrypt = ccm4309_aes_nx_decrypt,
604 .geniv = "seqiv",
605 }
606}; 597};