aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorYuan Kang <Yuan.Kang@freescale.com>2011-07-14 23:21:41 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2011-07-14 23:21:41 -0400
commit0e47930020081b07047842be3e4552edb2395352 (patch)
treeb32e13048f0db79b41c8fff653fee416632aa134
parent1c2b4abb19285368808f303ad61e17b172f1a108 (diff)
crypto: caam - shorter names
"aead_authenc" and "ipsec_esp" changed to "aead," except for function "ipsec_esp," which is changed to "init_aead_job." Variable name of aead_request structures changed to "req" and name of aead_givcrypt_request structure changed to "areq" Signed-off-by: Yuan Kang <Yuan.Kang@freescale.com> Signed-off-by: Kim Phillips <kim.phillips@freescale.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r--drivers/crypto/caam/caamalg.c274
1 files changed, 137 insertions, 137 deletions
diff --git a/drivers/crypto/caam/caamalg.c b/drivers/crypto/caam/caamalg.c
index 676d957c22b0..4786a204633b 100644
--- a/drivers/crypto/caam/caamalg.c
+++ b/drivers/crypto/caam/caamalg.c
@@ -94,7 +94,7 @@ struct caam_ctx {
94 unsigned int authsize; 94 unsigned int authsize;
95}; 95};
96 96
97static int aead_authenc_setauthsize(struct crypto_aead *authenc, 97static int aead_setauthsize(struct crypto_aead *authenc,
98 unsigned int authsize) 98 unsigned int authsize)
99{ 99{
100 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 100 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
@@ -286,7 +286,7 @@ static int build_sh_desc_ipsec(struct caam_ctx *ctx)
286 return 0; 286 return 0;
287} 287}
288 288
289static int aead_authenc_setkey(struct crypto_aead *aead, 289static int aead_setkey(struct crypto_aead *aead,
290 const u8 *key, unsigned int keylen) 290 const u8 *key, unsigned int keylen)
291{ 291{
292 /* Sizes for MDHA pads (*not* keys): MD5, SHA1, 224, 256, 384, 512 */ 292 /* Sizes for MDHA pads (*not* keys): MD5, SHA1, 224, 256, 384, 512 */
@@ -379,7 +379,7 @@ struct link_tbl_entry {
379}; 379};
380 380
381/* 381/*
382 * ipsec_esp_edesc - s/w-extended ipsec_esp descriptor 382 * aead_edesc - s/w-extended ipsec_esp descriptor
383 * @src_nents: number of segments in input scatterlist 383 * @src_nents: number of segments in input scatterlist
384 * @dst_nents: number of segments in output scatterlist 384 * @dst_nents: number of segments in output scatterlist
385 * @assoc_nents: number of segments in associated data (SPI+Seq) scatterlist 385 * @assoc_nents: number of segments in associated data (SPI+Seq) scatterlist
@@ -388,7 +388,7 @@ struct link_tbl_entry {
388 * @link_tbl_dma: bus physical mapped address of h/w link table 388 * @link_tbl_dma: bus physical mapped address of h/w link table
389 * @hw_desc: the h/w job descriptor followed by any referenced link tables 389 * @hw_desc: the h/w job descriptor followed by any referenced link tables
390 */ 390 */
391struct ipsec_esp_edesc { 391struct aead_edesc {
392 int assoc_nents; 392 int assoc_nents;
393 int src_nents; 393 int src_nents;
394 int dst_nents; 394 int dst_nents;
@@ -398,19 +398,19 @@ struct ipsec_esp_edesc {
398 u32 hw_desc[0]; 398 u32 hw_desc[0];
399}; 399};
400 400
401static void ipsec_esp_unmap(struct device *dev, 401static void aead_unmap(struct device *dev,
402 struct ipsec_esp_edesc *edesc, 402 struct aead_edesc *edesc,
403 struct aead_request *areq) 403 struct aead_request *req)
404{ 404{
405 dma_unmap_sg(dev, areq->assoc, edesc->assoc_nents, DMA_TO_DEVICE); 405 dma_unmap_sg(dev, req->assoc, edesc->assoc_nents, DMA_TO_DEVICE);
406 406
407 if (unlikely(areq->dst != areq->src)) { 407 if (unlikely(req->dst != req->src)) {
408 dma_unmap_sg(dev, areq->src, edesc->src_nents, 408 dma_unmap_sg(dev, req->src, edesc->src_nents,
409 DMA_TO_DEVICE); 409 DMA_TO_DEVICE);
410 dma_unmap_sg(dev, areq->dst, edesc->dst_nents, 410 dma_unmap_sg(dev, req->dst, edesc->dst_nents,
411 DMA_FROM_DEVICE); 411 DMA_FROM_DEVICE);
412 } else { 412 } else {
413 dma_unmap_sg(dev, areq->src, edesc->src_nents, 413 dma_unmap_sg(dev, req->src, edesc->src_nents,
414 DMA_BIDIRECTIONAL); 414 DMA_BIDIRECTIONAL);
415 } 415 }
416 416
@@ -423,20 +423,20 @@ static void ipsec_esp_unmap(struct device *dev,
423/* 423/*
424 * ipsec_esp descriptor callbacks 424 * ipsec_esp descriptor callbacks
425 */ 425 */
426static void ipsec_esp_encrypt_done(struct device *jrdev, u32 *desc, u32 err, 426static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
427 void *context) 427 void *context)
428{ 428{
429 struct aead_request *areq = context; 429 struct aead_request *req = context;
430 struct ipsec_esp_edesc *edesc; 430 struct aead_edesc *edesc;
431#ifdef DEBUG 431#ifdef DEBUG
432 struct crypto_aead *aead = crypto_aead_reqtfm(areq); 432 struct crypto_aead *aead = crypto_aead_reqtfm(req);
433 int ivsize = crypto_aead_ivsize(aead); 433 int ivsize = crypto_aead_ivsize(aead);
434 struct caam_ctx *ctx = crypto_aead_ctx(aead); 434 struct caam_ctx *ctx = crypto_aead_ctx(aead);
435 435
436 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 436 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
437#endif 437#endif
438 edesc = (struct ipsec_esp_edesc *)((char *)desc - 438 edesc = (struct aead_edesc *)((char *)desc -
439 offsetof(struct ipsec_esp_edesc, hw_desc)); 439 offsetof(struct aead_edesc, hw_desc));
440 440
441 if (err) { 441 if (err) {
442 char tmp[CAAM_ERROR_STR_MAX]; 442 char tmp[CAAM_ERROR_STR_MAX];
@@ -444,39 +444,39 @@ static void ipsec_esp_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
444 dev_err(jrdev, "%08x: %s\n", err, caam_jr_strstatus(tmp, err)); 444 dev_err(jrdev, "%08x: %s\n", err, caam_jr_strstatus(tmp, err));
445 } 445 }
446 446
447 ipsec_esp_unmap(jrdev, edesc, areq); 447 aead_unmap(jrdev, edesc, req);
448 448
449#ifdef DEBUG 449#ifdef DEBUG
450 print_hex_dump(KERN_ERR, "assoc @"xstr(__LINE__)": ", 450 print_hex_dump(KERN_ERR, "assoc @"xstr(__LINE__)": ",
451 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(areq->assoc), 451 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->assoc),
452 areq->assoclen , 1); 452 req->assoclen , 1);
453 print_hex_dump(KERN_ERR, "dstiv @"xstr(__LINE__)": ", 453 print_hex_dump(KERN_ERR, "dstiv @"xstr(__LINE__)": ",
454 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(areq->src) - ivsize, 454 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src) - ivsize,
455 edesc->src_nents ? 100 : ivsize, 1); 455 edesc->src_nents ? 100 : ivsize, 1);
456 print_hex_dump(KERN_ERR, "dst @"xstr(__LINE__)": ", 456 print_hex_dump(KERN_ERR, "dst @"xstr(__LINE__)": ",
457 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(areq->src), 457 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
458 edesc->src_nents ? 100 : areq->cryptlen + 458 edesc->src_nents ? 100 : req->cryptlen +
459 ctx->authsize + 4, 1); 459 ctx->authsize + 4, 1);
460#endif 460#endif
461 461
462 kfree(edesc); 462 kfree(edesc);
463 463
464 aead_request_complete(areq, err); 464 aead_request_complete(req, err);
465} 465}
466 466
467static void ipsec_esp_decrypt_done(struct device *jrdev, u32 *desc, u32 err, 467static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
468 void *context) 468 void *context)
469{ 469{
470 struct aead_request *areq = context; 470 struct aead_request *req = context;
471 struct ipsec_esp_edesc *edesc; 471 struct aead_edesc *edesc;
472#ifdef DEBUG 472#ifdef DEBUG
473 struct crypto_aead *aead = crypto_aead_reqtfm(areq); 473 struct crypto_aead *aead = crypto_aead_reqtfm(req);
474 struct caam_ctx *ctx = crypto_aead_ctx(aead); 474 struct caam_ctx *ctx = crypto_aead_ctx(aead);
475 475
476 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 476 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
477#endif 477#endif
478 edesc = (struct ipsec_esp_edesc *)((char *)desc - 478 edesc = (struct aead_edesc *)((char *)desc -
479 offsetof(struct ipsec_esp_edesc, hw_desc)); 479 offsetof(struct aead_edesc, hw_desc));
480 480
481 if (err) { 481 if (err) {
482 char tmp[CAAM_ERROR_STR_MAX]; 482 char tmp[CAAM_ERROR_STR_MAX];
@@ -484,7 +484,7 @@ static void ipsec_esp_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
484 dev_err(jrdev, "%08x: %s\n", err, caam_jr_strstatus(tmp, err)); 484 dev_err(jrdev, "%08x: %s\n", err, caam_jr_strstatus(tmp, err));
485 } 485 }
486 486
487 ipsec_esp_unmap(jrdev, edesc, areq); 487 aead_unmap(jrdev, edesc, req);
488 488
489 /* 489 /*
490 * verify hw auth check passed else return -EBADMSG 490 * verify hw auth check passed else return -EBADMSG
@@ -495,12 +495,12 @@ static void ipsec_esp_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
495#ifdef DEBUG 495#ifdef DEBUG
496 print_hex_dump(KERN_ERR, "iphdrout@"xstr(__LINE__)": ", 496 print_hex_dump(KERN_ERR, "iphdrout@"xstr(__LINE__)": ",
497 DUMP_PREFIX_ADDRESS, 16, 4, 497 DUMP_PREFIX_ADDRESS, 16, 4,
498 ((char *)sg_virt(areq->assoc) - sizeof(struct iphdr)), 498 ((char *)sg_virt(req->assoc) - sizeof(struct iphdr)),
499 sizeof(struct iphdr) + areq->assoclen + 499 sizeof(struct iphdr) + req->assoclen +
500 ((areq->cryptlen > 1500) ? 1500 : areq->cryptlen) + 500 ((req->cryptlen > 1500) ? 1500 : req->cryptlen) +
501 ctx->authsize + 36, 1); 501 ctx->authsize + 36, 1);
502 if (!err && edesc->link_tbl_bytes) { 502 if (!err && edesc->link_tbl_bytes) {
503 struct scatterlist *sg = sg_last(areq->src, edesc->src_nents); 503 struct scatterlist *sg = sg_last(req->src, edesc->src_nents);
504 print_hex_dump(KERN_ERR, "sglastout@"xstr(__LINE__)": ", 504 print_hex_dump(KERN_ERR, "sglastout@"xstr(__LINE__)": ",
505 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(sg), 505 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(sg),
506 sg->length + ctx->authsize + 16, 1); 506 sg->length + ctx->authsize + 16, 1);
@@ -508,7 +508,7 @@ static void ipsec_esp_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
508#endif 508#endif
509 kfree(edesc); 509 kfree(edesc);
510 510
511 aead_request_complete(areq, err); 511 aead_request_complete(req, err);
512} 512}
513 513
514/* 514/*
@@ -537,12 +537,12 @@ static void sg_to_link_tbl(struct scatterlist *sg, int sg_count,
537/* 537/*
538 * fill in and submit ipsec_esp job descriptor 538 * fill in and submit ipsec_esp job descriptor
539 */ 539 */
540static int ipsec_esp(struct ipsec_esp_edesc *edesc, struct aead_request *areq, 540static int init_aead_job(struct aead_edesc *edesc, struct aead_request *req,
541 u32 encrypt, 541 u32 encrypt,
542 void (*callback) (struct device *dev, u32 *desc, 542 void (*callback) (struct device *dev, u32 *desc,
543 u32 err, void *context)) 543 u32 err, void *context))
544{ 544{
545 struct crypto_aead *aead = crypto_aead_reqtfm(areq); 545 struct crypto_aead *aead = crypto_aead_reqtfm(req);
546 struct caam_ctx *ctx = crypto_aead_ctx(aead); 546 struct caam_ctx *ctx = crypto_aead_ctx(aead);
547 struct device *jrdev = ctx->jrdev; 547 struct device *jrdev = ctx->jrdev;
548 u32 *desc = edesc->hw_desc, options; 548 u32 *desc = edesc->hw_desc, options;
@@ -554,27 +554,27 @@ static int ipsec_esp(struct ipsec_esp_edesc *edesc, struct aead_request *areq,
554 u32 *sh_desc = ctx->sh_desc; 554 u32 *sh_desc = ctx->sh_desc;
555 555
556 debug("assoclen %d cryptlen %d authsize %d\n", 556 debug("assoclen %d cryptlen %d authsize %d\n",
557 areq->assoclen, areq->cryptlen, authsize); 557 req->assoclen, req->cryptlen, authsize);
558 print_hex_dump(KERN_ERR, "assoc @"xstr(__LINE__)": ", 558 print_hex_dump(KERN_ERR, "assoc @"xstr(__LINE__)": ",
559 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(areq->assoc), 559 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->assoc),
560 areq->assoclen , 1); 560 req->assoclen , 1);
561 print_hex_dump(KERN_ERR, "presciv@"xstr(__LINE__)": ", 561 print_hex_dump(KERN_ERR, "presciv@"xstr(__LINE__)": ",
562 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(areq->src) - ivsize, 562 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src) - ivsize,
563 edesc->src_nents ? 100 : ivsize, 1); 563 edesc->src_nents ? 100 : ivsize, 1);
564 print_hex_dump(KERN_ERR, "src @"xstr(__LINE__)": ", 564 print_hex_dump(KERN_ERR, "src @"xstr(__LINE__)": ",
565 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(areq->src), 565 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
566 edesc->src_nents ? 100 : areq->cryptlen + authsize, 1); 566 edesc->src_nents ? 100 : req->cryptlen + authsize, 1);
567 print_hex_dump(KERN_ERR, "shrdesc@"xstr(__LINE__)": ", 567 print_hex_dump(KERN_ERR, "shrdesc@"xstr(__LINE__)": ",
568 DUMP_PREFIX_ADDRESS, 16, 4, sh_desc, 568 DUMP_PREFIX_ADDRESS, 16, 4, sh_desc,
569 desc_bytes(sh_desc), 1); 569 desc_bytes(sh_desc), 1);
570#endif 570#endif
571 assoc_sg_count = dma_map_sg(jrdev, areq->assoc, edesc->assoc_nents ?: 1, 571 assoc_sg_count = dma_map_sg(jrdev, req->assoc, edesc->assoc_nents ?: 1,
572 DMA_TO_DEVICE); 572 DMA_TO_DEVICE);
573 if (areq->src == areq->dst) 573 if (req->src == req->dst)
574 sg_count = dma_map_sg(jrdev, areq->src, edesc->src_nents ? : 1, 574 sg_count = dma_map_sg(jrdev, req->src, edesc->src_nents ? : 1,
575 DMA_BIDIRECTIONAL); 575 DMA_BIDIRECTIONAL);
576 else 576 else
577 sg_count = dma_map_sg(jrdev, areq->src, edesc->src_nents ? : 1, 577 sg_count = dma_map_sg(jrdev, req->src, edesc->src_nents ? : 1,
578 DMA_TO_DEVICE); 578 DMA_TO_DEVICE);
579 579
580 /* start auth operation */ 580 /* start auth operation */
@@ -584,14 +584,14 @@ static int ipsec_esp(struct ipsec_esp_edesc *edesc, struct aead_request *areq,
584 /* Load FIFO with data for Class 2 CHA */ 584 /* Load FIFO with data for Class 2 CHA */
585 options = FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG; 585 options = FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG;
586 if (!edesc->assoc_nents) { 586 if (!edesc->assoc_nents) {
587 ptr = sg_dma_address(areq->assoc); 587 ptr = sg_dma_address(req->assoc);
588 } else { 588 } else {
589 sg_to_link_tbl(areq->assoc, edesc->assoc_nents, 589 sg_to_link_tbl(req->assoc, edesc->assoc_nents,
590 edesc->link_tbl, 0); 590 edesc->link_tbl, 0);
591 ptr = edesc->link_tbl_dma; 591 ptr = edesc->link_tbl_dma;
592 options |= LDST_SGF; 592 options |= LDST_SGF;
593 } 593 }
594 append_fifo_load(desc, ptr, areq->assoclen, options); 594 append_fifo_load(desc, ptr, req->assoclen, options);
595 595
596 /* copy iv from cipher/class1 input context to class2 infifo */ 596 /* copy iv from cipher/class1 input context to class2 infifo */
597 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO | ivsize); 597 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO | ivsize);
@@ -621,31 +621,31 @@ static int ipsec_esp(struct ipsec_esp_edesc *edesc, struct aead_request *areq,
621 /* load payload & instruct to class2 to snoop class 1 if encrypting */ 621 /* load payload & instruct to class2 to snoop class 1 if encrypting */
622 options = 0; 622 options = 0;
623 if (!edesc->src_nents) { 623 if (!edesc->src_nents) {
624 src_dma = sg_dma_address(areq->src); 624 src_dma = sg_dma_address(req->src);
625 } else { 625 } else {
626 sg_to_link_tbl(areq->src, edesc->src_nents, edesc->link_tbl + 626 sg_to_link_tbl(req->src, edesc->src_nents, edesc->link_tbl +
627 edesc->assoc_nents, 0); 627 edesc->assoc_nents, 0);
628 src_dma = edesc->link_tbl_dma + edesc->assoc_nents * 628 src_dma = edesc->link_tbl_dma + edesc->assoc_nents *
629 sizeof(struct link_tbl_entry); 629 sizeof(struct link_tbl_entry);
630 options |= LDST_SGF; 630 options |= LDST_SGF;
631 } 631 }
632 append_seq_in_ptr(desc, src_dma, areq->cryptlen + authsize, options); 632 append_seq_in_ptr(desc, src_dma, req->cryptlen + authsize, options);
633 append_seq_fifo_load(desc, areq->cryptlen, FIFOLD_CLASS_BOTH | 633 append_seq_fifo_load(desc, req->cryptlen, FIFOLD_CLASS_BOTH |
634 FIFOLD_TYPE_LASTBOTH | 634 FIFOLD_TYPE_LASTBOTH |
635 (encrypt ? FIFOLD_TYPE_MSG1OUT2 635 (encrypt ? FIFOLD_TYPE_MSG1OUT2
636 : FIFOLD_TYPE_MSG)); 636 : FIFOLD_TYPE_MSG));
637 637
638 /* specify destination */ 638 /* specify destination */
639 if (areq->src == areq->dst) { 639 if (req->src == req->dst) {
640 dst_dma = src_dma; 640 dst_dma = src_dma;
641 } else { 641 } else {
642 sg_count = dma_map_sg(jrdev, areq->dst, edesc->dst_nents ? : 1, 642 sg_count = dma_map_sg(jrdev, req->dst, edesc->dst_nents ? : 1,
643 DMA_FROM_DEVICE); 643 DMA_FROM_DEVICE);
644 if (!edesc->dst_nents) { 644 if (!edesc->dst_nents) {
645 dst_dma = sg_dma_address(areq->dst); 645 dst_dma = sg_dma_address(req->dst);
646 options = 0; 646 options = 0;
647 } else { 647 } else {
648 sg_to_link_tbl(areq->dst, edesc->dst_nents, 648 sg_to_link_tbl(req->dst, edesc->dst_nents,
649 edesc->link_tbl + edesc->assoc_nents + 649 edesc->link_tbl + edesc->assoc_nents +
650 edesc->src_nents, 0); 650 edesc->src_nents, 0);
651 dst_dma = edesc->link_tbl_dma + (edesc->assoc_nents + 651 dst_dma = edesc->link_tbl_dma + (edesc->assoc_nents +
@@ -654,8 +654,8 @@ static int ipsec_esp(struct ipsec_esp_edesc *edesc, struct aead_request *areq,
654 options = LDST_SGF; 654 options = LDST_SGF;
655 } 655 }
656 } 656 }
657 append_seq_out_ptr(desc, dst_dma, areq->cryptlen + authsize, options); 657 append_seq_out_ptr(desc, dst_dma, req->cryptlen + authsize, options);
658 append_seq_fifo_store(desc, areq->cryptlen, FIFOST_TYPE_MESSAGE_DATA); 658 append_seq_fifo_store(desc, req->cryptlen, FIFOST_TYPE_MESSAGE_DATA);
659 659
660 /* ICV */ 660 /* ICV */
661 if (encrypt) 661 if (encrypt)
@@ -674,11 +674,11 @@ static int ipsec_esp(struct ipsec_esp_edesc *edesc, struct aead_request *areq,
674 edesc->link_tbl_bytes, 1); 674 edesc->link_tbl_bytes, 1);
675#endif 675#endif
676 676
677 ret = caam_jr_enqueue(jrdev, desc, callback, areq); 677 ret = caam_jr_enqueue(jrdev, desc, callback, req);
678 if (!ret) 678 if (!ret)
679 ret = -EINPROGRESS; 679 ret = -EINPROGRESS;
680 else { 680 else {
681 ipsec_esp_unmap(jrdev, edesc, areq); 681 aead_unmap(jrdev, edesc, req);
682 kfree(edesc); 682 kfree(edesc);
683 } 683 }
684 684
@@ -708,30 +708,30 @@ static int sg_count(struct scatterlist *sg_list, int nbytes, int *chained)
708/* 708/*
709 * allocate and map the ipsec_esp extended descriptor 709 * allocate and map the ipsec_esp extended descriptor
710 */ 710 */
711static struct ipsec_esp_edesc *ipsec_esp_edesc_alloc(struct aead_request *areq, 711static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
712 int desc_bytes) 712 int desc_bytes)
713{ 713{
714 struct crypto_aead *aead = crypto_aead_reqtfm(areq); 714 struct crypto_aead *aead = crypto_aead_reqtfm(req);
715 struct caam_ctx *ctx = crypto_aead_ctx(aead); 715 struct caam_ctx *ctx = crypto_aead_ctx(aead);
716 struct device *jrdev = ctx->jrdev; 716 struct device *jrdev = ctx->jrdev;
717 gfp_t flags = areq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL : 717 gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
718 GFP_ATOMIC; 718 GFP_ATOMIC;
719 int assoc_nents, src_nents, dst_nents = 0, chained, link_tbl_bytes; 719 int assoc_nents, src_nents, dst_nents = 0, chained, link_tbl_bytes;
720 struct ipsec_esp_edesc *edesc; 720 struct aead_edesc *edesc;
721 721
722 assoc_nents = sg_count(areq->assoc, areq->assoclen, &chained); 722 assoc_nents = sg_count(req->assoc, req->assoclen, &chained);
723 BUG_ON(chained); 723 BUG_ON(chained);
724 if (likely(assoc_nents == 1)) 724 if (likely(assoc_nents == 1))
725 assoc_nents = 0; 725 assoc_nents = 0;
726 726
727 src_nents = sg_count(areq->src, areq->cryptlen + ctx->authsize, 727 src_nents = sg_count(req->src, req->cryptlen + ctx->authsize,
728 &chained); 728 &chained);
729 BUG_ON(chained); 729 BUG_ON(chained);
730 if (src_nents == 1) 730 if (src_nents == 1)
731 src_nents = 0; 731 src_nents = 0;
732 732
733 if (unlikely(areq->dst != areq->src)) { 733 if (unlikely(req->dst != req->src)) {
734 dst_nents = sg_count(areq->dst, areq->cryptlen + ctx->authsize, 734 dst_nents = sg_count(req->dst, req->cryptlen + ctx->authsize,
735 &chained); 735 &chained);
736 BUG_ON(chained); 736 BUG_ON(chained);
737 if (dst_nents == 1) 737 if (dst_nents == 1)
@@ -743,7 +743,7 @@ static struct ipsec_esp_edesc *ipsec_esp_edesc_alloc(struct aead_request *areq,
743 debug("link_tbl_bytes %d\n", link_tbl_bytes); 743 debug("link_tbl_bytes %d\n", link_tbl_bytes);
744 744
745 /* allocate space for base edesc and hw desc commands, link tables */ 745 /* allocate space for base edesc and hw desc commands, link tables */
746 edesc = kmalloc(sizeof(struct ipsec_esp_edesc) + desc_bytes + 746 edesc = kmalloc(sizeof(struct aead_edesc) + desc_bytes +
747 link_tbl_bytes, GFP_DMA | flags); 747 link_tbl_bytes, GFP_DMA | flags);
748 if (!edesc) { 748 if (!edesc) {
749 dev_err(jrdev, "could not allocate extended descriptor\n"); 749 dev_err(jrdev, "could not allocate extended descriptor\n");
@@ -753,7 +753,7 @@ static struct ipsec_esp_edesc *ipsec_esp_edesc_alloc(struct aead_request *areq,
753 edesc->assoc_nents = assoc_nents; 753 edesc->assoc_nents = assoc_nents;
754 edesc->src_nents = src_nents; 754 edesc->src_nents = src_nents;
755 edesc->dst_nents = dst_nents; 755 edesc->dst_nents = dst_nents;
756 edesc->link_tbl = (void *)edesc + sizeof(struct ipsec_esp_edesc) + 756 edesc->link_tbl = (void *)edesc + sizeof(struct aead_edesc) +
757 desc_bytes; 757 desc_bytes;
758 edesc->link_tbl_dma = dma_map_single(jrdev, edesc->link_tbl, 758 edesc->link_tbl_dma = dma_map_single(jrdev, edesc->link_tbl,
759 link_tbl_bytes, DMA_TO_DEVICE); 759 link_tbl_bytes, DMA_TO_DEVICE);
@@ -762,10 +762,10 @@ static struct ipsec_esp_edesc *ipsec_esp_edesc_alloc(struct aead_request *areq,
762 return edesc; 762 return edesc;
763} 763}
764 764
765static int aead_authenc_encrypt(struct aead_request *areq) 765static int aead_encrypt(struct aead_request *req)
766{ 766{
767 struct ipsec_esp_edesc *edesc; 767 struct aead_edesc *edesc;
768 struct crypto_aead *aead = crypto_aead_reqtfm(areq); 768 struct crypto_aead *aead = crypto_aead_reqtfm(req);
769 struct caam_ctx *ctx = crypto_aead_ctx(aead); 769 struct caam_ctx *ctx = crypto_aead_ctx(aead);
770 struct device *jrdev = ctx->jrdev; 770 struct device *jrdev = ctx->jrdev;
771 int ivsize = crypto_aead_ivsize(aead); 771 int ivsize = crypto_aead_ivsize(aead);
@@ -773,7 +773,7 @@ static int aead_authenc_encrypt(struct aead_request *areq)
773 dma_addr_t iv_dma; 773 dma_addr_t iv_dma;
774 774
775 /* allocate extended descriptor */ 775 /* allocate extended descriptor */
776 edesc = ipsec_esp_edesc_alloc(areq, DESC_AEAD_ENCRYPT_TEXT_LEN * 776 edesc = aead_edesc_alloc(req, DESC_AEAD_ENCRYPT_TEXT_LEN *
777 CAAM_CMD_SZ); 777 CAAM_CMD_SZ);
778 if (IS_ERR(edesc)) 778 if (IS_ERR(edesc))
779 return PTR_ERR(edesc); 779 return PTR_ERR(edesc);
@@ -784,29 +784,29 @@ static int aead_authenc_encrypt(struct aead_request *areq)
784 init_job_desc_shared(desc, ctx->shared_desc_phys, 784 init_job_desc_shared(desc, ctx->shared_desc_phys,
785 desc_len(ctx->sh_desc), HDR_SHARE_DEFER); 785 desc_len(ctx->sh_desc), HDR_SHARE_DEFER);
786 786
787 iv_dma = dma_map_single(jrdev, areq->iv, ivsize, DMA_TO_DEVICE); 787 iv_dma = dma_map_single(jrdev, req->iv, ivsize, DMA_TO_DEVICE);
788 /* check dma error */ 788 /* check dma error */
789 789
790 append_load(desc, iv_dma, ivsize, 790 append_load(desc, iv_dma, ivsize,
791 LDST_CLASS_1_CCB | LDST_SRCDST_BYTE_CONTEXT); 791 LDST_CLASS_1_CCB | LDST_SRCDST_BYTE_CONTEXT);
792 792
793 return ipsec_esp(edesc, areq, OP_ALG_ENCRYPT, ipsec_esp_encrypt_done); 793 return init_aead_job(edesc, req, OP_ALG_ENCRYPT, aead_encrypt_done);
794} 794}
795 795
796static int aead_authenc_decrypt(struct aead_request *req) 796static int aead_decrypt(struct aead_request *req)
797{ 797{
798 struct crypto_aead *aead = crypto_aead_reqtfm(req); 798 struct crypto_aead *aead = crypto_aead_reqtfm(req);
799 int ivsize = crypto_aead_ivsize(aead); 799 int ivsize = crypto_aead_ivsize(aead);
800 struct caam_ctx *ctx = crypto_aead_ctx(aead); 800 struct caam_ctx *ctx = crypto_aead_ctx(aead);
801 struct device *jrdev = ctx->jrdev; 801 struct device *jrdev = ctx->jrdev;
802 struct ipsec_esp_edesc *edesc; 802 struct aead_edesc *edesc;
803 u32 *desc; 803 u32 *desc;
804 dma_addr_t iv_dma; 804 dma_addr_t iv_dma;
805 805
806 req->cryptlen -= ctx->authsize; 806 req->cryptlen -= ctx->authsize;
807 807
808 /* allocate extended descriptor */ 808 /* allocate extended descriptor */
809 edesc = ipsec_esp_edesc_alloc(req, DESC_AEAD_DECRYPT_TEXT_LEN * 809 edesc = aead_edesc_alloc(req, DESC_AEAD_DECRYPT_TEXT_LEN *
810 CAAM_CMD_SZ); 810 CAAM_CMD_SZ);
811 if (IS_ERR(edesc)) 811 if (IS_ERR(edesc))
812 return PTR_ERR(edesc); 812 return PTR_ERR(edesc);
@@ -823,26 +823,26 @@ static int aead_authenc_decrypt(struct aead_request *req)
823 append_load(desc, iv_dma, ivsize, 823 append_load(desc, iv_dma, ivsize,
824 LDST_CLASS_1_CCB | LDST_SRCDST_BYTE_CONTEXT); 824 LDST_CLASS_1_CCB | LDST_SRCDST_BYTE_CONTEXT);
825 825
826 return ipsec_esp(edesc, req, !OP_ALG_ENCRYPT, ipsec_esp_decrypt_done); 826 return init_aead_job(edesc, req, !OP_ALG_ENCRYPT, aead_decrypt_done);
827} 827}
828 828
829static int aead_authenc_givencrypt(struct aead_givcrypt_request *req) 829static int aead_givencrypt(struct aead_givcrypt_request *areq)
830{ 830{
831 struct aead_request *areq = &req->areq; 831 struct aead_request *req = &areq->areq;
832 struct ipsec_esp_edesc *edesc; 832 struct aead_edesc *edesc;
833 struct crypto_aead *aead = crypto_aead_reqtfm(areq); 833 struct crypto_aead *aead = crypto_aead_reqtfm(req);
834 struct caam_ctx *ctx = crypto_aead_ctx(aead); 834 struct caam_ctx *ctx = crypto_aead_ctx(aead);
835 struct device *jrdev = ctx->jrdev; 835 struct device *jrdev = ctx->jrdev;
836 int ivsize = crypto_aead_ivsize(aead); 836 int ivsize = crypto_aead_ivsize(aead);
837 dma_addr_t iv_dma; 837 dma_addr_t iv_dma;
838 u32 *desc; 838 u32 *desc;
839 839
840 iv_dma = dma_map_single(jrdev, req->giv, ivsize, DMA_FROM_DEVICE); 840 iv_dma = dma_map_single(jrdev, areq->giv, ivsize, DMA_FROM_DEVICE);
841 841
842 debug("%s: giv %p\n", __func__, req->giv); 842 debug("%s: giv %p\n", __func__, areq->giv);
843 843
844 /* allocate extended descriptor */ 844 /* allocate extended descriptor */
845 edesc = ipsec_esp_edesc_alloc(areq, DESC_AEAD_GIVENCRYPT_TEXT_LEN * 845 edesc = aead_edesc_alloc(req, DESC_AEAD_GIVENCRYPT_TEXT_LEN *
846 CAAM_CMD_SZ); 846 CAAM_CMD_SZ);
847 if (IS_ERR(edesc)) 847 if (IS_ERR(edesc))
848 return PTR_ERR(edesc); 848 return PTR_ERR(edesc);
@@ -881,7 +881,7 @@ static int aead_authenc_givencrypt(struct aead_givcrypt_request *req)
881 881
882 append_fifo_store(desc, iv_dma, ivsize, FIFOST_TYPE_MESSAGE_DATA); 882 append_fifo_store(desc, iv_dma, ivsize, FIFOST_TYPE_MESSAGE_DATA);
883 883
884 return ipsec_esp(edesc, areq, OP_ALG_ENCRYPT, ipsec_esp_encrypt_done); 884 return init_aead_job(edesc, req, OP_ALG_ENCRYPT, aead_encrypt_done);
885} 885}
886 886
887struct caam_alg_template { 887struct caam_alg_template {
@@ -901,11 +901,11 @@ static struct caam_alg_template driver_algs[] = {
901 .driver_name = "authenc-hmac-sha1-cbc-aes-caam", 901 .driver_name = "authenc-hmac-sha1-cbc-aes-caam",
902 .blocksize = AES_BLOCK_SIZE, 902 .blocksize = AES_BLOCK_SIZE,
903 .aead = { 903 .aead = {
904 .setkey = aead_authenc_setkey, 904 .setkey = aead_setkey,
905 .setauthsize = aead_authenc_setauthsize, 905 .setauthsize = aead_setauthsize,
906 .encrypt = aead_authenc_encrypt, 906 .encrypt = aead_encrypt,
907 .decrypt = aead_authenc_decrypt, 907 .decrypt = aead_decrypt,
908 .givencrypt = aead_authenc_givencrypt, 908 .givencrypt = aead_givencrypt,
909 .geniv = "<built-in>", 909 .geniv = "<built-in>",
910 .ivsize = AES_BLOCK_SIZE, 910 .ivsize = AES_BLOCK_SIZE,
911 .maxauthsize = SHA1_DIGEST_SIZE, 911 .maxauthsize = SHA1_DIGEST_SIZE,
@@ -919,11 +919,11 @@ static struct caam_alg_template driver_algs[] = {
919 .driver_name = "authenc-hmac-sha256-cbc-aes-caam", 919 .driver_name = "authenc-hmac-sha256-cbc-aes-caam",
920 .blocksize = AES_BLOCK_SIZE, 920 .blocksize = AES_BLOCK_SIZE,
921 .aead = { 921 .aead = {
922 .setkey = aead_authenc_setkey, 922 .setkey = aead_setkey,
923 .setauthsize = aead_authenc_setauthsize, 923 .setauthsize = aead_setauthsize,
924 .encrypt = aead_authenc_encrypt, 924 .encrypt = aead_encrypt,
925 .decrypt = aead_authenc_decrypt, 925 .decrypt = aead_decrypt,
926 .givencrypt = aead_authenc_givencrypt, 926 .givencrypt = aead_givencrypt,
927 .geniv = "<built-in>", 927 .geniv = "<built-in>",
928 .ivsize = AES_BLOCK_SIZE, 928 .ivsize = AES_BLOCK_SIZE,
929 .maxauthsize = SHA256_DIGEST_SIZE, 929 .maxauthsize = SHA256_DIGEST_SIZE,
@@ -938,11 +938,11 @@ static struct caam_alg_template driver_algs[] = {
938 .driver_name = "authenc-hmac-sha512-cbc-aes-caam", 938 .driver_name = "authenc-hmac-sha512-cbc-aes-caam",
939 .blocksize = AES_BLOCK_SIZE, 939 .blocksize = AES_BLOCK_SIZE,
940 .aead = { 940 .aead = {
941 .setkey = aead_authenc_setkey, 941 .setkey = aead_setkey,
942 .setauthsize = aead_authenc_setauthsize, 942 .setauthsize = aead_setauthsize,
943 .encrypt = aead_authenc_encrypt, 943 .encrypt = aead_encrypt,
944 .decrypt = aead_authenc_decrypt, 944 .decrypt = aead_decrypt,
945 .givencrypt = aead_authenc_givencrypt, 945 .givencrypt = aead_givencrypt,
946 .geniv = "<built-in>", 946 .geniv = "<built-in>",
947 .ivsize = AES_BLOCK_SIZE, 947 .ivsize = AES_BLOCK_SIZE,
948 .maxauthsize = SHA512_DIGEST_SIZE, 948 .maxauthsize = SHA512_DIGEST_SIZE,
@@ -957,11 +957,11 @@ static struct caam_alg_template driver_algs[] = {
957 .driver_name = "authenc-hmac-sha1-cbc-des3_ede-caam", 957 .driver_name = "authenc-hmac-sha1-cbc-des3_ede-caam",
958 .blocksize = DES3_EDE_BLOCK_SIZE, 958 .blocksize = DES3_EDE_BLOCK_SIZE,
959 .aead = { 959 .aead = {
960 .setkey = aead_authenc_setkey, 960 .setkey = aead_setkey,
961 .setauthsize = aead_authenc_setauthsize, 961 .setauthsize = aead_setauthsize,
962 .encrypt = aead_authenc_encrypt, 962 .encrypt = aead_encrypt,
963 .decrypt = aead_authenc_decrypt, 963 .decrypt = aead_decrypt,
964 .givencrypt = aead_authenc_givencrypt, 964 .givencrypt = aead_givencrypt,
965 .geniv = "<built-in>", 965 .geniv = "<built-in>",
966 .ivsize = DES3_EDE_BLOCK_SIZE, 966 .ivsize = DES3_EDE_BLOCK_SIZE,
967 .maxauthsize = SHA1_DIGEST_SIZE, 967 .maxauthsize = SHA1_DIGEST_SIZE,
@@ -975,11 +975,11 @@ static struct caam_alg_template driver_algs[] = {
975 .driver_name = "authenc-hmac-sha256-cbc-des3_ede-caam", 975 .driver_name = "authenc-hmac-sha256-cbc-des3_ede-caam",
976 .blocksize = DES3_EDE_BLOCK_SIZE, 976 .blocksize = DES3_EDE_BLOCK_SIZE,
977 .aead = { 977 .aead = {
978 .setkey = aead_authenc_setkey, 978 .setkey = aead_setkey,
979 .setauthsize = aead_authenc_setauthsize, 979 .setauthsize = aead_setauthsize,
980 .encrypt = aead_authenc_encrypt, 980 .encrypt = aead_encrypt,
981 .decrypt = aead_authenc_decrypt, 981 .decrypt = aead_decrypt,
982 .givencrypt = aead_authenc_givencrypt, 982 .givencrypt = aead_givencrypt,
983 .geniv = "<built-in>", 983 .geniv = "<built-in>",
984 .ivsize = DES3_EDE_BLOCK_SIZE, 984 .ivsize = DES3_EDE_BLOCK_SIZE,
985 .maxauthsize = SHA256_DIGEST_SIZE, 985 .maxauthsize = SHA256_DIGEST_SIZE,
@@ -994,11 +994,11 @@ static struct caam_alg_template driver_algs[] = {
994 .driver_name = "authenc-hmac-sha512-cbc-des3_ede-caam", 994 .driver_name = "authenc-hmac-sha512-cbc-des3_ede-caam",
995 .blocksize = DES3_EDE_BLOCK_SIZE, 995 .blocksize = DES3_EDE_BLOCK_SIZE,
996 .aead = { 996 .aead = {
997 .setkey = aead_authenc_setkey, 997 .setkey = aead_setkey,
998 .setauthsize = aead_authenc_setauthsize, 998 .setauthsize = aead_setauthsize,
999 .encrypt = aead_authenc_encrypt, 999 .encrypt = aead_encrypt,
1000 .decrypt = aead_authenc_decrypt, 1000 .decrypt = aead_decrypt,
1001 .givencrypt = aead_authenc_givencrypt, 1001 .givencrypt = aead_givencrypt,
1002 .geniv = "<built-in>", 1002 .geniv = "<built-in>",
1003 .ivsize = DES3_EDE_BLOCK_SIZE, 1003 .ivsize = DES3_EDE_BLOCK_SIZE,
1004 .maxauthsize = SHA512_DIGEST_SIZE, 1004 .maxauthsize = SHA512_DIGEST_SIZE,
@@ -1013,11 +1013,11 @@ static struct caam_alg_template driver_algs[] = {
1013 .driver_name = "authenc-hmac-sha1-cbc-des-caam", 1013 .driver_name = "authenc-hmac-sha1-cbc-des-caam",
1014 .blocksize = DES_BLOCK_SIZE, 1014 .blocksize = DES_BLOCK_SIZE,
1015 .aead = { 1015 .aead = {
1016 .setkey = aead_authenc_setkey, 1016 .setkey = aead_setkey,
1017 .setauthsize = aead_authenc_setauthsize, 1017 .setauthsize = aead_setauthsize,
1018 .encrypt = aead_authenc_encrypt, 1018 .encrypt = aead_encrypt,
1019 .decrypt = aead_authenc_decrypt, 1019 .decrypt = aead_decrypt,
1020 .givencrypt = aead_authenc_givencrypt, 1020 .givencrypt = aead_givencrypt,
1021 .geniv = "<built-in>", 1021 .geniv = "<built-in>",
1022 .ivsize = DES_BLOCK_SIZE, 1022 .ivsize = DES_BLOCK_SIZE,
1023 .maxauthsize = SHA1_DIGEST_SIZE, 1023 .maxauthsize = SHA1_DIGEST_SIZE,
@@ -1031,11 +1031,11 @@ static struct caam_alg_template driver_algs[] = {
1031 .driver_name = "authenc-hmac-sha256-cbc-des-caam", 1031 .driver_name = "authenc-hmac-sha256-cbc-des-caam",
1032 .blocksize = DES_BLOCK_SIZE, 1032 .blocksize = DES_BLOCK_SIZE,
1033 .aead = { 1033 .aead = {
1034 .setkey = aead_authenc_setkey, 1034 .setkey = aead_setkey,
1035 .setauthsize = aead_authenc_setauthsize, 1035 .setauthsize = aead_setauthsize,
1036 .encrypt = aead_authenc_encrypt, 1036 .encrypt = aead_encrypt,
1037 .decrypt = aead_authenc_decrypt, 1037 .decrypt = aead_decrypt,
1038 .givencrypt = aead_authenc_givencrypt, 1038 .givencrypt = aead_givencrypt,
1039 .geniv = "<built-in>", 1039 .geniv = "<built-in>",
1040 .ivsize = DES_BLOCK_SIZE, 1040 .ivsize = DES_BLOCK_SIZE,
1041 .maxauthsize = SHA256_DIGEST_SIZE, 1041 .maxauthsize = SHA256_DIGEST_SIZE,
@@ -1050,11 +1050,11 @@ static struct caam_alg_template driver_algs[] = {
1050 .driver_name = "authenc-hmac-sha512-cbc-des-caam", 1050 .driver_name = "authenc-hmac-sha512-cbc-des-caam",
1051 .blocksize = DES_BLOCK_SIZE, 1051 .blocksize = DES_BLOCK_SIZE,
1052 .aead = { 1052 .aead = {
1053 .setkey = aead_authenc_setkey, 1053 .setkey = aead_setkey,
1054 .setauthsize = aead_authenc_setauthsize, 1054 .setauthsize = aead_setauthsize,
1055 .encrypt = aead_authenc_encrypt, 1055 .encrypt = aead_encrypt,
1056 .decrypt = aead_authenc_decrypt, 1056 .decrypt = aead_decrypt,
1057 .givencrypt = aead_authenc_givencrypt, 1057 .givencrypt = aead_givencrypt,
1058 .geniv = "<built-in>", 1058 .geniv = "<built-in>",
1059 .ivsize = DES_BLOCK_SIZE, 1059 .ivsize = DES_BLOCK_SIZE,
1060 .maxauthsize = SHA512_DIGEST_SIZE, 1060 .maxauthsize = SHA512_DIGEST_SIZE,