aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/crypto
diff options
context:
space:
mode:
authorTudor Ambarus <tudor.ambarus@freescale.com>2014-10-23 09:11:23 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2014-10-24 10:52:28 -0400
commit3ef8d945d0dafd272e77c01099bc4975c5297a5a (patch)
tree42ec2aaaf31f4fa619aaf46fa98536c4fcbb2630 /drivers/crypto
parent61daf055ea42342dea1f7006833bc7cde20eeb34 (diff)
crypto: caam - add support for gcm(aes)
Add support for AES working in Galois Counter Mode. There is a limitation related to IV size, similar to the one present in SW implementation (crypto/gcm.c): The only IV size allowed is 12 bytes. It will be padded by HW to the right with 0x0000_0001 (up to 16 bytes - AES block size), according to the GCM specification. Signed-off-by: Tudor Ambarus <tudor.ambarus@freescale.com> Signed-off-by: Horia Geanta <horia.geanta@freescale.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers/crypto')
-rw-r--r--drivers/crypto/caam/caamalg.c342
1 files changed, 331 insertions, 11 deletions
diff --git a/drivers/crypto/caam/caamalg.c b/drivers/crypto/caam/caamalg.c
index a80ea853701d..66e35efcedfa 100644
--- a/drivers/crypto/caam/caamalg.c
+++ b/drivers/crypto/caam/caamalg.c
@@ -74,6 +74,10 @@
74#define DESC_AEAD_NULL_ENC_LEN (DESC_AEAD_NULL_BASE + 14 * CAAM_CMD_SZ) 74#define DESC_AEAD_NULL_ENC_LEN (DESC_AEAD_NULL_BASE + 14 * CAAM_CMD_SZ)
75#define DESC_AEAD_NULL_DEC_LEN (DESC_AEAD_NULL_BASE + 17 * CAAM_CMD_SZ) 75#define DESC_AEAD_NULL_DEC_LEN (DESC_AEAD_NULL_BASE + 17 * CAAM_CMD_SZ)
76 76
77#define DESC_GCM_BASE (3 * CAAM_CMD_SZ)
78#define DESC_GCM_ENC_LEN (DESC_GCM_BASE + 23 * CAAM_CMD_SZ)
79#define DESC_GCM_DEC_LEN (DESC_GCM_BASE + 19 * CAAM_CMD_SZ)
80
77#define DESC_ABLKCIPHER_BASE (3 * CAAM_CMD_SZ) 81#define DESC_ABLKCIPHER_BASE (3 * CAAM_CMD_SZ)
78#define DESC_ABLKCIPHER_ENC_LEN (DESC_ABLKCIPHER_BASE + \ 82#define DESC_ABLKCIPHER_ENC_LEN (DESC_ABLKCIPHER_BASE + \
79 20 * CAAM_CMD_SZ) 83 20 * CAAM_CMD_SZ)
@@ -630,6 +634,236 @@ static int aead_setauthsize(struct crypto_aead *authenc,
630 return 0; 634 return 0;
631} 635}
632 636
637static int gcm_set_sh_desc(struct crypto_aead *aead)
638{
639 struct aead_tfm *tfm = &aead->base.crt_aead;
640 struct caam_ctx *ctx = crypto_aead_ctx(aead);
641 struct device *jrdev = ctx->jrdev;
642 bool keys_fit_inline = false;
643 u32 *key_jump_cmd, *zero_payload_jump_cmd,
644 *zero_assoc_jump_cmd1, *zero_assoc_jump_cmd2;
645 u32 *desc;
646
647 if (!ctx->enckeylen || !ctx->authsize)
648 return 0;
649
650 /*
651 * AES GCM encrypt shared descriptor
652 * Job Descriptor and Shared Descriptor
653 * must fit into the 64-word Descriptor h/w Buffer
654 */
655 if (DESC_GCM_ENC_LEN + DESC_JOB_IO_LEN +
656 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
657 keys_fit_inline = true;
658
659 desc = ctx->sh_desc_enc;
660
661 init_sh_desc(desc, HDR_SHARE_SERIAL);
662
663 /* skip key loading if they are loaded due to sharing */
664 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
665 JUMP_COND_SHRD | JUMP_COND_SELF);
666 if (keys_fit_inline)
667 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
668 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
669 else
670 append_key(desc, ctx->key_dma, ctx->enckeylen,
671 CLASS_1 | KEY_DEST_CLASS_REG);
672 set_jump_tgt_here(desc, key_jump_cmd);
673
674 /* class 1 operation */
675 append_operation(desc, ctx->class1_alg_type |
676 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
677
678 /* cryptlen = seqoutlen - authsize */
679 append_math_sub_imm_u32(desc, REG3, SEQOUTLEN, IMM, ctx->authsize);
680
681 /* assoclen + cryptlen = seqinlen - ivsize */
682 append_math_sub_imm_u32(desc, REG2, SEQINLEN, IMM, tfm->ivsize);
683
684 /* assoclen = (assoclen + cryptlen) - cryptlen */
685 append_math_sub(desc, REG1, REG2, REG3, CAAM_CMD_SZ);
686
687 /* if cryptlen is ZERO jump to zero-payload commands */
688 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
689 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
690 JUMP_COND_MATH_Z);
691 /* read IV */
692 append_seq_fifo_load(desc, tfm->ivsize, FIFOLD_CLASS_CLASS1 |
693 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
694
695 /* if assoclen is ZERO, skip reading the assoc data */
696 append_math_add(desc, VARSEQINLEN, ZERO, REG1, CAAM_CMD_SZ);
697 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
698 JUMP_COND_MATH_Z);
699
700 /* read assoc data */
701 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
702 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
703 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
704
705 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
706
707 /* write encrypted data */
708 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
709
710 /* read payload data */
711 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
712 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
713
714 /* jump the zero-payload commands */
715 append_jump(desc, JUMP_TEST_ALL | 7);
716
717 /* zero-payload commands */
718 set_jump_tgt_here(desc, zero_payload_jump_cmd);
719
720 /* if assoclen is ZERO, jump to IV reading - is the only input data */
721 append_math_add(desc, VARSEQINLEN, ZERO, REG1, CAAM_CMD_SZ);
722 zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
723 JUMP_COND_MATH_Z);
724 /* read IV */
725 append_seq_fifo_load(desc, tfm->ivsize, FIFOLD_CLASS_CLASS1 |
726 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
727
728 /* read assoc data */
729 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
730 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
731
732 /* jump to ICV writing */
733 append_jump(desc, JUMP_TEST_ALL | 2);
734
735 /* read IV - is the only input data */
736 set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
737 append_seq_fifo_load(desc, tfm->ivsize, FIFOLD_CLASS_CLASS1 |
738 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 |
739 FIFOLD_TYPE_LAST1);
740
741 /* write ICV */
742 append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB |
743 LDST_SRCDST_BYTE_CONTEXT);
744
745 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
746 desc_bytes(desc),
747 DMA_TO_DEVICE);
748 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
749 dev_err(jrdev, "unable to map shared descriptor\n");
750 return -ENOMEM;
751 }
752#ifdef DEBUG
753 print_hex_dump(KERN_ERR, "gcm enc shdesc@"__stringify(__LINE__)": ",
754 DUMP_PREFIX_ADDRESS, 16, 4, desc,
755 desc_bytes(desc), 1);
756#endif
757
758 /*
759 * Job Descriptor and Shared Descriptors
760 * must all fit into the 64-word Descriptor h/w Buffer
761 */
762 keys_fit_inline = false;
763 if (DESC_GCM_DEC_LEN + DESC_JOB_IO_LEN +
764 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
765 keys_fit_inline = true;
766
767 desc = ctx->sh_desc_dec;
768
769 init_sh_desc(desc, HDR_SHARE_SERIAL);
770
771 /* skip key loading if they are loaded due to sharing */
772 key_jump_cmd = append_jump(desc, JUMP_JSL |
773 JUMP_TEST_ALL | JUMP_COND_SHRD |
774 JUMP_COND_SELF);
775 if (keys_fit_inline)
776 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
777 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
778 else
779 append_key(desc, ctx->key_dma, ctx->enckeylen,
780 CLASS_1 | KEY_DEST_CLASS_REG);
781 set_jump_tgt_here(desc, key_jump_cmd);
782
783 /* class 1 operation */
784 append_operation(desc, ctx->class1_alg_type |
785 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
786
787 /* assoclen + cryptlen = seqinlen - ivsize - icvsize */
788 append_math_sub_imm_u32(desc, REG3, SEQINLEN, IMM,
789 ctx->authsize + tfm->ivsize);
790
791 /* assoclen = (assoclen + cryptlen) - cryptlen */
792 append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
793 append_math_sub(desc, REG1, REG3, REG2, CAAM_CMD_SZ);
794
795 /* read IV */
796 append_seq_fifo_load(desc, tfm->ivsize, FIFOLD_CLASS_CLASS1 |
797 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
798
799 /* jump to zero-payload command if cryptlen is zero */
800 append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
801 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
802 JUMP_COND_MATH_Z);
803
804 append_math_add(desc, VARSEQINLEN, ZERO, REG1, CAAM_CMD_SZ);
805 /* if asoclen is ZERO, skip reading assoc data */
806 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
807 JUMP_COND_MATH_Z);
808 /* read assoc data */
809 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
810 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
811 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
812
813 append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
814
815 /* store encrypted data */
816 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
817
818 /* read payload data */
819 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
820 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
821
822 /* jump the zero-payload commands */
823 append_jump(desc, JUMP_TEST_ALL | 4);
824
825 /* zero-payload command */
826 set_jump_tgt_here(desc, zero_payload_jump_cmd);
827
828 /* if assoclen is ZERO, jump to ICV reading */
829 append_math_add(desc, VARSEQINLEN, ZERO, REG1, CAAM_CMD_SZ);
830 zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
831 JUMP_COND_MATH_Z);
832 /* read assoc data */
833 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
834 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
835 set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
836
837 /* read ICV */
838 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 |
839 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
840
841 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
842 desc_bytes(desc),
843 DMA_TO_DEVICE);
844 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
845 dev_err(jrdev, "unable to map shared descriptor\n");
846 return -ENOMEM;
847 }
848#ifdef DEBUG
849 print_hex_dump(KERN_ERR, "gcm dec shdesc@"__stringify(__LINE__)": ",
850 DUMP_PREFIX_ADDRESS, 16, 4, desc,
851 desc_bytes(desc), 1);
852#endif
853
854 return 0;
855}
856
857static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
858{
859 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
860
861 ctx->authsize = authsize;
862 gcm_set_sh_desc(authenc);
863
864 return 0;
865}
866
633static u32 gen_split_aead_key(struct caam_ctx *ctx, const u8 *key_in, 867static u32 gen_split_aead_key(struct caam_ctx *ctx, const u8 *key_in,
634 u32 authkeylen) 868 u32 authkeylen)
635{ 869{
@@ -703,6 +937,36 @@ badkey:
703 return -EINVAL; 937 return -EINVAL;
704} 938}
705 939
940static int gcm_setkey(struct crypto_aead *aead,
941 const u8 *key, unsigned int keylen)
942{
943 struct caam_ctx *ctx = crypto_aead_ctx(aead);
944 struct device *jrdev = ctx->jrdev;
945 int ret = 0;
946
947#ifdef DEBUG
948 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
949 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
950#endif
951
952 memcpy(ctx->key, key, keylen);
953 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen,
954 DMA_TO_DEVICE);
955 if (dma_mapping_error(jrdev, ctx->key_dma)) {
956 dev_err(jrdev, "unable to map key i/o memory\n");
957 return -ENOMEM;
958 }
959 ctx->enckeylen = keylen;
960
961 ret = gcm_set_sh_desc(aead);
962 if (ret) {
963 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen,
964 DMA_TO_DEVICE);
965 }
966
967 return ret;
968}
969
706static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher, 970static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
707 const u8 *key, unsigned int keylen) 971 const u8 *key, unsigned int keylen)
708{ 972{
@@ -1088,6 +1352,7 @@ static void init_aead_job(u32 *sh_desc, dma_addr_t ptr,
1088 u32 out_options = 0, in_options; 1352 u32 out_options = 0, in_options;
1089 dma_addr_t dst_dma, src_dma; 1353 dma_addr_t dst_dma, src_dma;
1090 int len, sec4_sg_index = 0; 1354 int len, sec4_sg_index = 0;
1355 bool is_gcm = false;
1091 1356
1092#ifdef DEBUG 1357#ifdef DEBUG
1093 debug("assoclen %d cryptlen %d authsize %d\n", 1358 debug("assoclen %d cryptlen %d authsize %d\n",
@@ -1106,11 +1371,19 @@ static void init_aead_job(u32 *sh_desc, dma_addr_t ptr,
1106 desc_bytes(sh_desc), 1); 1371 desc_bytes(sh_desc), 1);
1107#endif 1372#endif
1108 1373
1374 if (((ctx->class1_alg_type & OP_ALG_ALGSEL_MASK) ==
1375 OP_ALG_ALGSEL_AES) &&
1376 ((ctx->class1_alg_type & OP_ALG_AAI_MASK) == OP_ALG_AAI_GCM))
1377 is_gcm = true;
1378
1109 len = desc_len(sh_desc); 1379 len = desc_len(sh_desc);
1110 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1380 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1111 1381
1112 if (all_contig) { 1382 if (all_contig) {
1113 src_dma = sg_dma_address(req->assoc); 1383 if (is_gcm)
1384 src_dma = edesc->iv_dma;
1385 else
1386 src_dma = sg_dma_address(req->assoc);
1114 in_options = 0; 1387 in_options = 0;
1115 } else { 1388 } else {
1116 src_dma = edesc->sec4_sg_dma; 1389 src_dma = edesc->sec4_sg_dma;
@@ -1292,6 +1565,7 @@ static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1292 int ivsize = crypto_aead_ivsize(aead); 1565 int ivsize = crypto_aead_ivsize(aead);
1293 int sec4_sg_index, sec4_sg_len = 0, sec4_sg_bytes; 1566 int sec4_sg_index, sec4_sg_len = 0, sec4_sg_bytes;
1294 unsigned int authsize = ctx->authsize; 1567 unsigned int authsize = ctx->authsize;
1568 bool is_gcm = false;
1295 1569
1296 assoc_nents = sg_count(req->assoc, req->assoclen, &assoc_chained); 1570 assoc_nents = sg_count(req->assoc, req->assoclen, &assoc_chained);
1297 1571
@@ -1326,15 +1600,31 @@ static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1326 return ERR_PTR(-ENOMEM); 1600 return ERR_PTR(-ENOMEM);
1327 } 1601 }
1328 1602
1329 /* Check if data are contiguous */ 1603 if (((ctx->class1_alg_type & OP_ALG_ALGSEL_MASK) ==
1330 if (assoc_nents || sg_dma_address(req->assoc) + req->assoclen != 1604 OP_ALG_ALGSEL_AES) &&
1331 iv_dma || src_nents || iv_dma + ivsize != 1605 ((ctx->class1_alg_type & OP_ALG_AAI_MASK) == OP_ALG_AAI_GCM))
1332 sg_dma_address(req->src)) { 1606 is_gcm = true;
1333 all_contig = false; 1607
1608 /*
1609 * Check if data are contiguous.
1610 * GCM expected input sequence: IV, AAD, text
1611 * All other - expected input sequence: AAD, IV, text
1612 */
1613 if (is_gcm)
1614 all_contig = (!assoc_nents &&
1615 iv_dma + ivsize == sg_dma_address(req->assoc) &&
1616 !src_nents && sg_dma_address(req->assoc) +
1617 req->assoclen == sg_dma_address(req->src));
1618 else
1619 all_contig = (!assoc_nents && sg_dma_address(req->assoc) +
1620 req->assoclen == iv_dma && !src_nents &&
1621 iv_dma + ivsize == sg_dma_address(req->src));
1622 if (!all_contig) {
1334 assoc_nents = assoc_nents ? : 1; 1623 assoc_nents = assoc_nents ? : 1;
1335 src_nents = src_nents ? : 1; 1624 src_nents = src_nents ? : 1;
1336 sec4_sg_len = assoc_nents + 1 + src_nents; 1625 sec4_sg_len = assoc_nents + 1 + src_nents;
1337 } 1626 }
1627
1338 sec4_sg_len += dst_nents; 1628 sec4_sg_len += dst_nents;
1339 1629
1340 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry); 1630 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
@@ -1361,14 +1651,26 @@ static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1361 1651
1362 sec4_sg_index = 0; 1652 sec4_sg_index = 0;
1363 if (!all_contig) { 1653 if (!all_contig) {
1364 sg_to_sec4_sg(req->assoc, 1654 if (!is_gcm) {
1365 (assoc_nents ? : 1), 1655 sg_to_sec4_sg(req->assoc,
1366 edesc->sec4_sg + 1656 (assoc_nents ? : 1),
1367 sec4_sg_index, 0); 1657 edesc->sec4_sg +
1368 sec4_sg_index += assoc_nents ? : 1; 1658 sec4_sg_index, 0);
1659 sec4_sg_index += assoc_nents ? : 1;
1660 }
1661
1369 dma_to_sec4_sg_one(edesc->sec4_sg + sec4_sg_index, 1662 dma_to_sec4_sg_one(edesc->sec4_sg + sec4_sg_index,
1370 iv_dma, ivsize, 0); 1663 iv_dma, ivsize, 0);
1371 sec4_sg_index += 1; 1664 sec4_sg_index += 1;
1665
1666 if (is_gcm) {
1667 sg_to_sec4_sg(req->assoc,
1668 (assoc_nents ? : 1),
1669 edesc->sec4_sg +
1670 sec4_sg_index, 0);
1671 sec4_sg_index += assoc_nents ? : 1;
1672 }
1673
1372 sg_to_sec4_sg_last(req->src, 1674 sg_to_sec4_sg_last(req->src,
1373 (src_nents ? : 1), 1675 (src_nents ? : 1),
1374 edesc->sec4_sg + 1676 edesc->sec4_sg +
@@ -2309,6 +2611,24 @@ static struct caam_alg_template driver_algs[] = {
2309 OP_ALG_AAI_HMAC_PRECOMP, 2611 OP_ALG_AAI_HMAC_PRECOMP,
2310 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC, 2612 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
2311 }, 2613 },
2614 /* Galois Counter Mode */
2615 {
2616 .name = "gcm(aes)",
2617 .driver_name = "gcm-aes-caam",
2618 .blocksize = 1,
2619 .type = CRYPTO_ALG_TYPE_AEAD,
2620 .template_aead = {
2621 .setkey = gcm_setkey,
2622 .setauthsize = gcm_setauthsize,
2623 .encrypt = aead_encrypt,
2624 .decrypt = aead_decrypt,
2625 .givencrypt = NULL,
2626 .geniv = "<built-in>",
2627 .ivsize = 12,
2628 .maxauthsize = AES_BLOCK_SIZE,
2629 },
2630 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2631 },
2312 /* ablkcipher descriptor */ 2632 /* ablkcipher descriptor */
2313 { 2633 {
2314 .name = "cbc(aes)", 2634 .name = "cbc(aes)",