diff options
author | Salvatore Benedetto <salvatore.benedetto@intel.com> | 2016-07-04 12:21:40 -0400 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2016-07-05 11:05:28 -0400 |
commit | 879f77e9071f029e1c9bd5a75814ecf51370f846 (patch) | |
tree | 8b290a6e0dee16e8c29c601fabf9b795377b65ad | |
parent | c8afbc84042980ca6978ea4b140f848e2820b96a (diff) |
crypto: qat - Add RSA CRT mode
Extend qat driver to use RSA CRT mode when all CRT related components are
present in the private key. Simplify code in qat_rsa_setkey by adding
qat_rsa_clear_ctx.
Signed-off-by: Salvatore Benedetto <salvatore.benedetto@intel.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r-- | drivers/crypto/qat/qat_common/qat_asym_algs.c | 234 |
1 files changed, 209 insertions, 25 deletions
diff --git a/drivers/crypto/qat/qat_common/qat_asym_algs.c b/drivers/crypto/qat/qat_common/qat_asym_algs.c index 04b0ef8cfaa1..eaff02a3b1ac 100644 --- a/drivers/crypto/qat/qat_common/qat_asym_algs.c +++ b/drivers/crypto/qat/qat_common/qat_asym_algs.c | |||
@@ -73,6 +73,14 @@ struct qat_rsa_input_params { | |||
73 | dma_addr_t d; | 73 | dma_addr_t d; |
74 | dma_addr_t n; | 74 | dma_addr_t n; |
75 | } dec; | 75 | } dec; |
76 | struct { | ||
77 | dma_addr_t c; | ||
78 | dma_addr_t p; | ||
79 | dma_addr_t q; | ||
80 | dma_addr_t dp; | ||
81 | dma_addr_t dq; | ||
82 | dma_addr_t qinv; | ||
83 | } dec_crt; | ||
76 | u64 in_tab[8]; | 84 | u64 in_tab[8]; |
77 | }; | 85 | }; |
78 | } __packed __aligned(64); | 86 | } __packed __aligned(64); |
@@ -93,10 +101,21 @@ struct qat_rsa_ctx { | |||
93 | char *n; | 101 | char *n; |
94 | char *e; | 102 | char *e; |
95 | char *d; | 103 | char *d; |
104 | char *p; | ||
105 | char *q; | ||
106 | char *dp; | ||
107 | char *dq; | ||
108 | char *qinv; | ||
96 | dma_addr_t dma_n; | 109 | dma_addr_t dma_n; |
97 | dma_addr_t dma_e; | 110 | dma_addr_t dma_e; |
98 | dma_addr_t dma_d; | 111 | dma_addr_t dma_d; |
112 | dma_addr_t dma_p; | ||
113 | dma_addr_t dma_q; | ||
114 | dma_addr_t dma_dp; | ||
115 | dma_addr_t dma_dq; | ||
116 | dma_addr_t dma_qinv; | ||
99 | unsigned int key_sz; | 117 | unsigned int key_sz; |
118 | bool crt_mode; | ||
100 | struct qat_crypto_instance *inst; | 119 | struct qat_crypto_instance *inst; |
101 | } __packed __aligned(64); | 120 | } __packed __aligned(64); |
102 | 121 | ||
@@ -235,6 +254,35 @@ static unsigned long qat_rsa_dec_fn_id(unsigned int len) | |||
235 | }; | 254 | }; |
236 | } | 255 | } |
237 | 256 | ||
257 | #define PKE_RSA_DP2_512 0x1c131b57 | ||
258 | #define PKE_RSA_DP2_1024 0x26131c2d | ||
259 | #define PKE_RSA_DP2_1536 0x45111d12 | ||
260 | #define PKE_RSA_DP2_2048 0x59121dfa | ||
261 | #define PKE_RSA_DP2_3072 0x81121ed9 | ||
262 | #define PKE_RSA_DP2_4096 0xb1111fb2 | ||
263 | |||
264 | static unsigned long qat_rsa_dec_fn_id_crt(unsigned int len) | ||
265 | { | ||
266 | unsigned int bitslen = len << 3; | ||
267 | |||
268 | switch (bitslen) { | ||
269 | case 512: | ||
270 | return PKE_RSA_DP2_512; | ||
271 | case 1024: | ||
272 | return PKE_RSA_DP2_1024; | ||
273 | case 1536: | ||
274 | return PKE_RSA_DP2_1536; | ||
275 | case 2048: | ||
276 | return PKE_RSA_DP2_2048; | ||
277 | case 3072: | ||
278 | return PKE_RSA_DP2_3072; | ||
279 | case 4096: | ||
280 | return PKE_RSA_DP2_4096; | ||
281 | default: | ||
282 | return 0; | ||
283 | }; | ||
284 | } | ||
285 | |||
238 | static int qat_rsa_enc(struct akcipher_request *req) | 286 | static int qat_rsa_enc(struct akcipher_request *req) |
239 | { | 287 | { |
240 | struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); | 288 | struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); |
@@ -388,7 +436,9 @@ static int qat_rsa_dec(struct akcipher_request *req) | |||
388 | memset(msg, '\0', sizeof(*msg)); | 436 | memset(msg, '\0', sizeof(*msg)); |
389 | ICP_QAT_FW_PKE_HDR_VALID_FLAG_SET(msg->pke_hdr, | 437 | ICP_QAT_FW_PKE_HDR_VALID_FLAG_SET(msg->pke_hdr, |
390 | ICP_QAT_FW_COMN_REQ_FLAG_SET); | 438 | ICP_QAT_FW_COMN_REQ_FLAG_SET); |
391 | msg->pke_hdr.cd_pars.func_id = qat_rsa_dec_fn_id(ctx->key_sz); | 439 | msg->pke_hdr.cd_pars.func_id = ctx->crt_mode ? |
440 | qat_rsa_dec_fn_id_crt(ctx->key_sz) : | ||
441 | qat_rsa_dec_fn_id(ctx->key_sz); | ||
392 | if (unlikely(!msg->pke_hdr.cd_pars.func_id)) | 442 | if (unlikely(!msg->pke_hdr.cd_pars.func_id)) |
393 | return -EINVAL; | 443 | return -EINVAL; |
394 | 444 | ||
@@ -398,8 +448,16 @@ static int qat_rsa_dec(struct akcipher_request *req) | |||
398 | ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_PTR_TYPE_FLAT, | 448 | ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_PTR_TYPE_FLAT, |
399 | QAT_COMN_CD_FLD_TYPE_64BIT_ADR); | 449 | QAT_COMN_CD_FLD_TYPE_64BIT_ADR); |
400 | 450 | ||
401 | qat_req->in.dec.d = ctx->dma_d; | 451 | if (ctx->crt_mode) { |
402 | qat_req->in.dec.n = ctx->dma_n; | 452 | qat_req->in.dec_crt.p = ctx->dma_p; |
453 | qat_req->in.dec_crt.q = ctx->dma_q; | ||
454 | qat_req->in.dec_crt.dp = ctx->dma_dp; | ||
455 | qat_req->in.dec_crt.dq = ctx->dma_dq; | ||
456 | qat_req->in.dec_crt.qinv = ctx->dma_qinv; | ||
457 | } else { | ||
458 | qat_req->in.dec.d = ctx->dma_d; | ||
459 | qat_req->in.dec.n = ctx->dma_n; | ||
460 | } | ||
403 | ret = -ENOMEM; | 461 | ret = -ENOMEM; |
404 | 462 | ||
405 | /* | 463 | /* |
@@ -446,7 +504,10 @@ static int qat_rsa_dec(struct akcipher_request *req) | |||
446 | 504 | ||
447 | } | 505 | } |
448 | 506 | ||
449 | qat_req->in.in_tab[3] = 0; | 507 | if (ctx->crt_mode) |
508 | qat_req->in.in_tab[6] = 0; | ||
509 | else | ||
510 | qat_req->in.in_tab[3] = 0; | ||
450 | qat_req->out.out_tab[1] = 0; | 511 | qat_req->out.out_tab[1] = 0; |
451 | qat_req->phy_in = dma_map_single(dev, &qat_req->in.dec.c, | 512 | qat_req->phy_in = dma_map_single(dev, &qat_req->in.dec.c, |
452 | sizeof(struct qat_rsa_input_params), | 513 | sizeof(struct qat_rsa_input_params), |
@@ -463,7 +524,11 @@ static int qat_rsa_dec(struct akcipher_request *req) | |||
463 | msg->pke_mid.src_data_addr = qat_req->phy_in; | 524 | msg->pke_mid.src_data_addr = qat_req->phy_in; |
464 | msg->pke_mid.dest_data_addr = qat_req->phy_out; | 525 | msg->pke_mid.dest_data_addr = qat_req->phy_out; |
465 | msg->pke_mid.opaque = (uint64_t)(__force long)req; | 526 | msg->pke_mid.opaque = (uint64_t)(__force long)req; |
466 | msg->input_param_count = 3; | 527 | if (ctx->crt_mode) |
528 | msg->input_param_count = 6; | ||
529 | else | ||
530 | msg->input_param_count = 3; | ||
531 | |||
467 | msg->output_param_count = 1; | 532 | msg->output_param_count = 1; |
468 | do { | 533 | do { |
469 | ret = adf_send_message(ctx->inst->pke_tx, (uint32_t *)msg); | 534 | ret = adf_send_message(ctx->inst->pke_tx, (uint32_t *)msg); |
@@ -583,13 +648,106 @@ err: | |||
583 | return ret; | 648 | return ret; |
584 | } | 649 | } |
585 | 650 | ||
586 | static int qat_rsa_setkey(struct crypto_akcipher *tfm, const void *key, | 651 | static void qat_rsa_drop_leading_zeros(const char **ptr, unsigned int *len) |
587 | unsigned int keylen, bool private) | ||
588 | { | 652 | { |
589 | struct qat_rsa_ctx *ctx = akcipher_tfm_ctx(tfm); | 653 | while (!**ptr && *len) { |
590 | struct device *dev = &GET_DEV(ctx->inst->accel_dev); | 654 | (*ptr)++; |
591 | struct rsa_key rsa_key; | 655 | (*len)--; |
592 | int ret; | 656 | } |
657 | } | ||
658 | |||
659 | static void qat_rsa_setkey_crt(struct qat_rsa_ctx *ctx, struct rsa_key *rsa_key) | ||
660 | { | ||
661 | struct qat_crypto_instance *inst = ctx->inst; | ||
662 | struct device *dev = &GET_DEV(inst->accel_dev); | ||
663 | const char *ptr; | ||
664 | unsigned int len; | ||
665 | unsigned int half_key_sz = ctx->key_sz / 2; | ||
666 | |||
667 | /* p */ | ||
668 | ptr = rsa_key->p; | ||
669 | len = rsa_key->p_sz; | ||
670 | qat_rsa_drop_leading_zeros(&ptr, &len); | ||
671 | if (!len) | ||
672 | goto err; | ||
673 | ctx->p = dma_zalloc_coherent(dev, half_key_sz, &ctx->dma_p, GFP_KERNEL); | ||
674 | if (!ctx->p) | ||
675 | goto err; | ||
676 | memcpy(ctx->p + (half_key_sz - len), ptr, len); | ||
677 | |||
678 | /* q */ | ||
679 | ptr = rsa_key->q; | ||
680 | len = rsa_key->q_sz; | ||
681 | qat_rsa_drop_leading_zeros(&ptr, &len); | ||
682 | if (!len) | ||
683 | goto free_p; | ||
684 | ctx->q = dma_zalloc_coherent(dev, half_key_sz, &ctx->dma_q, GFP_KERNEL); | ||
685 | if (!ctx->q) | ||
686 | goto free_p; | ||
687 | memcpy(ctx->q + (half_key_sz - len), ptr, len); | ||
688 | |||
689 | /* dp */ | ||
690 | ptr = rsa_key->dp; | ||
691 | len = rsa_key->dp_sz; | ||
692 | qat_rsa_drop_leading_zeros(&ptr, &len); | ||
693 | if (!len) | ||
694 | goto free_q; | ||
695 | ctx->dp = dma_zalloc_coherent(dev, half_key_sz, &ctx->dma_dp, | ||
696 | GFP_KERNEL); | ||
697 | if (!ctx->dp) | ||
698 | goto free_q; | ||
699 | memcpy(ctx->dp + (half_key_sz - len), ptr, len); | ||
700 | |||
701 | /* dq */ | ||
702 | ptr = rsa_key->dq; | ||
703 | len = rsa_key->dq_sz; | ||
704 | qat_rsa_drop_leading_zeros(&ptr, &len); | ||
705 | if (!len) | ||
706 | goto free_dp; | ||
707 | ctx->dq = dma_zalloc_coherent(dev, half_key_sz, &ctx->dma_dq, | ||
708 | GFP_KERNEL); | ||
709 | if (!ctx->dq) | ||
710 | goto free_dp; | ||
711 | memcpy(ctx->dq + (half_key_sz - len), ptr, len); | ||
712 | |||
713 | /* qinv */ | ||
714 | ptr = rsa_key->qinv; | ||
715 | len = rsa_key->qinv_sz; | ||
716 | qat_rsa_drop_leading_zeros(&ptr, &len); | ||
717 | if (!len) | ||
718 | goto free_dq; | ||
719 | ctx->qinv = dma_zalloc_coherent(dev, half_key_sz, &ctx->dma_qinv, | ||
720 | GFP_KERNEL); | ||
721 | if (!ctx->qinv) | ||
722 | goto free_dq; | ||
723 | memcpy(ctx->qinv + (half_key_sz - len), ptr, len); | ||
724 | |||
725 | ctx->crt_mode = true; | ||
726 | return; | ||
727 | |||
728 | free_dq: | ||
729 | memset(ctx->dq, '\0', half_key_sz); | ||
730 | dma_free_coherent(dev, half_key_sz, ctx->dq, ctx->dma_dq); | ||
731 | ctx->dq = NULL; | ||
732 | free_dp: | ||
733 | memset(ctx->dp, '\0', half_key_sz); | ||
734 | dma_free_coherent(dev, half_key_sz, ctx->dp, ctx->dma_dp); | ||
735 | ctx->dp = NULL; | ||
736 | free_q: | ||
737 | memset(ctx->q, '\0', half_key_sz); | ||
738 | dma_free_coherent(dev, half_key_sz, ctx->q, ctx->dma_q); | ||
739 | ctx->q = NULL; | ||
740 | free_p: | ||
741 | memset(ctx->p, '\0', half_key_sz); | ||
742 | dma_free_coherent(dev, half_key_sz, ctx->p, ctx->dma_p); | ||
743 | ctx->p = NULL; | ||
744 | err: | ||
745 | ctx->crt_mode = false; | ||
746 | } | ||
747 | |||
748 | static void qat_rsa_clear_ctx(struct device *dev, struct qat_rsa_ctx *ctx) | ||
749 | { | ||
750 | unsigned int half_key_sz = ctx->key_sz / 2; | ||
593 | 751 | ||
594 | /* Free the old key if any */ | 752 | /* Free the old key if any */ |
595 | if (ctx->n) | 753 | if (ctx->n) |
@@ -600,10 +758,48 @@ static int qat_rsa_setkey(struct crypto_akcipher *tfm, const void *key, | |||
600 | memset(ctx->d, '\0', ctx->key_sz); | 758 | memset(ctx->d, '\0', ctx->key_sz); |
601 | dma_free_coherent(dev, ctx->key_sz, ctx->d, ctx->dma_d); | 759 | dma_free_coherent(dev, ctx->key_sz, ctx->d, ctx->dma_d); |
602 | } | 760 | } |
761 | if (ctx->p) { | ||
762 | memset(ctx->p, '\0', half_key_sz); | ||
763 | dma_free_coherent(dev, half_key_sz, ctx->p, ctx->dma_p); | ||
764 | } | ||
765 | if (ctx->q) { | ||
766 | memset(ctx->q, '\0', half_key_sz); | ||
767 | dma_free_coherent(dev, half_key_sz, ctx->q, ctx->dma_q); | ||
768 | } | ||
769 | if (ctx->dp) { | ||
770 | memset(ctx->dp, '\0', half_key_sz); | ||
771 | dma_free_coherent(dev, half_key_sz, ctx->dp, ctx->dma_dp); | ||
772 | } | ||
773 | if (ctx->dq) { | ||
774 | memset(ctx->dq, '\0', half_key_sz); | ||
775 | dma_free_coherent(dev, half_key_sz, ctx->dq, ctx->dma_dq); | ||
776 | } | ||
777 | if (ctx->qinv) { | ||
778 | memset(ctx->qinv, '\0', half_key_sz); | ||
779 | dma_free_coherent(dev, half_key_sz, ctx->qinv, ctx->dma_qinv); | ||
780 | } | ||
603 | 781 | ||
604 | ctx->n = NULL; | 782 | ctx->n = NULL; |
605 | ctx->e = NULL; | 783 | ctx->e = NULL; |
606 | ctx->d = NULL; | 784 | ctx->d = NULL; |
785 | ctx->p = NULL; | ||
786 | ctx->q = NULL; | ||
787 | ctx->dp = NULL; | ||
788 | ctx->dq = NULL; | ||
789 | ctx->qinv = NULL; | ||
790 | ctx->crt_mode = false; | ||
791 | ctx->key_sz = 0; | ||
792 | } | ||
793 | |||
794 | static int qat_rsa_setkey(struct crypto_akcipher *tfm, const void *key, | ||
795 | unsigned int keylen, bool private) | ||
796 | { | ||
797 | struct qat_rsa_ctx *ctx = akcipher_tfm_ctx(tfm); | ||
798 | struct device *dev = &GET_DEV(ctx->inst->accel_dev); | ||
799 | struct rsa_key rsa_key; | ||
800 | int ret; | ||
801 | |||
802 | qat_rsa_clear_ctx(dev, ctx); | ||
607 | 803 | ||
608 | if (private) | 804 | if (private) |
609 | ret = rsa_parse_priv_key(&rsa_key, key, keylen); | 805 | ret = rsa_parse_priv_key(&rsa_key, key, keylen); |
@@ -622,6 +818,7 @@ static int qat_rsa_setkey(struct crypto_akcipher *tfm, const void *key, | |||
622 | ret = qat_rsa_set_d(ctx, rsa_key.d, rsa_key.d_sz); | 818 | ret = qat_rsa_set_d(ctx, rsa_key.d, rsa_key.d_sz); |
623 | if (ret < 0) | 819 | if (ret < 0) |
624 | goto free; | 820 | goto free; |
821 | qat_rsa_setkey_crt(ctx, &rsa_key); | ||
625 | } | 822 | } |
626 | 823 | ||
627 | if (!ctx->n || !ctx->e) { | 824 | if (!ctx->n || !ctx->e) { |
@@ -637,20 +834,7 @@ static int qat_rsa_setkey(struct crypto_akcipher *tfm, const void *key, | |||
637 | 834 | ||
638 | return 0; | 835 | return 0; |
639 | free: | 836 | free: |
640 | if (ctx->d) { | 837 | qat_rsa_clear_ctx(dev, ctx); |
641 | memset(ctx->d, '\0', ctx->key_sz); | ||
642 | dma_free_coherent(dev, ctx->key_sz, ctx->d, ctx->dma_d); | ||
643 | ctx->d = NULL; | ||
644 | } | ||
645 | if (ctx->e) { | ||
646 | dma_free_coherent(dev, ctx->key_sz, ctx->e, ctx->dma_e); | ||
647 | ctx->e = NULL; | ||
648 | } | ||
649 | if (ctx->n) { | ||
650 | dma_free_coherent(dev, ctx->key_sz, ctx->n, ctx->dma_n); | ||
651 | ctx->n = NULL; | ||
652 | ctx->key_sz = 0; | ||
653 | } | ||
654 | return ret; | 838 | return ret; |
655 | } | 839 | } |
656 | 840 | ||