diff options
Diffstat (limited to 'drivers/crypto')
-rw-r--r-- | drivers/crypto/Kconfig | 2 | ||||
-rw-r--r-- | drivers/crypto/hifn_795x.c | 8 | ||||
-rw-r--r-- | drivers/crypto/padlock-aes.c | 13 | ||||
-rw-r--r-- | drivers/crypto/talitos.c | 713 |
4 files changed, 536 insertions, 200 deletions
diff --git a/drivers/crypto/Kconfig b/drivers/crypto/Kconfig index 3e72a6a96d73..5b27692372bf 100644 --- a/drivers/crypto/Kconfig +++ b/drivers/crypto/Kconfig | |||
@@ -12,7 +12,7 @@ if CRYPTO_HW | |||
12 | 12 | ||
13 | config CRYPTO_DEV_PADLOCK | 13 | config CRYPTO_DEV_PADLOCK |
14 | tristate "Support for VIA PadLock ACE" | 14 | tristate "Support for VIA PadLock ACE" |
15 | depends on X86_32 && !UML | 15 | depends on X86 && !UML |
16 | select CRYPTO_ALGAPI | 16 | select CRYPTO_ALGAPI |
17 | help | 17 | help |
18 | Some VIA processors come with an integrated crypto engine | 18 | Some VIA processors come with an integrated crypto engine |
diff --git a/drivers/crypto/hifn_795x.c b/drivers/crypto/hifn_795x.c index 2bef086fb342..5f753fc08730 100644 --- a/drivers/crypto/hifn_795x.c +++ b/drivers/crypto/hifn_795x.c | |||
@@ -2564,7 +2564,7 @@ static void hifn_tasklet_callback(unsigned long data) | |||
2564 | hifn_process_queue(dev); | 2564 | hifn_process_queue(dev); |
2565 | } | 2565 | } |
2566 | 2566 | ||
2567 | static int hifn_probe(struct pci_dev *pdev, const struct pci_device_id *id) | 2567 | static int __devinit hifn_probe(struct pci_dev *pdev, const struct pci_device_id *id) |
2568 | { | 2568 | { |
2569 | int err, i; | 2569 | int err, i; |
2570 | struct hifn_device *dev; | 2570 | struct hifn_device *dev; |
@@ -2696,7 +2696,7 @@ err_out_disable_pci_device: | |||
2696 | return err; | 2696 | return err; |
2697 | } | 2697 | } |
2698 | 2698 | ||
2699 | static void hifn_remove(struct pci_dev *pdev) | 2699 | static void __devexit hifn_remove(struct pci_dev *pdev) |
2700 | { | 2700 | { |
2701 | int i; | 2701 | int i; |
2702 | struct hifn_device *dev; | 2702 | struct hifn_device *dev; |
@@ -2744,7 +2744,7 @@ static struct pci_driver hifn_pci_driver = { | |||
2744 | .remove = __devexit_p(hifn_remove), | 2744 | .remove = __devexit_p(hifn_remove), |
2745 | }; | 2745 | }; |
2746 | 2746 | ||
2747 | static int __devinit hifn_init(void) | 2747 | static int __init hifn_init(void) |
2748 | { | 2748 | { |
2749 | unsigned int freq; | 2749 | unsigned int freq; |
2750 | int err; | 2750 | int err; |
@@ -2789,7 +2789,7 @@ static int __devinit hifn_init(void) | |||
2789 | return 0; | 2789 | return 0; |
2790 | } | 2790 | } |
2791 | 2791 | ||
2792 | static void __devexit hifn_fini(void) | 2792 | static void __exit hifn_fini(void) |
2793 | { | 2793 | { |
2794 | pci_unregister_driver(&hifn_pci_driver); | 2794 | pci_unregister_driver(&hifn_pci_driver); |
2795 | 2795 | ||
diff --git a/drivers/crypto/padlock-aes.c b/drivers/crypto/padlock-aes.c index 856b3cc25583..87f92c39b5f0 100644 --- a/drivers/crypto/padlock-aes.c +++ b/drivers/crypto/padlock-aes.c | |||
@@ -154,7 +154,11 @@ static inline void padlock_reset_key(struct cword *cword) | |||
154 | int cpu = raw_smp_processor_id(); | 154 | int cpu = raw_smp_processor_id(); |
155 | 155 | ||
156 | if (cword != per_cpu(last_cword, cpu)) | 156 | if (cword != per_cpu(last_cword, cpu)) |
157 | #ifndef CONFIG_X86_64 | ||
157 | asm volatile ("pushfl; popfl"); | 158 | asm volatile ("pushfl; popfl"); |
159 | #else | ||
160 | asm volatile ("pushfq; popfq"); | ||
161 | #endif | ||
158 | } | 162 | } |
159 | 163 | ||
160 | static inline void padlock_store_cword(struct cword *cword) | 164 | static inline void padlock_store_cword(struct cword *cword) |
@@ -208,10 +212,19 @@ static inline void padlock_xcrypt_ecb(const u8 *input, u8 *output, void *key, | |||
208 | 212 | ||
209 | asm volatile ("test $1, %%cl;" | 213 | asm volatile ("test $1, %%cl;" |
210 | "je 1f;" | 214 | "je 1f;" |
215 | #ifndef CONFIG_X86_64 | ||
211 | "lea -1(%%ecx), %%eax;" | 216 | "lea -1(%%ecx), %%eax;" |
212 | "mov $1, %%ecx;" | 217 | "mov $1, %%ecx;" |
218 | #else | ||
219 | "lea -1(%%rcx), %%rax;" | ||
220 | "mov $1, %%rcx;" | ||
221 | #endif | ||
213 | ".byte 0xf3,0x0f,0xa7,0xc8;" /* rep xcryptecb */ | 222 | ".byte 0xf3,0x0f,0xa7,0xc8;" /* rep xcryptecb */ |
223 | #ifndef CONFIG_X86_64 | ||
214 | "mov %%eax, %%ecx;" | 224 | "mov %%eax, %%ecx;" |
225 | #else | ||
226 | "mov %%rax, %%rcx;" | ||
227 | #endif | ||
215 | "1:" | 228 | "1:" |
216 | ".byte 0xf3,0x0f,0xa7,0xc8" /* rep xcryptecb */ | 229 | ".byte 0xf3,0x0f,0xa7,0xc8" /* rep xcryptecb */ |
217 | : "+S"(input), "+D"(output) | 230 | : "+S"(input), "+D"(output) |
diff --git a/drivers/crypto/talitos.c b/drivers/crypto/talitos.c index a3918c16b3db..c70775fd3ce2 100644 --- a/drivers/crypto/talitos.c +++ b/drivers/crypto/talitos.c | |||
@@ -44,6 +44,8 @@ | |||
44 | #include <crypto/sha.h> | 44 | #include <crypto/sha.h> |
45 | #include <crypto/aead.h> | 45 | #include <crypto/aead.h> |
46 | #include <crypto/authenc.h> | 46 | #include <crypto/authenc.h> |
47 | #include <crypto/skcipher.h> | ||
48 | #include <crypto/scatterwalk.h> | ||
47 | 49 | ||
48 | #include "talitos.h" | 50 | #include "talitos.h" |
49 | 51 | ||
@@ -339,7 +341,8 @@ static void flush_channel(struct device *dev, int ch, int error, int reset_ch) | |||
339 | status = error; | 341 | status = error; |
340 | 342 | ||
341 | dma_unmap_single(dev, request->dma_desc, | 343 | dma_unmap_single(dev, request->dma_desc, |
342 | sizeof(struct talitos_desc), DMA_BIDIRECTIONAL); | 344 | sizeof(struct talitos_desc), |
345 | DMA_BIDIRECTIONAL); | ||
343 | 346 | ||
344 | /* copy entries so we can call callback outside lock */ | 347 | /* copy entries so we can call callback outside lock */ |
345 | saved_req.desc = request->desc; | 348 | saved_req.desc = request->desc; |
@@ -413,7 +416,8 @@ static struct talitos_desc *current_desc(struct device *dev, int ch) | |||
413 | /* | 416 | /* |
414 | * user diagnostics; report root cause of error based on execution unit status | 417 | * user diagnostics; report root cause of error based on execution unit status |
415 | */ | 418 | */ |
416 | static void report_eu_error(struct device *dev, int ch, struct talitos_desc *desc) | 419 | static void report_eu_error(struct device *dev, int ch, |
420 | struct talitos_desc *desc) | ||
417 | { | 421 | { |
418 | struct talitos_private *priv = dev_get_drvdata(dev); | 422 | struct talitos_private *priv = dev_get_drvdata(dev); |
419 | int i; | 423 | int i; |
@@ -684,8 +688,8 @@ struct talitos_ctx { | |||
684 | unsigned int authsize; | 688 | unsigned int authsize; |
685 | }; | 689 | }; |
686 | 690 | ||
687 | static int aead_authenc_setauthsize(struct crypto_aead *authenc, | 691 | static int aead_setauthsize(struct crypto_aead *authenc, |
688 | unsigned int authsize) | 692 | unsigned int authsize) |
689 | { | 693 | { |
690 | struct talitos_ctx *ctx = crypto_aead_ctx(authenc); | 694 | struct talitos_ctx *ctx = crypto_aead_ctx(authenc); |
691 | 695 | ||
@@ -694,8 +698,8 @@ static int aead_authenc_setauthsize(struct crypto_aead *authenc, | |||
694 | return 0; | 698 | return 0; |
695 | } | 699 | } |
696 | 700 | ||
697 | static int aead_authenc_setkey(struct crypto_aead *authenc, | 701 | static int aead_setkey(struct crypto_aead *authenc, |
698 | const u8 *key, unsigned int keylen) | 702 | const u8 *key, unsigned int keylen) |
699 | { | 703 | { |
700 | struct talitos_ctx *ctx = crypto_aead_ctx(authenc); | 704 | struct talitos_ctx *ctx = crypto_aead_ctx(authenc); |
701 | struct rtattr *rta = (void *)key; | 705 | struct rtattr *rta = (void *)key; |
@@ -740,7 +744,7 @@ badkey: | |||
740 | } | 744 | } |
741 | 745 | ||
742 | /* | 746 | /* |
743 | * ipsec_esp_edesc - s/w-extended ipsec_esp descriptor | 747 | * talitos_edesc - s/w-extended descriptor |
744 | * @src_nents: number of segments in input scatterlist | 748 | * @src_nents: number of segments in input scatterlist |
745 | * @dst_nents: number of segments in output scatterlist | 749 | * @dst_nents: number of segments in output scatterlist |
746 | * @dma_len: length of dma mapped link_tbl space | 750 | * @dma_len: length of dma mapped link_tbl space |
@@ -752,17 +756,67 @@ badkey: | |||
752 | * is greater than 1, an integrity check value is concatenated to the end | 756 | * is greater than 1, an integrity check value is concatenated to the end |
753 | * of link_tbl data | 757 | * of link_tbl data |
754 | */ | 758 | */ |
755 | struct ipsec_esp_edesc { | 759 | struct talitos_edesc { |
756 | int src_nents; | 760 | int src_nents; |
757 | int dst_nents; | 761 | int dst_nents; |
762 | int src_is_chained; | ||
763 | int dst_is_chained; | ||
758 | int dma_len; | 764 | int dma_len; |
759 | dma_addr_t dma_link_tbl; | 765 | dma_addr_t dma_link_tbl; |
760 | struct talitos_desc desc; | 766 | struct talitos_desc desc; |
761 | struct talitos_ptr link_tbl[0]; | 767 | struct talitos_ptr link_tbl[0]; |
762 | }; | 768 | }; |
763 | 769 | ||
770 | static int talitos_map_sg(struct device *dev, struct scatterlist *sg, | ||
771 | unsigned int nents, enum dma_data_direction dir, | ||
772 | int chained) | ||
773 | { | ||
774 | if (unlikely(chained)) | ||
775 | while (sg) { | ||
776 | dma_map_sg(dev, sg, 1, dir); | ||
777 | sg = scatterwalk_sg_next(sg); | ||
778 | } | ||
779 | else | ||
780 | dma_map_sg(dev, sg, nents, dir); | ||
781 | return nents; | ||
782 | } | ||
783 | |||
784 | static void talitos_unmap_sg_chain(struct device *dev, struct scatterlist *sg, | ||
785 | enum dma_data_direction dir) | ||
786 | { | ||
787 | while (sg) { | ||
788 | dma_unmap_sg(dev, sg, 1, dir); | ||
789 | sg = scatterwalk_sg_next(sg); | ||
790 | } | ||
791 | } | ||
792 | |||
793 | static void talitos_sg_unmap(struct device *dev, | ||
794 | struct talitos_edesc *edesc, | ||
795 | struct scatterlist *src, | ||
796 | struct scatterlist *dst) | ||
797 | { | ||
798 | unsigned int src_nents = edesc->src_nents ? : 1; | ||
799 | unsigned int dst_nents = edesc->dst_nents ? : 1; | ||
800 | |||
801 | if (src != dst) { | ||
802 | if (edesc->src_is_chained) | ||
803 | talitos_unmap_sg_chain(dev, src, DMA_TO_DEVICE); | ||
804 | else | ||
805 | dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); | ||
806 | |||
807 | if (edesc->dst_is_chained) | ||
808 | talitos_unmap_sg_chain(dev, dst, DMA_FROM_DEVICE); | ||
809 | else | ||
810 | dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE); | ||
811 | } else | ||
812 | if (edesc->src_is_chained) | ||
813 | talitos_unmap_sg_chain(dev, src, DMA_BIDIRECTIONAL); | ||
814 | else | ||
815 | dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); | ||
816 | } | ||
817 | |||
764 | static void ipsec_esp_unmap(struct device *dev, | 818 | static void ipsec_esp_unmap(struct device *dev, |
765 | struct ipsec_esp_edesc *edesc, | 819 | struct talitos_edesc *edesc, |
766 | struct aead_request *areq) | 820 | struct aead_request *areq) |
767 | { | 821 | { |
768 | unmap_single_talitos_ptr(dev, &edesc->desc.ptr[6], DMA_FROM_DEVICE); | 822 | unmap_single_talitos_ptr(dev, &edesc->desc.ptr[6], DMA_FROM_DEVICE); |
@@ -772,15 +826,7 @@ static void ipsec_esp_unmap(struct device *dev, | |||
772 | 826 | ||
773 | dma_unmap_sg(dev, areq->assoc, 1, DMA_TO_DEVICE); | 827 | dma_unmap_sg(dev, areq->assoc, 1, DMA_TO_DEVICE); |
774 | 828 | ||
775 | if (areq->src != areq->dst) { | 829 | talitos_sg_unmap(dev, edesc, areq->src, areq->dst); |
776 | dma_unmap_sg(dev, areq->src, edesc->src_nents ? : 1, | ||
777 | DMA_TO_DEVICE); | ||
778 | dma_unmap_sg(dev, areq->dst, edesc->dst_nents ? : 1, | ||
779 | DMA_FROM_DEVICE); | ||
780 | } else { | ||
781 | dma_unmap_sg(dev, areq->src, edesc->src_nents ? : 1, | ||
782 | DMA_BIDIRECTIONAL); | ||
783 | } | ||
784 | 830 | ||
785 | if (edesc->dma_len) | 831 | if (edesc->dma_len) |
786 | dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len, | 832 | dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len, |
@@ -795,13 +841,14 @@ static void ipsec_esp_encrypt_done(struct device *dev, | |||
795 | int err) | 841 | int err) |
796 | { | 842 | { |
797 | struct aead_request *areq = context; | 843 | struct aead_request *areq = context; |
798 | struct ipsec_esp_edesc *edesc = | ||
799 | container_of(desc, struct ipsec_esp_edesc, desc); | ||
800 | struct crypto_aead *authenc = crypto_aead_reqtfm(areq); | 844 | struct crypto_aead *authenc = crypto_aead_reqtfm(areq); |
801 | struct talitos_ctx *ctx = crypto_aead_ctx(authenc); | 845 | struct talitos_ctx *ctx = crypto_aead_ctx(authenc); |
846 | struct talitos_edesc *edesc; | ||
802 | struct scatterlist *sg; | 847 | struct scatterlist *sg; |
803 | void *icvdata; | 848 | void *icvdata; |
804 | 849 | ||
850 | edesc = container_of(desc, struct talitos_edesc, desc); | ||
851 | |||
805 | ipsec_esp_unmap(dev, edesc, areq); | 852 | ipsec_esp_unmap(dev, edesc, areq); |
806 | 853 | ||
807 | /* copy the generated ICV to dst */ | 854 | /* copy the generated ICV to dst */ |
@@ -819,17 +866,18 @@ static void ipsec_esp_encrypt_done(struct device *dev, | |||
819 | } | 866 | } |
820 | 867 | ||
821 | static void ipsec_esp_decrypt_swauth_done(struct device *dev, | 868 | static void ipsec_esp_decrypt_swauth_done(struct device *dev, |
822 | struct talitos_desc *desc, void *context, | 869 | struct talitos_desc *desc, |
823 | int err) | 870 | void *context, int err) |
824 | { | 871 | { |
825 | struct aead_request *req = context; | 872 | struct aead_request *req = context; |
826 | struct ipsec_esp_edesc *edesc = | ||
827 | container_of(desc, struct ipsec_esp_edesc, desc); | ||
828 | struct crypto_aead *authenc = crypto_aead_reqtfm(req); | 873 | struct crypto_aead *authenc = crypto_aead_reqtfm(req); |
829 | struct talitos_ctx *ctx = crypto_aead_ctx(authenc); | 874 | struct talitos_ctx *ctx = crypto_aead_ctx(authenc); |
875 | struct talitos_edesc *edesc; | ||
830 | struct scatterlist *sg; | 876 | struct scatterlist *sg; |
831 | void *icvdata; | 877 | void *icvdata; |
832 | 878 | ||
879 | edesc = container_of(desc, struct talitos_edesc, desc); | ||
880 | |||
833 | ipsec_esp_unmap(dev, edesc, req); | 881 | ipsec_esp_unmap(dev, edesc, req); |
834 | 882 | ||
835 | if (!err) { | 883 | if (!err) { |
@@ -851,20 +899,20 @@ static void ipsec_esp_decrypt_swauth_done(struct device *dev, | |||
851 | } | 899 | } |
852 | 900 | ||
853 | static void ipsec_esp_decrypt_hwauth_done(struct device *dev, | 901 | static void ipsec_esp_decrypt_hwauth_done(struct device *dev, |
854 | struct talitos_desc *desc, void *context, | 902 | struct talitos_desc *desc, |
855 | int err) | 903 | void *context, int err) |
856 | { | 904 | { |
857 | struct aead_request *req = context; | 905 | struct aead_request *req = context; |
858 | struct ipsec_esp_edesc *edesc = | 906 | struct talitos_edesc *edesc; |
859 | container_of(desc, struct ipsec_esp_edesc, desc); | 907 | |
908 | edesc = container_of(desc, struct talitos_edesc, desc); | ||
860 | 909 | ||
861 | ipsec_esp_unmap(dev, edesc, req); | 910 | ipsec_esp_unmap(dev, edesc, req); |
862 | 911 | ||
863 | /* check ICV auth status */ | 912 | /* check ICV auth status */ |
864 | if (!err) | 913 | if (!err && ((desc->hdr_lo & DESC_HDR_LO_ICCR1_MASK) != |
865 | if ((desc->hdr_lo & DESC_HDR_LO_ICCR1_MASK) != | 914 | DESC_HDR_LO_ICCR1_PASS)) |
866 | DESC_HDR_LO_ICCR1_PASS) | 915 | err = -EBADMSG; |
867 | err = -EBADMSG; | ||
868 | 916 | ||
869 | kfree(edesc); | 917 | kfree(edesc); |
870 | 918 | ||
@@ -886,7 +934,7 @@ static int sg_to_link_tbl(struct scatterlist *sg, int sg_count, | |||
886 | link_tbl_ptr->j_extent = 0; | 934 | link_tbl_ptr->j_extent = 0; |
887 | link_tbl_ptr++; | 935 | link_tbl_ptr++; |
888 | cryptlen -= sg_dma_len(sg); | 936 | cryptlen -= sg_dma_len(sg); |
889 | sg = sg_next(sg); | 937 | sg = scatterwalk_sg_next(sg); |
890 | } | 938 | } |
891 | 939 | ||
892 | /* adjust (decrease) last one (or two) entry's len to cryptlen */ | 940 | /* adjust (decrease) last one (or two) entry's len to cryptlen */ |
@@ -910,7 +958,7 @@ static int sg_to_link_tbl(struct scatterlist *sg, int sg_count, | |||
910 | /* | 958 | /* |
911 | * fill in and submit ipsec_esp descriptor | 959 | * fill in and submit ipsec_esp descriptor |
912 | */ | 960 | */ |
913 | static int ipsec_esp(struct ipsec_esp_edesc *edesc, struct aead_request *areq, | 961 | static int ipsec_esp(struct talitos_edesc *edesc, struct aead_request *areq, |
914 | u8 *giv, u64 seq, | 962 | u8 *giv, u64 seq, |
915 | void (*callback) (struct device *dev, | 963 | void (*callback) (struct device *dev, |
916 | struct talitos_desc *desc, | 964 | struct talitos_desc *desc, |
@@ -952,32 +1000,31 @@ static int ipsec_esp(struct ipsec_esp_edesc *edesc, struct aead_request *areq, | |||
952 | desc->ptr[4].len = cpu_to_be16(cryptlen); | 1000 | desc->ptr[4].len = cpu_to_be16(cryptlen); |
953 | desc->ptr[4].j_extent = authsize; | 1001 | desc->ptr[4].j_extent = authsize; |
954 | 1002 | ||
955 | if (areq->src == areq->dst) | 1003 | sg_count = talitos_map_sg(dev, areq->src, edesc->src_nents ? : 1, |
956 | sg_count = dma_map_sg(dev, areq->src, edesc->src_nents ? : 1, | 1004 | (areq->src == areq->dst) ? DMA_BIDIRECTIONAL |
957 | DMA_BIDIRECTIONAL); | 1005 | : DMA_TO_DEVICE, |
958 | else | 1006 | edesc->src_is_chained); |
959 | sg_count = dma_map_sg(dev, areq->src, edesc->src_nents ? : 1, | ||
960 | DMA_TO_DEVICE); | ||
961 | 1007 | ||
962 | if (sg_count == 1) { | 1008 | if (sg_count == 1) { |
963 | desc->ptr[4].ptr = cpu_to_be32(sg_dma_address(areq->src)); | 1009 | desc->ptr[4].ptr = cpu_to_be32(sg_dma_address(areq->src)); |
964 | } else { | 1010 | } else { |
965 | sg_link_tbl_len = cryptlen; | 1011 | sg_link_tbl_len = cryptlen; |
966 | 1012 | ||
967 | if ((edesc->desc.hdr & DESC_HDR_MODE1_MDEU_CICV) && | 1013 | if (edesc->desc.hdr & DESC_HDR_MODE1_MDEU_CICV) |
968 | (edesc->desc.hdr & DESC_HDR_MODE0_ENCRYPT) == 0) { | ||
969 | sg_link_tbl_len = cryptlen + authsize; | 1014 | sg_link_tbl_len = cryptlen + authsize; |
970 | } | 1015 | |
971 | sg_count = sg_to_link_tbl(areq->src, sg_count, sg_link_tbl_len, | 1016 | sg_count = sg_to_link_tbl(areq->src, sg_count, sg_link_tbl_len, |
972 | &edesc->link_tbl[0]); | 1017 | &edesc->link_tbl[0]); |
973 | if (sg_count > 1) { | 1018 | if (sg_count > 1) { |
974 | desc->ptr[4].j_extent |= DESC_PTR_LNKTBL_JUMP; | 1019 | desc->ptr[4].j_extent |= DESC_PTR_LNKTBL_JUMP; |
975 | desc->ptr[4].ptr = cpu_to_be32(edesc->dma_link_tbl); | 1020 | desc->ptr[4].ptr = cpu_to_be32(edesc->dma_link_tbl); |
976 | dma_sync_single_for_device(ctx->dev, edesc->dma_link_tbl, | 1021 | dma_sync_single_for_device(dev, edesc->dma_link_tbl, |
977 | edesc->dma_len, DMA_BIDIRECTIONAL); | 1022 | edesc->dma_len, |
1023 | DMA_BIDIRECTIONAL); | ||
978 | } else { | 1024 | } else { |
979 | /* Only one segment now, so no link tbl needed */ | 1025 | /* Only one segment now, so no link tbl needed */ |
980 | desc->ptr[4].ptr = cpu_to_be32(sg_dma_address(areq->src)); | 1026 | desc->ptr[4].ptr = cpu_to_be32(sg_dma_address(areq-> |
1027 | src)); | ||
981 | } | 1028 | } |
982 | } | 1029 | } |
983 | 1030 | ||
@@ -985,10 +1032,11 @@ static int ipsec_esp(struct ipsec_esp_edesc *edesc, struct aead_request *areq, | |||
985 | desc->ptr[5].len = cpu_to_be16(cryptlen); | 1032 | desc->ptr[5].len = cpu_to_be16(cryptlen); |
986 | desc->ptr[5].j_extent = authsize; | 1033 | desc->ptr[5].j_extent = authsize; |
987 | 1034 | ||
988 | if (areq->src != areq->dst) { | 1035 | if (areq->src != areq->dst) |
989 | sg_count = dma_map_sg(dev, areq->dst, edesc->dst_nents ? : 1, | 1036 | sg_count = talitos_map_sg(dev, areq->dst, |
990 | DMA_FROM_DEVICE); | 1037 | edesc->dst_nents ? : 1, |
991 | } | 1038 | DMA_FROM_DEVICE, |
1039 | edesc->dst_is_chained); | ||
992 | 1040 | ||
993 | if (sg_count == 1) { | 1041 | if (sg_count == 1) { |
994 | desc->ptr[5].ptr = cpu_to_be32(sg_dma_address(areq->dst)); | 1042 | desc->ptr[5].ptr = cpu_to_be32(sg_dma_address(areq->dst)); |
@@ -1033,49 +1081,55 @@ static int ipsec_esp(struct ipsec_esp_edesc *edesc, struct aead_request *areq, | |||
1033 | return ret; | 1081 | return ret; |
1034 | } | 1082 | } |
1035 | 1083 | ||
1036 | |||
1037 | /* | 1084 | /* |
1038 | * derive number of elements in scatterlist | 1085 | * derive number of elements in scatterlist |
1039 | */ | 1086 | */ |
1040 | static int sg_count(struct scatterlist *sg_list, int nbytes) | 1087 | static int sg_count(struct scatterlist *sg_list, int nbytes, int *chained) |
1041 | { | 1088 | { |
1042 | struct scatterlist *sg = sg_list; | 1089 | struct scatterlist *sg = sg_list; |
1043 | int sg_nents = 0; | 1090 | int sg_nents = 0; |
1044 | 1091 | ||
1045 | while (nbytes) { | 1092 | *chained = 0; |
1093 | while (nbytes > 0) { | ||
1046 | sg_nents++; | 1094 | sg_nents++; |
1047 | nbytes -= sg->length; | 1095 | nbytes -= sg->length; |
1048 | sg = sg_next(sg); | 1096 | if (!sg_is_last(sg) && (sg + 1)->length == 0) |
1097 | *chained = 1; | ||
1098 | sg = scatterwalk_sg_next(sg); | ||
1049 | } | 1099 | } |
1050 | 1100 | ||
1051 | return sg_nents; | 1101 | return sg_nents; |
1052 | } | 1102 | } |
1053 | 1103 | ||
1054 | /* | 1104 | /* |
1055 | * allocate and map the ipsec_esp extended descriptor | 1105 | * allocate and map the extended descriptor |
1056 | */ | 1106 | */ |
1057 | static struct ipsec_esp_edesc *ipsec_esp_edesc_alloc(struct aead_request *areq, | 1107 | static struct talitos_edesc *talitos_edesc_alloc(struct device *dev, |
1058 | int icv_stashing) | 1108 | struct scatterlist *src, |
1109 | struct scatterlist *dst, | ||
1110 | unsigned int cryptlen, | ||
1111 | unsigned int authsize, | ||
1112 | int icv_stashing, | ||
1113 | u32 cryptoflags) | ||
1059 | { | 1114 | { |
1060 | struct crypto_aead *authenc = crypto_aead_reqtfm(areq); | 1115 | struct talitos_edesc *edesc; |
1061 | struct talitos_ctx *ctx = crypto_aead_ctx(authenc); | ||
1062 | struct ipsec_esp_edesc *edesc; | ||
1063 | int src_nents, dst_nents, alloc_len, dma_len; | 1116 | int src_nents, dst_nents, alloc_len, dma_len; |
1064 | gfp_t flags = areq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL : | 1117 | int src_chained, dst_chained = 0; |
1118 | gfp_t flags = cryptoflags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL : | ||
1065 | GFP_ATOMIC; | 1119 | GFP_ATOMIC; |
1066 | 1120 | ||
1067 | if (areq->cryptlen + ctx->authsize > TALITOS_MAX_DATA_LEN) { | 1121 | if (cryptlen + authsize > TALITOS_MAX_DATA_LEN) { |
1068 | dev_err(ctx->dev, "cryptlen exceeds h/w max limit\n"); | 1122 | dev_err(dev, "length exceeds h/w max limit\n"); |
1069 | return ERR_PTR(-EINVAL); | 1123 | return ERR_PTR(-EINVAL); |
1070 | } | 1124 | } |
1071 | 1125 | ||
1072 | src_nents = sg_count(areq->src, areq->cryptlen + ctx->authsize); | 1126 | src_nents = sg_count(src, cryptlen + authsize, &src_chained); |
1073 | src_nents = (src_nents == 1) ? 0 : src_nents; | 1127 | src_nents = (src_nents == 1) ? 0 : src_nents; |
1074 | 1128 | ||
1075 | if (areq->dst == areq->src) { | 1129 | if (dst == src) { |
1076 | dst_nents = src_nents; | 1130 | dst_nents = src_nents; |
1077 | } else { | 1131 | } else { |
1078 | dst_nents = sg_count(areq->dst, areq->cryptlen + ctx->authsize); | 1132 | dst_nents = sg_count(dst, cryptlen + authsize, &dst_chained); |
1079 | dst_nents = (dst_nents == 1) ? 0 : dst_nents; | 1133 | dst_nents = (dst_nents == 1) ? 0 : dst_nents; |
1080 | } | 1134 | } |
1081 | 1135 | ||
@@ -1084,39 +1138,52 @@ static struct ipsec_esp_edesc *ipsec_esp_edesc_alloc(struct aead_request *areq, | |||
1084 | * allowing for two separate entries for ICV and generated ICV (+ 2), | 1138 | * allowing for two separate entries for ICV and generated ICV (+ 2), |
1085 | * and the ICV data itself | 1139 | * and the ICV data itself |
1086 | */ | 1140 | */ |
1087 | alloc_len = sizeof(struct ipsec_esp_edesc); | 1141 | alloc_len = sizeof(struct talitos_edesc); |
1088 | if (src_nents || dst_nents) { | 1142 | if (src_nents || dst_nents) { |
1089 | dma_len = (src_nents + dst_nents + 2) * | 1143 | dma_len = (src_nents + dst_nents + 2) * |
1090 | sizeof(struct talitos_ptr) + ctx->authsize; | 1144 | sizeof(struct talitos_ptr) + authsize; |
1091 | alloc_len += dma_len; | 1145 | alloc_len += dma_len; |
1092 | } else { | 1146 | } else { |
1093 | dma_len = 0; | 1147 | dma_len = 0; |
1094 | alloc_len += icv_stashing ? ctx->authsize : 0; | 1148 | alloc_len += icv_stashing ? authsize : 0; |
1095 | } | 1149 | } |
1096 | 1150 | ||
1097 | edesc = kmalloc(alloc_len, GFP_DMA | flags); | 1151 | edesc = kmalloc(alloc_len, GFP_DMA | flags); |
1098 | if (!edesc) { | 1152 | if (!edesc) { |
1099 | dev_err(ctx->dev, "could not allocate edescriptor\n"); | 1153 | dev_err(dev, "could not allocate edescriptor\n"); |
1100 | return ERR_PTR(-ENOMEM); | 1154 | return ERR_PTR(-ENOMEM); |
1101 | } | 1155 | } |
1102 | 1156 | ||
1103 | edesc->src_nents = src_nents; | 1157 | edesc->src_nents = src_nents; |
1104 | edesc->dst_nents = dst_nents; | 1158 | edesc->dst_nents = dst_nents; |
1159 | edesc->src_is_chained = src_chained; | ||
1160 | edesc->dst_is_chained = dst_chained; | ||
1105 | edesc->dma_len = dma_len; | 1161 | edesc->dma_len = dma_len; |
1106 | edesc->dma_link_tbl = dma_map_single(ctx->dev, &edesc->link_tbl[0], | 1162 | edesc->dma_link_tbl = dma_map_single(dev, &edesc->link_tbl[0], |
1107 | edesc->dma_len, DMA_BIDIRECTIONAL); | 1163 | edesc->dma_len, DMA_BIDIRECTIONAL); |
1108 | 1164 | ||
1109 | return edesc; | 1165 | return edesc; |
1110 | } | 1166 | } |
1111 | 1167 | ||
1112 | static int aead_authenc_encrypt(struct aead_request *req) | 1168 | static struct talitos_edesc *aead_edesc_alloc(struct aead_request *areq, |
1169 | int icv_stashing) | ||
1170 | { | ||
1171 | struct crypto_aead *authenc = crypto_aead_reqtfm(areq); | ||
1172 | struct talitos_ctx *ctx = crypto_aead_ctx(authenc); | ||
1173 | |||
1174 | return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst, | ||
1175 | areq->cryptlen, ctx->authsize, icv_stashing, | ||
1176 | areq->base.flags); | ||
1177 | } | ||
1178 | |||
1179 | static int aead_encrypt(struct aead_request *req) | ||
1113 | { | 1180 | { |
1114 | struct crypto_aead *authenc = crypto_aead_reqtfm(req); | 1181 | struct crypto_aead *authenc = crypto_aead_reqtfm(req); |
1115 | struct talitos_ctx *ctx = crypto_aead_ctx(authenc); | 1182 | struct talitos_ctx *ctx = crypto_aead_ctx(authenc); |
1116 | struct ipsec_esp_edesc *edesc; | 1183 | struct talitos_edesc *edesc; |
1117 | 1184 | ||
1118 | /* allocate extended descriptor */ | 1185 | /* allocate extended descriptor */ |
1119 | edesc = ipsec_esp_edesc_alloc(req, 0); | 1186 | edesc = aead_edesc_alloc(req, 0); |
1120 | if (IS_ERR(edesc)) | 1187 | if (IS_ERR(edesc)) |
1121 | return PTR_ERR(edesc); | 1188 | return PTR_ERR(edesc); |
1122 | 1189 | ||
@@ -1126,70 +1193,67 @@ static int aead_authenc_encrypt(struct aead_request *req) | |||
1126 | return ipsec_esp(edesc, req, NULL, 0, ipsec_esp_encrypt_done); | 1193 | return ipsec_esp(edesc, req, NULL, 0, ipsec_esp_encrypt_done); |
1127 | } | 1194 | } |
1128 | 1195 | ||
1129 | 1196 | static int aead_decrypt(struct aead_request *req) | |
1130 | |||
1131 | static int aead_authenc_decrypt(struct aead_request *req) | ||
1132 | { | 1197 | { |
1133 | struct crypto_aead *authenc = crypto_aead_reqtfm(req); | 1198 | struct crypto_aead *authenc = crypto_aead_reqtfm(req); |
1134 | struct talitos_ctx *ctx = crypto_aead_ctx(authenc); | 1199 | struct talitos_ctx *ctx = crypto_aead_ctx(authenc); |
1135 | unsigned int authsize = ctx->authsize; | 1200 | unsigned int authsize = ctx->authsize; |
1136 | struct talitos_private *priv = dev_get_drvdata(ctx->dev); | 1201 | struct talitos_private *priv = dev_get_drvdata(ctx->dev); |
1137 | struct ipsec_esp_edesc *edesc; | 1202 | struct talitos_edesc *edesc; |
1138 | struct scatterlist *sg; | 1203 | struct scatterlist *sg; |
1139 | void *icvdata; | 1204 | void *icvdata; |
1140 | 1205 | ||
1141 | req->cryptlen -= authsize; | 1206 | req->cryptlen -= authsize; |
1142 | 1207 | ||
1143 | /* allocate extended descriptor */ | 1208 | /* allocate extended descriptor */ |
1144 | edesc = ipsec_esp_edesc_alloc(req, 1); | 1209 | edesc = aead_edesc_alloc(req, 1); |
1145 | if (IS_ERR(edesc)) | 1210 | if (IS_ERR(edesc)) |
1146 | return PTR_ERR(edesc); | 1211 | return PTR_ERR(edesc); |
1147 | 1212 | ||
1148 | if ((priv->features & TALITOS_FTR_HW_AUTH_CHECK) && | 1213 | if ((priv->features & TALITOS_FTR_HW_AUTH_CHECK) && |
1149 | (((!edesc->src_nents && !edesc->dst_nents) || | 1214 | ((!edesc->src_nents && !edesc->dst_nents) || |
1150 | priv->features & TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT))) { | 1215 | priv->features & TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT)) { |
1151 | 1216 | ||
1152 | /* decrypt and check the ICV */ | 1217 | /* decrypt and check the ICV */ |
1153 | edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND | | 1218 | edesc->desc.hdr = ctx->desc_hdr_template | |
1219 | DESC_HDR_DIR_INBOUND | | ||
1154 | DESC_HDR_MODE1_MDEU_CICV; | 1220 | DESC_HDR_MODE1_MDEU_CICV; |
1155 | 1221 | ||
1156 | /* reset integrity check result bits */ | 1222 | /* reset integrity check result bits */ |
1157 | edesc->desc.hdr_lo = 0; | 1223 | edesc->desc.hdr_lo = 0; |
1158 | 1224 | ||
1159 | return ipsec_esp(edesc, req, NULL, 0, ipsec_esp_decrypt_hwauth_done); | 1225 | return ipsec_esp(edesc, req, NULL, 0, |
1226 | ipsec_esp_decrypt_hwauth_done); | ||
1160 | 1227 | ||
1161 | } else { | 1228 | } |
1162 | |||
1163 | /* Have to check the ICV with software */ | ||
1164 | 1229 | ||
1165 | edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND; | 1230 | /* Have to check the ICV with software */ |
1231 | edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND; | ||
1166 | 1232 | ||
1167 | /* stash incoming ICV for later cmp with ICV generated by the h/w */ | 1233 | /* stash incoming ICV for later cmp with ICV generated by the h/w */ |
1168 | if (edesc->dma_len) | 1234 | if (edesc->dma_len) |
1169 | icvdata = &edesc->link_tbl[edesc->src_nents + | 1235 | icvdata = &edesc->link_tbl[edesc->src_nents + |
1170 | edesc->dst_nents + 2]; | 1236 | edesc->dst_nents + 2]; |
1171 | else | 1237 | else |
1172 | icvdata = &edesc->link_tbl[0]; | 1238 | icvdata = &edesc->link_tbl[0]; |
1173 | 1239 | ||
1174 | sg = sg_last(req->src, edesc->src_nents ? : 1); | 1240 | sg = sg_last(req->src, edesc->src_nents ? : 1); |
1175 | 1241 | ||
1176 | memcpy(icvdata, (char *)sg_virt(sg) + sg->length - ctx->authsize, | 1242 | memcpy(icvdata, (char *)sg_virt(sg) + sg->length - ctx->authsize, |
1177 | ctx->authsize); | 1243 | ctx->authsize); |
1178 | 1244 | ||
1179 | return ipsec_esp(edesc, req, NULL, 0, ipsec_esp_decrypt_swauth_done); | 1245 | return ipsec_esp(edesc, req, NULL, 0, ipsec_esp_decrypt_swauth_done); |
1180 | } | ||
1181 | } | 1246 | } |
1182 | 1247 | ||
1183 | static int aead_authenc_givencrypt( | 1248 | static int aead_givencrypt(struct aead_givcrypt_request *req) |
1184 | struct aead_givcrypt_request *req) | ||
1185 | { | 1249 | { |
1186 | struct aead_request *areq = &req->areq; | 1250 | struct aead_request *areq = &req->areq; |
1187 | struct crypto_aead *authenc = crypto_aead_reqtfm(areq); | 1251 | struct crypto_aead *authenc = crypto_aead_reqtfm(areq); |
1188 | struct talitos_ctx *ctx = crypto_aead_ctx(authenc); | 1252 | struct talitos_ctx *ctx = crypto_aead_ctx(authenc); |
1189 | struct ipsec_esp_edesc *edesc; | 1253 | struct talitos_edesc *edesc; |
1190 | 1254 | ||
1191 | /* allocate extended descriptor */ | 1255 | /* allocate extended descriptor */ |
1192 | edesc = ipsec_esp_edesc_alloc(areq, 0); | 1256 | edesc = aead_edesc_alloc(areq, 0); |
1193 | if (IS_ERR(edesc)) | 1257 | if (IS_ERR(edesc)) |
1194 | return PTR_ERR(edesc); | 1258 | return PTR_ERR(edesc); |
1195 | 1259 | ||
@@ -1204,31 +1268,228 @@ static int aead_authenc_givencrypt( | |||
1204 | ipsec_esp_encrypt_done); | 1268 | ipsec_esp_encrypt_done); |
1205 | } | 1269 | } |
1206 | 1270 | ||
1271 | static int ablkcipher_setkey(struct crypto_ablkcipher *cipher, | ||
1272 | const u8 *key, unsigned int keylen) | ||
1273 | { | ||
1274 | struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher); | ||
1275 | struct ablkcipher_alg *alg = crypto_ablkcipher_alg(cipher); | ||
1276 | |||
1277 | if (keylen > TALITOS_MAX_KEY_SIZE) | ||
1278 | goto badkey; | ||
1279 | |||
1280 | if (keylen < alg->min_keysize || keylen > alg->max_keysize) | ||
1281 | goto badkey; | ||
1282 | |||
1283 | memcpy(&ctx->key, key, keylen); | ||
1284 | ctx->keylen = keylen; | ||
1285 | |||
1286 | return 0; | ||
1287 | |||
1288 | badkey: | ||
1289 | crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); | ||
1290 | return -EINVAL; | ||
1291 | } | ||
1292 | |||
1293 | static void common_nonsnoop_unmap(struct device *dev, | ||
1294 | struct talitos_edesc *edesc, | ||
1295 | struct ablkcipher_request *areq) | ||
1296 | { | ||
1297 | unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE); | ||
1298 | unmap_single_talitos_ptr(dev, &edesc->desc.ptr[2], DMA_TO_DEVICE); | ||
1299 | unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1], DMA_TO_DEVICE); | ||
1300 | |||
1301 | talitos_sg_unmap(dev, edesc, areq->src, areq->dst); | ||
1302 | |||
1303 | if (edesc->dma_len) | ||
1304 | dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len, | ||
1305 | DMA_BIDIRECTIONAL); | ||
1306 | } | ||
1307 | |||
1308 | static void ablkcipher_done(struct device *dev, | ||
1309 | struct talitos_desc *desc, void *context, | ||
1310 | int err) | ||
1311 | { | ||
1312 | struct ablkcipher_request *areq = context; | ||
1313 | struct talitos_edesc *edesc; | ||
1314 | |||
1315 | edesc = container_of(desc, struct talitos_edesc, desc); | ||
1316 | |||
1317 | common_nonsnoop_unmap(dev, edesc, areq); | ||
1318 | |||
1319 | kfree(edesc); | ||
1320 | |||
1321 | areq->base.complete(&areq->base, err); | ||
1322 | } | ||
1323 | |||
1324 | static int common_nonsnoop(struct talitos_edesc *edesc, | ||
1325 | struct ablkcipher_request *areq, | ||
1326 | u8 *giv, | ||
1327 | void (*callback) (struct device *dev, | ||
1328 | struct talitos_desc *desc, | ||
1329 | void *context, int error)) | ||
1330 | { | ||
1331 | struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); | ||
1332 | struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher); | ||
1333 | struct device *dev = ctx->dev; | ||
1334 | struct talitos_desc *desc = &edesc->desc; | ||
1335 | unsigned int cryptlen = areq->nbytes; | ||
1336 | unsigned int ivsize; | ||
1337 | int sg_count, ret; | ||
1338 | |||
1339 | /* first DWORD empty */ | ||
1340 | desc->ptr[0].len = 0; | ||
1341 | desc->ptr[0].ptr = 0; | ||
1342 | desc->ptr[0].j_extent = 0; | ||
1343 | |||
1344 | /* cipher iv */ | ||
1345 | ivsize = crypto_ablkcipher_ivsize(cipher); | ||
1346 | map_single_talitos_ptr(dev, &desc->ptr[1], ivsize, giv ?: areq->info, 0, | ||
1347 | DMA_TO_DEVICE); | ||
1348 | |||
1349 | /* cipher key */ | ||
1350 | map_single_talitos_ptr(dev, &desc->ptr[2], ctx->keylen, | ||
1351 | (char *)&ctx->key, 0, DMA_TO_DEVICE); | ||
1352 | |||
1353 | /* | ||
1354 | * cipher in | ||
1355 | */ | ||
1356 | desc->ptr[3].len = cpu_to_be16(cryptlen); | ||
1357 | desc->ptr[3].j_extent = 0; | ||
1358 | |||
1359 | sg_count = talitos_map_sg(dev, areq->src, edesc->src_nents ? : 1, | ||
1360 | (areq->src == areq->dst) ? DMA_BIDIRECTIONAL | ||
1361 | : DMA_TO_DEVICE, | ||
1362 | edesc->src_is_chained); | ||
1363 | |||
1364 | if (sg_count == 1) { | ||
1365 | desc->ptr[3].ptr = cpu_to_be32(sg_dma_address(areq->src)); | ||
1366 | } else { | ||
1367 | sg_count = sg_to_link_tbl(areq->src, sg_count, cryptlen, | ||
1368 | &edesc->link_tbl[0]); | ||
1369 | if (sg_count > 1) { | ||
1370 | desc->ptr[3].j_extent |= DESC_PTR_LNKTBL_JUMP; | ||
1371 | desc->ptr[3].ptr = cpu_to_be32(edesc->dma_link_tbl); | ||
1372 | dma_sync_single_for_device(dev, edesc->dma_link_tbl, | ||
1373 | edesc->dma_len, | ||
1374 | DMA_BIDIRECTIONAL); | ||
1375 | } else { | ||
1376 | /* Only one segment now, so no link tbl needed */ | ||
1377 | desc->ptr[3].ptr = cpu_to_be32(sg_dma_address(areq-> | ||
1378 | src)); | ||
1379 | } | ||
1380 | } | ||
1381 | |||
1382 | /* cipher out */ | ||
1383 | desc->ptr[4].len = cpu_to_be16(cryptlen); | ||
1384 | desc->ptr[4].j_extent = 0; | ||
1385 | |||
1386 | if (areq->src != areq->dst) | ||
1387 | sg_count = talitos_map_sg(dev, areq->dst, | ||
1388 | edesc->dst_nents ? : 1, | ||
1389 | DMA_FROM_DEVICE, | ||
1390 | edesc->dst_is_chained); | ||
1391 | |||
1392 | if (sg_count == 1) { | ||
1393 | desc->ptr[4].ptr = cpu_to_be32(sg_dma_address(areq->dst)); | ||
1394 | } else { | ||
1395 | struct talitos_ptr *link_tbl_ptr = | ||
1396 | &edesc->link_tbl[edesc->src_nents + 1]; | ||
1397 | |||
1398 | desc->ptr[4].j_extent |= DESC_PTR_LNKTBL_JUMP; | ||
1399 | desc->ptr[4].ptr = cpu_to_be32((struct talitos_ptr *) | ||
1400 | edesc->dma_link_tbl + | ||
1401 | edesc->src_nents + 1); | ||
1402 | sg_count = sg_to_link_tbl(areq->dst, sg_count, cryptlen, | ||
1403 | link_tbl_ptr); | ||
1404 | dma_sync_single_for_device(ctx->dev, edesc->dma_link_tbl, | ||
1405 | edesc->dma_len, DMA_BIDIRECTIONAL); | ||
1406 | } | ||
1407 | |||
1408 | /* iv out */ | ||
1409 | map_single_talitos_ptr(dev, &desc->ptr[5], ivsize, ctx->iv, 0, | ||
1410 | DMA_FROM_DEVICE); | ||
1411 | |||
1412 | /* last DWORD empty */ | ||
1413 | desc->ptr[6].len = 0; | ||
1414 | desc->ptr[6].ptr = 0; | ||
1415 | desc->ptr[6].j_extent = 0; | ||
1416 | |||
1417 | ret = talitos_submit(dev, desc, callback, areq); | ||
1418 | if (ret != -EINPROGRESS) { | ||
1419 | common_nonsnoop_unmap(dev, edesc, areq); | ||
1420 | kfree(edesc); | ||
1421 | } | ||
1422 | return ret; | ||
1423 | } | ||
1424 | |||
1425 | static struct talitos_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request * | ||
1426 | areq) | ||
1427 | { | ||
1428 | struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); | ||
1429 | struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher); | ||
1430 | |||
1431 | return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst, areq->nbytes, | ||
1432 | 0, 0, areq->base.flags); | ||
1433 | } | ||
1434 | |||
1435 | static int ablkcipher_encrypt(struct ablkcipher_request *areq) | ||
1436 | { | ||
1437 | struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); | ||
1438 | struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher); | ||
1439 | struct talitos_edesc *edesc; | ||
1440 | |||
1441 | /* allocate extended descriptor */ | ||
1442 | edesc = ablkcipher_edesc_alloc(areq); | ||
1443 | if (IS_ERR(edesc)) | ||
1444 | return PTR_ERR(edesc); | ||
1445 | |||
1446 | /* set encrypt */ | ||
1447 | edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT; | ||
1448 | |||
1449 | return common_nonsnoop(edesc, areq, NULL, ablkcipher_done); | ||
1450 | } | ||
1451 | |||
1452 | static int ablkcipher_decrypt(struct ablkcipher_request *areq) | ||
1453 | { | ||
1454 | struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); | ||
1455 | struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher); | ||
1456 | struct talitos_edesc *edesc; | ||
1457 | |||
1458 | /* allocate extended descriptor */ | ||
1459 | edesc = ablkcipher_edesc_alloc(areq); | ||
1460 | if (IS_ERR(edesc)) | ||
1461 | return PTR_ERR(edesc); | ||
1462 | |||
1463 | edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND; | ||
1464 | |||
1465 | return common_nonsnoop(edesc, areq, NULL, ablkcipher_done); | ||
1466 | } | ||
1467 | |||
1207 | struct talitos_alg_template { | 1468 | struct talitos_alg_template { |
1208 | char name[CRYPTO_MAX_ALG_NAME]; | 1469 | struct crypto_alg alg; |
1209 | char driver_name[CRYPTO_MAX_ALG_NAME]; | ||
1210 | unsigned int blocksize; | ||
1211 | struct aead_alg aead; | ||
1212 | struct device *dev; | ||
1213 | __be32 desc_hdr_template; | 1470 | __be32 desc_hdr_template; |
1214 | }; | 1471 | }; |
1215 | 1472 | ||
1216 | static struct talitos_alg_template driver_algs[] = { | 1473 | static struct talitos_alg_template driver_algs[] = { |
1217 | /* single-pass ipsec_esp descriptor */ | 1474 | /* AEAD algorithms. These use a single-pass ipsec_esp descriptor */ |
1218 | { | 1475 | { |
1219 | .name = "authenc(hmac(sha1),cbc(aes))", | 1476 | .alg = { |
1220 | .driver_name = "authenc-hmac-sha1-cbc-aes-talitos", | 1477 | .cra_name = "authenc(hmac(sha1),cbc(aes))", |
1221 | .blocksize = AES_BLOCK_SIZE, | 1478 | .cra_driver_name = "authenc-hmac-sha1-cbc-aes-talitos", |
1222 | .aead = { | 1479 | .cra_blocksize = AES_BLOCK_SIZE, |
1223 | .setkey = aead_authenc_setkey, | 1480 | .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC, |
1224 | .setauthsize = aead_authenc_setauthsize, | 1481 | .cra_type = &crypto_aead_type, |
1225 | .encrypt = aead_authenc_encrypt, | 1482 | .cra_aead = { |
1226 | .decrypt = aead_authenc_decrypt, | 1483 | .setkey = aead_setkey, |
1227 | .givencrypt = aead_authenc_givencrypt, | 1484 | .setauthsize = aead_setauthsize, |
1228 | .geniv = "<built-in>", | 1485 | .encrypt = aead_encrypt, |
1229 | .ivsize = AES_BLOCK_SIZE, | 1486 | .decrypt = aead_decrypt, |
1230 | .maxauthsize = SHA1_DIGEST_SIZE, | 1487 | .givencrypt = aead_givencrypt, |
1231 | }, | 1488 | .geniv = "<built-in>", |
1489 | .ivsize = AES_BLOCK_SIZE, | ||
1490 | .maxauthsize = SHA1_DIGEST_SIZE, | ||
1491 | } | ||
1492 | }, | ||
1232 | .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP | | 1493 | .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP | |
1233 | DESC_HDR_SEL0_AESU | | 1494 | DESC_HDR_SEL0_AESU | |
1234 | DESC_HDR_MODE0_AESU_CBC | | 1495 | DESC_HDR_MODE0_AESU_CBC | |
@@ -1238,19 +1499,23 @@ static struct talitos_alg_template driver_algs[] = { | |||
1238 | DESC_HDR_MODE1_MDEU_SHA1_HMAC, | 1499 | DESC_HDR_MODE1_MDEU_SHA1_HMAC, |
1239 | }, | 1500 | }, |
1240 | { | 1501 | { |
1241 | .name = "authenc(hmac(sha1),cbc(des3_ede))", | 1502 | .alg = { |
1242 | .driver_name = "authenc-hmac-sha1-cbc-3des-talitos", | 1503 | .cra_name = "authenc(hmac(sha1),cbc(des3_ede))", |
1243 | .blocksize = DES3_EDE_BLOCK_SIZE, | 1504 | .cra_driver_name = "authenc-hmac-sha1-cbc-3des-talitos", |
1244 | .aead = { | 1505 | .cra_blocksize = DES3_EDE_BLOCK_SIZE, |
1245 | .setkey = aead_authenc_setkey, | 1506 | .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC, |
1246 | .setauthsize = aead_authenc_setauthsize, | 1507 | .cra_type = &crypto_aead_type, |
1247 | .encrypt = aead_authenc_encrypt, | 1508 | .cra_aead = { |
1248 | .decrypt = aead_authenc_decrypt, | 1509 | .setkey = aead_setkey, |
1249 | .givencrypt = aead_authenc_givencrypt, | 1510 | .setauthsize = aead_setauthsize, |
1250 | .geniv = "<built-in>", | 1511 | .encrypt = aead_encrypt, |
1251 | .ivsize = DES3_EDE_BLOCK_SIZE, | 1512 | .decrypt = aead_decrypt, |
1252 | .maxauthsize = SHA1_DIGEST_SIZE, | 1513 | .givencrypt = aead_givencrypt, |
1253 | }, | 1514 | .geniv = "<built-in>", |
1515 | .ivsize = DES3_EDE_BLOCK_SIZE, | ||
1516 | .maxauthsize = SHA1_DIGEST_SIZE, | ||
1517 | } | ||
1518 | }, | ||
1254 | .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP | | 1519 | .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP | |
1255 | DESC_HDR_SEL0_DEU | | 1520 | DESC_HDR_SEL0_DEU | |
1256 | DESC_HDR_MODE0_DEU_CBC | | 1521 | DESC_HDR_MODE0_DEU_CBC | |
@@ -1261,19 +1526,23 @@ static struct talitos_alg_template driver_algs[] = { | |||
1261 | DESC_HDR_MODE1_MDEU_SHA1_HMAC, | 1526 | DESC_HDR_MODE1_MDEU_SHA1_HMAC, |
1262 | }, | 1527 | }, |
1263 | { | 1528 | { |
1264 | .name = "authenc(hmac(sha256),cbc(aes))", | 1529 | .alg = { |
1265 | .driver_name = "authenc-hmac-sha256-cbc-aes-talitos", | 1530 | .cra_name = "authenc(hmac(sha256),cbc(aes))", |
1266 | .blocksize = AES_BLOCK_SIZE, | 1531 | .cra_driver_name = "authenc-hmac-sha256-cbc-aes-talitos", |
1267 | .aead = { | 1532 | .cra_blocksize = AES_BLOCK_SIZE, |
1268 | .setkey = aead_authenc_setkey, | 1533 | .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC, |
1269 | .setauthsize = aead_authenc_setauthsize, | 1534 | .cra_type = &crypto_aead_type, |
1270 | .encrypt = aead_authenc_encrypt, | 1535 | .cra_aead = { |
1271 | .decrypt = aead_authenc_decrypt, | 1536 | .setkey = aead_setkey, |
1272 | .givencrypt = aead_authenc_givencrypt, | 1537 | .setauthsize = aead_setauthsize, |
1273 | .geniv = "<built-in>", | 1538 | .encrypt = aead_encrypt, |
1274 | .ivsize = AES_BLOCK_SIZE, | 1539 | .decrypt = aead_decrypt, |
1275 | .maxauthsize = SHA256_DIGEST_SIZE, | 1540 | .givencrypt = aead_givencrypt, |
1276 | }, | 1541 | .geniv = "<built-in>", |
1542 | .ivsize = AES_BLOCK_SIZE, | ||
1543 | .maxauthsize = SHA256_DIGEST_SIZE, | ||
1544 | } | ||
1545 | }, | ||
1277 | .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP | | 1546 | .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP | |
1278 | DESC_HDR_SEL0_AESU | | 1547 | DESC_HDR_SEL0_AESU | |
1279 | DESC_HDR_MODE0_AESU_CBC | | 1548 | DESC_HDR_MODE0_AESU_CBC | |
@@ -1283,19 +1552,23 @@ static struct talitos_alg_template driver_algs[] = { | |||
1283 | DESC_HDR_MODE1_MDEU_SHA256_HMAC, | 1552 | DESC_HDR_MODE1_MDEU_SHA256_HMAC, |
1284 | }, | 1553 | }, |
1285 | { | 1554 | { |
1286 | .name = "authenc(hmac(sha256),cbc(des3_ede))", | 1555 | .alg = { |
1287 | .driver_name = "authenc-hmac-sha256-cbc-3des-talitos", | 1556 | .cra_name = "authenc(hmac(sha256),cbc(des3_ede))", |
1288 | .blocksize = DES3_EDE_BLOCK_SIZE, | 1557 | .cra_driver_name = "authenc-hmac-sha256-cbc-3des-talitos", |
1289 | .aead = { | 1558 | .cra_blocksize = DES3_EDE_BLOCK_SIZE, |
1290 | .setkey = aead_authenc_setkey, | 1559 | .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC, |
1291 | .setauthsize = aead_authenc_setauthsize, | 1560 | .cra_type = &crypto_aead_type, |
1292 | .encrypt = aead_authenc_encrypt, | 1561 | .cra_aead = { |
1293 | .decrypt = aead_authenc_decrypt, | 1562 | .setkey = aead_setkey, |
1294 | .givencrypt = aead_authenc_givencrypt, | 1563 | .setauthsize = aead_setauthsize, |
1295 | .geniv = "<built-in>", | 1564 | .encrypt = aead_encrypt, |
1296 | .ivsize = DES3_EDE_BLOCK_SIZE, | 1565 | .decrypt = aead_decrypt, |
1297 | .maxauthsize = SHA256_DIGEST_SIZE, | 1566 | .givencrypt = aead_givencrypt, |
1298 | }, | 1567 | .geniv = "<built-in>", |
1568 | .ivsize = DES3_EDE_BLOCK_SIZE, | ||
1569 | .maxauthsize = SHA256_DIGEST_SIZE, | ||
1570 | } | ||
1571 | }, | ||
1299 | .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP | | 1572 | .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP | |
1300 | DESC_HDR_SEL0_DEU | | 1573 | DESC_HDR_SEL0_DEU | |
1301 | DESC_HDR_MODE0_DEU_CBC | | 1574 | DESC_HDR_MODE0_DEU_CBC | |
@@ -1306,19 +1579,23 @@ static struct talitos_alg_template driver_algs[] = { | |||
1306 | DESC_HDR_MODE1_MDEU_SHA256_HMAC, | 1579 | DESC_HDR_MODE1_MDEU_SHA256_HMAC, |
1307 | }, | 1580 | }, |
1308 | { | 1581 | { |
1309 | .name = "authenc(hmac(md5),cbc(aes))", | 1582 | .alg = { |
1310 | .driver_name = "authenc-hmac-md5-cbc-aes-talitos", | 1583 | .cra_name = "authenc(hmac(md5),cbc(aes))", |
1311 | .blocksize = AES_BLOCK_SIZE, | 1584 | .cra_driver_name = "authenc-hmac-md5-cbc-aes-talitos", |
1312 | .aead = { | 1585 | .cra_blocksize = AES_BLOCK_SIZE, |
1313 | .setkey = aead_authenc_setkey, | 1586 | .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC, |
1314 | .setauthsize = aead_authenc_setauthsize, | 1587 | .cra_type = &crypto_aead_type, |
1315 | .encrypt = aead_authenc_encrypt, | 1588 | .cra_aead = { |
1316 | .decrypt = aead_authenc_decrypt, | 1589 | .setkey = aead_setkey, |
1317 | .givencrypt = aead_authenc_givencrypt, | 1590 | .setauthsize = aead_setauthsize, |
1318 | .geniv = "<built-in>", | 1591 | .encrypt = aead_encrypt, |
1319 | .ivsize = AES_BLOCK_SIZE, | 1592 | .decrypt = aead_decrypt, |
1320 | .maxauthsize = MD5_DIGEST_SIZE, | 1593 | .givencrypt = aead_givencrypt, |
1321 | }, | 1594 | .geniv = "<built-in>", |
1595 | .ivsize = AES_BLOCK_SIZE, | ||
1596 | .maxauthsize = MD5_DIGEST_SIZE, | ||
1597 | } | ||
1598 | }, | ||
1322 | .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP | | 1599 | .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP | |
1323 | DESC_HDR_SEL0_AESU | | 1600 | DESC_HDR_SEL0_AESU | |
1324 | DESC_HDR_MODE0_AESU_CBC | | 1601 | DESC_HDR_MODE0_AESU_CBC | |
@@ -1328,19 +1605,23 @@ static struct talitos_alg_template driver_algs[] = { | |||
1328 | DESC_HDR_MODE1_MDEU_MD5_HMAC, | 1605 | DESC_HDR_MODE1_MDEU_MD5_HMAC, |
1329 | }, | 1606 | }, |
1330 | { | 1607 | { |
1331 | .name = "authenc(hmac(md5),cbc(des3_ede))", | 1608 | .alg = { |
1332 | .driver_name = "authenc-hmac-md5-cbc-3des-talitos", | 1609 | .cra_name = "authenc(hmac(md5),cbc(des3_ede))", |
1333 | .blocksize = DES3_EDE_BLOCK_SIZE, | 1610 | .cra_driver_name = "authenc-hmac-md5-cbc-3des-talitos", |
1334 | .aead = { | 1611 | .cra_blocksize = DES3_EDE_BLOCK_SIZE, |
1335 | .setkey = aead_authenc_setkey, | 1612 | .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC, |
1336 | .setauthsize = aead_authenc_setauthsize, | 1613 | .cra_type = &crypto_aead_type, |
1337 | .encrypt = aead_authenc_encrypt, | 1614 | .cra_aead = { |
1338 | .decrypt = aead_authenc_decrypt, | 1615 | .setkey = aead_setkey, |
1339 | .givencrypt = aead_authenc_givencrypt, | 1616 | .setauthsize = aead_setauthsize, |
1340 | .geniv = "<built-in>", | 1617 | .encrypt = aead_encrypt, |
1341 | .ivsize = DES3_EDE_BLOCK_SIZE, | 1618 | .decrypt = aead_decrypt, |
1342 | .maxauthsize = MD5_DIGEST_SIZE, | 1619 | .givencrypt = aead_givencrypt, |
1343 | }, | 1620 | .geniv = "<built-in>", |
1621 | .ivsize = DES3_EDE_BLOCK_SIZE, | ||
1622 | .maxauthsize = MD5_DIGEST_SIZE, | ||
1623 | } | ||
1624 | }, | ||
1344 | .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP | | 1625 | .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP | |
1345 | DESC_HDR_SEL0_DEU | | 1626 | DESC_HDR_SEL0_DEU | |
1346 | DESC_HDR_MODE0_DEU_CBC | | 1627 | DESC_HDR_MODE0_DEU_CBC | |
@@ -1349,6 +1630,52 @@ static struct talitos_alg_template driver_algs[] = { | |||
1349 | DESC_HDR_MODE1_MDEU_INIT | | 1630 | DESC_HDR_MODE1_MDEU_INIT | |
1350 | DESC_HDR_MODE1_MDEU_PAD | | 1631 | DESC_HDR_MODE1_MDEU_PAD | |
1351 | DESC_HDR_MODE1_MDEU_MD5_HMAC, | 1632 | DESC_HDR_MODE1_MDEU_MD5_HMAC, |
1633 | }, | ||
1634 | /* ABLKCIPHER algorithms. */ | ||
1635 | { | ||
1636 | .alg = { | ||
1637 | .cra_name = "cbc(aes)", | ||
1638 | .cra_driver_name = "cbc-aes-talitos", | ||
1639 | .cra_blocksize = AES_BLOCK_SIZE, | ||
1640 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | | ||
1641 | CRYPTO_ALG_ASYNC, | ||
1642 | .cra_type = &crypto_ablkcipher_type, | ||
1643 | .cra_ablkcipher = { | ||
1644 | .setkey = ablkcipher_setkey, | ||
1645 | .encrypt = ablkcipher_encrypt, | ||
1646 | .decrypt = ablkcipher_decrypt, | ||
1647 | .geniv = "eseqiv", | ||
1648 | .min_keysize = AES_MIN_KEY_SIZE, | ||
1649 | .max_keysize = AES_MAX_KEY_SIZE, | ||
1650 | .ivsize = AES_BLOCK_SIZE, | ||
1651 | } | ||
1652 | }, | ||
1653 | .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU | | ||
1654 | DESC_HDR_SEL0_AESU | | ||
1655 | DESC_HDR_MODE0_AESU_CBC, | ||
1656 | }, | ||
1657 | { | ||
1658 | .alg = { | ||
1659 | .cra_name = "cbc(des3_ede)", | ||
1660 | .cra_driver_name = "cbc-3des-talitos", | ||
1661 | .cra_blocksize = DES3_EDE_BLOCK_SIZE, | ||
1662 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | | ||
1663 | CRYPTO_ALG_ASYNC, | ||
1664 | .cra_type = &crypto_ablkcipher_type, | ||
1665 | .cra_ablkcipher = { | ||
1666 | .setkey = ablkcipher_setkey, | ||
1667 | .encrypt = ablkcipher_encrypt, | ||
1668 | .decrypt = ablkcipher_decrypt, | ||
1669 | .geniv = "eseqiv", | ||
1670 | .min_keysize = DES3_EDE_KEY_SIZE, | ||
1671 | .max_keysize = DES3_EDE_KEY_SIZE, | ||
1672 | .ivsize = DES3_EDE_BLOCK_SIZE, | ||
1673 | } | ||
1674 | }, | ||
1675 | .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU | | ||
1676 | DESC_HDR_SEL0_DEU | | ||
1677 | DESC_HDR_MODE0_DEU_CBC | | ||
1678 | DESC_HDR_MODE0_DEU_3DES, | ||
1352 | } | 1679 | } |
1353 | }; | 1680 | }; |
1354 | 1681 | ||
@@ -1362,12 +1689,14 @@ struct talitos_crypto_alg { | |||
1362 | static int talitos_cra_init(struct crypto_tfm *tfm) | 1689 | static int talitos_cra_init(struct crypto_tfm *tfm) |
1363 | { | 1690 | { |
1364 | struct crypto_alg *alg = tfm->__crt_alg; | 1691 | struct crypto_alg *alg = tfm->__crt_alg; |
1365 | struct talitos_crypto_alg *talitos_alg = | 1692 | struct talitos_crypto_alg *talitos_alg; |
1366 | container_of(alg, struct talitos_crypto_alg, crypto_alg); | ||
1367 | struct talitos_ctx *ctx = crypto_tfm_ctx(tfm); | 1693 | struct talitos_ctx *ctx = crypto_tfm_ctx(tfm); |
1368 | 1694 | ||
1695 | talitos_alg = container_of(alg, struct talitos_crypto_alg, crypto_alg); | ||
1696 | |||
1369 | /* update context with ptr to dev */ | 1697 | /* update context with ptr to dev */ |
1370 | ctx->dev = talitos_alg->dev; | 1698 | ctx->dev = talitos_alg->dev; |
1699 | |||
1371 | /* copy descriptor header template value */ | 1700 | /* copy descriptor header template value */ |
1372 | ctx->desc_hdr_template = talitos_alg->desc_hdr_template; | 1701 | ctx->desc_hdr_template = talitos_alg->desc_hdr_template; |
1373 | 1702 | ||
@@ -1453,19 +1782,13 @@ static struct talitos_crypto_alg *talitos_alg_alloc(struct device *dev, | |||
1453 | return ERR_PTR(-ENOMEM); | 1782 | return ERR_PTR(-ENOMEM); |
1454 | 1783 | ||
1455 | alg = &t_alg->crypto_alg; | 1784 | alg = &t_alg->crypto_alg; |
1785 | *alg = template->alg; | ||
1456 | 1786 | ||
1457 | snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name); | ||
1458 | snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s", | ||
1459 | template->driver_name); | ||
1460 | alg->cra_module = THIS_MODULE; | 1787 | alg->cra_module = THIS_MODULE; |
1461 | alg->cra_init = talitos_cra_init; | 1788 | alg->cra_init = talitos_cra_init; |
1462 | alg->cra_priority = TALITOS_CRA_PRIORITY; | 1789 | alg->cra_priority = TALITOS_CRA_PRIORITY; |
1463 | alg->cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC; | ||
1464 | alg->cra_blocksize = template->blocksize; | ||
1465 | alg->cra_alignmask = 0; | 1790 | alg->cra_alignmask = 0; |
1466 | alg->cra_type = &crypto_aead_type; | ||
1467 | alg->cra_ctxsize = sizeof(struct talitos_ctx); | 1791 | alg->cra_ctxsize = sizeof(struct talitos_ctx); |
1468 | alg->cra_u.aead = template->aead; | ||
1469 | 1792 | ||
1470 | t_alg->desc_hdr_template = template->desc_hdr_template; | 1793 | t_alg->desc_hdr_template = template->desc_hdr_template; |
1471 | t_alg->dev = dev; | 1794 | t_alg->dev = dev; |