diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2013-11-23 19:18:25 -0500 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2013-11-23 19:18:25 -0500 |
commit | 26b265cd29dde56bf0901c421eabc7ae815f38c4 (patch) | |
tree | 83a5418c96ccde8522bda6614063b665fe5e0ec9 /crypto | |
parent | 2e7babfa892a55588467ef03b545002e32f31528 (diff) | |
parent | f262f0f5cad0c9eca61d1d383e3b67b57dcbe5ea (diff) |
Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto update from Herbert Xu:
- Made x86 ablk_helper generic for ARM
- Phase out chainiv in favour of eseqiv (affects IPsec)
- Fixed aes-cbc IV corruption on s390
- Added constant-time crypto_memneq which replaces memcmp
- Fixed aes-ctr in omap-aes
- Added OMAP3 ROM RNG support
- Add PRNG support for MSM SoC's
- Add and use Job Ring API in caam
- Misc fixes
[ NOTE! This pull request was sent within the merge window, but Herbert
has some questionable email sending setup that makes him public enemy
#1 as far as gmail is concerned. So most of his emails seem to be
trapped by gmail as spam, resulting in me not seeing them. - Linus ]
* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (49 commits)
crypto: s390 - Fix aes-cbc IV corruption
crypto: omap-aes - Fix CTR mode counter length
crypto: omap-sham - Add missing modalias
padata: make the sequence counter an atomic_t
crypto: caam - Modify the interface layers to use JR API's
crypto: caam - Add API's to allocate/free Job Rings
crypto: caam - Add Platform driver for Job Ring
hwrng: msm - Add PRNG support for MSM SoC's
ARM: DT: msm: Add Qualcomm's PRNG driver binding document
crypto: skcipher - Use eseqiv even on UP machines
crypto: talitos - Simplify key parsing
crypto: picoxcell - Simplify and harden key parsing
crypto: ixp4xx - Simplify and harden key parsing
crypto: authencesn - Simplify key parsing
crypto: authenc - Export key parsing helper function
crypto: mv_cesa: remove deprecated IRQF_DISABLED
hwrng: OMAP3 ROM Random Number Generator support
crypto: sha256_ssse3 - also test for BMI2
crypto: mv_cesa - Remove redundant of_match_ptr
crypto: sahara - Remove redundant of_match_ptr
...
Diffstat (limited to 'crypto')
-rw-r--r-- | crypto/Kconfig | 23 | ||||
-rw-r--r-- | crypto/Makefile | 8 | ||||
-rw-r--r-- | crypto/ablk_helper.c | 150 | ||||
-rw-r--r-- | crypto/ablkcipher.c | 21 | ||||
-rw-r--r-- | crypto/ansi_cprng.c | 4 | ||||
-rw-r--r-- | crypto/asymmetric_keys/rsa.c | 5 | ||||
-rw-r--r-- | crypto/authenc.c | 54 | ||||
-rw-r--r-- | crypto/authencesn.c | 34 | ||||
-rw-r--r-- | crypto/ccm.c | 4 | ||||
-rw-r--r-- | crypto/gcm.c | 2 | ||||
-rw-r--r-- | crypto/memneq.c | 138 |
11 files changed, 356 insertions, 87 deletions
diff --git a/crypto/Kconfig b/crypto/Kconfig index 4ae5734fb473..7bcb70d216e1 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig | |||
@@ -174,9 +174,8 @@ config CRYPTO_TEST | |||
174 | help | 174 | help |
175 | Quick & dirty crypto test module. | 175 | Quick & dirty crypto test module. |
176 | 176 | ||
177 | config CRYPTO_ABLK_HELPER_X86 | 177 | config CRYPTO_ABLK_HELPER |
178 | tristate | 178 | tristate |
179 | depends on X86 | ||
180 | select CRYPTO_CRYPTD | 179 | select CRYPTO_CRYPTD |
181 | 180 | ||
182 | config CRYPTO_GLUE_HELPER_X86 | 181 | config CRYPTO_GLUE_HELPER_X86 |
@@ -695,7 +694,7 @@ config CRYPTO_AES_NI_INTEL | |||
695 | select CRYPTO_AES_X86_64 if 64BIT | 694 | select CRYPTO_AES_X86_64 if 64BIT |
696 | select CRYPTO_AES_586 if !64BIT | 695 | select CRYPTO_AES_586 if !64BIT |
697 | select CRYPTO_CRYPTD | 696 | select CRYPTO_CRYPTD |
698 | select CRYPTO_ABLK_HELPER_X86 | 697 | select CRYPTO_ABLK_HELPER |
699 | select CRYPTO_ALGAPI | 698 | select CRYPTO_ALGAPI |
700 | select CRYPTO_GLUE_HELPER_X86 if 64BIT | 699 | select CRYPTO_GLUE_HELPER_X86 if 64BIT |
701 | select CRYPTO_LRW | 700 | select CRYPTO_LRW |
@@ -895,7 +894,7 @@ config CRYPTO_CAMELLIA_AESNI_AVX_X86_64 | |||
895 | depends on CRYPTO | 894 | depends on CRYPTO |
896 | select CRYPTO_ALGAPI | 895 | select CRYPTO_ALGAPI |
897 | select CRYPTO_CRYPTD | 896 | select CRYPTO_CRYPTD |
898 | select CRYPTO_ABLK_HELPER_X86 | 897 | select CRYPTO_ABLK_HELPER |
899 | select CRYPTO_GLUE_HELPER_X86 | 898 | select CRYPTO_GLUE_HELPER_X86 |
900 | select CRYPTO_CAMELLIA_X86_64 | 899 | select CRYPTO_CAMELLIA_X86_64 |
901 | select CRYPTO_LRW | 900 | select CRYPTO_LRW |
@@ -917,7 +916,7 @@ config CRYPTO_CAMELLIA_AESNI_AVX2_X86_64 | |||
917 | depends on CRYPTO | 916 | depends on CRYPTO |
918 | select CRYPTO_ALGAPI | 917 | select CRYPTO_ALGAPI |
919 | select CRYPTO_CRYPTD | 918 | select CRYPTO_CRYPTD |
920 | select CRYPTO_ABLK_HELPER_X86 | 919 | select CRYPTO_ABLK_HELPER |
921 | select CRYPTO_GLUE_HELPER_X86 | 920 | select CRYPTO_GLUE_HELPER_X86 |
922 | select CRYPTO_CAMELLIA_X86_64 | 921 | select CRYPTO_CAMELLIA_X86_64 |
923 | select CRYPTO_CAMELLIA_AESNI_AVX_X86_64 | 922 | select CRYPTO_CAMELLIA_AESNI_AVX_X86_64 |
@@ -969,7 +968,7 @@ config CRYPTO_CAST5_AVX_X86_64 | |||
969 | depends on X86 && 64BIT | 968 | depends on X86 && 64BIT |
970 | select CRYPTO_ALGAPI | 969 | select CRYPTO_ALGAPI |
971 | select CRYPTO_CRYPTD | 970 | select CRYPTO_CRYPTD |
972 | select CRYPTO_ABLK_HELPER_X86 | 971 | select CRYPTO_ABLK_HELPER |
973 | select CRYPTO_CAST_COMMON | 972 | select CRYPTO_CAST_COMMON |
974 | select CRYPTO_CAST5 | 973 | select CRYPTO_CAST5 |
975 | help | 974 | help |
@@ -992,7 +991,7 @@ config CRYPTO_CAST6_AVX_X86_64 | |||
992 | depends on X86 && 64BIT | 991 | depends on X86 && 64BIT |
993 | select CRYPTO_ALGAPI | 992 | select CRYPTO_ALGAPI |
994 | select CRYPTO_CRYPTD | 993 | select CRYPTO_CRYPTD |
995 | select CRYPTO_ABLK_HELPER_X86 | 994 | select CRYPTO_ABLK_HELPER |
996 | select CRYPTO_GLUE_HELPER_X86 | 995 | select CRYPTO_GLUE_HELPER_X86 |
997 | select CRYPTO_CAST_COMMON | 996 | select CRYPTO_CAST_COMMON |
998 | select CRYPTO_CAST6 | 997 | select CRYPTO_CAST6 |
@@ -1110,7 +1109,7 @@ config CRYPTO_SERPENT_SSE2_X86_64 | |||
1110 | depends on X86 && 64BIT | 1109 | depends on X86 && 64BIT |
1111 | select CRYPTO_ALGAPI | 1110 | select CRYPTO_ALGAPI |
1112 | select CRYPTO_CRYPTD | 1111 | select CRYPTO_CRYPTD |
1113 | select CRYPTO_ABLK_HELPER_X86 | 1112 | select CRYPTO_ABLK_HELPER |
1114 | select CRYPTO_GLUE_HELPER_X86 | 1113 | select CRYPTO_GLUE_HELPER_X86 |
1115 | select CRYPTO_SERPENT | 1114 | select CRYPTO_SERPENT |
1116 | select CRYPTO_LRW | 1115 | select CRYPTO_LRW |
@@ -1132,7 +1131,7 @@ config CRYPTO_SERPENT_SSE2_586 | |||
1132 | depends on X86 && !64BIT | 1131 | depends on X86 && !64BIT |
1133 | select CRYPTO_ALGAPI | 1132 | select CRYPTO_ALGAPI |
1134 | select CRYPTO_CRYPTD | 1133 | select CRYPTO_CRYPTD |
1135 | select CRYPTO_ABLK_HELPER_X86 | 1134 | select CRYPTO_ABLK_HELPER |
1136 | select CRYPTO_GLUE_HELPER_X86 | 1135 | select CRYPTO_GLUE_HELPER_X86 |
1137 | select CRYPTO_SERPENT | 1136 | select CRYPTO_SERPENT |
1138 | select CRYPTO_LRW | 1137 | select CRYPTO_LRW |
@@ -1154,7 +1153,7 @@ config CRYPTO_SERPENT_AVX_X86_64 | |||
1154 | depends on X86 && 64BIT | 1153 | depends on X86 && 64BIT |
1155 | select CRYPTO_ALGAPI | 1154 | select CRYPTO_ALGAPI |
1156 | select CRYPTO_CRYPTD | 1155 | select CRYPTO_CRYPTD |
1157 | select CRYPTO_ABLK_HELPER_X86 | 1156 | select CRYPTO_ABLK_HELPER |
1158 | select CRYPTO_GLUE_HELPER_X86 | 1157 | select CRYPTO_GLUE_HELPER_X86 |
1159 | select CRYPTO_SERPENT | 1158 | select CRYPTO_SERPENT |
1160 | select CRYPTO_LRW | 1159 | select CRYPTO_LRW |
@@ -1176,7 +1175,7 @@ config CRYPTO_SERPENT_AVX2_X86_64 | |||
1176 | depends on X86 && 64BIT | 1175 | depends on X86 && 64BIT |
1177 | select CRYPTO_ALGAPI | 1176 | select CRYPTO_ALGAPI |
1178 | select CRYPTO_CRYPTD | 1177 | select CRYPTO_CRYPTD |
1179 | select CRYPTO_ABLK_HELPER_X86 | 1178 | select CRYPTO_ABLK_HELPER |
1180 | select CRYPTO_GLUE_HELPER_X86 | 1179 | select CRYPTO_GLUE_HELPER_X86 |
1181 | select CRYPTO_SERPENT | 1180 | select CRYPTO_SERPENT |
1182 | select CRYPTO_SERPENT_AVX_X86_64 | 1181 | select CRYPTO_SERPENT_AVX_X86_64 |
@@ -1292,7 +1291,7 @@ config CRYPTO_TWOFISH_AVX_X86_64 | |||
1292 | depends on X86 && 64BIT | 1291 | depends on X86 && 64BIT |
1293 | select CRYPTO_ALGAPI | 1292 | select CRYPTO_ALGAPI |
1294 | select CRYPTO_CRYPTD | 1293 | select CRYPTO_CRYPTD |
1295 | select CRYPTO_ABLK_HELPER_X86 | 1294 | select CRYPTO_ABLK_HELPER |
1296 | select CRYPTO_GLUE_HELPER_X86 | 1295 | select CRYPTO_GLUE_HELPER_X86 |
1297 | select CRYPTO_TWOFISH_COMMON | 1296 | select CRYPTO_TWOFISH_COMMON |
1298 | select CRYPTO_TWOFISH_X86_64 | 1297 | select CRYPTO_TWOFISH_X86_64 |
diff --git a/crypto/Makefile b/crypto/Makefile index b3a7e807e08b..989c510da8cc 100644 --- a/crypto/Makefile +++ b/crypto/Makefile | |||
@@ -2,8 +2,13 @@ | |||
2 | # Cryptographic API | 2 | # Cryptographic API |
3 | # | 3 | # |
4 | 4 | ||
5 | # memneq MUST be built with -Os or -O0 to prevent early-return optimizations | ||
6 | # that will defeat memneq's actual purpose to prevent timing attacks. | ||
7 | CFLAGS_REMOVE_memneq.o := -O1 -O2 -O3 | ||
8 | CFLAGS_memneq.o := -Os | ||
9 | |||
5 | obj-$(CONFIG_CRYPTO) += crypto.o | 10 | obj-$(CONFIG_CRYPTO) += crypto.o |
6 | crypto-y := api.o cipher.o compress.o | 11 | crypto-y := api.o cipher.o compress.o memneq.o |
7 | 12 | ||
8 | obj-$(CONFIG_CRYPTO_WORKQUEUE) += crypto_wq.o | 13 | obj-$(CONFIG_CRYPTO_WORKQUEUE) += crypto_wq.o |
9 | 14 | ||
@@ -105,3 +110,4 @@ obj-$(CONFIG_XOR_BLOCKS) += xor.o | |||
105 | obj-$(CONFIG_ASYNC_CORE) += async_tx/ | 110 | obj-$(CONFIG_ASYNC_CORE) += async_tx/ |
106 | obj-$(CONFIG_ASYMMETRIC_KEY_TYPE) += asymmetric_keys/ | 111 | obj-$(CONFIG_ASYMMETRIC_KEY_TYPE) += asymmetric_keys/ |
107 | obj-$(CONFIG_CRYPTO_HASH_INFO) += hash_info.o | 112 | obj-$(CONFIG_CRYPTO_HASH_INFO) += hash_info.o |
113 | obj-$(CONFIG_CRYPTO_ABLK_HELPER) += ablk_helper.o | ||
diff --git a/crypto/ablk_helper.c b/crypto/ablk_helper.c new file mode 100644 index 000000000000..ffe7278d4bd8 --- /dev/null +++ b/crypto/ablk_helper.c | |||
@@ -0,0 +1,150 @@ | |||
1 | /* | ||
2 | * Shared async block cipher helpers | ||
3 | * | ||
4 | * Copyright (c) 2012 Jussi Kivilinna <jussi.kivilinna@mbnet.fi> | ||
5 | * | ||
6 | * Based on aesni-intel_glue.c by: | ||
7 | * Copyright (C) 2008, Intel Corp. | ||
8 | * Author: Huang Ying <ying.huang@intel.com> | ||
9 | * | ||
10 | * This program is free software; you can redistribute it and/or modify | ||
11 | * it under the terms of the GNU General Public License as published by | ||
12 | * the Free Software Foundation; either version 2 of the License, or | ||
13 | * (at your option) any later version. | ||
14 | * | ||
15 | * This program is distributed in the hope that it will be useful, | ||
16 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
17 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
18 | * GNU General Public License for more details. | ||
19 | * | ||
20 | * You should have received a copy of the GNU General Public License | ||
21 | * along with this program; if not, write to the Free Software | ||
22 | * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 | ||
23 | * USA | ||
24 | * | ||
25 | */ | ||
26 | |||
27 | #include <linux/kernel.h> | ||
28 | #include <linux/crypto.h> | ||
29 | #include <linux/init.h> | ||
30 | #include <linux/module.h> | ||
31 | #include <linux/hardirq.h> | ||
32 | #include <crypto/algapi.h> | ||
33 | #include <crypto/cryptd.h> | ||
34 | #include <crypto/ablk_helper.h> | ||
35 | #include <asm/simd.h> | ||
36 | |||
37 | int ablk_set_key(struct crypto_ablkcipher *tfm, const u8 *key, | ||
38 | unsigned int key_len) | ||
39 | { | ||
40 | struct async_helper_ctx *ctx = crypto_ablkcipher_ctx(tfm); | ||
41 | struct crypto_ablkcipher *child = &ctx->cryptd_tfm->base; | ||
42 | int err; | ||
43 | |||
44 | crypto_ablkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); | ||
45 | crypto_ablkcipher_set_flags(child, crypto_ablkcipher_get_flags(tfm) | ||
46 | & CRYPTO_TFM_REQ_MASK); | ||
47 | err = crypto_ablkcipher_setkey(child, key, key_len); | ||
48 | crypto_ablkcipher_set_flags(tfm, crypto_ablkcipher_get_flags(child) | ||
49 | & CRYPTO_TFM_RES_MASK); | ||
50 | return err; | ||
51 | } | ||
52 | EXPORT_SYMBOL_GPL(ablk_set_key); | ||
53 | |||
54 | int __ablk_encrypt(struct ablkcipher_request *req) | ||
55 | { | ||
56 | struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req); | ||
57 | struct async_helper_ctx *ctx = crypto_ablkcipher_ctx(tfm); | ||
58 | struct blkcipher_desc desc; | ||
59 | |||
60 | desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm); | ||
61 | desc.info = req->info; | ||
62 | desc.flags = 0; | ||
63 | |||
64 | return crypto_blkcipher_crt(desc.tfm)->encrypt( | ||
65 | &desc, req->dst, req->src, req->nbytes); | ||
66 | } | ||
67 | EXPORT_SYMBOL_GPL(__ablk_encrypt); | ||
68 | |||
69 | int ablk_encrypt(struct ablkcipher_request *req) | ||
70 | { | ||
71 | struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req); | ||
72 | struct async_helper_ctx *ctx = crypto_ablkcipher_ctx(tfm); | ||
73 | |||
74 | if (!may_use_simd()) { | ||
75 | struct ablkcipher_request *cryptd_req = | ||
76 | ablkcipher_request_ctx(req); | ||
77 | |||
78 | *cryptd_req = *req; | ||
79 | ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base); | ||
80 | |||
81 | return crypto_ablkcipher_encrypt(cryptd_req); | ||
82 | } else { | ||
83 | return __ablk_encrypt(req); | ||
84 | } | ||
85 | } | ||
86 | EXPORT_SYMBOL_GPL(ablk_encrypt); | ||
87 | |||
88 | int ablk_decrypt(struct ablkcipher_request *req) | ||
89 | { | ||
90 | struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req); | ||
91 | struct async_helper_ctx *ctx = crypto_ablkcipher_ctx(tfm); | ||
92 | |||
93 | if (!may_use_simd()) { | ||
94 | struct ablkcipher_request *cryptd_req = | ||
95 | ablkcipher_request_ctx(req); | ||
96 | |||
97 | *cryptd_req = *req; | ||
98 | ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base); | ||
99 | |||
100 | return crypto_ablkcipher_decrypt(cryptd_req); | ||
101 | } else { | ||
102 | struct blkcipher_desc desc; | ||
103 | |||
104 | desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm); | ||
105 | desc.info = req->info; | ||
106 | desc.flags = 0; | ||
107 | |||
108 | return crypto_blkcipher_crt(desc.tfm)->decrypt( | ||
109 | &desc, req->dst, req->src, req->nbytes); | ||
110 | } | ||
111 | } | ||
112 | EXPORT_SYMBOL_GPL(ablk_decrypt); | ||
113 | |||
114 | void ablk_exit(struct crypto_tfm *tfm) | ||
115 | { | ||
116 | struct async_helper_ctx *ctx = crypto_tfm_ctx(tfm); | ||
117 | |||
118 | cryptd_free_ablkcipher(ctx->cryptd_tfm); | ||
119 | } | ||
120 | EXPORT_SYMBOL_GPL(ablk_exit); | ||
121 | |||
122 | int ablk_init_common(struct crypto_tfm *tfm, const char *drv_name) | ||
123 | { | ||
124 | struct async_helper_ctx *ctx = crypto_tfm_ctx(tfm); | ||
125 | struct cryptd_ablkcipher *cryptd_tfm; | ||
126 | |||
127 | cryptd_tfm = cryptd_alloc_ablkcipher(drv_name, 0, 0); | ||
128 | if (IS_ERR(cryptd_tfm)) | ||
129 | return PTR_ERR(cryptd_tfm); | ||
130 | |||
131 | ctx->cryptd_tfm = cryptd_tfm; | ||
132 | tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request) + | ||
133 | crypto_ablkcipher_reqsize(&cryptd_tfm->base); | ||
134 | |||
135 | return 0; | ||
136 | } | ||
137 | EXPORT_SYMBOL_GPL(ablk_init_common); | ||
138 | |||
139 | int ablk_init(struct crypto_tfm *tfm) | ||
140 | { | ||
141 | char drv_name[CRYPTO_MAX_ALG_NAME]; | ||
142 | |||
143 | snprintf(drv_name, sizeof(drv_name), "__driver-%s", | ||
144 | crypto_tfm_alg_driver_name(tfm)); | ||
145 | |||
146 | return ablk_init_common(tfm, drv_name); | ||
147 | } | ||
148 | EXPORT_SYMBOL_GPL(ablk_init); | ||
149 | |||
150 | MODULE_LICENSE("GPL"); | ||
diff --git a/crypto/ablkcipher.c b/crypto/ablkcipher.c index 7d4a8d28277e..40886c489903 100644 --- a/crypto/ablkcipher.c +++ b/crypto/ablkcipher.c | |||
@@ -16,9 +16,7 @@ | |||
16 | #include <crypto/internal/skcipher.h> | 16 | #include <crypto/internal/skcipher.h> |
17 | #include <linux/cpumask.h> | 17 | #include <linux/cpumask.h> |
18 | #include <linux/err.h> | 18 | #include <linux/err.h> |
19 | #include <linux/init.h> | ||
20 | #include <linux/kernel.h> | 19 | #include <linux/kernel.h> |
21 | #include <linux/module.h> | ||
22 | #include <linux/rtnetlink.h> | 20 | #include <linux/rtnetlink.h> |
23 | #include <linux/sched.h> | 21 | #include <linux/sched.h> |
24 | #include <linux/slab.h> | 22 | #include <linux/slab.h> |
@@ -30,8 +28,6 @@ | |||
30 | 28 | ||
31 | #include "internal.h" | 29 | #include "internal.h" |
32 | 30 | ||
33 | static const char *skcipher_default_geniv __read_mostly; | ||
34 | |||
35 | struct ablkcipher_buffer { | 31 | struct ablkcipher_buffer { |
36 | struct list_head entry; | 32 | struct list_head entry; |
37 | struct scatter_walk dst; | 33 | struct scatter_walk dst; |
@@ -527,8 +523,7 @@ const char *crypto_default_geniv(const struct crypto_alg *alg) | |||
527 | alg->cra_blocksize) | 523 | alg->cra_blocksize) |
528 | return "chainiv"; | 524 | return "chainiv"; |
529 | 525 | ||
530 | return alg->cra_flags & CRYPTO_ALG_ASYNC ? | 526 | return "eseqiv"; |
531 | "eseqiv" : skcipher_default_geniv; | ||
532 | } | 527 | } |
533 | 528 | ||
534 | static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask) | 529 | static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask) |
@@ -709,17 +704,3 @@ err: | |||
709 | return ERR_PTR(err); | 704 | return ERR_PTR(err); |
710 | } | 705 | } |
711 | EXPORT_SYMBOL_GPL(crypto_alloc_ablkcipher); | 706 | EXPORT_SYMBOL_GPL(crypto_alloc_ablkcipher); |
712 | |||
713 | static int __init skcipher_module_init(void) | ||
714 | { | ||
715 | skcipher_default_geniv = num_possible_cpus() > 1 ? | ||
716 | "eseqiv" : "chainiv"; | ||
717 | return 0; | ||
718 | } | ||
719 | |||
720 | static void skcipher_module_exit(void) | ||
721 | { | ||
722 | } | ||
723 | |||
724 | module_init(skcipher_module_init); | ||
725 | module_exit(skcipher_module_exit); | ||
diff --git a/crypto/ansi_cprng.c b/crypto/ansi_cprng.c index c0bb3778f1ae..666f1962a160 100644 --- a/crypto/ansi_cprng.c +++ b/crypto/ansi_cprng.c | |||
@@ -230,11 +230,11 @@ remainder: | |||
230 | */ | 230 | */ |
231 | if (byte_count < DEFAULT_BLK_SZ) { | 231 | if (byte_count < DEFAULT_BLK_SZ) { |
232 | empty_rbuf: | 232 | empty_rbuf: |
233 | for (; ctx->rand_data_valid < DEFAULT_BLK_SZ; | 233 | while (ctx->rand_data_valid < DEFAULT_BLK_SZ) { |
234 | ctx->rand_data_valid++) { | ||
235 | *ptr = ctx->rand_data[ctx->rand_data_valid]; | 234 | *ptr = ctx->rand_data[ctx->rand_data_valid]; |
236 | ptr++; | 235 | ptr++; |
237 | byte_count--; | 236 | byte_count--; |
237 | ctx->rand_data_valid++; | ||
238 | if (byte_count == 0) | 238 | if (byte_count == 0) |
239 | goto done; | 239 | goto done; |
240 | } | 240 | } |
diff --git a/crypto/asymmetric_keys/rsa.c b/crypto/asymmetric_keys/rsa.c index 90a17f59ba28..459cf97a75e2 100644 --- a/crypto/asymmetric_keys/rsa.c +++ b/crypto/asymmetric_keys/rsa.c | |||
@@ -13,6 +13,7 @@ | |||
13 | #include <linux/module.h> | 13 | #include <linux/module.h> |
14 | #include <linux/kernel.h> | 14 | #include <linux/kernel.h> |
15 | #include <linux/slab.h> | 15 | #include <linux/slab.h> |
16 | #include <crypto/algapi.h> | ||
16 | #include "public_key.h" | 17 | #include "public_key.h" |
17 | 18 | ||
18 | MODULE_LICENSE("GPL"); | 19 | MODULE_LICENSE("GPL"); |
@@ -189,12 +190,12 @@ static int RSA_verify(const u8 *H, const u8 *EM, size_t k, size_t hash_size, | |||
189 | } | 190 | } |
190 | } | 191 | } |
191 | 192 | ||
192 | if (memcmp(asn1_template, EM + T_offset, asn1_size) != 0) { | 193 | if (crypto_memneq(asn1_template, EM + T_offset, asn1_size) != 0) { |
193 | kleave(" = -EBADMSG [EM[T] ASN.1 mismatch]"); | 194 | kleave(" = -EBADMSG [EM[T] ASN.1 mismatch]"); |
194 | return -EBADMSG; | 195 | return -EBADMSG; |
195 | } | 196 | } |
196 | 197 | ||
197 | if (memcmp(H, EM + T_offset + asn1_size, hash_size) != 0) { | 198 | if (crypto_memneq(H, EM + T_offset + asn1_size, hash_size) != 0) { |
198 | kleave(" = -EKEYREJECTED [EM[T] hash mismatch]"); | 199 | kleave(" = -EKEYREJECTED [EM[T] hash mismatch]"); |
199 | return -EKEYREJECTED; | 200 | return -EKEYREJECTED; |
200 | } | 201 | } |
diff --git a/crypto/authenc.c b/crypto/authenc.c index ffce19de05cf..1875e7026e8f 100644 --- a/crypto/authenc.c +++ b/crypto/authenc.c | |||
@@ -52,40 +52,52 @@ static void authenc_request_complete(struct aead_request *req, int err) | |||
52 | aead_request_complete(req, err); | 52 | aead_request_complete(req, err); |
53 | } | 53 | } |
54 | 54 | ||
55 | static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key, | 55 | int crypto_authenc_extractkeys(struct crypto_authenc_keys *keys, const u8 *key, |
56 | unsigned int keylen) | 56 | unsigned int keylen) |
57 | { | 57 | { |
58 | unsigned int authkeylen; | 58 | struct rtattr *rta = (struct rtattr *)key; |
59 | unsigned int enckeylen; | ||
60 | struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); | ||
61 | struct crypto_ahash *auth = ctx->auth; | ||
62 | struct crypto_ablkcipher *enc = ctx->enc; | ||
63 | struct rtattr *rta = (void *)key; | ||
64 | struct crypto_authenc_key_param *param; | 59 | struct crypto_authenc_key_param *param; |
65 | int err = -EINVAL; | ||
66 | 60 | ||
67 | if (!RTA_OK(rta, keylen)) | 61 | if (!RTA_OK(rta, keylen)) |
68 | goto badkey; | 62 | return -EINVAL; |
69 | if (rta->rta_type != CRYPTO_AUTHENC_KEYA_PARAM) | 63 | if (rta->rta_type != CRYPTO_AUTHENC_KEYA_PARAM) |
70 | goto badkey; | 64 | return -EINVAL; |
71 | if (RTA_PAYLOAD(rta) < sizeof(*param)) | 65 | if (RTA_PAYLOAD(rta) < sizeof(*param)) |
72 | goto badkey; | 66 | return -EINVAL; |
73 | 67 | ||
74 | param = RTA_DATA(rta); | 68 | param = RTA_DATA(rta); |
75 | enckeylen = be32_to_cpu(param->enckeylen); | 69 | keys->enckeylen = be32_to_cpu(param->enckeylen); |
76 | 70 | ||
77 | key += RTA_ALIGN(rta->rta_len); | 71 | key += RTA_ALIGN(rta->rta_len); |
78 | keylen -= RTA_ALIGN(rta->rta_len); | 72 | keylen -= RTA_ALIGN(rta->rta_len); |
79 | 73 | ||
80 | if (keylen < enckeylen) | 74 | if (keylen < keys->enckeylen) |
81 | goto badkey; | 75 | return -EINVAL; |
82 | 76 | ||
83 | authkeylen = keylen - enckeylen; | 77 | keys->authkeylen = keylen - keys->enckeylen; |
78 | keys->authkey = key; | ||
79 | keys->enckey = key + keys->authkeylen; | ||
80 | |||
81 | return 0; | ||
82 | } | ||
83 | EXPORT_SYMBOL_GPL(crypto_authenc_extractkeys); | ||
84 | |||
85 | static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key, | ||
86 | unsigned int keylen) | ||
87 | { | ||
88 | struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); | ||
89 | struct crypto_ahash *auth = ctx->auth; | ||
90 | struct crypto_ablkcipher *enc = ctx->enc; | ||
91 | struct crypto_authenc_keys keys; | ||
92 | int err = -EINVAL; | ||
93 | |||
94 | if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) | ||
95 | goto badkey; | ||
84 | 96 | ||
85 | crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK); | 97 | crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK); |
86 | crypto_ahash_set_flags(auth, crypto_aead_get_flags(authenc) & | 98 | crypto_ahash_set_flags(auth, crypto_aead_get_flags(authenc) & |
87 | CRYPTO_TFM_REQ_MASK); | 99 | CRYPTO_TFM_REQ_MASK); |
88 | err = crypto_ahash_setkey(auth, key, authkeylen); | 100 | err = crypto_ahash_setkey(auth, keys.authkey, keys.authkeylen); |
89 | crypto_aead_set_flags(authenc, crypto_ahash_get_flags(auth) & | 101 | crypto_aead_set_flags(authenc, crypto_ahash_get_flags(auth) & |
90 | CRYPTO_TFM_RES_MASK); | 102 | CRYPTO_TFM_RES_MASK); |
91 | 103 | ||
@@ -95,7 +107,7 @@ static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key, | |||
95 | crypto_ablkcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK); | 107 | crypto_ablkcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK); |
96 | crypto_ablkcipher_set_flags(enc, crypto_aead_get_flags(authenc) & | 108 | crypto_ablkcipher_set_flags(enc, crypto_aead_get_flags(authenc) & |
97 | CRYPTO_TFM_REQ_MASK); | 109 | CRYPTO_TFM_REQ_MASK); |
98 | err = crypto_ablkcipher_setkey(enc, key + authkeylen, enckeylen); | 110 | err = crypto_ablkcipher_setkey(enc, keys.enckey, keys.enckeylen); |
99 | crypto_aead_set_flags(authenc, crypto_ablkcipher_get_flags(enc) & | 111 | crypto_aead_set_flags(authenc, crypto_ablkcipher_get_flags(enc) & |
100 | CRYPTO_TFM_RES_MASK); | 112 | CRYPTO_TFM_RES_MASK); |
101 | 113 | ||
@@ -188,7 +200,7 @@ static void authenc_verify_ahash_update_done(struct crypto_async_request *areq, | |||
188 | scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, | 200 | scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, |
189 | authsize, 0); | 201 | authsize, 0); |
190 | 202 | ||
191 | err = memcmp(ihash, ahreq->result, authsize) ? -EBADMSG : 0; | 203 | err = crypto_memneq(ihash, ahreq->result, authsize) ? -EBADMSG : 0; |
192 | if (err) | 204 | if (err) |
193 | goto out; | 205 | goto out; |
194 | 206 | ||
@@ -227,7 +239,7 @@ static void authenc_verify_ahash_done(struct crypto_async_request *areq, | |||
227 | scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, | 239 | scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, |
228 | authsize, 0); | 240 | authsize, 0); |
229 | 241 | ||
230 | err = memcmp(ihash, ahreq->result, authsize) ? -EBADMSG : 0; | 242 | err = crypto_memneq(ihash, ahreq->result, authsize) ? -EBADMSG : 0; |
231 | if (err) | 243 | if (err) |
232 | goto out; | 244 | goto out; |
233 | 245 | ||
@@ -462,7 +474,7 @@ static int crypto_authenc_verify(struct aead_request *req, | |||
462 | ihash = ohash + authsize; | 474 | ihash = ohash + authsize; |
463 | scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, | 475 | scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, |
464 | authsize, 0); | 476 | authsize, 0); |
465 | return memcmp(ihash, ohash, authsize) ? -EBADMSG : 0; | 477 | return crypto_memneq(ihash, ohash, authsize) ? -EBADMSG : 0; |
466 | } | 478 | } |
467 | 479 | ||
468 | static int crypto_authenc_iverify(struct aead_request *req, u8 *iv, | 480 | static int crypto_authenc_iverify(struct aead_request *req, u8 *iv, |
diff --git a/crypto/authencesn.c b/crypto/authencesn.c index ab53762fc309..4be0dd4373a9 100644 --- a/crypto/authencesn.c +++ b/crypto/authencesn.c | |||
@@ -59,37 +59,19 @@ static void authenc_esn_request_complete(struct aead_request *req, int err) | |||
59 | static int crypto_authenc_esn_setkey(struct crypto_aead *authenc_esn, const u8 *key, | 59 | static int crypto_authenc_esn_setkey(struct crypto_aead *authenc_esn, const u8 *key, |
60 | unsigned int keylen) | 60 | unsigned int keylen) |
61 | { | 61 | { |
62 | unsigned int authkeylen; | ||
63 | unsigned int enckeylen; | ||
64 | struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); | 62 | struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); |
65 | struct crypto_ahash *auth = ctx->auth; | 63 | struct crypto_ahash *auth = ctx->auth; |
66 | struct crypto_ablkcipher *enc = ctx->enc; | 64 | struct crypto_ablkcipher *enc = ctx->enc; |
67 | struct rtattr *rta = (void *)key; | 65 | struct crypto_authenc_keys keys; |
68 | struct crypto_authenc_key_param *param; | ||
69 | int err = -EINVAL; | 66 | int err = -EINVAL; |
70 | 67 | ||
71 | if (!RTA_OK(rta, keylen)) | 68 | if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) |
72 | goto badkey; | 69 | goto badkey; |
73 | if (rta->rta_type != CRYPTO_AUTHENC_KEYA_PARAM) | ||
74 | goto badkey; | ||
75 | if (RTA_PAYLOAD(rta) < sizeof(*param)) | ||
76 | goto badkey; | ||
77 | |||
78 | param = RTA_DATA(rta); | ||
79 | enckeylen = be32_to_cpu(param->enckeylen); | ||
80 | |||
81 | key += RTA_ALIGN(rta->rta_len); | ||
82 | keylen -= RTA_ALIGN(rta->rta_len); | ||
83 | |||
84 | if (keylen < enckeylen) | ||
85 | goto badkey; | ||
86 | |||
87 | authkeylen = keylen - enckeylen; | ||
88 | 70 | ||
89 | crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK); | 71 | crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK); |
90 | crypto_ahash_set_flags(auth, crypto_aead_get_flags(authenc_esn) & | 72 | crypto_ahash_set_flags(auth, crypto_aead_get_flags(authenc_esn) & |
91 | CRYPTO_TFM_REQ_MASK); | 73 | CRYPTO_TFM_REQ_MASK); |
92 | err = crypto_ahash_setkey(auth, key, authkeylen); | 74 | err = crypto_ahash_setkey(auth, keys.authkey, keys.authkeylen); |
93 | crypto_aead_set_flags(authenc_esn, crypto_ahash_get_flags(auth) & | 75 | crypto_aead_set_flags(authenc_esn, crypto_ahash_get_flags(auth) & |
94 | CRYPTO_TFM_RES_MASK); | 76 | CRYPTO_TFM_RES_MASK); |
95 | 77 | ||
@@ -99,7 +81,7 @@ static int crypto_authenc_esn_setkey(struct crypto_aead *authenc_esn, const u8 * | |||
99 | crypto_ablkcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK); | 81 | crypto_ablkcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK); |
100 | crypto_ablkcipher_set_flags(enc, crypto_aead_get_flags(authenc_esn) & | 82 | crypto_ablkcipher_set_flags(enc, crypto_aead_get_flags(authenc_esn) & |
101 | CRYPTO_TFM_REQ_MASK); | 83 | CRYPTO_TFM_REQ_MASK); |
102 | err = crypto_ablkcipher_setkey(enc, key + authkeylen, enckeylen); | 84 | err = crypto_ablkcipher_setkey(enc, keys.enckey, keys.enckeylen); |
103 | crypto_aead_set_flags(authenc_esn, crypto_ablkcipher_get_flags(enc) & | 85 | crypto_aead_set_flags(authenc_esn, crypto_ablkcipher_get_flags(enc) & |
104 | CRYPTO_TFM_RES_MASK); | 86 | CRYPTO_TFM_RES_MASK); |
105 | 87 | ||
@@ -247,7 +229,7 @@ static void authenc_esn_verify_ahash_update_done(struct crypto_async_request *ar | |||
247 | scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, | 229 | scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, |
248 | authsize, 0); | 230 | authsize, 0); |
249 | 231 | ||
250 | err = memcmp(ihash, ahreq->result, authsize) ? -EBADMSG : 0; | 232 | err = crypto_memneq(ihash, ahreq->result, authsize) ? -EBADMSG : 0; |
251 | if (err) | 233 | if (err) |
252 | goto out; | 234 | goto out; |
253 | 235 | ||
@@ -296,7 +278,7 @@ static void authenc_esn_verify_ahash_update_done2(struct crypto_async_request *a | |||
296 | scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, | 278 | scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, |
297 | authsize, 0); | 279 | authsize, 0); |
298 | 280 | ||
299 | err = memcmp(ihash, ahreq->result, authsize) ? -EBADMSG : 0; | 281 | err = crypto_memneq(ihash, ahreq->result, authsize) ? -EBADMSG : 0; |
300 | if (err) | 282 | if (err) |
301 | goto out; | 283 | goto out; |
302 | 284 | ||
@@ -336,7 +318,7 @@ static void authenc_esn_verify_ahash_done(struct crypto_async_request *areq, | |||
336 | scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, | 318 | scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, |
337 | authsize, 0); | 319 | authsize, 0); |
338 | 320 | ||
339 | err = memcmp(ihash, ahreq->result, authsize) ? -EBADMSG : 0; | 321 | err = crypto_memneq(ihash, ahreq->result, authsize) ? -EBADMSG : 0; |
340 | if (err) | 322 | if (err) |
341 | goto out; | 323 | goto out; |
342 | 324 | ||
@@ -568,7 +550,7 @@ static int crypto_authenc_esn_verify(struct aead_request *req) | |||
568 | ihash = ohash + authsize; | 550 | ihash = ohash + authsize; |
569 | scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, | 551 | scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, |
570 | authsize, 0); | 552 | authsize, 0); |
571 | return memcmp(ihash, ohash, authsize) ? -EBADMSG : 0; | 553 | return crypto_memneq(ihash, ohash, authsize) ? -EBADMSG : 0; |
572 | } | 554 | } |
573 | 555 | ||
574 | static int crypto_authenc_esn_iverify(struct aead_request *req, u8 *iv, | 556 | static int crypto_authenc_esn_iverify(struct aead_request *req, u8 *iv, |
diff --git a/crypto/ccm.c b/crypto/ccm.c index 499c91717d93..3e05499d183a 100644 --- a/crypto/ccm.c +++ b/crypto/ccm.c | |||
@@ -363,7 +363,7 @@ static void crypto_ccm_decrypt_done(struct crypto_async_request *areq, | |||
363 | 363 | ||
364 | if (!err) { | 364 | if (!err) { |
365 | err = crypto_ccm_auth(req, req->dst, cryptlen); | 365 | err = crypto_ccm_auth(req, req->dst, cryptlen); |
366 | if (!err && memcmp(pctx->auth_tag, pctx->odata, authsize)) | 366 | if (!err && crypto_memneq(pctx->auth_tag, pctx->odata, authsize)) |
367 | err = -EBADMSG; | 367 | err = -EBADMSG; |
368 | } | 368 | } |
369 | aead_request_complete(req, err); | 369 | aead_request_complete(req, err); |
@@ -422,7 +422,7 @@ static int crypto_ccm_decrypt(struct aead_request *req) | |||
422 | return err; | 422 | return err; |
423 | 423 | ||
424 | /* verify */ | 424 | /* verify */ |
425 | if (memcmp(authtag, odata, authsize)) | 425 | if (crypto_memneq(authtag, odata, authsize)) |
426 | return -EBADMSG; | 426 | return -EBADMSG; |
427 | 427 | ||
428 | return err; | 428 | return err; |
diff --git a/crypto/gcm.c b/crypto/gcm.c index 43e1fb05ea54..b4f017939004 100644 --- a/crypto/gcm.c +++ b/crypto/gcm.c | |||
@@ -582,7 +582,7 @@ static int crypto_gcm_verify(struct aead_request *req, | |||
582 | 582 | ||
583 | crypto_xor(auth_tag, iauth_tag, 16); | 583 | crypto_xor(auth_tag, iauth_tag, 16); |
584 | scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0); | 584 | scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0); |
585 | return memcmp(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0; | 585 | return crypto_memneq(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0; |
586 | } | 586 | } |
587 | 587 | ||
588 | static void gcm_decrypt_done(struct crypto_async_request *areq, int err) | 588 | static void gcm_decrypt_done(struct crypto_async_request *areq, int err) |
diff --git a/crypto/memneq.c b/crypto/memneq.c new file mode 100644 index 000000000000..cd0162221c14 --- /dev/null +++ b/crypto/memneq.c | |||
@@ -0,0 +1,138 @@ | |||
1 | /* | ||
2 | * Constant-time equality testing of memory regions. | ||
3 | * | ||
4 | * Authors: | ||
5 | * | ||
6 | * James Yonan <james@openvpn.net> | ||
7 | * Daniel Borkmann <dborkman@redhat.com> | ||
8 | * | ||
9 | * This file is provided under a dual BSD/GPLv2 license. When using or | ||
10 | * redistributing this file, you may do so under either license. | ||
11 | * | ||
12 | * GPL LICENSE SUMMARY | ||
13 | * | ||
14 | * Copyright(c) 2013 OpenVPN Technologies, Inc. All rights reserved. | ||
15 | * | ||
16 | * This program is free software; you can redistribute it and/or modify | ||
17 | * it under the terms of version 2 of the GNU General Public License as | ||
18 | * published by the Free Software Foundation. | ||
19 | * | ||
20 | * This program is distributed in the hope that it will be useful, but | ||
21 | * WITHOUT ANY WARRANTY; without even the implied warranty of | ||
22 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
23 | * General Public License for more details. | ||
24 | * | ||
25 | * You should have received a copy of the GNU General Public License | ||
26 | * along with this program; if not, write to the Free Software | ||
27 | * Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. | ||
28 | * The full GNU General Public License is included in this distribution | ||
29 | * in the file called LICENSE.GPL. | ||
30 | * | ||
31 | * BSD LICENSE | ||
32 | * | ||
33 | * Copyright(c) 2013 OpenVPN Technologies, Inc. All rights reserved. | ||
34 | * | ||
35 | * Redistribution and use in source and binary forms, with or without | ||
36 | * modification, are permitted provided that the following conditions | ||
37 | * are met: | ||
38 | * | ||
39 | * * Redistributions of source code must retain the above copyright | ||
40 | * notice, this list of conditions and the following disclaimer. | ||
41 | * * Redistributions in binary form must reproduce the above copyright | ||
42 | * notice, this list of conditions and the following disclaimer in | ||
43 | * the documentation and/or other materials provided with the | ||
44 | * distribution. | ||
45 | * * Neither the name of OpenVPN Technologies nor the names of its | ||
46 | * contributors may be used to endorse or promote products derived | ||
47 | * from this software without specific prior written permission. | ||
48 | * | ||
49 | * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||
50 | * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||
51 | * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||
52 | * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||
53 | * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||
54 | * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||
55 | * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||
56 | * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||
57 | * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||
58 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||
59 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||
60 | */ | ||
61 | |||
62 | #include <crypto/algapi.h> | ||
63 | |||
64 | #ifndef __HAVE_ARCH_CRYPTO_MEMNEQ | ||
65 | |||
66 | /* Generic path for arbitrary size */ | ||
67 | static inline unsigned long | ||
68 | __crypto_memneq_generic(const void *a, const void *b, size_t size) | ||
69 | { | ||
70 | unsigned long neq = 0; | ||
71 | |||
72 | #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) | ||
73 | while (size >= sizeof(unsigned long)) { | ||
74 | neq |= *(unsigned long *)a ^ *(unsigned long *)b; | ||
75 | a += sizeof(unsigned long); | ||
76 | b += sizeof(unsigned long); | ||
77 | size -= sizeof(unsigned long); | ||
78 | } | ||
79 | #endif /* CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS */ | ||
80 | while (size > 0) { | ||
81 | neq |= *(unsigned char *)a ^ *(unsigned char *)b; | ||
82 | a += 1; | ||
83 | b += 1; | ||
84 | size -= 1; | ||
85 | } | ||
86 | return neq; | ||
87 | } | ||
88 | |||
89 | /* Loop-free fast-path for frequently used 16-byte size */ | ||
90 | static inline unsigned long __crypto_memneq_16(const void *a, const void *b) | ||
91 | { | ||
92 | #ifdef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS | ||
93 | if (sizeof(unsigned long) == 8) | ||
94 | return ((*(unsigned long *)(a) ^ *(unsigned long *)(b)) | ||
95 | | (*(unsigned long *)(a+8) ^ *(unsigned long *)(b+8))); | ||
96 | else if (sizeof(unsigned int) == 4) | ||
97 | return ((*(unsigned int *)(a) ^ *(unsigned int *)(b)) | ||
98 | | (*(unsigned int *)(a+4) ^ *(unsigned int *)(b+4)) | ||
99 | | (*(unsigned int *)(a+8) ^ *(unsigned int *)(b+8)) | ||
100 | | (*(unsigned int *)(a+12) ^ *(unsigned int *)(b+12))); | ||
101 | else | ||
102 | #endif /* CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS */ | ||
103 | return ((*(unsigned char *)(a) ^ *(unsigned char *)(b)) | ||
104 | | (*(unsigned char *)(a+1) ^ *(unsigned char *)(b+1)) | ||
105 | | (*(unsigned char *)(a+2) ^ *(unsigned char *)(b+2)) | ||
106 | | (*(unsigned char *)(a+3) ^ *(unsigned char *)(b+3)) | ||
107 | | (*(unsigned char *)(a+4) ^ *(unsigned char *)(b+4)) | ||
108 | | (*(unsigned char *)(a+5) ^ *(unsigned char *)(b+5)) | ||
109 | | (*(unsigned char *)(a+6) ^ *(unsigned char *)(b+6)) | ||
110 | | (*(unsigned char *)(a+7) ^ *(unsigned char *)(b+7)) | ||
111 | | (*(unsigned char *)(a+8) ^ *(unsigned char *)(b+8)) | ||
112 | | (*(unsigned char *)(a+9) ^ *(unsigned char *)(b+9)) | ||
113 | | (*(unsigned char *)(a+10) ^ *(unsigned char *)(b+10)) | ||
114 | | (*(unsigned char *)(a+11) ^ *(unsigned char *)(b+11)) | ||
115 | | (*(unsigned char *)(a+12) ^ *(unsigned char *)(b+12)) | ||
116 | | (*(unsigned char *)(a+13) ^ *(unsigned char *)(b+13)) | ||
117 | | (*(unsigned char *)(a+14) ^ *(unsigned char *)(b+14)) | ||
118 | | (*(unsigned char *)(a+15) ^ *(unsigned char *)(b+15))); | ||
119 | } | ||
120 | |||
121 | /* Compare two areas of memory without leaking timing information, | ||
122 | * and with special optimizations for common sizes. Users should | ||
123 | * not call this function directly, but should instead use | ||
124 | * crypto_memneq defined in crypto/algapi.h. | ||
125 | */ | ||
126 | noinline unsigned long __crypto_memneq(const void *a, const void *b, | ||
127 | size_t size) | ||
128 | { | ||
129 | switch (size) { | ||
130 | case 16: | ||
131 | return __crypto_memneq_16(a, b); | ||
132 | default: | ||
133 | return __crypto_memneq_generic(a, b, size); | ||
134 | } | ||
135 | } | ||
136 | EXPORT_SYMBOL(__crypto_memneq); | ||
137 | |||
138 | #endif /* __HAVE_ARCH_CRYPTO_MEMNEQ */ | ||