diff options
-rw-r--r-- | arch/x86/crypto/Makefile | 3 | ||||
-rw-r--r-- | arch/x86/crypto/ghash-clmulni-intel_asm.S | 157 | ||||
-rw-r--r-- | arch/x86/crypto/ghash-clmulni-intel_glue.c | 333 | ||||
-rw-r--r-- | arch/x86/include/asm/cpufeature.h | 1 | ||||
-rw-r--r-- | crypto/Kconfig | 8 | ||||
-rw-r--r-- | crypto/cryptd.c | 7 | ||||
-rw-r--r-- | include/crypto/cryptd.h | 1 |
7 files changed, 510 insertions, 0 deletions
diff --git a/arch/x86/crypto/Makefile b/arch/x86/crypto/Makefile index cfb0010fa940..1a58ad89fdf7 100644 --- a/arch/x86/crypto/Makefile +++ b/arch/x86/crypto/Makefile | |||
@@ -12,6 +12,7 @@ obj-$(CONFIG_CRYPTO_AES_X86_64) += aes-x86_64.o | |||
12 | obj-$(CONFIG_CRYPTO_TWOFISH_X86_64) += twofish-x86_64.o | 12 | obj-$(CONFIG_CRYPTO_TWOFISH_X86_64) += twofish-x86_64.o |
13 | obj-$(CONFIG_CRYPTO_SALSA20_X86_64) += salsa20-x86_64.o | 13 | obj-$(CONFIG_CRYPTO_SALSA20_X86_64) += salsa20-x86_64.o |
14 | obj-$(CONFIG_CRYPTO_AES_NI_INTEL) += aesni-intel.o | 14 | obj-$(CONFIG_CRYPTO_AES_NI_INTEL) += aesni-intel.o |
15 | obj-$(CONFIG_CRYPTO_GHASH_CLMUL_NI_INTEL) += ghash-clmulni-intel.o | ||
15 | 16 | ||
16 | obj-$(CONFIG_CRYPTO_CRC32C_INTEL) += crc32c-intel.o | 17 | obj-$(CONFIG_CRYPTO_CRC32C_INTEL) += crc32c-intel.o |
17 | 18 | ||
@@ -24,3 +25,5 @@ twofish-x86_64-y := twofish-x86_64-asm_64.o twofish_glue.o | |||
24 | salsa20-x86_64-y := salsa20-x86_64-asm_64.o salsa20_glue.o | 25 | salsa20-x86_64-y := salsa20-x86_64-asm_64.o salsa20_glue.o |
25 | 26 | ||
26 | aesni-intel-y := aesni-intel_asm.o aesni-intel_glue.o | 27 | aesni-intel-y := aesni-intel_asm.o aesni-intel_glue.o |
28 | |||
29 | ghash-clmulni-intel-y := ghash-clmulni-intel_asm.o ghash-clmulni-intel_glue.o | ||
diff --git a/arch/x86/crypto/ghash-clmulni-intel_asm.S b/arch/x86/crypto/ghash-clmulni-intel_asm.S new file mode 100644 index 000000000000..b9e787a511da --- /dev/null +++ b/arch/x86/crypto/ghash-clmulni-intel_asm.S | |||
@@ -0,0 +1,157 @@ | |||
1 | /* | ||
2 | * Accelerated GHASH implementation with Intel PCLMULQDQ-NI | ||
3 | * instructions. This file contains accelerated part of ghash | ||
4 | * implementation. More information about PCLMULQDQ can be found at: | ||
5 | * | ||
6 | * http://software.intel.com/en-us/articles/carry-less-multiplication-and-its-usage-for-computing-the-gcm-mode/ | ||
7 | * | ||
8 | * Copyright (c) 2009 Intel Corp. | ||
9 | * Author: Huang Ying <ying.huang@intel.com> | ||
10 | * Vinodh Gopal | ||
11 | * Erdinc Ozturk | ||
12 | * Deniz Karakoyunlu | ||
13 | * | ||
14 | * This program is free software; you can redistribute it and/or modify it | ||
15 | * under the terms of the GNU General Public License version 2 as published | ||
16 | * by the Free Software Foundation. | ||
17 | */ | ||
18 | |||
19 | #include <linux/linkage.h> | ||
20 | |||
21 | .align 16 | ||
22 | .Lbswap_mask: | ||
23 | .octa 0x000102030405060708090a0b0c0d0e0f | ||
24 | .Lpoly: | ||
25 | .octa 0xc2000000000000000000000000000001 | ||
26 | .Ltwo_one: | ||
27 | .octa 0x00000001000000000000000000000001 | ||
28 | |||
29 | #define DATA %xmm0 | ||
30 | #define SHASH %xmm1 | ||
31 | #define T1 %xmm2 | ||
32 | #define T2 %xmm3 | ||
33 | #define T3 %xmm4 | ||
34 | #define BSWAP %xmm5 | ||
35 | #define IN1 %xmm6 | ||
36 | |||
37 | .text | ||
38 | |||
39 | /* | ||
40 | * __clmul_gf128mul_ble: internal ABI | ||
41 | * input: | ||
42 | * DATA: operand1 | ||
43 | * SHASH: operand2, hash_key << 1 mod poly | ||
44 | * output: | ||
45 | * DATA: operand1 * operand2 mod poly | ||
46 | * changed: | ||
47 | * T1 | ||
48 | * T2 | ||
49 | * T3 | ||
50 | */ | ||
51 | __clmul_gf128mul_ble: | ||
52 | movaps DATA, T1 | ||
53 | pshufd $0b01001110, DATA, T2 | ||
54 | pshufd $0b01001110, SHASH, T3 | ||
55 | pxor DATA, T2 | ||
56 | pxor SHASH, T3 | ||
57 | |||
58 | # pclmulqdq $0x00, SHASH, DATA # DATA = a0 * b0 | ||
59 | .byte 0x66, 0x0f, 0x3a, 0x44, 0xc1, 0x00 | ||
60 | # pclmulqdq $0x11, SHASH, T1 # T1 = a1 * b1 | ||
61 | .byte 0x66, 0x0f, 0x3a, 0x44, 0xd1, 0x11 | ||
62 | # pclmulqdq $0x00, T3, T2 # T2 = (a1 + a0) * (b1 + b0) | ||
63 | .byte 0x66, 0x0f, 0x3a, 0x44, 0xdc, 0x00 | ||
64 | pxor DATA, T2 | ||
65 | pxor T1, T2 # T2 = a0 * b1 + a1 * b0 | ||
66 | |||
67 | movaps T2, T3 | ||
68 | pslldq $8, T3 | ||
69 | psrldq $8, T2 | ||
70 | pxor T3, DATA | ||
71 | pxor T2, T1 # <T1:DATA> is result of | ||
72 | # carry-less multiplication | ||
73 | |||
74 | # first phase of the reduction | ||
75 | movaps DATA, T3 | ||
76 | psllq $1, T3 | ||
77 | pxor DATA, T3 | ||
78 | psllq $5, T3 | ||
79 | pxor DATA, T3 | ||
80 | psllq $57, T3 | ||
81 | movaps T3, T2 | ||
82 | pslldq $8, T2 | ||
83 | psrldq $8, T3 | ||
84 | pxor T2, DATA | ||
85 | pxor T3, T1 | ||
86 | |||
87 | # second phase of the reduction | ||
88 | movaps DATA, T2 | ||
89 | psrlq $5, T2 | ||
90 | pxor DATA, T2 | ||
91 | psrlq $1, T2 | ||
92 | pxor DATA, T2 | ||
93 | psrlq $1, T2 | ||
94 | pxor T2, T1 | ||
95 | pxor T1, DATA | ||
96 | ret | ||
97 | |||
98 | /* void clmul_ghash_mul(char *dst, const be128 *shash) */ | ||
99 | ENTRY(clmul_ghash_mul) | ||
100 | movups (%rdi), DATA | ||
101 | movups (%rsi), SHASH | ||
102 | movaps .Lbswap_mask, BSWAP | ||
103 | pshufb BSWAP, DATA | ||
104 | call __clmul_gf128mul_ble | ||
105 | pshufb BSWAP, DATA | ||
106 | movups DATA, (%rdi) | ||
107 | ret | ||
108 | |||
109 | /* | ||
110 | * void clmul_ghash_update(char *dst, const char *src, unsigned int srclen, | ||
111 | * const be128 *shash); | ||
112 | */ | ||
113 | ENTRY(clmul_ghash_update) | ||
114 | cmp $16, %rdx | ||
115 | jb .Lupdate_just_ret # check length | ||
116 | movaps .Lbswap_mask, BSWAP | ||
117 | movups (%rdi), DATA | ||
118 | movups (%rcx), SHASH | ||
119 | pshufb BSWAP, DATA | ||
120 | .align 4 | ||
121 | .Lupdate_loop: | ||
122 | movups (%rsi), IN1 | ||
123 | pshufb BSWAP, IN1 | ||
124 | pxor IN1, DATA | ||
125 | call __clmul_gf128mul_ble | ||
126 | sub $16, %rdx | ||
127 | add $16, %rsi | ||
128 | cmp $16, %rdx | ||
129 | jge .Lupdate_loop | ||
130 | pshufb BSWAP, DATA | ||
131 | movups DATA, (%rdi) | ||
132 | .Lupdate_just_ret: | ||
133 | ret | ||
134 | |||
135 | /* | ||
136 | * void clmul_ghash_setkey(be128 *shash, const u8 *key); | ||
137 | * | ||
138 | * Calculate hash_key << 1 mod poly | ||
139 | */ | ||
140 | ENTRY(clmul_ghash_setkey) | ||
141 | movaps .Lbswap_mask, BSWAP | ||
142 | movups (%rsi), %xmm0 | ||
143 | pshufb BSWAP, %xmm0 | ||
144 | movaps %xmm0, %xmm1 | ||
145 | psllq $1, %xmm0 | ||
146 | psrlq $63, %xmm1 | ||
147 | movaps %xmm1, %xmm2 | ||
148 | pslldq $8, %xmm1 | ||
149 | psrldq $8, %xmm2 | ||
150 | por %xmm1, %xmm0 | ||
151 | # reduction | ||
152 | pshufd $0b00100100, %xmm2, %xmm1 | ||
153 | pcmpeqd .Ltwo_one, %xmm1 | ||
154 | pand .Lpoly, %xmm1 | ||
155 | pxor %xmm1, %xmm0 | ||
156 | movups %xmm0, (%rdi) | ||
157 | ret | ||
diff --git a/arch/x86/crypto/ghash-clmulni-intel_glue.c b/arch/x86/crypto/ghash-clmulni-intel_glue.c new file mode 100644 index 000000000000..65d409644d72 --- /dev/null +++ b/arch/x86/crypto/ghash-clmulni-intel_glue.c | |||
@@ -0,0 +1,333 @@ | |||
1 | /* | ||
2 | * Accelerated GHASH implementation with Intel PCLMULQDQ-NI | ||
3 | * instructions. This file contains glue code. | ||
4 | * | ||
5 | * Copyright (c) 2009 Intel Corp. | ||
6 | * Author: Huang Ying <ying.huang@intel.com> | ||
7 | * | ||
8 | * This program is free software; you can redistribute it and/or modify it | ||
9 | * under the terms of the GNU General Public License version 2 as published | ||
10 | * by the Free Software Foundation. | ||
11 | */ | ||
12 | |||
13 | #include <linux/module.h> | ||
14 | #include <linux/init.h> | ||
15 | #include <linux/kernel.h> | ||
16 | #include <linux/crypto.h> | ||
17 | #include <crypto/algapi.h> | ||
18 | #include <crypto/cryptd.h> | ||
19 | #include <crypto/gf128mul.h> | ||
20 | #include <crypto/internal/hash.h> | ||
21 | #include <asm/i387.h> | ||
22 | |||
23 | #define GHASH_BLOCK_SIZE 16 | ||
24 | #define GHASH_DIGEST_SIZE 16 | ||
25 | |||
26 | void clmul_ghash_mul(char *dst, const be128 *shash); | ||
27 | |||
28 | void clmul_ghash_update(char *dst, const char *src, unsigned int srclen, | ||
29 | const be128 *shash); | ||
30 | |||
31 | void clmul_ghash_setkey(be128 *shash, const u8 *key); | ||
32 | |||
33 | struct ghash_async_ctx { | ||
34 | struct cryptd_ahash *cryptd_tfm; | ||
35 | }; | ||
36 | |||
37 | struct ghash_ctx { | ||
38 | be128 shash; | ||
39 | }; | ||
40 | |||
41 | struct ghash_desc_ctx { | ||
42 | u8 buffer[GHASH_BLOCK_SIZE]; | ||
43 | u32 bytes; | ||
44 | }; | ||
45 | |||
46 | static int ghash_init(struct shash_desc *desc) | ||
47 | { | ||
48 | struct ghash_desc_ctx *dctx = shash_desc_ctx(desc); | ||
49 | |||
50 | memset(dctx, 0, sizeof(*dctx)); | ||
51 | |||
52 | return 0; | ||
53 | } | ||
54 | |||
55 | static int ghash_setkey(struct crypto_shash *tfm, | ||
56 | const u8 *key, unsigned int keylen) | ||
57 | { | ||
58 | struct ghash_ctx *ctx = crypto_shash_ctx(tfm); | ||
59 | |||
60 | if (keylen != GHASH_BLOCK_SIZE) { | ||
61 | crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); | ||
62 | return -EINVAL; | ||
63 | } | ||
64 | |||
65 | clmul_ghash_setkey(&ctx->shash, key); | ||
66 | |||
67 | return 0; | ||
68 | } | ||
69 | |||
70 | static int ghash_update(struct shash_desc *desc, | ||
71 | const u8 *src, unsigned int srclen) | ||
72 | { | ||
73 | struct ghash_desc_ctx *dctx = shash_desc_ctx(desc); | ||
74 | struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm); | ||
75 | u8 *dst = dctx->buffer; | ||
76 | |||
77 | kernel_fpu_begin(); | ||
78 | if (dctx->bytes) { | ||
79 | int n = min(srclen, dctx->bytes); | ||
80 | u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes); | ||
81 | |||
82 | dctx->bytes -= n; | ||
83 | srclen -= n; | ||
84 | |||
85 | while (n--) | ||
86 | *pos++ ^= *src++; | ||
87 | |||
88 | if (!dctx->bytes) | ||
89 | clmul_ghash_mul(dst, &ctx->shash); | ||
90 | } | ||
91 | |||
92 | clmul_ghash_update(dst, src, srclen, &ctx->shash); | ||
93 | kernel_fpu_end(); | ||
94 | |||
95 | if (srclen & 0xf) { | ||
96 | src += srclen - (srclen & 0xf); | ||
97 | srclen &= 0xf; | ||
98 | dctx->bytes = GHASH_BLOCK_SIZE - srclen; | ||
99 | while (srclen--) | ||
100 | *dst++ ^= *src++; | ||
101 | } | ||
102 | |||
103 | return 0; | ||
104 | } | ||
105 | |||
106 | static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx) | ||
107 | { | ||
108 | u8 *dst = dctx->buffer; | ||
109 | |||
110 | if (dctx->bytes) { | ||
111 | u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes); | ||
112 | |||
113 | while (dctx->bytes--) | ||
114 | *tmp++ ^= 0; | ||
115 | |||
116 | kernel_fpu_begin(); | ||
117 | clmul_ghash_mul(dst, &ctx->shash); | ||
118 | kernel_fpu_end(); | ||
119 | } | ||
120 | |||
121 | dctx->bytes = 0; | ||
122 | } | ||
123 | |||
124 | static int ghash_final(struct shash_desc *desc, u8 *dst) | ||
125 | { | ||
126 | struct ghash_desc_ctx *dctx = shash_desc_ctx(desc); | ||
127 | struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm); | ||
128 | u8 *buf = dctx->buffer; | ||
129 | |||
130 | ghash_flush(ctx, dctx); | ||
131 | memcpy(dst, buf, GHASH_BLOCK_SIZE); | ||
132 | |||
133 | return 0; | ||
134 | } | ||
135 | |||
136 | static struct shash_alg ghash_alg = { | ||
137 | .digestsize = GHASH_DIGEST_SIZE, | ||
138 | .init = ghash_init, | ||
139 | .update = ghash_update, | ||
140 | .final = ghash_final, | ||
141 | .setkey = ghash_setkey, | ||
142 | .descsize = sizeof(struct ghash_desc_ctx), | ||
143 | .base = { | ||
144 | .cra_name = "__ghash", | ||
145 | .cra_driver_name = "__ghash-pclmulqdqni", | ||
146 | .cra_priority = 0, | ||
147 | .cra_flags = CRYPTO_ALG_TYPE_SHASH, | ||
148 | .cra_blocksize = GHASH_BLOCK_SIZE, | ||
149 | .cra_ctxsize = sizeof(struct ghash_ctx), | ||
150 | .cra_module = THIS_MODULE, | ||
151 | .cra_list = LIST_HEAD_INIT(ghash_alg.base.cra_list), | ||
152 | }, | ||
153 | }; | ||
154 | |||
155 | static int ghash_async_init(struct ahash_request *req) | ||
156 | { | ||
157 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | ||
158 | struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm); | ||
159 | struct ahash_request *cryptd_req = ahash_request_ctx(req); | ||
160 | struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm; | ||
161 | |||
162 | if (irq_fpu_usable()) { | ||
163 | memcpy(cryptd_req, req, sizeof(*req)); | ||
164 | ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base); | ||
165 | return crypto_ahash_init(cryptd_req); | ||
166 | } else { | ||
167 | struct shash_desc *desc = cryptd_shash_desc(cryptd_req); | ||
168 | struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm); | ||
169 | |||
170 | desc->tfm = child; | ||
171 | desc->flags = req->base.flags; | ||
172 | return crypto_shash_init(desc); | ||
173 | } | ||
174 | } | ||
175 | |||
176 | static int ghash_async_update(struct ahash_request *req) | ||
177 | { | ||
178 | struct ahash_request *cryptd_req = ahash_request_ctx(req); | ||
179 | |||
180 | if (irq_fpu_usable()) { | ||
181 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | ||
182 | struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm); | ||
183 | struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm; | ||
184 | |||
185 | memcpy(cryptd_req, req, sizeof(*req)); | ||
186 | ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base); | ||
187 | return crypto_ahash_update(cryptd_req); | ||
188 | } else { | ||
189 | struct shash_desc *desc = cryptd_shash_desc(cryptd_req); | ||
190 | return shash_ahash_update(req, desc); | ||
191 | } | ||
192 | } | ||
193 | |||
194 | static int ghash_async_final(struct ahash_request *req) | ||
195 | { | ||
196 | struct ahash_request *cryptd_req = ahash_request_ctx(req); | ||
197 | |||
198 | if (irq_fpu_usable()) { | ||
199 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | ||
200 | struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm); | ||
201 | struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm; | ||
202 | |||
203 | memcpy(cryptd_req, req, sizeof(*req)); | ||
204 | ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base); | ||
205 | return crypto_ahash_final(cryptd_req); | ||
206 | } else { | ||
207 | struct shash_desc *desc = cryptd_shash_desc(cryptd_req); | ||
208 | return crypto_shash_final(desc, req->result); | ||
209 | } | ||
210 | } | ||
211 | |||
212 | static int ghash_async_digest(struct ahash_request *req) | ||
213 | { | ||
214 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | ||
215 | struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm); | ||
216 | struct ahash_request *cryptd_req = ahash_request_ctx(req); | ||
217 | struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm; | ||
218 | |||
219 | if (irq_fpu_usable()) { | ||
220 | memcpy(cryptd_req, req, sizeof(*req)); | ||
221 | ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base); | ||
222 | return crypto_ahash_digest(cryptd_req); | ||
223 | } else { | ||
224 | struct shash_desc *desc = cryptd_shash_desc(cryptd_req); | ||
225 | struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm); | ||
226 | |||
227 | desc->tfm = child; | ||
228 | desc->flags = req->base.flags; | ||
229 | return shash_ahash_digest(req, desc); | ||
230 | } | ||
231 | } | ||
232 | |||
233 | static int ghash_async_setkey(struct crypto_ahash *tfm, const u8 *key, | ||
234 | unsigned int keylen) | ||
235 | { | ||
236 | struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm); | ||
237 | struct crypto_ahash *child = &ctx->cryptd_tfm->base; | ||
238 | int err; | ||
239 | |||
240 | crypto_ahash_clear_flags(child, CRYPTO_TFM_REQ_MASK); | ||
241 | crypto_ahash_set_flags(child, crypto_ahash_get_flags(tfm) | ||
242 | & CRYPTO_TFM_REQ_MASK); | ||
243 | err = crypto_ahash_setkey(child, key, keylen); | ||
244 | crypto_ahash_set_flags(tfm, crypto_ahash_get_flags(child) | ||
245 | & CRYPTO_TFM_RES_MASK); | ||
246 | |||
247 | return 0; | ||
248 | } | ||
249 | |||
250 | static int ghash_async_init_tfm(struct crypto_tfm *tfm) | ||
251 | { | ||
252 | struct cryptd_ahash *cryptd_tfm; | ||
253 | struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm); | ||
254 | |||
255 | cryptd_tfm = cryptd_alloc_ahash("__ghash-pclmulqdqni", 0, 0); | ||
256 | if (IS_ERR(cryptd_tfm)) | ||
257 | return PTR_ERR(cryptd_tfm); | ||
258 | ctx->cryptd_tfm = cryptd_tfm; | ||
259 | crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), | ||
260 | sizeof(struct ahash_request) + | ||
261 | crypto_ahash_reqsize(&cryptd_tfm->base)); | ||
262 | |||
263 | return 0; | ||
264 | } | ||
265 | |||
266 | static void ghash_async_exit_tfm(struct crypto_tfm *tfm) | ||
267 | { | ||
268 | struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm); | ||
269 | |||
270 | cryptd_free_ahash(ctx->cryptd_tfm); | ||
271 | } | ||
272 | |||
273 | static struct ahash_alg ghash_async_alg = { | ||
274 | .init = ghash_async_init, | ||
275 | .update = ghash_async_update, | ||
276 | .final = ghash_async_final, | ||
277 | .setkey = ghash_async_setkey, | ||
278 | .digest = ghash_async_digest, | ||
279 | .halg = { | ||
280 | .digestsize = GHASH_DIGEST_SIZE, | ||
281 | .base = { | ||
282 | .cra_name = "ghash", | ||
283 | .cra_driver_name = "ghash-clmulni", | ||
284 | .cra_priority = 400, | ||
285 | .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC, | ||
286 | .cra_blocksize = GHASH_BLOCK_SIZE, | ||
287 | .cra_type = &crypto_ahash_type, | ||
288 | .cra_module = THIS_MODULE, | ||
289 | .cra_list = LIST_HEAD_INIT(ghash_async_alg.halg.base.cra_list), | ||
290 | .cra_init = ghash_async_init_tfm, | ||
291 | .cra_exit = ghash_async_exit_tfm, | ||
292 | }, | ||
293 | }, | ||
294 | }; | ||
295 | |||
296 | static int __init ghash_pclmulqdqni_mod_init(void) | ||
297 | { | ||
298 | int err; | ||
299 | |||
300 | if (!cpu_has_pclmulqdq) { | ||
301 | printk(KERN_INFO "Intel PCLMULQDQ-NI instructions are not" | ||
302 | " detected.\n"); | ||
303 | return -ENODEV; | ||
304 | } | ||
305 | |||
306 | err = crypto_register_shash(&ghash_alg); | ||
307 | if (err) | ||
308 | goto err_out; | ||
309 | err = crypto_register_ahash(&ghash_async_alg); | ||
310 | if (err) | ||
311 | goto err_shash; | ||
312 | |||
313 | return 0; | ||
314 | |||
315 | err_shash: | ||
316 | crypto_unregister_shash(&ghash_alg); | ||
317 | err_out: | ||
318 | return err; | ||
319 | } | ||
320 | |||
321 | static void __exit ghash_pclmulqdqni_mod_exit(void) | ||
322 | { | ||
323 | crypto_unregister_ahash(&ghash_async_alg); | ||
324 | crypto_unregister_shash(&ghash_alg); | ||
325 | } | ||
326 | |||
327 | module_init(ghash_pclmulqdqni_mod_init); | ||
328 | module_exit(ghash_pclmulqdqni_mod_exit); | ||
329 | |||
330 | MODULE_LICENSE("GPL"); | ||
331 | MODULE_DESCRIPTION("GHASH Message Digest Algorithm, " | ||
332 | "acclerated by PCLMULQDQ-NI"); | ||
333 | MODULE_ALIAS("ghash"); | ||
diff --git a/arch/x86/include/asm/cpufeature.h b/arch/x86/include/asm/cpufeature.h index 9cfc88b97742..613700f27a4a 100644 --- a/arch/x86/include/asm/cpufeature.h +++ b/arch/x86/include/asm/cpufeature.h | |||
@@ -248,6 +248,7 @@ extern const char * const x86_power_flags[32]; | |||
248 | #define cpu_has_x2apic boot_cpu_has(X86_FEATURE_X2APIC) | 248 | #define cpu_has_x2apic boot_cpu_has(X86_FEATURE_X2APIC) |
249 | #define cpu_has_xsave boot_cpu_has(X86_FEATURE_XSAVE) | 249 | #define cpu_has_xsave boot_cpu_has(X86_FEATURE_XSAVE) |
250 | #define cpu_has_hypervisor boot_cpu_has(X86_FEATURE_HYPERVISOR) | 250 | #define cpu_has_hypervisor boot_cpu_has(X86_FEATURE_HYPERVISOR) |
251 | #define cpu_has_pclmulqdq boot_cpu_has(X86_FEATURE_PCLMULQDQ) | ||
251 | 252 | ||
252 | #if defined(CONFIG_X86_INVLPG) || defined(CONFIG_X86_64) | 253 | #if defined(CONFIG_X86_INVLPG) || defined(CONFIG_X86_64) |
253 | # define cpu_has_invlpg 1 | 254 | # define cpu_has_invlpg 1 |
diff --git a/crypto/Kconfig b/crypto/Kconfig index 26b5dd0cb564..fd6871102b60 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig | |||
@@ -440,6 +440,14 @@ config CRYPTO_WP512 | |||
440 | See also: | 440 | See also: |
441 | <http://planeta.terra.com.br/informatica/paulobarreto/WhirlpoolPage.html> | 441 | <http://planeta.terra.com.br/informatica/paulobarreto/WhirlpoolPage.html> |
442 | 442 | ||
443 | config CRYPTO_GHASH_CLMUL_NI_INTEL | ||
444 | tristate "GHASH digest algorithm (CLMUL-NI accelerated)" | ||
445 | select CRYPTO_SHASH | ||
446 | select CRYPTO_CRYPTD | ||
447 | help | ||
448 | GHASH is message digest algorithm for GCM (Galois/Counter Mode). | ||
449 | The implementation is accelerated by CLMUL-NI of Intel. | ||
450 | |||
443 | comment "Ciphers" | 451 | comment "Ciphers" |
444 | 452 | ||
445 | config CRYPTO_AES | 453 | config CRYPTO_AES |
diff --git a/crypto/cryptd.c b/crypto/cryptd.c index 35335825a4ef..f8ae0d94a647 100644 --- a/crypto/cryptd.c +++ b/crypto/cryptd.c | |||
@@ -711,6 +711,13 @@ struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm) | |||
711 | } | 711 | } |
712 | EXPORT_SYMBOL_GPL(cryptd_ahash_child); | 712 | EXPORT_SYMBOL_GPL(cryptd_ahash_child); |
713 | 713 | ||
714 | struct shash_desc *cryptd_shash_desc(struct ahash_request *req) | ||
715 | { | ||
716 | struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); | ||
717 | return &rctx->desc; | ||
718 | } | ||
719 | EXPORT_SYMBOL_GPL(cryptd_shash_desc); | ||
720 | |||
714 | void cryptd_free_ahash(struct cryptd_ahash *tfm) | 721 | void cryptd_free_ahash(struct cryptd_ahash *tfm) |
715 | { | 722 | { |
716 | crypto_free_ahash(&tfm->base); | 723 | crypto_free_ahash(&tfm->base); |
diff --git a/include/crypto/cryptd.h b/include/crypto/cryptd.h index 2f65a6e8ea4d..1c96b255017c 100644 --- a/include/crypto/cryptd.h +++ b/include/crypto/cryptd.h | |||
@@ -39,6 +39,7 @@ static inline struct cryptd_ahash *__cryptd_ahash_cast( | |||
39 | struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name, | 39 | struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name, |
40 | u32 type, u32 mask); | 40 | u32 type, u32 mask); |
41 | struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm); | 41 | struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm); |
42 | struct shash_desc *cryptd_shash_desc(struct ahash_request *req); | ||
42 | void cryptd_free_ahash(struct cryptd_ahash *tfm); | 43 | void cryptd_free_ahash(struct cryptd_ahash *tfm); |
43 | 44 | ||
44 | #endif | 45 | #endif |