aboutsummaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
authorHuang Ying <ying.huang@intel.com>2009-08-06 01:34:26 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2009-08-06 01:34:26 -0400
commit9382d97af586a47dad312765e35c61aa7ad7fcdd (patch)
tree2866deac562a3858b1aa686461c1c3ce16c46287 /crypto
parent2cdc6899a88e2b9c6cb82ebd547bf58932d534df (diff)
crypto: gcm - Use GHASH digest algorithm
Remove the dedicated GHASH implementation in GCM, and uses the GHASH digest algorithm instead. This will make GCM uses hardware accelerated GHASH implementation automatically if available. ahash instead of shash interface is used, because some hardware accelerated GHASH implementation needs asynchronous interface. Signed-off-by: Huang Ying <ying.huang@intel.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto')
-rw-r--r--crypto/Kconfig2
-rw-r--r--crypto/gcm.c580
2 files changed, 408 insertions, 174 deletions
diff --git a/crypto/Kconfig b/crypto/Kconfig
index 0ae170e01051..5105cf15834f 100644
--- a/crypto/Kconfig
+++ b/crypto/Kconfig
@@ -157,7 +157,7 @@ config CRYPTO_GCM
157 tristate "GCM/GMAC support" 157 tristate "GCM/GMAC support"
158 select CRYPTO_CTR 158 select CRYPTO_CTR
159 select CRYPTO_AEAD 159 select CRYPTO_AEAD
160 select CRYPTO_GF128MUL 160 select CRYPTO_GHASH
161 help 161 help
162 Support for Galois/Counter Mode (GCM) and Galois Message 162 Support for Galois/Counter Mode (GCM) and Galois Message
163 Authentication Code (GMAC). Required for IPSec. 163 Authentication Code (GMAC). Required for IPSec.
diff --git a/crypto/gcm.c b/crypto/gcm.c
index e70afd0c73dd..5fc3292483ef 100644
--- a/crypto/gcm.c
+++ b/crypto/gcm.c
@@ -11,7 +11,10 @@
11#include <crypto/gf128mul.h> 11#include <crypto/gf128mul.h>
12#include <crypto/internal/aead.h> 12#include <crypto/internal/aead.h>
13#include <crypto/internal/skcipher.h> 13#include <crypto/internal/skcipher.h>
14#include <crypto/internal/hash.h>
14#include <crypto/scatterwalk.h> 15#include <crypto/scatterwalk.h>
16#include <crypto/hash.h>
17#include "internal.h"
15#include <linux/completion.h> 18#include <linux/completion.h>
16#include <linux/err.h> 19#include <linux/err.h>
17#include <linux/init.h> 20#include <linux/init.h>
@@ -21,11 +24,12 @@
21 24
22struct gcm_instance_ctx { 25struct gcm_instance_ctx {
23 struct crypto_skcipher_spawn ctr; 26 struct crypto_skcipher_spawn ctr;
27 struct crypto_ahash_spawn ghash;
24}; 28};
25 29
26struct crypto_gcm_ctx { 30struct crypto_gcm_ctx {
27 struct crypto_ablkcipher *ctr; 31 struct crypto_ablkcipher *ctr;
28 struct gf128mul_4k *gf128; 32 struct crypto_ahash *ghash;
29}; 33};
30 34
31struct crypto_rfc4106_ctx { 35struct crypto_rfc4106_ctx {
@@ -34,10 +38,9 @@ struct crypto_rfc4106_ctx {
34}; 38};
35 39
36struct crypto_gcm_ghash_ctx { 40struct crypto_gcm_ghash_ctx {
37 u32 bytes; 41 unsigned int cryptlen;
38 u32 flags; 42 struct scatterlist *src;
39 struct gf128mul_4k *gf128; 43 crypto_completion_t complete;
40 u8 buffer[16];
41}; 44};
42 45
43struct crypto_gcm_req_priv_ctx { 46struct crypto_gcm_req_priv_ctx {
@@ -45,8 +48,11 @@ struct crypto_gcm_req_priv_ctx {
45 u8 iauth_tag[16]; 48 u8 iauth_tag[16];
46 struct scatterlist src[2]; 49 struct scatterlist src[2];
47 struct scatterlist dst[2]; 50 struct scatterlist dst[2];
48 struct crypto_gcm_ghash_ctx ghash; 51 struct crypto_gcm_ghash_ctx ghash_ctx;
49 struct ablkcipher_request abreq; 52 union {
53 struct ahash_request ahreq;
54 struct ablkcipher_request abreq;
55 } u;
50}; 56};
51 57
52struct crypto_gcm_setkey_result { 58struct crypto_gcm_setkey_result {
@@ -54,6 +60,8 @@ struct crypto_gcm_setkey_result {
54 struct completion completion; 60 struct completion completion;
55}; 61};
56 62
63static void *gcm_zeroes;
64
57static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx( 65static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
58 struct aead_request *req) 66 struct aead_request *req)
59{ 67{
@@ -62,113 +70,6 @@ static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
62 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1); 70 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
63} 71}
64 72
65static void crypto_gcm_ghash_init(struct crypto_gcm_ghash_ctx *ctx, u32 flags,
66 struct gf128mul_4k *gf128)
67{
68 ctx->bytes = 0;
69 ctx->flags = flags;
70 ctx->gf128 = gf128;
71 memset(ctx->buffer, 0, 16);
72}
73
74static void crypto_gcm_ghash_update(struct crypto_gcm_ghash_ctx *ctx,
75 const u8 *src, unsigned int srclen)
76{
77 u8 *dst = ctx->buffer;
78
79 if (ctx->bytes) {
80 int n = min(srclen, ctx->bytes);
81 u8 *pos = dst + (16 - ctx->bytes);
82
83 ctx->bytes -= n;
84 srclen -= n;
85
86 while (n--)
87 *pos++ ^= *src++;
88
89 if (!ctx->bytes)
90 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
91 }
92
93 while (srclen >= 16) {
94 crypto_xor(dst, src, 16);
95 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
96 src += 16;
97 srclen -= 16;
98 }
99
100 if (srclen) {
101 ctx->bytes = 16 - srclen;
102 while (srclen--)
103 *dst++ ^= *src++;
104 }
105}
106
107static void crypto_gcm_ghash_update_sg(struct crypto_gcm_ghash_ctx *ctx,
108 struct scatterlist *sg, int len)
109{
110 struct scatter_walk walk;
111 u8 *src;
112 int n;
113
114 if (!len)
115 return;
116
117 scatterwalk_start(&walk, sg);
118
119 while (len) {
120 n = scatterwalk_clamp(&walk, len);
121
122 if (!n) {
123 scatterwalk_start(&walk, scatterwalk_sg_next(walk.sg));
124 n = scatterwalk_clamp(&walk, len);
125 }
126
127 src = scatterwalk_map(&walk, 0);
128
129 crypto_gcm_ghash_update(ctx, src, n);
130 len -= n;
131
132 scatterwalk_unmap(src, 0);
133 scatterwalk_advance(&walk, n);
134 scatterwalk_done(&walk, 0, len);
135 if (len)
136 crypto_yield(ctx->flags);
137 }
138}
139
140static void crypto_gcm_ghash_flush(struct crypto_gcm_ghash_ctx *ctx)
141{
142 u8 *dst = ctx->buffer;
143
144 if (ctx->bytes) {
145 u8 *tmp = dst + (16 - ctx->bytes);
146
147 while (ctx->bytes--)
148 *tmp++ ^= 0;
149
150 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
151 }
152
153 ctx->bytes = 0;
154}
155
156static void crypto_gcm_ghash_final_xor(struct crypto_gcm_ghash_ctx *ctx,
157 unsigned int authlen,
158 unsigned int cryptlen, u8 *dst)
159{
160 u8 *buf = ctx->buffer;
161 u128 lengths;
162
163 lengths.a = cpu_to_be64(authlen * 8);
164 lengths.b = cpu_to_be64(cryptlen * 8);
165
166 crypto_gcm_ghash_flush(ctx);
167 crypto_xor(buf, (u8 *)&lengths, 16);
168 gf128mul_4k_lle((be128 *)buf, ctx->gf128);
169 crypto_xor(dst, buf, 16);
170}
171
172static void crypto_gcm_setkey_done(struct crypto_async_request *req, int err) 73static void crypto_gcm_setkey_done(struct crypto_async_request *req, int err)
173{ 74{
174 struct crypto_gcm_setkey_result *result = req->data; 75 struct crypto_gcm_setkey_result *result = req->data;
@@ -184,6 +85,7 @@ static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
184 unsigned int keylen) 85 unsigned int keylen)
185{ 86{
186 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead); 87 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
88 struct crypto_ahash *ghash = ctx->ghash;
187 struct crypto_ablkcipher *ctr = ctx->ctr; 89 struct crypto_ablkcipher *ctr = ctx->ctr;
188 struct { 90 struct {
189 be128 hash; 91 be128 hash;
@@ -233,13 +135,12 @@ static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
233 if (err) 135 if (err)
234 goto out; 136 goto out;
235 137
236 if (ctx->gf128 != NULL) 138 crypto_ahash_clear_flags(ghash, CRYPTO_TFM_REQ_MASK);
237 gf128mul_free_4k(ctx->gf128); 139 crypto_ahash_set_flags(ghash, crypto_aead_get_flags(aead) &
238 140 CRYPTO_TFM_REQ_MASK);
239 ctx->gf128 = gf128mul_init_4k_lle(&data->hash); 141 err = crypto_ahash_setkey(ghash, (u8 *)&data->hash, sizeof(be128));
240 142 crypto_aead_set_flags(aead, crypto_ahash_get_flags(ghash) &
241 if (ctx->gf128 == NULL) 143 CRYPTO_TFM_RES_MASK);
242 err = -ENOMEM;
243 144
244out: 145out:
245 kfree(data); 146 kfree(data);
@@ -272,8 +173,6 @@ static void crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req,
272 struct crypto_aead *aead = crypto_aead_reqtfm(req); 173 struct crypto_aead *aead = crypto_aead_reqtfm(req);
273 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead); 174 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
274 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 175 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
275 u32 flags = req->base.tfm->crt_flags;
276 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
277 struct scatterlist *dst; 176 struct scatterlist *dst;
278 __be32 counter = cpu_to_be32(1); 177 __be32 counter = cpu_to_be32(1);
279 178
@@ -296,108 +195,398 @@ static void crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req,
296 ablkcipher_request_set_crypt(ablk_req, pctx->src, dst, 195 ablkcipher_request_set_crypt(ablk_req, pctx->src, dst,
297 cryptlen + sizeof(pctx->auth_tag), 196 cryptlen + sizeof(pctx->auth_tag),
298 req->iv); 197 req->iv);
198}
199
200static inline unsigned int gcm_remain(unsigned int len)
201{
202 len &= 0xfU;
203 return len ? 16 - len : 0;
204}
205
206static void gcm_hash_len_done(struct crypto_async_request *areq, int err);
207static void gcm_hash_final_done(struct crypto_async_request *areq, int err);
299 208
300 crypto_gcm_ghash_init(ghash, flags, ctx->gf128); 209static int gcm_hash_update(struct aead_request *req,
210 struct crypto_gcm_req_priv_ctx *pctx,
211 crypto_completion_t complete,
212 struct scatterlist *src,
213 unsigned int len)
214{
215 struct ahash_request *ahreq = &pctx->u.ahreq;
301 216
302 crypto_gcm_ghash_update_sg(ghash, req->assoc, req->assoclen); 217 ahash_request_set_callback(ahreq, aead_request_flags(req),
303 crypto_gcm_ghash_flush(ghash); 218 complete, req);
219 ahash_request_set_crypt(ahreq, src, NULL, len);
220
221 return crypto_ahash_update(ahreq);
304} 222}
305 223
306static int crypto_gcm_hash(struct aead_request *req) 224static int gcm_hash_remain(struct aead_request *req,
225 struct crypto_gcm_req_priv_ctx *pctx,
226 unsigned int remain,
227 crypto_completion_t complete)
307{ 228{
308 struct crypto_aead *aead = crypto_aead_reqtfm(req); 229 struct ahash_request *ahreq = &pctx->u.ahreq;
230
231 ahash_request_set_callback(ahreq, aead_request_flags(req),
232 complete, req);
233 sg_init_one(pctx->src, gcm_zeroes, remain);
234 ahash_request_set_crypt(ahreq, pctx->src, NULL, remain);
235
236 return crypto_ahash_update(ahreq);
237}
238
239static int gcm_hash_len(struct aead_request *req,
240 struct crypto_gcm_req_priv_ctx *pctx)
241{
242 struct ahash_request *ahreq = &pctx->u.ahreq;
243 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
244 u128 lengths;
245
246 lengths.a = cpu_to_be64(req->assoclen * 8);
247 lengths.b = cpu_to_be64(gctx->cryptlen * 8);
248 memcpy(pctx->iauth_tag, &lengths, 16);
249 sg_init_one(pctx->src, pctx->iauth_tag, 16);
250 ahash_request_set_callback(ahreq, aead_request_flags(req),
251 gcm_hash_len_done, req);
252 ahash_request_set_crypt(ahreq, pctx->src,
253 NULL, sizeof(lengths));
254
255 return crypto_ahash_update(ahreq);
256}
257
258static int gcm_hash_final(struct aead_request *req,
259 struct crypto_gcm_req_priv_ctx *pctx)
260{
261 struct ahash_request *ahreq = &pctx->u.ahreq;
262
263 ahash_request_set_callback(ahreq, aead_request_flags(req),
264 gcm_hash_final_done, req);
265 ahash_request_set_crypt(ahreq, NULL, pctx->iauth_tag, 0);
266
267 return crypto_ahash_final(ahreq);
268}
269
270static void gcm_hash_final_done(struct crypto_async_request *areq,
271 int err)
272{
273 struct aead_request *req = areq->data;
309 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 274 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
310 u8 *auth_tag = pctx->auth_tag; 275 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
311 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash; 276
277 if (!err)
278 crypto_xor(pctx->auth_tag, pctx->iauth_tag, 16);
312 279
313 crypto_gcm_ghash_update_sg(ghash, req->dst, req->cryptlen); 280 gctx->complete(areq, err);
314 crypto_gcm_ghash_final_xor(ghash, req->assoclen, req->cryptlen, 281}
315 auth_tag); 282
283static void gcm_hash_len_done(struct crypto_async_request *areq,
284 int err)
285{
286 struct aead_request *req = areq->data;
287 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
288
289 if (!err) {
290 err = gcm_hash_final(req, pctx);
291 if (err == -EINPROGRESS || err == -EBUSY)
292 return;
293 }
294
295 gcm_hash_final_done(areq, err);
296}
297
298static void gcm_hash_crypt_remain_done(struct crypto_async_request *areq,
299 int err)
300{
301 struct aead_request *req = areq->data;
302 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
303
304 if (!err) {
305 err = gcm_hash_len(req, pctx);
306 if (err == -EINPROGRESS || err == -EBUSY)
307 return;
308 }
309
310 gcm_hash_len_done(areq, err);
311}
312
313static void gcm_hash_crypt_done(struct crypto_async_request *areq,
314 int err)
315{
316 struct aead_request *req = areq->data;
317 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
318 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
319 unsigned int remain;
320
321 if (!err) {
322 remain = gcm_remain(gctx->cryptlen);
323 BUG_ON(!remain);
324 err = gcm_hash_remain(req, pctx, remain,
325 gcm_hash_crypt_remain_done);
326 if (err == -EINPROGRESS || err == -EBUSY)
327 return;
328 }
329
330 gcm_hash_crypt_remain_done(areq, err);
331}
332
333static void gcm_hash_assoc_remain_done(struct crypto_async_request *areq,
334 int err)
335{
336 struct aead_request *req = areq->data;
337 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
338 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
339 crypto_completion_t complete;
340 unsigned int remain = 0;
341
342 if (!err && gctx->cryptlen) {
343 remain = gcm_remain(gctx->cryptlen);
344 complete = remain ? gcm_hash_crypt_done :
345 gcm_hash_crypt_remain_done;
346 err = gcm_hash_update(req, pctx, complete,
347 gctx->src, gctx->cryptlen);
348 if (err == -EINPROGRESS || err == -EBUSY)
349 return;
350 }
351
352 if (remain)
353 gcm_hash_crypt_done(areq, err);
354 else
355 gcm_hash_crypt_remain_done(areq, err);
356}
357
358static void gcm_hash_assoc_done(struct crypto_async_request *areq,
359 int err)
360{
361 struct aead_request *req = areq->data;
362 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
363 unsigned int remain;
364
365 if (!err) {
366 remain = gcm_remain(req->assoclen);
367 BUG_ON(!remain);
368 err = gcm_hash_remain(req, pctx, remain,
369 gcm_hash_assoc_remain_done);
370 if (err == -EINPROGRESS || err == -EBUSY)
371 return;
372 }
373
374 gcm_hash_assoc_remain_done(areq, err);
375}
376
377static void gcm_hash_init_done(struct crypto_async_request *areq,
378 int err)
379{
380 struct aead_request *req = areq->data;
381 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
382 crypto_completion_t complete;
383 unsigned int remain = 0;
384
385 if (!err && req->assoclen) {
386 remain = gcm_remain(req->assoclen);
387 complete = remain ? gcm_hash_assoc_done :
388 gcm_hash_assoc_remain_done;
389 err = gcm_hash_update(req, pctx, complete,
390 req->assoc, req->assoclen);
391 if (err == -EINPROGRESS || err == -EBUSY)
392 return;
393 }
394
395 if (remain)
396 gcm_hash_assoc_done(areq, err);
397 else
398 gcm_hash_assoc_remain_done(areq, err);
399}
400
401static int gcm_hash(struct aead_request *req,
402 struct crypto_gcm_req_priv_ctx *pctx)
403{
404 struct ahash_request *ahreq = &pctx->u.ahreq;
405 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
406 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
407 unsigned int remain;
408 crypto_completion_t complete;
409 int err;
410
411 ahash_request_set_tfm(ahreq, ctx->ghash);
412
413 ahash_request_set_callback(ahreq, aead_request_flags(req),
414 gcm_hash_init_done, req);
415 err = crypto_ahash_init(ahreq);
416 if (err)
417 return err;
418 remain = gcm_remain(req->assoclen);
419 complete = remain ? gcm_hash_assoc_done : gcm_hash_assoc_remain_done;
420 err = gcm_hash_update(req, pctx, complete, req->assoc, req->assoclen);
421 if (err)
422 return err;
423 if (remain) {
424 err = gcm_hash_remain(req, pctx, remain,
425 gcm_hash_assoc_remain_done);
426 if (err)
427 return err;
428 }
429 remain = gcm_remain(gctx->cryptlen);
430 complete = remain ? gcm_hash_crypt_done : gcm_hash_crypt_remain_done;
431 err = gcm_hash_update(req, pctx, complete, gctx->src, gctx->cryptlen);
432 if (err)
433 return err;
434 if (remain) {
435 err = gcm_hash_remain(req, pctx, remain,
436 gcm_hash_crypt_remain_done);
437 if (err)
438 return err;
439 }
440 err = gcm_hash_len(req, pctx);
441 if (err)
442 return err;
443 err = gcm_hash_final(req, pctx);
444 if (err)
445 return err;
446
447 return 0;
448}
449
450static void gcm_enc_copy_hash(struct aead_request *req,
451 struct crypto_gcm_req_priv_ctx *pctx)
452{
453 struct crypto_aead *aead = crypto_aead_reqtfm(req);
454 u8 *auth_tag = pctx->auth_tag;
316 455
317 scatterwalk_map_and_copy(auth_tag, req->dst, req->cryptlen, 456 scatterwalk_map_and_copy(auth_tag, req->dst, req->cryptlen,
318 crypto_aead_authsize(aead), 1); 457 crypto_aead_authsize(aead), 1);
319 return 0;
320} 458}
321 459
322static void crypto_gcm_encrypt_done(struct crypto_async_request *areq, int err) 460static void gcm_enc_hash_done(struct crypto_async_request *areq,
461 int err)
323{ 462{
324 struct aead_request *req = areq->data; 463 struct aead_request *req = areq->data;
464 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
325 465
326 if (!err) 466 if (!err)
327 err = crypto_gcm_hash(req); 467 gcm_enc_copy_hash(req, pctx);
328 468
329 aead_request_complete(req, err); 469 aead_request_complete(req, err);
330} 470}
331 471
472static void gcm_encrypt_done(struct crypto_async_request *areq,
473 int err)
474{
475 struct aead_request *req = areq->data;
476 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
477
478 if (!err) {
479 err = gcm_hash(req, pctx);
480 if (err == -EINPROGRESS || err == -EBUSY)
481 return;
482 }
483
484 gcm_enc_hash_done(areq, err);
485}
486
332static int crypto_gcm_encrypt(struct aead_request *req) 487static int crypto_gcm_encrypt(struct aead_request *req)
333{ 488{
334 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 489 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
335 struct ablkcipher_request *abreq = &pctx->abreq; 490 struct ablkcipher_request *abreq = &pctx->u.abreq;
491 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
336 int err; 492 int err;
337 493
338 crypto_gcm_init_crypt(abreq, req, req->cryptlen); 494 crypto_gcm_init_crypt(abreq, req, req->cryptlen);
339 ablkcipher_request_set_callback(abreq, aead_request_flags(req), 495 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
340 crypto_gcm_encrypt_done, req); 496 gcm_encrypt_done, req);
497
498 gctx->src = req->dst;
499 gctx->cryptlen = req->cryptlen;
500 gctx->complete = gcm_enc_hash_done;
341 501
342 err = crypto_ablkcipher_encrypt(abreq); 502 err = crypto_ablkcipher_encrypt(abreq);
343 if (err) 503 if (err)
344 return err; 504 return err;
345 505
346 return crypto_gcm_hash(req); 506 err = gcm_hash(req, pctx);
507 if (err)
508 return err;
509
510 crypto_xor(pctx->auth_tag, pctx->iauth_tag, 16);
511 gcm_enc_copy_hash(req, pctx);
512
513 return 0;
347} 514}
348 515
349static int crypto_gcm_verify(struct aead_request *req) 516static int crypto_gcm_verify(struct aead_request *req,
517 struct crypto_gcm_req_priv_ctx *pctx)
350{ 518{
351 struct crypto_aead *aead = crypto_aead_reqtfm(req); 519 struct crypto_aead *aead = crypto_aead_reqtfm(req);
352 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
353 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
354 u8 *auth_tag = pctx->auth_tag; 520 u8 *auth_tag = pctx->auth_tag;
355 u8 *iauth_tag = pctx->iauth_tag; 521 u8 *iauth_tag = pctx->iauth_tag;
356 unsigned int authsize = crypto_aead_authsize(aead); 522 unsigned int authsize = crypto_aead_authsize(aead);
357 unsigned int cryptlen = req->cryptlen - authsize; 523 unsigned int cryptlen = req->cryptlen - authsize;
358 524
359 crypto_gcm_ghash_final_xor(ghash, req->assoclen, cryptlen, auth_tag); 525 crypto_xor(auth_tag, iauth_tag, 16);
360
361 authsize = crypto_aead_authsize(aead);
362 scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0); 526 scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0);
363 return memcmp(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0; 527 return memcmp(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0;
364} 528}
365 529
366static void crypto_gcm_decrypt_done(struct crypto_async_request *areq, int err) 530static void gcm_decrypt_done(struct crypto_async_request *areq, int err)
367{ 531{
368 struct aead_request *req = areq->data; 532 struct aead_request *req = areq->data;
533 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
369 534
370 if (!err) 535 if (!err)
371 err = crypto_gcm_verify(req); 536 err = crypto_gcm_verify(req, pctx);
372 537
373 aead_request_complete(req, err); 538 aead_request_complete(req, err);
374} 539}
375 540
541static void gcm_dec_hash_done(struct crypto_async_request *areq, int err)
542{
543 struct aead_request *req = areq->data;
544 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
545 struct ablkcipher_request *abreq = &pctx->u.abreq;
546 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
547
548 if (!err) {
549 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
550 gcm_decrypt_done, req);
551 crypto_gcm_init_crypt(abreq, req, gctx->cryptlen);
552 err = crypto_ablkcipher_decrypt(abreq);
553 if (err == -EINPROGRESS || err == -EBUSY)
554 return;
555 }
556
557 gcm_decrypt_done(areq, err);
558}
559
376static int crypto_gcm_decrypt(struct aead_request *req) 560static int crypto_gcm_decrypt(struct aead_request *req)
377{ 561{
378 struct crypto_aead *aead = crypto_aead_reqtfm(req); 562 struct crypto_aead *aead = crypto_aead_reqtfm(req);
379 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 563 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
380 struct ablkcipher_request *abreq = &pctx->abreq; 564 struct ablkcipher_request *abreq = &pctx->u.abreq;
381 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash; 565 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
382 unsigned int cryptlen = req->cryptlen;
383 unsigned int authsize = crypto_aead_authsize(aead); 566 unsigned int authsize = crypto_aead_authsize(aead);
567 unsigned int cryptlen = req->cryptlen;
384 int err; 568 int err;
385 569
386 if (cryptlen < authsize) 570 if (cryptlen < authsize)
387 return -EINVAL; 571 return -EINVAL;
388 cryptlen -= authsize; 572 cryptlen -= authsize;
389 573
390 crypto_gcm_init_crypt(abreq, req, cryptlen); 574 gctx->src = req->src;
391 ablkcipher_request_set_callback(abreq, aead_request_flags(req), 575 gctx->cryptlen = cryptlen;
392 crypto_gcm_decrypt_done, req); 576 gctx->complete = gcm_dec_hash_done;
393 577
394 crypto_gcm_ghash_update_sg(ghash, req->src, cryptlen); 578 err = gcm_hash(req, pctx);
579 if (err)
580 return err;
395 581
582 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
583 gcm_decrypt_done, req);
584 crypto_gcm_init_crypt(abreq, req, cryptlen);
396 err = crypto_ablkcipher_decrypt(abreq); 585 err = crypto_ablkcipher_decrypt(abreq);
397 if (err) 586 if (err)
398 return err; 587 return err;
399 588
400 return crypto_gcm_verify(req); 589 return crypto_gcm_verify(req, pctx);
401} 590}
402 591
403static int crypto_gcm_init_tfm(struct crypto_tfm *tfm) 592static int crypto_gcm_init_tfm(struct crypto_tfm *tfm)
@@ -406,43 +595,56 @@ static int crypto_gcm_init_tfm(struct crypto_tfm *tfm)
406 struct gcm_instance_ctx *ictx = crypto_instance_ctx(inst); 595 struct gcm_instance_ctx *ictx = crypto_instance_ctx(inst);
407 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm); 596 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
408 struct crypto_ablkcipher *ctr; 597 struct crypto_ablkcipher *ctr;
598 struct crypto_ahash *ghash;
409 unsigned long align; 599 unsigned long align;
410 int err; 600 int err;
411 601
602 ghash = crypto_spawn_ahash(&ictx->ghash);
603 if (IS_ERR(ghash))
604 return PTR_ERR(ghash);
605
412 ctr = crypto_spawn_skcipher(&ictx->ctr); 606 ctr = crypto_spawn_skcipher(&ictx->ctr);
413 err = PTR_ERR(ctr); 607 err = PTR_ERR(ctr);
414 if (IS_ERR(ctr)) 608 if (IS_ERR(ctr))
415 return err; 609 goto err_free_hash;
416 610
417 ctx->ctr = ctr; 611 ctx->ctr = ctr;
418 ctx->gf128 = NULL; 612 ctx->ghash = ghash;
419 613
420 align = crypto_tfm_alg_alignmask(tfm); 614 align = crypto_tfm_alg_alignmask(tfm);
421 align &= ~(crypto_tfm_ctx_alignment() - 1); 615 align &= ~(crypto_tfm_ctx_alignment() - 1);
422 tfm->crt_aead.reqsize = align + 616 tfm->crt_aead.reqsize = align +
423 sizeof(struct crypto_gcm_req_priv_ctx) + 617 offsetof(struct crypto_gcm_req_priv_ctx, u) +
424 crypto_ablkcipher_reqsize(ctr); 618 max(sizeof(struct ablkcipher_request) +
619 crypto_ablkcipher_reqsize(ctr),
620 sizeof(struct ahash_request) +
621 crypto_ahash_reqsize(ghash));
425 622
426 return 0; 623 return 0;
624
625err_free_hash:
626 crypto_free_ahash(ghash);
627 return err;
427} 628}
428 629
429static void crypto_gcm_exit_tfm(struct crypto_tfm *tfm) 630static void crypto_gcm_exit_tfm(struct crypto_tfm *tfm)
430{ 631{
431 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm); 632 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
432 633
433 if (ctx->gf128 != NULL) 634 crypto_free_ahash(ctx->ghash);
434 gf128mul_free_4k(ctx->gf128);
435
436 crypto_free_ablkcipher(ctx->ctr); 635 crypto_free_ablkcipher(ctx->ctr);
437} 636}
438 637
439static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb, 638static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb,
440 const char *full_name, 639 const char *full_name,
441 const char *ctr_name) 640 const char *ctr_name,
641 const char *ghash_name)
442{ 642{
443 struct crypto_attr_type *algt; 643 struct crypto_attr_type *algt;
444 struct crypto_instance *inst; 644 struct crypto_instance *inst;
445 struct crypto_alg *ctr; 645 struct crypto_alg *ctr;
646 struct crypto_alg *ghash_alg;
647 struct ahash_alg *ghash_ahash_alg;
446 struct gcm_instance_ctx *ctx; 648 struct gcm_instance_ctx *ctx;
447 int err; 649 int err;
448 650
@@ -454,17 +656,31 @@ static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb,
454 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 656 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
455 return ERR_PTR(-EINVAL); 657 return ERR_PTR(-EINVAL);
456 658
659 ghash_alg = crypto_find_alg(ghash_name, &crypto_ahash_type,
660 CRYPTO_ALG_TYPE_HASH,
661 CRYPTO_ALG_TYPE_AHASH_MASK);
662 err = PTR_ERR(ghash_alg);
663 if (IS_ERR(ghash_alg))
664 return ERR_PTR(err);
665
666 err = -ENOMEM;
457 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 667 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
458 if (!inst) 668 if (!inst)
459 return ERR_PTR(-ENOMEM); 669 goto out_put_ghash;
460 670
461 ctx = crypto_instance_ctx(inst); 671 ctx = crypto_instance_ctx(inst);
672 ghash_ahash_alg = container_of(ghash_alg, struct ahash_alg, halg.base);
673 err = crypto_init_ahash_spawn(&ctx->ghash, &ghash_ahash_alg->halg,
674 inst);
675 if (err)
676 goto err_free_inst;
677
462 crypto_set_skcipher_spawn(&ctx->ctr, inst); 678 crypto_set_skcipher_spawn(&ctx->ctr, inst);
463 err = crypto_grab_skcipher(&ctx->ctr, ctr_name, 0, 679 err = crypto_grab_skcipher(&ctx->ctr, ctr_name, 0,
464 crypto_requires_sync(algt->type, 680 crypto_requires_sync(algt->type,
465 algt->mask)); 681 algt->mask));
466 if (err) 682 if (err)
467 goto err_free_inst; 683 goto err_drop_ghash;
468 684
469 ctr = crypto_skcipher_spawn_alg(&ctx->ctr); 685 ctr = crypto_skcipher_spawn_alg(&ctx->ctr);
470 686
@@ -479,7 +695,8 @@ static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb,
479 695
480 err = -ENAMETOOLONG; 696 err = -ENAMETOOLONG;
481 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, 697 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
482 "gcm_base(%s)", ctr->cra_driver_name) >= 698 "gcm_base(%s,%s)", ctr->cra_driver_name,
699 ghash_alg->cra_driver_name) >=
483 CRYPTO_MAX_ALG_NAME) 700 CRYPTO_MAX_ALG_NAME)
484 goto out_put_ctr; 701 goto out_put_ctr;
485 702
@@ -502,12 +719,16 @@ static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb,
502 inst->alg.cra_aead.decrypt = crypto_gcm_decrypt; 719 inst->alg.cra_aead.decrypt = crypto_gcm_decrypt;
503 720
504out: 721out:
722 crypto_mod_put(ghash_alg);
505 return inst; 723 return inst;
506 724
507out_put_ctr: 725out_put_ctr:
508 crypto_drop_skcipher(&ctx->ctr); 726 crypto_drop_skcipher(&ctx->ctr);
727err_drop_ghash:
728 crypto_drop_ahash(&ctx->ghash);
509err_free_inst: 729err_free_inst:
510 kfree(inst); 730 kfree(inst);
731out_put_ghash:
511 inst = ERR_PTR(err); 732 inst = ERR_PTR(err);
512 goto out; 733 goto out;
513} 734}
@@ -532,7 +753,7 @@ static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb)
532 CRYPTO_MAX_ALG_NAME) 753 CRYPTO_MAX_ALG_NAME)
533 return ERR_PTR(-ENAMETOOLONG); 754 return ERR_PTR(-ENAMETOOLONG);
534 755
535 return crypto_gcm_alloc_common(tb, full_name, ctr_name); 756 return crypto_gcm_alloc_common(tb, full_name, ctr_name, "ghash");
536} 757}
537 758
538static void crypto_gcm_free(struct crypto_instance *inst) 759static void crypto_gcm_free(struct crypto_instance *inst)
@@ -540,6 +761,7 @@ static void crypto_gcm_free(struct crypto_instance *inst)
540 struct gcm_instance_ctx *ctx = crypto_instance_ctx(inst); 761 struct gcm_instance_ctx *ctx = crypto_instance_ctx(inst);
541 762
542 crypto_drop_skcipher(&ctx->ctr); 763 crypto_drop_skcipher(&ctx->ctr);
764 crypto_drop_ahash(&ctx->ghash);
543 kfree(inst); 765 kfree(inst);
544} 766}
545 767
@@ -554,6 +776,7 @@ static struct crypto_instance *crypto_gcm_base_alloc(struct rtattr **tb)
554{ 776{
555 int err; 777 int err;
556 const char *ctr_name; 778 const char *ctr_name;
779 const char *ghash_name;
557 char full_name[CRYPTO_MAX_ALG_NAME]; 780 char full_name[CRYPTO_MAX_ALG_NAME];
558 781
559 ctr_name = crypto_attr_alg_name(tb[1]); 782 ctr_name = crypto_attr_alg_name(tb[1]);
@@ -561,11 +784,16 @@ static struct crypto_instance *crypto_gcm_base_alloc(struct rtattr **tb)
561 if (IS_ERR(ctr_name)) 784 if (IS_ERR(ctr_name))
562 return ERR_PTR(err); 785 return ERR_PTR(err);
563 786
564 if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm_base(%s)", 787 ghash_name = crypto_attr_alg_name(tb[2]);
565 ctr_name) >= CRYPTO_MAX_ALG_NAME) 788 err = PTR_ERR(ghash_name);
789 if (IS_ERR(ghash_name))
790 return ERR_PTR(err);
791
792 if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm_base(%s,%s)",
793 ctr_name, ghash_name) >= CRYPTO_MAX_ALG_NAME)
566 return ERR_PTR(-ENAMETOOLONG); 794 return ERR_PTR(-ENAMETOOLONG);
567 795
568 return crypto_gcm_alloc_common(tb, full_name, ctr_name); 796 return crypto_gcm_alloc_common(tb, full_name, ctr_name, ghash_name);
569} 797}
570 798
571static struct crypto_template crypto_gcm_base_tmpl = { 799static struct crypto_template crypto_gcm_base_tmpl = {
@@ -784,6 +1012,10 @@ static int __init crypto_gcm_module_init(void)
784{ 1012{
785 int err; 1013 int err;
786 1014
1015 gcm_zeroes = kzalloc(16, GFP_KERNEL);
1016 if (!gcm_zeroes)
1017 return -ENOMEM;
1018
787 err = crypto_register_template(&crypto_gcm_base_tmpl); 1019 err = crypto_register_template(&crypto_gcm_base_tmpl);
788 if (err) 1020 if (err)
789 goto out; 1021 goto out;
@@ -796,18 +1028,20 @@ static int __init crypto_gcm_module_init(void)
796 if (err) 1028 if (err)
797 goto out_undo_gcm; 1029 goto out_undo_gcm;
798 1030
799out: 1031 return 0;
800 return err;
801 1032
802out_undo_gcm: 1033out_undo_gcm:
803 crypto_unregister_template(&crypto_gcm_tmpl); 1034 crypto_unregister_template(&crypto_gcm_tmpl);
804out_undo_base: 1035out_undo_base:
805 crypto_unregister_template(&crypto_gcm_base_tmpl); 1036 crypto_unregister_template(&crypto_gcm_base_tmpl);
806 goto out; 1037out:
1038 kfree(gcm_zeroes);
1039 return err;
807} 1040}
808 1041
809static void __exit crypto_gcm_module_exit(void) 1042static void __exit crypto_gcm_module_exit(void)
810{ 1043{
1044 kfree(gcm_zeroes);
811 crypto_unregister_template(&crypto_rfc4106_tmpl); 1045 crypto_unregister_template(&crypto_rfc4106_tmpl);
812 crypto_unregister_template(&crypto_gcm_tmpl); 1046 crypto_unregister_template(&crypto_gcm_tmpl);
813 crypto_unregister_template(&crypto_gcm_base_tmpl); 1047 crypto_unregister_template(&crypto_gcm_base_tmpl);