summaryrefslogtreecommitdiffstats
path: root/crypto/seqiv.c
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2015-05-21 03:11:13 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2015-05-21 23:25:56 -0400
commit856e3f4092cfd9ea6d6564e73f5bce5a0ac3cae3 (patch)
tree8e8c87713974a5332957d16758413a7fa5258d4c /crypto/seqiv.c
parent74412fd5d71b6eda0beb302aa467da000f0d530c (diff)
crypto: seqiv - Add support for new AEAD interface
This patch converts the seqiv IV generator to work with the new AEAD interface where IV generators are just normal AEAD algorithms. Full backwards compatibility is paramount at this point since no users have yet switched over to the new interface. Nor can they switch to the new interface until IV generation is fully supported by it. So this means we are adding two versions of seqiv alongside the existing one. The first one is the one that will be used when the underlying AEAD algorithm has switched over to the new AEAD interface. The second one handles the current case where the underlying AEAD algorithm still uses the old interface. Both versions export themselves through the new AEAD interface. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/seqiv.c')
-rw-r--r--crypto/seqiv.c386
1 files changed, 376 insertions, 10 deletions
diff --git a/crypto/seqiv.c b/crypto/seqiv.c
index 5bbf2e9e3ce5..27dbab8a80a9 100644
--- a/crypto/seqiv.c
+++ b/crypto/seqiv.c
@@ -15,7 +15,9 @@
15 15
16#include <crypto/internal/aead.h> 16#include <crypto/internal/aead.h>
17#include <crypto/internal/skcipher.h> 17#include <crypto/internal/skcipher.h>
18#include <crypto/null.h>
18#include <crypto/rng.h> 19#include <crypto/rng.h>
20#include <crypto/scatterwalk.h>
19#include <linux/err.h> 21#include <linux/err.h>
20#include <linux/init.h> 22#include <linux/init.h>
21#include <linux/kernel.h> 23#include <linux/kernel.h>
@@ -29,6 +31,29 @@ struct seqiv_ctx {
29 u8 salt[] __attribute__ ((aligned(__alignof__(u32)))); 31 u8 salt[] __attribute__ ((aligned(__alignof__(u32))));
30}; 32};
31 33
34struct seqiv_aead_ctx {
35 struct crypto_aead *child;
36 spinlock_t lock;
37 struct crypto_blkcipher *null;
38 u8 salt[] __attribute__ ((aligned(__alignof__(u32))));
39};
40
41static int seqiv_aead_setkey(struct crypto_aead *tfm,
42 const u8 *key, unsigned int keylen)
43{
44 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(tfm);
45
46 return crypto_aead_setkey(ctx->child, key, keylen);
47}
48
49static int seqiv_aead_setauthsize(struct crypto_aead *tfm,
50 unsigned int authsize)
51{
52 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(tfm);
53
54 return crypto_aead_setauthsize(ctx->child, authsize);
55}
56
32static void seqiv_complete2(struct skcipher_givcrypt_request *req, int err) 57static void seqiv_complete2(struct skcipher_givcrypt_request *req, int err)
33{ 58{
34 struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req); 59 struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req);
@@ -81,6 +106,33 @@ static void seqiv_aead_complete(struct crypto_async_request *base, int err)
81 aead_givcrypt_complete(req, err); 106 aead_givcrypt_complete(req, err);
82} 107}
83 108
109static void seqiv_aead_encrypt_complete2(struct aead_request *req, int err)
110{
111 struct aead_request *subreq = aead_request_ctx(req);
112 struct crypto_aead *geniv;
113
114 if (err == -EINPROGRESS)
115 return;
116
117 if (err)
118 goto out;
119
120 geniv = crypto_aead_reqtfm(req);
121 memcpy(req->iv, subreq->iv, crypto_aead_ivsize(geniv));
122
123out:
124 kzfree(subreq->iv);
125}
126
127static void seqiv_aead_encrypt_complete(struct crypto_async_request *base,
128 int err)
129{
130 struct aead_request *req = base->data;
131
132 seqiv_aead_encrypt_complete2(req, err);
133 aead_request_complete(req, err);
134}
135
84static void seqiv_geniv(struct seqiv_ctx *ctx, u8 *info, u64 seq, 136static void seqiv_geniv(struct seqiv_ctx *ctx, u8 *info, u64 seq,
85 unsigned int ivsize) 137 unsigned int ivsize)
86{ 138{
@@ -186,6 +238,171 @@ static int seqiv_aead_givencrypt(struct aead_givcrypt_request *req)
186 return err; 238 return err;
187} 239}
188 240
241static int seqiv_aead_encrypt_compat(struct aead_request *req)
242{
243 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
244 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
245 struct aead_request *subreq = aead_request_ctx(req);
246 crypto_completion_t compl;
247 void *data;
248 u8 *info;
249 unsigned int ivsize;
250 int err;
251
252 aead_request_set_tfm(subreq, ctx->child);
253
254 compl = req->base.complete;
255 data = req->base.data;
256 info = req->iv;
257
258 ivsize = crypto_aead_ivsize(geniv);
259
260 if (unlikely(!IS_ALIGNED((unsigned long)info,
261 crypto_aead_alignmask(geniv) + 1))) {
262 info = kmalloc(ivsize, req->base.flags &
263 CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
264 GFP_ATOMIC);
265 if (!info)
266 return -ENOMEM;
267
268 memcpy(info, req->iv, ivsize);
269 compl = seqiv_aead_encrypt_complete;
270 data = req;
271 }
272
273 aead_request_set_callback(subreq, req->base.flags, compl, data);
274 aead_request_set_crypt(subreq, req->src, req->dst,
275 req->cryptlen - ivsize, info);
276 aead_request_set_ad(subreq, req->assoclen, ivsize);
277
278 crypto_xor(info, ctx->salt, ivsize);
279 scatterwalk_map_and_copy(info, req->dst, req->assoclen, ivsize, 1);
280
281 err = crypto_aead_encrypt(subreq);
282 if (unlikely(info != req->iv))
283 seqiv_aead_encrypt_complete2(req, err);
284 return err;
285}
286
287static int seqiv_aead_encrypt(struct aead_request *req)
288{
289 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
290 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
291 struct aead_request *subreq = aead_request_ctx(req);
292 crypto_completion_t compl;
293 void *data;
294 u8 *info;
295 unsigned int ivsize;
296 int err;
297
298 aead_request_set_tfm(subreq, ctx->child);
299
300 compl = req->base.complete;
301 data = req->base.data;
302 info = req->iv;
303
304 ivsize = crypto_aead_ivsize(geniv);
305
306 if (req->src != req->dst) {
307 struct scatterlist src[2];
308 struct scatterlist dst[2];
309 struct blkcipher_desc desc = {
310 .tfm = ctx->null,
311 };
312
313 err = crypto_blkcipher_encrypt(
314 &desc,
315 scatterwalk_ffwd(dst, req->dst,
316 req->assoclen + ivsize),
317 scatterwalk_ffwd(src, req->src,
318 req->assoclen + ivsize),
319 req->cryptlen - ivsize);
320 if (err)
321 return err;
322 }
323
324 if (unlikely(!IS_ALIGNED((unsigned long)info,
325 crypto_aead_alignmask(geniv) + 1))) {
326 info = kmalloc(ivsize, req->base.flags &
327 CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
328 GFP_ATOMIC);
329 if (!info)
330 return -ENOMEM;
331
332 memcpy(info, req->iv, ivsize);
333 compl = seqiv_aead_encrypt_complete;
334 data = req;
335 }
336
337 aead_request_set_callback(subreq, req->base.flags, compl, data);
338 aead_request_set_crypt(subreq, req->dst, req->dst,
339 req->cryptlen - ivsize, info);
340 aead_request_set_ad(subreq, req->assoclen + ivsize, 0);
341
342 crypto_xor(info, ctx->salt, ivsize);
343 scatterwalk_map_and_copy(info, req->dst, req->assoclen, ivsize, 1);
344
345 err = crypto_aead_encrypt(subreq);
346 if (unlikely(info != req->iv))
347 seqiv_aead_encrypt_complete2(req, err);
348 return err;
349}
350
351static int seqiv_aead_decrypt_compat(struct aead_request *req)
352{
353 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
354 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
355 struct aead_request *subreq = aead_request_ctx(req);
356 crypto_completion_t compl;
357 void *data;
358 unsigned int ivsize;
359
360 aead_request_set_tfm(subreq, ctx->child);
361
362 compl = req->base.complete;
363 data = req->base.data;
364
365 ivsize = crypto_aead_ivsize(geniv);
366
367 aead_request_set_callback(subreq, req->base.flags, compl, data);
368 aead_request_set_crypt(subreq, req->src, req->dst,
369 req->cryptlen - ivsize, req->iv);
370 aead_request_set_ad(subreq, req->assoclen, ivsize);
371
372 scatterwalk_map_and_copy(req->iv, req->src, req->assoclen, ivsize, 0);
373
374 return crypto_aead_decrypt(subreq);
375}
376
377static int seqiv_aead_decrypt(struct aead_request *req)
378{
379 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
380 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
381 struct aead_request *subreq = aead_request_ctx(req);
382 crypto_completion_t compl;
383 void *data;
384 unsigned int ivsize;
385
386 aead_request_set_tfm(subreq, ctx->child);
387
388 compl = req->base.complete;
389 data = req->base.data;
390
391 ivsize = crypto_aead_ivsize(geniv);
392
393 aead_request_set_callback(subreq, req->base.flags, compl, data);
394 aead_request_set_crypt(subreq, req->src, req->dst,
395 req->cryptlen - ivsize, req->iv);
396 aead_request_set_ad(subreq, req->assoclen + ivsize, 0);
397
398 scatterwalk_map_and_copy(req->iv, req->src, req->assoclen, ivsize, 0);
399 if (req->src != req->dst)
400 scatterwalk_map_and_copy(req->iv, req->dst,
401 req->assoclen, ivsize, 1);
402
403 return crypto_aead_decrypt(subreq);
404}
405
189static int seqiv_givencrypt_first(struct skcipher_givcrypt_request *req) 406static int seqiv_givencrypt_first(struct skcipher_givcrypt_request *req)
190{ 407{
191 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); 408 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
@@ -232,6 +449,52 @@ unlock:
232 return seqiv_aead_givencrypt(req); 449 return seqiv_aead_givencrypt(req);
233} 450}
234 451
452static int seqiv_aead_encrypt_compat_first(struct aead_request *req)
453{
454 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
455 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
456 int err = 0;
457
458 spin_lock_bh(&ctx->lock);
459 if (geniv->encrypt != seqiv_aead_encrypt_compat_first)
460 goto unlock;
461
462 geniv->encrypt = seqiv_aead_encrypt_compat;
463 err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
464 crypto_aead_ivsize(geniv));
465
466unlock:
467 spin_unlock_bh(&ctx->lock);
468
469 if (err)
470 return err;
471
472 return seqiv_aead_encrypt_compat(req);
473}
474
475static int seqiv_aead_encrypt_first(struct aead_request *req)
476{
477 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
478 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
479 int err = 0;
480
481 spin_lock_bh(&ctx->lock);
482 if (geniv->encrypt != seqiv_aead_encrypt_first)
483 goto unlock;
484
485 geniv->encrypt = seqiv_aead_encrypt;
486 err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
487 crypto_aead_ivsize(geniv));
488
489unlock:
490 spin_unlock_bh(&ctx->lock);
491
492 if (err)
493 return err;
494
495 return seqiv_aead_encrypt(req);
496}
497
235static int seqiv_init(struct crypto_tfm *tfm) 498static int seqiv_init(struct crypto_tfm *tfm)
236{ 499{
237 struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm); 500 struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm);
@@ -244,7 +507,7 @@ static int seqiv_init(struct crypto_tfm *tfm)
244 return skcipher_geniv_init(tfm); 507 return skcipher_geniv_init(tfm);
245} 508}
246 509
247static int seqiv_aead_init(struct crypto_tfm *tfm) 510static int seqiv_old_aead_init(struct crypto_tfm *tfm)
248{ 511{
249 struct crypto_aead *geniv = __crypto_aead_cast(tfm); 512 struct crypto_aead *geniv = __crypto_aead_cast(tfm);
250 struct seqiv_ctx *ctx = crypto_aead_ctx(geniv); 513 struct seqiv_ctx *ctx = crypto_aead_ctx(geniv);
@@ -257,6 +520,69 @@ static int seqiv_aead_init(struct crypto_tfm *tfm)
257 return aead_geniv_init(tfm); 520 return aead_geniv_init(tfm);
258} 521}
259 522
523static int seqiv_aead_compat_init(struct crypto_tfm *tfm)
524{
525 struct crypto_aead *geniv = __crypto_aead_cast(tfm);
526 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
527 int err;
528
529 spin_lock_init(&ctx->lock);
530
531 crypto_aead_set_reqsize(geniv, sizeof(struct aead_request));
532
533 err = aead_geniv_init(tfm);
534
535 ctx->child = geniv->child;
536 geniv->child = geniv;
537
538 return err;
539}
540
541static int seqiv_aead_init(struct crypto_tfm *tfm)
542{
543 struct crypto_aead *geniv = __crypto_aead_cast(tfm);
544 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
545 int err;
546
547 spin_lock_init(&ctx->lock);
548
549 crypto_aead_set_reqsize(geniv, sizeof(struct aead_request));
550
551 ctx->null = crypto_get_default_null_skcipher();
552 err = PTR_ERR(ctx->null);
553 if (IS_ERR(ctx->null))
554 goto out;
555
556 err = aead_geniv_init(tfm);
557 if (err)
558 goto drop_null;
559
560 ctx->child = geniv->child;
561 geniv->child = geniv;
562
563out:
564 return err;
565
566drop_null:
567 crypto_put_default_null_skcipher();
568 goto out;
569}
570
571static void seqiv_aead_compat_exit(struct crypto_tfm *tfm)
572{
573 struct seqiv_aead_ctx *ctx = crypto_tfm_ctx(tfm);
574
575 crypto_free_aead(ctx->child);
576}
577
578static void seqiv_aead_exit(struct crypto_tfm *tfm)
579{
580 struct seqiv_aead_ctx *ctx = crypto_tfm_ctx(tfm);
581
582 crypto_free_aead(ctx->child);
583 crypto_put_default_null_skcipher();
584}
585
260static struct crypto_template seqiv_tmpl; 586static struct crypto_template seqiv_tmpl;
261 587
262static struct crypto_instance *seqiv_ablkcipher_alloc(struct rtattr **tb) 588static struct crypto_instance *seqiv_ablkcipher_alloc(struct rtattr **tb)
@@ -280,35 +606,76 @@ static struct crypto_instance *seqiv_ablkcipher_alloc(struct rtattr **tb)
280 inst->alg.cra_exit = skcipher_geniv_exit; 606 inst->alg.cra_exit = skcipher_geniv_exit;
281 607
282 inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize; 608 inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize;
609 inst->alg.cra_ctxsize += sizeof(struct seqiv_ctx);
283 610
284out: 611out:
285 return inst; 612 return inst;
286} 613}
287 614
615static struct crypto_instance *seqiv_old_aead_alloc(struct aead_instance *aead)
616{
617 struct crypto_instance *inst = aead_crypto_instance(aead);
618
619 if (inst->alg.cra_aead.ivsize < sizeof(u64)) {
620 aead_geniv_free(aead);
621 return ERR_PTR(-EINVAL);
622 }
623
624 inst->alg.cra_aead.givencrypt = seqiv_aead_givencrypt_first;
625
626 inst->alg.cra_init = seqiv_old_aead_init;
627 inst->alg.cra_exit = aead_geniv_exit;
628
629 inst->alg.cra_ctxsize = inst->alg.cra_aead.ivsize;
630 inst->alg.cra_ctxsize += sizeof(struct seqiv_ctx);
631
632 return inst;
633}
634
288static struct crypto_instance *seqiv_aead_alloc(struct rtattr **tb) 635static struct crypto_instance *seqiv_aead_alloc(struct rtattr **tb)
289{ 636{
290 struct crypto_instance *inst; 637 struct aead_instance *inst;
638 struct crypto_aead_spawn *spawn;
639 struct aead_alg *alg;
291 640
292 inst = aead_geniv_alloc(&seqiv_tmpl, tb, 0, 0); 641 inst = aead_geniv_alloc(&seqiv_tmpl, tb, 0, 0);
293 642
294 if (IS_ERR(inst)) 643 if (IS_ERR(inst))
295 goto out; 644 goto out;
296 645
297 if (inst->alg.cra_aead.ivsize < sizeof(u64)) { 646 if (inst->alg.base.cra_aead.encrypt)
647 return seqiv_old_aead_alloc(inst);
648
649 if (inst->alg.ivsize < sizeof(u64)) {
298 aead_geniv_free(inst); 650 aead_geniv_free(inst);
299 inst = ERR_PTR(-EINVAL); 651 inst = ERR_PTR(-EINVAL);
300 goto out; 652 goto out;
301 } 653 }
302 654
303 inst->alg.cra_aead.givencrypt = seqiv_aead_givencrypt_first; 655 spawn = aead_instance_ctx(inst);
656 alg = crypto_spawn_aead_alg(spawn);
304 657
305 inst->alg.cra_init = seqiv_aead_init; 658 inst->alg.setkey = seqiv_aead_setkey;
306 inst->alg.cra_exit = aead_geniv_exit; 659 inst->alg.setauthsize = seqiv_aead_setauthsize;
660 inst->alg.encrypt = seqiv_aead_encrypt_first;
661 inst->alg.decrypt = seqiv_aead_decrypt;
307 662
308 inst->alg.cra_ctxsize = inst->alg.cra_aead.ivsize; 663 inst->alg.base.cra_init = seqiv_aead_init;
664 inst->alg.base.cra_exit = seqiv_aead_exit;
665
666 inst->alg.base.cra_ctxsize = sizeof(struct seqiv_aead_ctx);
667 inst->alg.base.cra_ctxsize += inst->alg.base.cra_aead.ivsize;
668
669 if (alg->base.cra_aead.encrypt) {
670 inst->alg.encrypt = seqiv_aead_encrypt_compat_first;
671 inst->alg.decrypt = seqiv_aead_decrypt_compat;
672
673 inst->alg.base.cra_init = seqiv_aead_compat_init;
674 inst->alg.base.cra_exit = seqiv_aead_compat_exit;
675 }
309 676
310out: 677out:
311 return inst; 678 return aead_crypto_instance(inst);
312} 679}
313 680
314static struct crypto_instance *seqiv_alloc(struct rtattr **tb) 681static struct crypto_instance *seqiv_alloc(struct rtattr **tb)
@@ -334,7 +701,6 @@ static struct crypto_instance *seqiv_alloc(struct rtattr **tb)
334 goto put_rng; 701 goto put_rng;
335 702
336 inst->alg.cra_alignmask |= __alignof__(u32) - 1; 703 inst->alg.cra_alignmask |= __alignof__(u32) - 1;
337 inst->alg.cra_ctxsize += sizeof(struct seqiv_ctx);
338 704
339out: 705out:
340 return inst; 706 return inst;
@@ -349,7 +715,7 @@ static void seqiv_free(struct crypto_instance *inst)
349 if ((inst->alg.cra_flags ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK) 715 if ((inst->alg.cra_flags ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK)
350 skcipher_geniv_free(inst); 716 skcipher_geniv_free(inst);
351 else 717 else
352 aead_geniv_free(inst); 718 aead_geniv_free(aead_instance(inst));
353 crypto_put_default_rng(); 719 crypto_put_default_rng();
354} 720}
355 721