summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--crypto/Kconfig1
-rw-r--r--crypto/aead.c100
-rw-r--r--crypto/seqiv.c386
-rw-r--r--include/crypto/internal/aead.h7
4 files changed, 443 insertions, 51 deletions
diff --git a/crypto/Kconfig b/crypto/Kconfig
index eba55b42f3e2..657bb82acd51 100644
--- a/crypto/Kconfig
+++ b/crypto/Kconfig
@@ -221,6 +221,7 @@ config CRYPTO_SEQIV
221 tristate "Sequence Number IV Generator" 221 tristate "Sequence Number IV Generator"
222 select CRYPTO_AEAD 222 select CRYPTO_AEAD
223 select CRYPTO_BLKCIPHER 223 select CRYPTO_BLKCIPHER
224 select CRYPTO_NULL
224 select CRYPTO_RNG 225 select CRYPTO_RNG
225 help 226 help
226 This IV generator generates an IV based on a sequence number by 227 This IV generator generates an IV based on a sequence number by
diff --git a/crypto/aead.c b/crypto/aead.c
index d231e2837bfd..5fa992ac219c 100644
--- a/crypto/aead.c
+++ b/crypto/aead.c
@@ -378,15 +378,16 @@ static int crypto_grab_nivaead(struct crypto_aead_spawn *spawn,
378 return crypto_grab_spawn(&spawn->base, name, type, mask); 378 return crypto_grab_spawn(&spawn->base, name, type, mask);
379} 379}
380 380
381struct crypto_instance *aead_geniv_alloc(struct crypto_template *tmpl, 381struct aead_instance *aead_geniv_alloc(struct crypto_template *tmpl,
382 struct rtattr **tb, u32 type, 382 struct rtattr **tb, u32 type, u32 mask)
383 u32 mask)
384{ 383{
385 const char *name; 384 const char *name;
386 struct crypto_aead_spawn *spawn; 385 struct crypto_aead_spawn *spawn;
387 struct crypto_attr_type *algt; 386 struct crypto_attr_type *algt;
388 struct crypto_instance *inst; 387 struct aead_instance *inst;
389 struct crypto_alg *alg; 388 struct aead_alg *alg;
389 unsigned int ivsize;
390 unsigned int maxauthsize;
390 int err; 391 int err;
391 392
392 algt = crypto_get_attr_type(tb); 393 algt = crypto_get_attr_type(tb);
@@ -405,20 +406,28 @@ struct crypto_instance *aead_geniv_alloc(struct crypto_template *tmpl,
405 if (!inst) 406 if (!inst)
406 return ERR_PTR(-ENOMEM); 407 return ERR_PTR(-ENOMEM);
407 408
408 spawn = crypto_instance_ctx(inst); 409 spawn = aead_instance_ctx(inst);
409 410
410 /* Ignore async algorithms if necessary. */ 411 /* Ignore async algorithms if necessary. */
411 mask |= crypto_requires_sync(algt->type, algt->mask); 412 mask |= crypto_requires_sync(algt->type, algt->mask);
412 413
413 crypto_set_aead_spawn(spawn, inst); 414 crypto_set_aead_spawn(spawn, aead_crypto_instance(inst));
414 err = crypto_grab_nivaead(spawn, name, type, mask); 415 err = crypto_grab_nivaead(spawn, name, type, mask);
415 if (err) 416 if (err)
416 goto err_free_inst; 417 goto err_free_inst;
417 418
418 alg = crypto_aead_spawn_alg(spawn); 419 alg = crypto_spawn_aead_alg(spawn);
420
421 if (alg->base.cra_aead.encrypt) {
422 ivsize = alg->base.cra_aead.ivsize;
423 maxauthsize = alg->base.cra_aead.maxauthsize;
424 } else {
425 ivsize = alg->ivsize;
426 maxauthsize = alg->maxauthsize;
427 }
419 428
420 err = -EINVAL; 429 err = -EINVAL;
421 if (!alg->cra_aead.ivsize) 430 if (!ivsize)
422 goto err_drop_alg; 431 goto err_drop_alg;
423 432
424 /* 433 /*
@@ -427,39 +436,56 @@ struct crypto_instance *aead_geniv_alloc(struct crypto_template *tmpl,
427 * template name and double-check the IV generator. 436 * template name and double-check the IV generator.
428 */ 437 */
429 if (algt->mask & CRYPTO_ALG_GENIV) { 438 if (algt->mask & CRYPTO_ALG_GENIV) {
430 if (strcmp(tmpl->name, alg->cra_aead.geniv)) 439 if (!alg->base.cra_aead.encrypt)
440 goto err_drop_alg;
441 if (strcmp(tmpl->name, alg->base.cra_aead.geniv))
431 goto err_drop_alg; 442 goto err_drop_alg;
432 443
433 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); 444 memcpy(inst->alg.base.cra_name, alg->base.cra_name,
434 memcpy(inst->alg.cra_driver_name, alg->cra_driver_name,
435 CRYPTO_MAX_ALG_NAME); 445 CRYPTO_MAX_ALG_NAME);
436 } else { 446 memcpy(inst->alg.base.cra_driver_name,
437 err = -ENAMETOOLONG; 447 alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME);
438 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, 448
439 "%s(%s)", tmpl->name, alg->cra_name) >= 449 inst->alg.base.cra_flags = CRYPTO_ALG_TYPE_AEAD |
440 CRYPTO_MAX_ALG_NAME) 450 CRYPTO_ALG_GENIV;
441 goto err_drop_alg; 451 inst->alg.base.cra_flags |= alg->base.cra_flags &
442 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, 452 CRYPTO_ALG_ASYNC;
443 "%s(%s)", tmpl->name, alg->cra_driver_name) >= 453 inst->alg.base.cra_priority = alg->base.cra_priority;
444 CRYPTO_MAX_ALG_NAME) 454 inst->alg.base.cra_blocksize = alg->base.cra_blocksize;
445 goto err_drop_alg; 455 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
456 inst->alg.base.cra_type = &crypto_aead_type;
457
458 inst->alg.base.cra_aead.ivsize = ivsize;
459 inst->alg.base.cra_aead.maxauthsize = maxauthsize;
460
461 inst->alg.base.cra_aead.setkey = alg->base.cra_aead.setkey;
462 inst->alg.base.cra_aead.setauthsize =
463 alg->base.cra_aead.setauthsize;
464 inst->alg.base.cra_aead.encrypt = alg->base.cra_aead.encrypt;
465 inst->alg.base.cra_aead.decrypt = alg->base.cra_aead.decrypt;
466
467 goto out;
446 } 468 }
447 469
448 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV; 470 err = -ENAMETOOLONG;
449 inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC; 471 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
450 inst->alg.cra_priority = alg->cra_priority; 472 "%s(%s)", tmpl->name, alg->base.cra_name) >=
451 inst->alg.cra_blocksize = alg->cra_blocksize; 473 CRYPTO_MAX_ALG_NAME)
452 inst->alg.cra_alignmask = alg->cra_alignmask; 474 goto err_drop_alg;
453 inst->alg.cra_type = &crypto_aead_type; 475 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
476 "%s(%s)", tmpl->name, alg->base.cra_driver_name) >=
477 CRYPTO_MAX_ALG_NAME)
478 goto err_drop_alg;
454 479
455 inst->alg.cra_aead.ivsize = alg->cra_aead.ivsize; 480 inst->alg.base.cra_flags = CRYPTO_ALG_TYPE_AEAD;
456 inst->alg.cra_aead.maxauthsize = alg->cra_aead.maxauthsize; 481 inst->alg.base.cra_flags |= alg->base.cra_flags & CRYPTO_ALG_ASYNC;
457 inst->alg.cra_aead.geniv = alg->cra_aead.geniv; 482 inst->alg.base.cra_priority = alg->base.cra_priority;
483 inst->alg.base.cra_blocksize = alg->base.cra_blocksize;
484 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
485 inst->alg.base.cra_type = &crypto_new_aead_type;
458 486
459 inst->alg.cra_aead.setkey = alg->cra_aead.setkey; 487 inst->alg.ivsize = ivsize;
460 inst->alg.cra_aead.setauthsize = alg->cra_aead.setauthsize; 488 inst->alg.maxauthsize = maxauthsize;
461 inst->alg.cra_aead.encrypt = alg->cra_aead.encrypt;
462 inst->alg.cra_aead.decrypt = alg->cra_aead.decrypt;
463 489
464out: 490out:
465 return inst; 491 return inst;
@@ -473,9 +499,9 @@ err_free_inst:
473} 499}
474EXPORT_SYMBOL_GPL(aead_geniv_alloc); 500EXPORT_SYMBOL_GPL(aead_geniv_alloc);
475 501
476void aead_geniv_free(struct crypto_instance *inst) 502void aead_geniv_free(struct aead_instance *inst)
477{ 503{
478 crypto_drop_aead(crypto_instance_ctx(inst)); 504 crypto_drop_aead(aead_instance_ctx(inst));
479 kfree(inst); 505 kfree(inst);
480} 506}
481EXPORT_SYMBOL_GPL(aead_geniv_free); 507EXPORT_SYMBOL_GPL(aead_geniv_free);
diff --git a/crypto/seqiv.c b/crypto/seqiv.c
index 5bbf2e9e3ce5..27dbab8a80a9 100644
--- a/crypto/seqiv.c
+++ b/crypto/seqiv.c
@@ -15,7 +15,9 @@
15 15
16#include <crypto/internal/aead.h> 16#include <crypto/internal/aead.h>
17#include <crypto/internal/skcipher.h> 17#include <crypto/internal/skcipher.h>
18#include <crypto/null.h>
18#include <crypto/rng.h> 19#include <crypto/rng.h>
20#include <crypto/scatterwalk.h>
19#include <linux/err.h> 21#include <linux/err.h>
20#include <linux/init.h> 22#include <linux/init.h>
21#include <linux/kernel.h> 23#include <linux/kernel.h>
@@ -29,6 +31,29 @@ struct seqiv_ctx {
29 u8 salt[] __attribute__ ((aligned(__alignof__(u32)))); 31 u8 salt[] __attribute__ ((aligned(__alignof__(u32))));
30}; 32};
31 33
34struct seqiv_aead_ctx {
35 struct crypto_aead *child;
36 spinlock_t lock;
37 struct crypto_blkcipher *null;
38 u8 salt[] __attribute__ ((aligned(__alignof__(u32))));
39};
40
41static int seqiv_aead_setkey(struct crypto_aead *tfm,
42 const u8 *key, unsigned int keylen)
43{
44 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(tfm);
45
46 return crypto_aead_setkey(ctx->child, key, keylen);
47}
48
49static int seqiv_aead_setauthsize(struct crypto_aead *tfm,
50 unsigned int authsize)
51{
52 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(tfm);
53
54 return crypto_aead_setauthsize(ctx->child, authsize);
55}
56
32static void seqiv_complete2(struct skcipher_givcrypt_request *req, int err) 57static void seqiv_complete2(struct skcipher_givcrypt_request *req, int err)
33{ 58{
34 struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req); 59 struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req);
@@ -81,6 +106,33 @@ static void seqiv_aead_complete(struct crypto_async_request *base, int err)
81 aead_givcrypt_complete(req, err); 106 aead_givcrypt_complete(req, err);
82} 107}
83 108
109static void seqiv_aead_encrypt_complete2(struct aead_request *req, int err)
110{
111 struct aead_request *subreq = aead_request_ctx(req);
112 struct crypto_aead *geniv;
113
114 if (err == -EINPROGRESS)
115 return;
116
117 if (err)
118 goto out;
119
120 geniv = crypto_aead_reqtfm(req);
121 memcpy(req->iv, subreq->iv, crypto_aead_ivsize(geniv));
122
123out:
124 kzfree(subreq->iv);
125}
126
127static void seqiv_aead_encrypt_complete(struct crypto_async_request *base,
128 int err)
129{
130 struct aead_request *req = base->data;
131
132 seqiv_aead_encrypt_complete2(req, err);
133 aead_request_complete(req, err);
134}
135
84static void seqiv_geniv(struct seqiv_ctx *ctx, u8 *info, u64 seq, 136static void seqiv_geniv(struct seqiv_ctx *ctx, u8 *info, u64 seq,
85 unsigned int ivsize) 137 unsigned int ivsize)
86{ 138{
@@ -186,6 +238,171 @@ static int seqiv_aead_givencrypt(struct aead_givcrypt_request *req)
186 return err; 238 return err;
187} 239}
188 240
241static int seqiv_aead_encrypt_compat(struct aead_request *req)
242{
243 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
244 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
245 struct aead_request *subreq = aead_request_ctx(req);
246 crypto_completion_t compl;
247 void *data;
248 u8 *info;
249 unsigned int ivsize;
250 int err;
251
252 aead_request_set_tfm(subreq, ctx->child);
253
254 compl = req->base.complete;
255 data = req->base.data;
256 info = req->iv;
257
258 ivsize = crypto_aead_ivsize(geniv);
259
260 if (unlikely(!IS_ALIGNED((unsigned long)info,
261 crypto_aead_alignmask(geniv) + 1))) {
262 info = kmalloc(ivsize, req->base.flags &
263 CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
264 GFP_ATOMIC);
265 if (!info)
266 return -ENOMEM;
267
268 memcpy(info, req->iv, ivsize);
269 compl = seqiv_aead_encrypt_complete;
270 data = req;
271 }
272
273 aead_request_set_callback(subreq, req->base.flags, compl, data);
274 aead_request_set_crypt(subreq, req->src, req->dst,
275 req->cryptlen - ivsize, info);
276 aead_request_set_ad(subreq, req->assoclen, ivsize);
277
278 crypto_xor(info, ctx->salt, ivsize);
279 scatterwalk_map_and_copy(info, req->dst, req->assoclen, ivsize, 1);
280
281 err = crypto_aead_encrypt(subreq);
282 if (unlikely(info != req->iv))
283 seqiv_aead_encrypt_complete2(req, err);
284 return err;
285}
286
287static int seqiv_aead_encrypt(struct aead_request *req)
288{
289 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
290 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
291 struct aead_request *subreq = aead_request_ctx(req);
292 crypto_completion_t compl;
293 void *data;
294 u8 *info;
295 unsigned int ivsize;
296 int err;
297
298 aead_request_set_tfm(subreq, ctx->child);
299
300 compl = req->base.complete;
301 data = req->base.data;
302 info = req->iv;
303
304 ivsize = crypto_aead_ivsize(geniv);
305
306 if (req->src != req->dst) {
307 struct scatterlist src[2];
308 struct scatterlist dst[2];
309 struct blkcipher_desc desc = {
310 .tfm = ctx->null,
311 };
312
313 err = crypto_blkcipher_encrypt(
314 &desc,
315 scatterwalk_ffwd(dst, req->dst,
316 req->assoclen + ivsize),
317 scatterwalk_ffwd(src, req->src,
318 req->assoclen + ivsize),
319 req->cryptlen - ivsize);
320 if (err)
321 return err;
322 }
323
324 if (unlikely(!IS_ALIGNED((unsigned long)info,
325 crypto_aead_alignmask(geniv) + 1))) {
326 info = kmalloc(ivsize, req->base.flags &
327 CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
328 GFP_ATOMIC);
329 if (!info)
330 return -ENOMEM;
331
332 memcpy(info, req->iv, ivsize);
333 compl = seqiv_aead_encrypt_complete;
334 data = req;
335 }
336
337 aead_request_set_callback(subreq, req->base.flags, compl, data);
338 aead_request_set_crypt(subreq, req->dst, req->dst,
339 req->cryptlen - ivsize, info);
340 aead_request_set_ad(subreq, req->assoclen + ivsize, 0);
341
342 crypto_xor(info, ctx->salt, ivsize);
343 scatterwalk_map_and_copy(info, req->dst, req->assoclen, ivsize, 1);
344
345 err = crypto_aead_encrypt(subreq);
346 if (unlikely(info != req->iv))
347 seqiv_aead_encrypt_complete2(req, err);
348 return err;
349}
350
351static int seqiv_aead_decrypt_compat(struct aead_request *req)
352{
353 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
354 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
355 struct aead_request *subreq = aead_request_ctx(req);
356 crypto_completion_t compl;
357 void *data;
358 unsigned int ivsize;
359
360 aead_request_set_tfm(subreq, ctx->child);
361
362 compl = req->base.complete;
363 data = req->base.data;
364
365 ivsize = crypto_aead_ivsize(geniv);
366
367 aead_request_set_callback(subreq, req->base.flags, compl, data);
368 aead_request_set_crypt(subreq, req->src, req->dst,
369 req->cryptlen - ivsize, req->iv);
370 aead_request_set_ad(subreq, req->assoclen, ivsize);
371
372 scatterwalk_map_and_copy(req->iv, req->src, req->assoclen, ivsize, 0);
373
374 return crypto_aead_decrypt(subreq);
375}
376
377static int seqiv_aead_decrypt(struct aead_request *req)
378{
379 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
380 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
381 struct aead_request *subreq = aead_request_ctx(req);
382 crypto_completion_t compl;
383 void *data;
384 unsigned int ivsize;
385
386 aead_request_set_tfm(subreq, ctx->child);
387
388 compl = req->base.complete;
389 data = req->base.data;
390
391 ivsize = crypto_aead_ivsize(geniv);
392
393 aead_request_set_callback(subreq, req->base.flags, compl, data);
394 aead_request_set_crypt(subreq, req->src, req->dst,
395 req->cryptlen - ivsize, req->iv);
396 aead_request_set_ad(subreq, req->assoclen + ivsize, 0);
397
398 scatterwalk_map_and_copy(req->iv, req->src, req->assoclen, ivsize, 0);
399 if (req->src != req->dst)
400 scatterwalk_map_and_copy(req->iv, req->dst,
401 req->assoclen, ivsize, 1);
402
403 return crypto_aead_decrypt(subreq);
404}
405
189static int seqiv_givencrypt_first(struct skcipher_givcrypt_request *req) 406static int seqiv_givencrypt_first(struct skcipher_givcrypt_request *req)
190{ 407{
191 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); 408 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
@@ -232,6 +449,52 @@ unlock:
232 return seqiv_aead_givencrypt(req); 449 return seqiv_aead_givencrypt(req);
233} 450}
234 451
452static int seqiv_aead_encrypt_compat_first(struct aead_request *req)
453{
454 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
455 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
456 int err = 0;
457
458 spin_lock_bh(&ctx->lock);
459 if (geniv->encrypt != seqiv_aead_encrypt_compat_first)
460 goto unlock;
461
462 geniv->encrypt = seqiv_aead_encrypt_compat;
463 err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
464 crypto_aead_ivsize(geniv));
465
466unlock:
467 spin_unlock_bh(&ctx->lock);
468
469 if (err)
470 return err;
471
472 return seqiv_aead_encrypt_compat(req);
473}
474
475static int seqiv_aead_encrypt_first(struct aead_request *req)
476{
477 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
478 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
479 int err = 0;
480
481 spin_lock_bh(&ctx->lock);
482 if (geniv->encrypt != seqiv_aead_encrypt_first)
483 goto unlock;
484
485 geniv->encrypt = seqiv_aead_encrypt;
486 err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
487 crypto_aead_ivsize(geniv));
488
489unlock:
490 spin_unlock_bh(&ctx->lock);
491
492 if (err)
493 return err;
494
495 return seqiv_aead_encrypt(req);
496}
497
235static int seqiv_init(struct crypto_tfm *tfm) 498static int seqiv_init(struct crypto_tfm *tfm)
236{ 499{
237 struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm); 500 struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm);
@@ -244,7 +507,7 @@ static int seqiv_init(struct crypto_tfm *tfm)
244 return skcipher_geniv_init(tfm); 507 return skcipher_geniv_init(tfm);
245} 508}
246 509
247static int seqiv_aead_init(struct crypto_tfm *tfm) 510static int seqiv_old_aead_init(struct crypto_tfm *tfm)
248{ 511{
249 struct crypto_aead *geniv = __crypto_aead_cast(tfm); 512 struct crypto_aead *geniv = __crypto_aead_cast(tfm);
250 struct seqiv_ctx *ctx = crypto_aead_ctx(geniv); 513 struct seqiv_ctx *ctx = crypto_aead_ctx(geniv);
@@ -257,6 +520,69 @@ static int seqiv_aead_init(struct crypto_tfm *tfm)
257 return aead_geniv_init(tfm); 520 return aead_geniv_init(tfm);
258} 521}
259 522
523static int seqiv_aead_compat_init(struct crypto_tfm *tfm)
524{
525 struct crypto_aead *geniv = __crypto_aead_cast(tfm);
526 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
527 int err;
528
529 spin_lock_init(&ctx->lock);
530
531 crypto_aead_set_reqsize(geniv, sizeof(struct aead_request));
532
533 err = aead_geniv_init(tfm);
534
535 ctx->child = geniv->child;
536 geniv->child = geniv;
537
538 return err;
539}
540
541static int seqiv_aead_init(struct crypto_tfm *tfm)
542{
543 struct crypto_aead *geniv = __crypto_aead_cast(tfm);
544 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
545 int err;
546
547 spin_lock_init(&ctx->lock);
548
549 crypto_aead_set_reqsize(geniv, sizeof(struct aead_request));
550
551 ctx->null = crypto_get_default_null_skcipher();
552 err = PTR_ERR(ctx->null);
553 if (IS_ERR(ctx->null))
554 goto out;
555
556 err = aead_geniv_init(tfm);
557 if (err)
558 goto drop_null;
559
560 ctx->child = geniv->child;
561 geniv->child = geniv;
562
563out:
564 return err;
565
566drop_null:
567 crypto_put_default_null_skcipher();
568 goto out;
569}
570
571static void seqiv_aead_compat_exit(struct crypto_tfm *tfm)
572{
573 struct seqiv_aead_ctx *ctx = crypto_tfm_ctx(tfm);
574
575 crypto_free_aead(ctx->child);
576}
577
578static void seqiv_aead_exit(struct crypto_tfm *tfm)
579{
580 struct seqiv_aead_ctx *ctx = crypto_tfm_ctx(tfm);
581
582 crypto_free_aead(ctx->child);
583 crypto_put_default_null_skcipher();
584}
585
260static struct crypto_template seqiv_tmpl; 586static struct crypto_template seqiv_tmpl;
261 587
262static struct crypto_instance *seqiv_ablkcipher_alloc(struct rtattr **tb) 588static struct crypto_instance *seqiv_ablkcipher_alloc(struct rtattr **tb)
@@ -280,35 +606,76 @@ static struct crypto_instance *seqiv_ablkcipher_alloc(struct rtattr **tb)
280 inst->alg.cra_exit = skcipher_geniv_exit; 606 inst->alg.cra_exit = skcipher_geniv_exit;
281 607
282 inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize; 608 inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize;
609 inst->alg.cra_ctxsize += sizeof(struct seqiv_ctx);
283 610
284out: 611out:
285 return inst; 612 return inst;
286} 613}
287 614
615static struct crypto_instance *seqiv_old_aead_alloc(struct aead_instance *aead)
616{
617 struct crypto_instance *inst = aead_crypto_instance(aead);
618
619 if (inst->alg.cra_aead.ivsize < sizeof(u64)) {
620 aead_geniv_free(aead);
621 return ERR_PTR(-EINVAL);
622 }
623
624 inst->alg.cra_aead.givencrypt = seqiv_aead_givencrypt_first;
625
626 inst->alg.cra_init = seqiv_old_aead_init;
627 inst->alg.cra_exit = aead_geniv_exit;
628
629 inst->alg.cra_ctxsize = inst->alg.cra_aead.ivsize;
630 inst->alg.cra_ctxsize += sizeof(struct seqiv_ctx);
631
632 return inst;
633}
634
288static struct crypto_instance *seqiv_aead_alloc(struct rtattr **tb) 635static struct crypto_instance *seqiv_aead_alloc(struct rtattr **tb)
289{ 636{
290 struct crypto_instance *inst; 637 struct aead_instance *inst;
638 struct crypto_aead_spawn *spawn;
639 struct aead_alg *alg;
291 640
292 inst = aead_geniv_alloc(&seqiv_tmpl, tb, 0, 0); 641 inst = aead_geniv_alloc(&seqiv_tmpl, tb, 0, 0);
293 642
294 if (IS_ERR(inst)) 643 if (IS_ERR(inst))
295 goto out; 644 goto out;
296 645
297 if (inst->alg.cra_aead.ivsize < sizeof(u64)) { 646 if (inst->alg.base.cra_aead.encrypt)
647 return seqiv_old_aead_alloc(inst);
648
649 if (inst->alg.ivsize < sizeof(u64)) {
298 aead_geniv_free(inst); 650 aead_geniv_free(inst);
299 inst = ERR_PTR(-EINVAL); 651 inst = ERR_PTR(-EINVAL);
300 goto out; 652 goto out;
301 } 653 }
302 654
303 inst->alg.cra_aead.givencrypt = seqiv_aead_givencrypt_first; 655 spawn = aead_instance_ctx(inst);
656 alg = crypto_spawn_aead_alg(spawn);
304 657
305 inst->alg.cra_init = seqiv_aead_init; 658 inst->alg.setkey = seqiv_aead_setkey;
306 inst->alg.cra_exit = aead_geniv_exit; 659 inst->alg.setauthsize = seqiv_aead_setauthsize;
660 inst->alg.encrypt = seqiv_aead_encrypt_first;
661 inst->alg.decrypt = seqiv_aead_decrypt;
307 662
308 inst->alg.cra_ctxsize = inst->alg.cra_aead.ivsize; 663 inst->alg.base.cra_init = seqiv_aead_init;
664 inst->alg.base.cra_exit = seqiv_aead_exit;
665
666 inst->alg.base.cra_ctxsize = sizeof(struct seqiv_aead_ctx);
667 inst->alg.base.cra_ctxsize += inst->alg.base.cra_aead.ivsize;
668
669 if (alg->base.cra_aead.encrypt) {
670 inst->alg.encrypt = seqiv_aead_encrypt_compat_first;
671 inst->alg.decrypt = seqiv_aead_decrypt_compat;
672
673 inst->alg.base.cra_init = seqiv_aead_compat_init;
674 inst->alg.base.cra_exit = seqiv_aead_compat_exit;
675 }
309 676
310out: 677out:
311 return inst; 678 return aead_crypto_instance(inst);
312} 679}
313 680
314static struct crypto_instance *seqiv_alloc(struct rtattr **tb) 681static struct crypto_instance *seqiv_alloc(struct rtattr **tb)
@@ -334,7 +701,6 @@ static struct crypto_instance *seqiv_alloc(struct rtattr **tb)
334 goto put_rng; 701 goto put_rng;
335 702
336 inst->alg.cra_alignmask |= __alignof__(u32) - 1; 703 inst->alg.cra_alignmask |= __alignof__(u32) - 1;
337 inst->alg.cra_ctxsize += sizeof(struct seqiv_ctx);
338 704
339out: 705out:
340 return inst; 706 return inst;
@@ -349,7 +715,7 @@ static void seqiv_free(struct crypto_instance *inst)
349 if ((inst->alg.cra_flags ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK) 715 if ((inst->alg.cra_flags ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK)
350 skcipher_geniv_free(inst); 716 skcipher_geniv_free(inst);
351 else 717 else
352 aead_geniv_free(inst); 718 aead_geniv_free(aead_instance(inst));
353 crypto_put_default_rng(); 719 crypto_put_default_rng();
354} 720}
355 721
diff --git a/include/crypto/internal/aead.h b/include/crypto/internal/aead.h
index 6cd31519c4f6..08f2ca6c020e 100644
--- a/include/crypto/internal/aead.h
+++ b/include/crypto/internal/aead.h
@@ -117,10 +117,9 @@ static inline struct crypto_aead *crypto_spawn_aead(
117 return crypto_spawn_tfm2(&spawn->base); 117 return crypto_spawn_tfm2(&spawn->base);
118} 118}
119 119
120struct crypto_instance *aead_geniv_alloc(struct crypto_template *tmpl, 120struct aead_instance *aead_geniv_alloc(struct crypto_template *tmpl,
121 struct rtattr **tb, u32 type, 121 struct rtattr **tb, u32 type, u32 mask);
122 u32 mask); 122void aead_geniv_free(struct aead_instance *inst);
123void aead_geniv_free(struct crypto_instance *inst);
124int aead_geniv_init(struct crypto_tfm *tfm); 123int aead_geniv_init(struct crypto_tfm *tfm);
125void aead_geniv_exit(struct crypto_tfm *tfm); 124void aead_geniv_exit(struct crypto_tfm *tfm);
126 125