diff options
Diffstat (limited to 'crypto/cryptd.c')
-rw-r--r-- | crypto/cryptd.c | 321 |
1 files changed, 209 insertions, 112 deletions
diff --git a/crypto/cryptd.c b/crypto/cryptd.c index ae5fa99d5d3..35335825a4e 100644 --- a/crypto/cryptd.c +++ b/crypto/cryptd.c | |||
@@ -39,6 +39,11 @@ struct cryptd_instance_ctx { | |||
39 | struct cryptd_queue *queue; | 39 | struct cryptd_queue *queue; |
40 | }; | 40 | }; |
41 | 41 | ||
42 | struct hashd_instance_ctx { | ||
43 | struct crypto_shash_spawn spawn; | ||
44 | struct cryptd_queue *queue; | ||
45 | }; | ||
46 | |||
42 | struct cryptd_blkcipher_ctx { | 47 | struct cryptd_blkcipher_ctx { |
43 | struct crypto_blkcipher *child; | 48 | struct crypto_blkcipher *child; |
44 | }; | 49 | }; |
@@ -48,11 +53,12 @@ struct cryptd_blkcipher_request_ctx { | |||
48 | }; | 53 | }; |
49 | 54 | ||
50 | struct cryptd_hash_ctx { | 55 | struct cryptd_hash_ctx { |
51 | struct crypto_hash *child; | 56 | struct crypto_shash *child; |
52 | }; | 57 | }; |
53 | 58 | ||
54 | struct cryptd_hash_request_ctx { | 59 | struct cryptd_hash_request_ctx { |
55 | crypto_completion_t complete; | 60 | crypto_completion_t complete; |
61 | struct shash_desc desc; | ||
56 | }; | 62 | }; |
57 | 63 | ||
58 | static void cryptd_queue_worker(struct work_struct *work); | 64 | static void cryptd_queue_worker(struct work_struct *work); |
@@ -249,32 +255,24 @@ static void cryptd_blkcipher_exit_tfm(struct crypto_tfm *tfm) | |||
249 | crypto_free_blkcipher(ctx->child); | 255 | crypto_free_blkcipher(ctx->child); |
250 | } | 256 | } |
251 | 257 | ||
252 | static struct crypto_instance *cryptd_alloc_instance(struct crypto_alg *alg, | 258 | static void *cryptd_alloc_instance(struct crypto_alg *alg, unsigned int head, |
253 | struct cryptd_queue *queue) | 259 | unsigned int tail) |
254 | { | 260 | { |
261 | char *p; | ||
255 | struct crypto_instance *inst; | 262 | struct crypto_instance *inst; |
256 | struct cryptd_instance_ctx *ctx; | ||
257 | int err; | 263 | int err; |
258 | 264 | ||
259 | inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); | 265 | p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL); |
260 | if (!inst) { | 266 | if (!p) |
261 | inst = ERR_PTR(-ENOMEM); | 267 | return ERR_PTR(-ENOMEM); |
262 | goto out; | 268 | |
263 | } | 269 | inst = (void *)(p + head); |
264 | 270 | ||
265 | err = -ENAMETOOLONG; | 271 | err = -ENAMETOOLONG; |
266 | if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, | 272 | if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, |
267 | "cryptd(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) | 273 | "cryptd(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) |
268 | goto out_free_inst; | 274 | goto out_free_inst; |
269 | 275 | ||
270 | ctx = crypto_instance_ctx(inst); | ||
271 | err = crypto_init_spawn(&ctx->spawn, alg, inst, | ||
272 | CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); | ||
273 | if (err) | ||
274 | goto out_free_inst; | ||
275 | |||
276 | ctx->queue = queue; | ||
277 | |||
278 | memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); | 276 | memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); |
279 | 277 | ||
280 | inst->alg.cra_priority = alg->cra_priority + 50; | 278 | inst->alg.cra_priority = alg->cra_priority + 50; |
@@ -282,29 +280,41 @@ static struct crypto_instance *cryptd_alloc_instance(struct crypto_alg *alg, | |||
282 | inst->alg.cra_alignmask = alg->cra_alignmask; | 280 | inst->alg.cra_alignmask = alg->cra_alignmask; |
283 | 281 | ||
284 | out: | 282 | out: |
285 | return inst; | 283 | return p; |
286 | 284 | ||
287 | out_free_inst: | 285 | out_free_inst: |
288 | kfree(inst); | 286 | kfree(p); |
289 | inst = ERR_PTR(err); | 287 | p = ERR_PTR(err); |
290 | goto out; | 288 | goto out; |
291 | } | 289 | } |
292 | 290 | ||
293 | static struct crypto_instance *cryptd_alloc_blkcipher( | 291 | static int cryptd_create_blkcipher(struct crypto_template *tmpl, |
294 | struct rtattr **tb, struct cryptd_queue *queue) | 292 | struct rtattr **tb, |
293 | struct cryptd_queue *queue) | ||
295 | { | 294 | { |
295 | struct cryptd_instance_ctx *ctx; | ||
296 | struct crypto_instance *inst; | 296 | struct crypto_instance *inst; |
297 | struct crypto_alg *alg; | 297 | struct crypto_alg *alg; |
298 | int err; | ||
298 | 299 | ||
299 | alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_BLKCIPHER, | 300 | alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_BLKCIPHER, |
300 | CRYPTO_ALG_TYPE_MASK); | 301 | CRYPTO_ALG_TYPE_MASK); |
301 | if (IS_ERR(alg)) | 302 | if (IS_ERR(alg)) |
302 | return ERR_CAST(alg); | 303 | return PTR_ERR(alg); |
303 | 304 | ||
304 | inst = cryptd_alloc_instance(alg, queue); | 305 | inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx)); |
306 | err = PTR_ERR(inst); | ||
305 | if (IS_ERR(inst)) | 307 | if (IS_ERR(inst)) |
306 | goto out_put_alg; | 308 | goto out_put_alg; |
307 | 309 | ||
310 | ctx = crypto_instance_ctx(inst); | ||
311 | ctx->queue = queue; | ||
312 | |||
313 | err = crypto_init_spawn(&ctx->spawn, alg, inst, | ||
314 | CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); | ||
315 | if (err) | ||
316 | goto out_free_inst; | ||
317 | |||
308 | inst->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC; | 318 | inst->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC; |
309 | inst->alg.cra_type = &crypto_ablkcipher_type; | 319 | inst->alg.cra_type = &crypto_ablkcipher_type; |
310 | 320 | ||
@@ -323,26 +333,34 @@ static struct crypto_instance *cryptd_alloc_blkcipher( | |||
323 | inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue; | 333 | inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue; |
324 | inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue; | 334 | inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue; |
325 | 335 | ||
336 | err = crypto_register_instance(tmpl, inst); | ||
337 | if (err) { | ||
338 | crypto_drop_spawn(&ctx->spawn); | ||
339 | out_free_inst: | ||
340 | kfree(inst); | ||
341 | } | ||
342 | |||
326 | out_put_alg: | 343 | out_put_alg: |
327 | crypto_mod_put(alg); | 344 | crypto_mod_put(alg); |
328 | return inst; | 345 | return err; |
329 | } | 346 | } |
330 | 347 | ||
331 | static int cryptd_hash_init_tfm(struct crypto_tfm *tfm) | 348 | static int cryptd_hash_init_tfm(struct crypto_tfm *tfm) |
332 | { | 349 | { |
333 | struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); | 350 | struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); |
334 | struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst); | 351 | struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst); |
335 | struct crypto_spawn *spawn = &ictx->spawn; | 352 | struct crypto_shash_spawn *spawn = &ictx->spawn; |
336 | struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm); | 353 | struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm); |
337 | struct crypto_hash *cipher; | 354 | struct crypto_shash *hash; |
338 | 355 | ||
339 | cipher = crypto_spawn_hash(spawn); | 356 | hash = crypto_spawn_shash(spawn); |
340 | if (IS_ERR(cipher)) | 357 | if (IS_ERR(hash)) |
341 | return PTR_ERR(cipher); | 358 | return PTR_ERR(hash); |
342 | 359 | ||
343 | ctx->child = cipher; | 360 | ctx->child = hash; |
344 | tfm->crt_ahash.reqsize = | 361 | crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), |
345 | sizeof(struct cryptd_hash_request_ctx); | 362 | sizeof(struct cryptd_hash_request_ctx) + |
363 | crypto_shash_descsize(hash)); | ||
346 | return 0; | 364 | return 0; |
347 | } | 365 | } |
348 | 366 | ||
@@ -350,22 +368,22 @@ static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm) | |||
350 | { | 368 | { |
351 | struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm); | 369 | struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm); |
352 | 370 | ||
353 | crypto_free_hash(ctx->child); | 371 | crypto_free_shash(ctx->child); |
354 | } | 372 | } |
355 | 373 | ||
356 | static int cryptd_hash_setkey(struct crypto_ahash *parent, | 374 | static int cryptd_hash_setkey(struct crypto_ahash *parent, |
357 | const u8 *key, unsigned int keylen) | 375 | const u8 *key, unsigned int keylen) |
358 | { | 376 | { |
359 | struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent); | 377 | struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent); |
360 | struct crypto_hash *child = ctx->child; | 378 | struct crypto_shash *child = ctx->child; |
361 | int err; | 379 | int err; |
362 | 380 | ||
363 | crypto_hash_clear_flags(child, CRYPTO_TFM_REQ_MASK); | 381 | crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK); |
364 | crypto_hash_set_flags(child, crypto_ahash_get_flags(parent) & | 382 | crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) & |
365 | CRYPTO_TFM_REQ_MASK); | 383 | CRYPTO_TFM_REQ_MASK); |
366 | err = crypto_hash_setkey(child, key, keylen); | 384 | err = crypto_shash_setkey(child, key, keylen); |
367 | crypto_ahash_set_flags(parent, crypto_hash_get_flags(child) & | 385 | crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) & |
368 | CRYPTO_TFM_RES_MASK); | 386 | CRYPTO_TFM_RES_MASK); |
369 | return err; | 387 | return err; |
370 | } | 388 | } |
371 | 389 | ||
@@ -385,21 +403,19 @@ static int cryptd_hash_enqueue(struct ahash_request *req, | |||
385 | 403 | ||
386 | static void cryptd_hash_init(struct crypto_async_request *req_async, int err) | 404 | static void cryptd_hash_init(struct crypto_async_request *req_async, int err) |
387 | { | 405 | { |
388 | struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); | 406 | struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); |
389 | struct crypto_hash *child = ctx->child; | 407 | struct crypto_shash *child = ctx->child; |
390 | struct ahash_request *req = ahash_request_cast(req_async); | 408 | struct ahash_request *req = ahash_request_cast(req_async); |
391 | struct cryptd_hash_request_ctx *rctx; | 409 | struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); |
392 | struct hash_desc desc; | 410 | struct shash_desc *desc = &rctx->desc; |
393 | |||
394 | rctx = ahash_request_ctx(req); | ||
395 | 411 | ||
396 | if (unlikely(err == -EINPROGRESS)) | 412 | if (unlikely(err == -EINPROGRESS)) |
397 | goto out; | 413 | goto out; |
398 | 414 | ||
399 | desc.tfm = child; | 415 | desc->tfm = child; |
400 | desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; | 416 | desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP; |
401 | 417 | ||
402 | err = crypto_hash_crt(child)->init(&desc); | 418 | err = crypto_shash_init(desc); |
403 | 419 | ||
404 | req->base.complete = rctx->complete; | 420 | req->base.complete = rctx->complete; |
405 | 421 | ||
@@ -416,23 +432,15 @@ static int cryptd_hash_init_enqueue(struct ahash_request *req) | |||
416 | 432 | ||
417 | static void cryptd_hash_update(struct crypto_async_request *req_async, int err) | 433 | static void cryptd_hash_update(struct crypto_async_request *req_async, int err) |
418 | { | 434 | { |
419 | struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); | 435 | struct ahash_request *req = ahash_request_cast(req_async); |
420 | struct crypto_hash *child = ctx->child; | ||
421 | struct ahash_request *req = ahash_request_cast(req_async); | ||
422 | struct cryptd_hash_request_ctx *rctx; | 436 | struct cryptd_hash_request_ctx *rctx; |
423 | struct hash_desc desc; | ||
424 | 437 | ||
425 | rctx = ahash_request_ctx(req); | 438 | rctx = ahash_request_ctx(req); |
426 | 439 | ||
427 | if (unlikely(err == -EINPROGRESS)) | 440 | if (unlikely(err == -EINPROGRESS)) |
428 | goto out; | 441 | goto out; |
429 | 442 | ||
430 | desc.tfm = child; | 443 | err = shash_ahash_update(req, &rctx->desc); |
431 | desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; | ||
432 | |||
433 | err = crypto_hash_crt(child)->update(&desc, | ||
434 | req->src, | ||
435 | req->nbytes); | ||
436 | 444 | ||
437 | req->base.complete = rctx->complete; | 445 | req->base.complete = rctx->complete; |
438 | 446 | ||
@@ -449,21 +457,13 @@ static int cryptd_hash_update_enqueue(struct ahash_request *req) | |||
449 | 457 | ||
450 | static void cryptd_hash_final(struct crypto_async_request *req_async, int err) | 458 | static void cryptd_hash_final(struct crypto_async_request *req_async, int err) |
451 | { | 459 | { |
452 | struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); | 460 | struct ahash_request *req = ahash_request_cast(req_async); |
453 | struct crypto_hash *child = ctx->child; | 461 | struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); |
454 | struct ahash_request *req = ahash_request_cast(req_async); | ||
455 | struct cryptd_hash_request_ctx *rctx; | ||
456 | struct hash_desc desc; | ||
457 | |||
458 | rctx = ahash_request_ctx(req); | ||
459 | 462 | ||
460 | if (unlikely(err == -EINPROGRESS)) | 463 | if (unlikely(err == -EINPROGRESS)) |
461 | goto out; | 464 | goto out; |
462 | 465 | ||
463 | desc.tfm = child; | 466 | err = crypto_shash_final(&rctx->desc, req->result); |
464 | desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; | ||
465 | |||
466 | err = crypto_hash_crt(child)->final(&desc, req->result); | ||
467 | 467 | ||
468 | req->base.complete = rctx->complete; | 468 | req->base.complete = rctx->complete; |
469 | 469 | ||
@@ -478,26 +478,44 @@ static int cryptd_hash_final_enqueue(struct ahash_request *req) | |||
478 | return cryptd_hash_enqueue(req, cryptd_hash_final); | 478 | return cryptd_hash_enqueue(req, cryptd_hash_final); |
479 | } | 479 | } |
480 | 480 | ||
481 | static void cryptd_hash_digest(struct crypto_async_request *req_async, int err) | 481 | static void cryptd_hash_finup(struct crypto_async_request *req_async, int err) |
482 | { | 482 | { |
483 | struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); | 483 | struct ahash_request *req = ahash_request_cast(req_async); |
484 | struct crypto_hash *child = ctx->child; | 484 | struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); |
485 | struct ahash_request *req = ahash_request_cast(req_async); | ||
486 | struct cryptd_hash_request_ctx *rctx; | ||
487 | struct hash_desc desc; | ||
488 | 485 | ||
489 | rctx = ahash_request_ctx(req); | 486 | if (unlikely(err == -EINPROGRESS)) |
487 | goto out; | ||
488 | |||
489 | err = shash_ahash_finup(req, &rctx->desc); | ||
490 | |||
491 | req->base.complete = rctx->complete; | ||
492 | |||
493 | out: | ||
494 | local_bh_disable(); | ||
495 | rctx->complete(&req->base, err); | ||
496 | local_bh_enable(); | ||
497 | } | ||
498 | |||
499 | static int cryptd_hash_finup_enqueue(struct ahash_request *req) | ||
500 | { | ||
501 | return cryptd_hash_enqueue(req, cryptd_hash_finup); | ||
502 | } | ||
503 | |||
504 | static void cryptd_hash_digest(struct crypto_async_request *req_async, int err) | ||
505 | { | ||
506 | struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); | ||
507 | struct crypto_shash *child = ctx->child; | ||
508 | struct ahash_request *req = ahash_request_cast(req_async); | ||
509 | struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); | ||
510 | struct shash_desc *desc = &rctx->desc; | ||
490 | 511 | ||
491 | if (unlikely(err == -EINPROGRESS)) | 512 | if (unlikely(err == -EINPROGRESS)) |
492 | goto out; | 513 | goto out; |
493 | 514 | ||
494 | desc.tfm = child; | 515 | desc->tfm = child; |
495 | desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; | 516 | desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP; |
496 | 517 | ||
497 | err = crypto_hash_crt(child)->digest(&desc, | 518 | err = shash_ahash_digest(req, desc); |
498 | req->src, | ||
499 | req->nbytes, | ||
500 | req->result); | ||
501 | 519 | ||
502 | req->base.complete = rctx->complete; | 520 | req->base.complete = rctx->complete; |
503 | 521 | ||
@@ -512,64 +530,108 @@ static int cryptd_hash_digest_enqueue(struct ahash_request *req) | |||
512 | return cryptd_hash_enqueue(req, cryptd_hash_digest); | 530 | return cryptd_hash_enqueue(req, cryptd_hash_digest); |
513 | } | 531 | } |
514 | 532 | ||
515 | static struct crypto_instance *cryptd_alloc_hash( | 533 | static int cryptd_hash_export(struct ahash_request *req, void *out) |
516 | struct rtattr **tb, struct cryptd_queue *queue) | ||
517 | { | 534 | { |
518 | struct crypto_instance *inst; | 535 | struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); |
536 | |||
537 | return crypto_shash_export(&rctx->desc, out); | ||
538 | } | ||
539 | |||
540 | static int cryptd_hash_import(struct ahash_request *req, const void *in) | ||
541 | { | ||
542 | struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); | ||
543 | |||
544 | return crypto_shash_import(&rctx->desc, in); | ||
545 | } | ||
546 | |||
547 | static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb, | ||
548 | struct cryptd_queue *queue) | ||
549 | { | ||
550 | struct hashd_instance_ctx *ctx; | ||
551 | struct ahash_instance *inst; | ||
552 | struct shash_alg *salg; | ||
519 | struct crypto_alg *alg; | 553 | struct crypto_alg *alg; |
554 | int err; | ||
520 | 555 | ||
521 | alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_HASH, | 556 | salg = shash_attr_alg(tb[1], 0, 0); |
522 | CRYPTO_ALG_TYPE_HASH_MASK); | 557 | if (IS_ERR(salg)) |
523 | if (IS_ERR(alg)) | 558 | return PTR_ERR(salg); |
524 | return ERR_PTR(PTR_ERR(alg)); | ||
525 | 559 | ||
526 | inst = cryptd_alloc_instance(alg, queue); | 560 | alg = &salg->base; |
561 | inst = cryptd_alloc_instance(alg, ahash_instance_headroom(), | ||
562 | sizeof(*ctx)); | ||
563 | err = PTR_ERR(inst); | ||
527 | if (IS_ERR(inst)) | 564 | if (IS_ERR(inst)) |
528 | goto out_put_alg; | 565 | goto out_put_alg; |
529 | 566 | ||
530 | inst->alg.cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC; | 567 | ctx = ahash_instance_ctx(inst); |
531 | inst->alg.cra_type = &crypto_ahash_type; | 568 | ctx->queue = queue; |
532 | 569 | ||
533 | inst->alg.cra_ahash.digestsize = alg->cra_hash.digestsize; | 570 | err = crypto_init_shash_spawn(&ctx->spawn, salg, |
534 | inst->alg.cra_ctxsize = sizeof(struct cryptd_hash_ctx); | 571 | ahash_crypto_instance(inst)); |
572 | if (err) | ||
573 | goto out_free_inst; | ||
535 | 574 | ||
536 | inst->alg.cra_init = cryptd_hash_init_tfm; | 575 | inst->alg.halg.base.cra_flags = CRYPTO_ALG_ASYNC; |
537 | inst->alg.cra_exit = cryptd_hash_exit_tfm; | ||
538 | 576 | ||
539 | inst->alg.cra_ahash.init = cryptd_hash_init_enqueue; | 577 | inst->alg.halg.digestsize = salg->digestsize; |
540 | inst->alg.cra_ahash.update = cryptd_hash_update_enqueue; | 578 | inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx); |
541 | inst->alg.cra_ahash.final = cryptd_hash_final_enqueue; | 579 | |
542 | inst->alg.cra_ahash.setkey = cryptd_hash_setkey; | 580 | inst->alg.halg.base.cra_init = cryptd_hash_init_tfm; |
543 | inst->alg.cra_ahash.digest = cryptd_hash_digest_enqueue; | 581 | inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm; |
582 | |||
583 | inst->alg.init = cryptd_hash_init_enqueue; | ||
584 | inst->alg.update = cryptd_hash_update_enqueue; | ||
585 | inst->alg.final = cryptd_hash_final_enqueue; | ||
586 | inst->alg.finup = cryptd_hash_finup_enqueue; | ||
587 | inst->alg.export = cryptd_hash_export; | ||
588 | inst->alg.import = cryptd_hash_import; | ||
589 | inst->alg.setkey = cryptd_hash_setkey; | ||
590 | inst->alg.digest = cryptd_hash_digest_enqueue; | ||
591 | |||
592 | err = ahash_register_instance(tmpl, inst); | ||
593 | if (err) { | ||
594 | crypto_drop_shash(&ctx->spawn); | ||
595 | out_free_inst: | ||
596 | kfree(inst); | ||
597 | } | ||
544 | 598 | ||
545 | out_put_alg: | 599 | out_put_alg: |
546 | crypto_mod_put(alg); | 600 | crypto_mod_put(alg); |
547 | return inst; | 601 | return err; |
548 | } | 602 | } |
549 | 603 | ||
550 | static struct cryptd_queue queue; | 604 | static struct cryptd_queue queue; |
551 | 605 | ||
552 | static struct crypto_instance *cryptd_alloc(struct rtattr **tb) | 606 | static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb) |
553 | { | 607 | { |
554 | struct crypto_attr_type *algt; | 608 | struct crypto_attr_type *algt; |
555 | 609 | ||
556 | algt = crypto_get_attr_type(tb); | 610 | algt = crypto_get_attr_type(tb); |
557 | if (IS_ERR(algt)) | 611 | if (IS_ERR(algt)) |
558 | return ERR_CAST(algt); | 612 | return PTR_ERR(algt); |
559 | 613 | ||
560 | switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) { | 614 | switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) { |
561 | case CRYPTO_ALG_TYPE_BLKCIPHER: | 615 | case CRYPTO_ALG_TYPE_BLKCIPHER: |
562 | return cryptd_alloc_blkcipher(tb, &queue); | 616 | return cryptd_create_blkcipher(tmpl, tb, &queue); |
563 | case CRYPTO_ALG_TYPE_DIGEST: | 617 | case CRYPTO_ALG_TYPE_DIGEST: |
564 | return cryptd_alloc_hash(tb, &queue); | 618 | return cryptd_create_hash(tmpl, tb, &queue); |
565 | } | 619 | } |
566 | 620 | ||
567 | return ERR_PTR(-EINVAL); | 621 | return -EINVAL; |
568 | } | 622 | } |
569 | 623 | ||
570 | static void cryptd_free(struct crypto_instance *inst) | 624 | static void cryptd_free(struct crypto_instance *inst) |
571 | { | 625 | { |
572 | struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst); | 626 | struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst); |
627 | struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst); | ||
628 | |||
629 | switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) { | ||
630 | case CRYPTO_ALG_TYPE_AHASH: | ||
631 | crypto_drop_shash(&hctx->spawn); | ||
632 | kfree(ahash_instance(inst)); | ||
633 | return; | ||
634 | } | ||
573 | 635 | ||
574 | crypto_drop_spawn(&ctx->spawn); | 636 | crypto_drop_spawn(&ctx->spawn); |
575 | kfree(inst); | 637 | kfree(inst); |
@@ -577,7 +639,7 @@ static void cryptd_free(struct crypto_instance *inst) | |||
577 | 639 | ||
578 | static struct crypto_template cryptd_tmpl = { | 640 | static struct crypto_template cryptd_tmpl = { |
579 | .name = "cryptd", | 641 | .name = "cryptd", |
580 | .alloc = cryptd_alloc, | 642 | .create = cryptd_create, |
581 | .free = cryptd_free, | 643 | .free = cryptd_free, |
582 | .module = THIS_MODULE, | 644 | .module = THIS_MODULE, |
583 | }; | 645 | }; |
@@ -620,6 +682,41 @@ void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm) | |||
620 | } | 682 | } |
621 | EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher); | 683 | EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher); |
622 | 684 | ||
685 | struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name, | ||
686 | u32 type, u32 mask) | ||
687 | { | ||
688 | char cryptd_alg_name[CRYPTO_MAX_ALG_NAME]; | ||
689 | struct crypto_ahash *tfm; | ||
690 | |||
691 | if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME, | ||
692 | "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME) | ||
693 | return ERR_PTR(-EINVAL); | ||
694 | tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask); | ||
695 | if (IS_ERR(tfm)) | ||
696 | return ERR_CAST(tfm); | ||
697 | if (tfm->base.__crt_alg->cra_module != THIS_MODULE) { | ||
698 | crypto_free_ahash(tfm); | ||
699 | return ERR_PTR(-EINVAL); | ||
700 | } | ||
701 | |||
702 | return __cryptd_ahash_cast(tfm); | ||
703 | } | ||
704 | EXPORT_SYMBOL_GPL(cryptd_alloc_ahash); | ||
705 | |||
706 | struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm) | ||
707 | { | ||
708 | struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base); | ||
709 | |||
710 | return ctx->child; | ||
711 | } | ||
712 | EXPORT_SYMBOL_GPL(cryptd_ahash_child); | ||
713 | |||
714 | void cryptd_free_ahash(struct cryptd_ahash *tfm) | ||
715 | { | ||
716 | crypto_free_ahash(&tfm->base); | ||
717 | } | ||
718 | EXPORT_SYMBOL_GPL(cryptd_free_ahash); | ||
719 | |||
623 | static int __init cryptd_init(void) | 720 | static int __init cryptd_init(void) |
624 | { | 721 | { |
625 | int err; | 722 | int err; |