aboutsummaryrefslogtreecommitdiffstats
path: root/crypto/cryptd.c
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2009-07-12 09:38:59 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2009-07-14 00:58:10 -0400
commit46309d8938122dff2fe59bf163307989cd22ea4a (patch)
tree7a2987245bd2a0c90ea234663c2488cfa9e0be4d /crypto/cryptd.c
parent7eddf95ec5440d60f10963f453e27f82f394044e (diff)
crypto: cryptd - Use shash algorithms
This patch changes cryptd to use shash algorithms instead of the legacy hash interface. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/cryptd.c')
-rw-r--r--crypto/cryptd.c164
1 files changed, 84 insertions, 80 deletions
diff --git a/crypto/cryptd.c b/crypto/cryptd.c
index ae5fa99d5d36..ef5720cf1216 100644
--- a/crypto/cryptd.c
+++ b/crypto/cryptd.c
@@ -39,6 +39,11 @@ struct cryptd_instance_ctx {
39 struct cryptd_queue *queue; 39 struct cryptd_queue *queue;
40}; 40};
41 41
42struct hashd_instance_ctx {
43 struct crypto_shash_spawn spawn;
44 struct cryptd_queue *queue;
45};
46
42struct cryptd_blkcipher_ctx { 47struct cryptd_blkcipher_ctx {
43 struct crypto_blkcipher *child; 48 struct crypto_blkcipher *child;
44}; 49};
@@ -48,11 +53,12 @@ struct cryptd_blkcipher_request_ctx {
48}; 53};
49 54
50struct cryptd_hash_ctx { 55struct cryptd_hash_ctx {
51 struct crypto_hash *child; 56 struct crypto_shash *child;
52}; 57};
53 58
54struct cryptd_hash_request_ctx { 59struct cryptd_hash_request_ctx {
55 crypto_completion_t complete; 60 crypto_completion_t complete;
61 struct shash_desc desc;
56}; 62};
57 63
58static void cryptd_queue_worker(struct work_struct *work); 64static void cryptd_queue_worker(struct work_struct *work);
@@ -250,13 +256,12 @@ static void cryptd_blkcipher_exit_tfm(struct crypto_tfm *tfm)
250} 256}
251 257
252static struct crypto_instance *cryptd_alloc_instance(struct crypto_alg *alg, 258static struct crypto_instance *cryptd_alloc_instance(struct crypto_alg *alg,
253 struct cryptd_queue *queue) 259 unsigned int tail)
254{ 260{
255 struct crypto_instance *inst; 261 struct crypto_instance *inst;
256 struct cryptd_instance_ctx *ctx;
257 int err; 262 int err;
258 263
259 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 264 inst = kzalloc(sizeof(*inst) + tail, GFP_KERNEL);
260 if (!inst) { 265 if (!inst) {
261 inst = ERR_PTR(-ENOMEM); 266 inst = ERR_PTR(-ENOMEM);
262 goto out; 267 goto out;
@@ -267,14 +272,6 @@ static struct crypto_instance *cryptd_alloc_instance(struct crypto_alg *alg,
267 "cryptd(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 272 "cryptd(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
268 goto out_free_inst; 273 goto out_free_inst;
269 274
270 ctx = crypto_instance_ctx(inst);
271 err = crypto_init_spawn(&ctx->spawn, alg, inst,
272 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
273 if (err)
274 goto out_free_inst;
275
276 ctx->queue = queue;
277
278 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); 275 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
279 276
280 inst->alg.cra_priority = alg->cra_priority + 50; 277 inst->alg.cra_priority = alg->cra_priority + 50;
@@ -293,18 +290,28 @@ out_free_inst:
293static struct crypto_instance *cryptd_alloc_blkcipher( 290static struct crypto_instance *cryptd_alloc_blkcipher(
294 struct rtattr **tb, struct cryptd_queue *queue) 291 struct rtattr **tb, struct cryptd_queue *queue)
295{ 292{
293 struct cryptd_instance_ctx *ctx;
296 struct crypto_instance *inst; 294 struct crypto_instance *inst;
297 struct crypto_alg *alg; 295 struct crypto_alg *alg;
296 int err;
298 297
299 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_BLKCIPHER, 298 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_BLKCIPHER,
300 CRYPTO_ALG_TYPE_MASK); 299 CRYPTO_ALG_TYPE_MASK);
301 if (IS_ERR(alg)) 300 if (IS_ERR(alg))
302 return ERR_CAST(alg); 301 return ERR_CAST(alg);
303 302
304 inst = cryptd_alloc_instance(alg, queue); 303 inst = cryptd_alloc_instance(alg, sizeof(*ctx));
305 if (IS_ERR(inst)) 304 if (IS_ERR(inst))
306 goto out_put_alg; 305 goto out_put_alg;
307 306
307 ctx = crypto_instance_ctx(inst);
308 ctx->queue = queue;
309
310 err = crypto_init_spawn(&ctx->spawn, alg, inst,
311 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
312 if (err)
313 goto out_free_inst;
314
308 inst->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC; 315 inst->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC;
309 inst->alg.cra_type = &crypto_ablkcipher_type; 316 inst->alg.cra_type = &crypto_ablkcipher_type;
310 317
@@ -326,23 +333,28 @@ static struct crypto_instance *cryptd_alloc_blkcipher(
326out_put_alg: 333out_put_alg:
327 crypto_mod_put(alg); 334 crypto_mod_put(alg);
328 return inst; 335 return inst;
336
337out_free_inst:
338 kfree(inst);
339 inst = ERR_PTR(err);
340 goto out_put_alg;
329} 341}
330 342
331static int cryptd_hash_init_tfm(struct crypto_tfm *tfm) 343static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
332{ 344{
333 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); 345 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
334 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst); 346 struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
335 struct crypto_spawn *spawn = &ictx->spawn; 347 struct crypto_shash_spawn *spawn = &ictx->spawn;
336 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm); 348 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
337 struct crypto_hash *cipher; 349 struct crypto_shash *hash;
338 350
339 cipher = crypto_spawn_hash(spawn); 351 hash = crypto_spawn_shash(spawn);
340 if (IS_ERR(cipher)) 352 if (IS_ERR(hash))
341 return PTR_ERR(cipher); 353 return PTR_ERR(hash);
342 354
343 ctx->child = cipher; 355 ctx->child = hash;
344 tfm->crt_ahash.reqsize = 356 tfm->crt_ahash.reqsize = sizeof(struct cryptd_hash_request_ctx) +
345 sizeof(struct cryptd_hash_request_ctx); 357 crypto_shash_descsize(hash);
346 return 0; 358 return 0;
347} 359}
348 360
@@ -350,22 +362,22 @@ static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
350{ 362{
351 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm); 363 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
352 364
353 crypto_free_hash(ctx->child); 365 crypto_free_shash(ctx->child);
354} 366}
355 367
356static int cryptd_hash_setkey(struct crypto_ahash *parent, 368static int cryptd_hash_setkey(struct crypto_ahash *parent,
357 const u8 *key, unsigned int keylen) 369 const u8 *key, unsigned int keylen)
358{ 370{
359 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent); 371 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent);
360 struct crypto_hash *child = ctx->child; 372 struct crypto_shash *child = ctx->child;
361 int err; 373 int err;
362 374
363 crypto_hash_clear_flags(child, CRYPTO_TFM_REQ_MASK); 375 crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
364 crypto_hash_set_flags(child, crypto_ahash_get_flags(parent) & 376 crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
365 CRYPTO_TFM_REQ_MASK); 377 CRYPTO_TFM_REQ_MASK);
366 err = crypto_hash_setkey(child, key, keylen); 378 err = crypto_shash_setkey(child, key, keylen);
367 crypto_ahash_set_flags(parent, crypto_hash_get_flags(child) & 379 crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) &
368 CRYPTO_TFM_RES_MASK); 380 CRYPTO_TFM_RES_MASK);
369 return err; 381 return err;
370} 382}
371 383
@@ -385,21 +397,19 @@ static int cryptd_hash_enqueue(struct ahash_request *req,
385 397
386static void cryptd_hash_init(struct crypto_async_request *req_async, int err) 398static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
387{ 399{
388 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); 400 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
389 struct crypto_hash *child = ctx->child; 401 struct crypto_shash *child = ctx->child;
390 struct ahash_request *req = ahash_request_cast(req_async); 402 struct ahash_request *req = ahash_request_cast(req_async);
391 struct cryptd_hash_request_ctx *rctx; 403 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
392 struct hash_desc desc; 404 struct shash_desc *desc = &rctx->desc;
393
394 rctx = ahash_request_ctx(req);
395 405
396 if (unlikely(err == -EINPROGRESS)) 406 if (unlikely(err == -EINPROGRESS))
397 goto out; 407 goto out;
398 408
399 desc.tfm = child; 409 desc->tfm = child;
400 desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; 410 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
401 411
402 err = crypto_hash_crt(child)->init(&desc); 412 err = crypto_shash_init(desc);
403 413
404 req->base.complete = rctx->complete; 414 req->base.complete = rctx->complete;
405 415
@@ -416,23 +426,15 @@ static int cryptd_hash_init_enqueue(struct ahash_request *req)
416 426
417static void cryptd_hash_update(struct crypto_async_request *req_async, int err) 427static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
418{ 428{
419 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); 429 struct ahash_request *req = ahash_request_cast(req_async);
420 struct crypto_hash *child = ctx->child;
421 struct ahash_request *req = ahash_request_cast(req_async);
422 struct cryptd_hash_request_ctx *rctx; 430 struct cryptd_hash_request_ctx *rctx;
423 struct hash_desc desc;
424 431
425 rctx = ahash_request_ctx(req); 432 rctx = ahash_request_ctx(req);
426 433
427 if (unlikely(err == -EINPROGRESS)) 434 if (unlikely(err == -EINPROGRESS))
428 goto out; 435 goto out;
429 436
430 desc.tfm = child; 437 err = shash_ahash_update(req, &rctx->desc);
431 desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
432
433 err = crypto_hash_crt(child)->update(&desc,
434 req->src,
435 req->nbytes);
436 438
437 req->base.complete = rctx->complete; 439 req->base.complete = rctx->complete;
438 440
@@ -449,21 +451,13 @@ static int cryptd_hash_update_enqueue(struct ahash_request *req)
449 451
450static void cryptd_hash_final(struct crypto_async_request *req_async, int err) 452static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
451{ 453{
452 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); 454 struct ahash_request *req = ahash_request_cast(req_async);
453 struct crypto_hash *child = ctx->child; 455 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
454 struct ahash_request *req = ahash_request_cast(req_async);
455 struct cryptd_hash_request_ctx *rctx;
456 struct hash_desc desc;
457
458 rctx = ahash_request_ctx(req);
459 456
460 if (unlikely(err == -EINPROGRESS)) 457 if (unlikely(err == -EINPROGRESS))
461 goto out; 458 goto out;
462 459
463 desc.tfm = child; 460 err = crypto_shash_final(&rctx->desc, req->result);
464 desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
465
466 err = crypto_hash_crt(child)->final(&desc, req->result);
467 461
468 req->base.complete = rctx->complete; 462 req->base.complete = rctx->complete;
469 463
@@ -480,24 +474,19 @@ static int cryptd_hash_final_enqueue(struct ahash_request *req)
480 474
481static void cryptd_hash_digest(struct crypto_async_request *req_async, int err) 475static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
482{ 476{
483 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); 477 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
484 struct crypto_hash *child = ctx->child; 478 struct crypto_shash *child = ctx->child;
485 struct ahash_request *req = ahash_request_cast(req_async); 479 struct ahash_request *req = ahash_request_cast(req_async);
486 struct cryptd_hash_request_ctx *rctx; 480 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
487 struct hash_desc desc; 481 struct shash_desc *desc = &rctx->desc;
488
489 rctx = ahash_request_ctx(req);
490 482
491 if (unlikely(err == -EINPROGRESS)) 483 if (unlikely(err == -EINPROGRESS))
492 goto out; 484 goto out;
493 485
494 desc.tfm = child; 486 desc->tfm = child;
495 desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; 487 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
496 488
497 err = crypto_hash_crt(child)->digest(&desc, 489 err = shash_ahash_digest(req, desc);
498 req->src,
499 req->nbytes,
500 req->result);
501 490
502 req->base.complete = rctx->complete; 491 req->base.complete = rctx->complete;
503 492
@@ -515,22 +504,32 @@ static int cryptd_hash_digest_enqueue(struct ahash_request *req)
515static struct crypto_instance *cryptd_alloc_hash( 504static struct crypto_instance *cryptd_alloc_hash(
516 struct rtattr **tb, struct cryptd_queue *queue) 505 struct rtattr **tb, struct cryptd_queue *queue)
517{ 506{
507 struct hashd_instance_ctx *ctx;
518 struct crypto_instance *inst; 508 struct crypto_instance *inst;
509 struct shash_alg *salg;
519 struct crypto_alg *alg; 510 struct crypto_alg *alg;
511 int err;
520 512
521 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_HASH, 513 salg = shash_attr_alg(tb[1], 0, 0);
522 CRYPTO_ALG_TYPE_HASH_MASK); 514 if (IS_ERR(salg))
523 if (IS_ERR(alg)) 515 return ERR_CAST(salg);
524 return ERR_PTR(PTR_ERR(alg));
525 516
526 inst = cryptd_alloc_instance(alg, queue); 517 alg = &salg->base;
518 inst = cryptd_alloc_instance(alg, sizeof(*ctx));
527 if (IS_ERR(inst)) 519 if (IS_ERR(inst))
528 goto out_put_alg; 520 goto out_put_alg;
529 521
522 ctx = crypto_instance_ctx(inst);
523 ctx->queue = queue;
524
525 err = crypto_init_shash_spawn(&ctx->spawn, salg, inst);
526 if (err)
527 goto out_free_inst;
528
530 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC; 529 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC;
531 inst->alg.cra_type = &crypto_ahash_type; 530 inst->alg.cra_type = &crypto_ahash_type;
532 531
533 inst->alg.cra_ahash.digestsize = alg->cra_hash.digestsize; 532 inst->alg.cra_ahash.digestsize = salg->digestsize;
534 inst->alg.cra_ctxsize = sizeof(struct cryptd_hash_ctx); 533 inst->alg.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
535 534
536 inst->alg.cra_init = cryptd_hash_init_tfm; 535 inst->alg.cra_init = cryptd_hash_init_tfm;
@@ -545,6 +544,11 @@ static struct crypto_instance *cryptd_alloc_hash(
545out_put_alg: 544out_put_alg:
546 crypto_mod_put(alg); 545 crypto_mod_put(alg);
547 return inst; 546 return inst;
547
548out_free_inst:
549 kfree(inst);
550 inst = ERR_PTR(err);
551 goto out_put_alg;
548} 552}
549 553
550static struct cryptd_queue queue; 554static struct cryptd_queue queue;