aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/crypto/n2_core.c
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2010-08-04 18:23:14 -0400
committerLinus Torvalds <torvalds@linux-foundation.org>2010-08-04 18:23:14 -0400
commitb7c8e55db7141dcbb9d5305a3260fa0ed62a1bcc (patch)
tree59fbd52d8e80e5a83d9747961d28aaf4d400613a /drivers/crypto/n2_core.c
parentffd386a9a8273dcfa61705d0b349eebc7525ef87 (diff)
parent4015d9a865e3bcc42d88bedc8ce1551000bab664 (diff)
Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (39 commits) random: Reorder struct entropy_store to remove padding on 64bits padata: update API documentation padata: Remove padata_get_cpumask crypto: pcrypt - Update pcrypt cpumask according to the padata cpumask notifier crypto: pcrypt - Rename pcrypt_instance padata: Pass the padata cpumasks to the cpumask_change_notifier chain padata: Rearrange set_cpumask functions padata: Rename padata_alloc functions crypto: pcrypt - Dont calulate a callback cpu on empty callback cpumask padata: Check for valid cpumasks padata: Allocate cpumask dependend recources in any case padata: Fix cpu index counting crypto: geode_aes - Convert pci_table entries to PCI_VDEVICE (if PCI_ANY_ID is used) pcrypt: Added sysfs interface to pcrypt padata: Added sysfs primitives to padata subsystem padata: Make two separate cpumasks padata: update documentation padata: simplify serialization mechanism padata: make padata_do_parallel to return zero on success padata: Handle empty padata cpumasks ...
Diffstat (limited to 'drivers/crypto/n2_core.c')
-rw-r--r--drivers/crypto/n2_core.c415
1 files changed, 296 insertions, 119 deletions
diff --git a/drivers/crypto/n2_core.c b/drivers/crypto/n2_core.c
index 23163fda503..b99c38f23d6 100644
--- a/drivers/crypto/n2_core.c
+++ b/drivers/crypto/n2_core.c
@@ -239,21 +239,57 @@ static inline bool n2_should_run_async(struct spu_queue *qp, int this_len)
239} 239}
240#endif 240#endif
241 241
242struct n2_base_ctx { 242struct n2_ahash_alg {
243 struct list_head list; 243 struct list_head entry;
244 const char *hash_zero;
245 const u32 *hash_init;
246 u8 hw_op_hashsz;
247 u8 digest_size;
248 u8 auth_type;
249 u8 hmac_type;
250 struct ahash_alg alg;
244}; 251};
245 252
246static void n2_base_ctx_init(struct n2_base_ctx *ctx) 253static inline struct n2_ahash_alg *n2_ahash_alg(struct crypto_tfm *tfm)
247{ 254{
248 INIT_LIST_HEAD(&ctx->list); 255 struct crypto_alg *alg = tfm->__crt_alg;
256 struct ahash_alg *ahash_alg;
257
258 ahash_alg = container_of(alg, struct ahash_alg, halg.base);
259
260 return container_of(ahash_alg, struct n2_ahash_alg, alg);
249} 261}
250 262
251struct n2_hash_ctx { 263struct n2_hmac_alg {
252 struct n2_base_ctx base; 264 const char *child_alg;
265 struct n2_ahash_alg derived;
266};
267
268static inline struct n2_hmac_alg *n2_hmac_alg(struct crypto_tfm *tfm)
269{
270 struct crypto_alg *alg = tfm->__crt_alg;
271 struct ahash_alg *ahash_alg;
272
273 ahash_alg = container_of(alg, struct ahash_alg, halg.base);
274
275 return container_of(ahash_alg, struct n2_hmac_alg, derived.alg);
276}
253 277
278struct n2_hash_ctx {
254 struct crypto_ahash *fallback_tfm; 279 struct crypto_ahash *fallback_tfm;
255}; 280};
256 281
282#define N2_HASH_KEY_MAX 32 /* HW limit for all HMAC requests */
283
284struct n2_hmac_ctx {
285 struct n2_hash_ctx base;
286
287 struct crypto_shash *child_shash;
288
289 int hash_key_len;
290 unsigned char hash_key[N2_HASH_KEY_MAX];
291};
292
257struct n2_hash_req_ctx { 293struct n2_hash_req_ctx {
258 union { 294 union {
259 struct md5_state md5; 295 struct md5_state md5;
@@ -261,9 +297,6 @@ struct n2_hash_req_ctx {
261 struct sha256_state sha256; 297 struct sha256_state sha256;
262 } u; 298 } u;
263 299
264 unsigned char hash_key[64];
265 unsigned char keyed_zero_hash[32];
266
267 struct ahash_request fallback_req; 300 struct ahash_request fallback_req;
268}; 301};
269 302
@@ -356,6 +389,94 @@ static void n2_hash_cra_exit(struct crypto_tfm *tfm)
356 crypto_free_ahash(ctx->fallback_tfm); 389 crypto_free_ahash(ctx->fallback_tfm);
357} 390}
358 391
392static int n2_hmac_cra_init(struct crypto_tfm *tfm)
393{
394 const char *fallback_driver_name = tfm->__crt_alg->cra_name;
395 struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
396 struct n2_hmac_ctx *ctx = crypto_ahash_ctx(ahash);
397 struct n2_hmac_alg *n2alg = n2_hmac_alg(tfm);
398 struct crypto_ahash *fallback_tfm;
399 struct crypto_shash *child_shash;
400 int err;
401
402 fallback_tfm = crypto_alloc_ahash(fallback_driver_name, 0,
403 CRYPTO_ALG_NEED_FALLBACK);
404 if (IS_ERR(fallback_tfm)) {
405 pr_warning("Fallback driver '%s' could not be loaded!\n",
406 fallback_driver_name);
407 err = PTR_ERR(fallback_tfm);
408 goto out;
409 }
410
411 child_shash = crypto_alloc_shash(n2alg->child_alg, 0, 0);
412 if (IS_ERR(child_shash)) {
413 pr_warning("Child shash '%s' could not be loaded!\n",
414 n2alg->child_alg);
415 err = PTR_ERR(child_shash);
416 goto out_free_fallback;
417 }
418
419 crypto_ahash_set_reqsize(ahash, (sizeof(struct n2_hash_req_ctx) +
420 crypto_ahash_reqsize(fallback_tfm)));
421
422 ctx->child_shash = child_shash;
423 ctx->base.fallback_tfm = fallback_tfm;
424 return 0;
425
426out_free_fallback:
427 crypto_free_ahash(fallback_tfm);
428
429out:
430 return err;
431}
432
433static void n2_hmac_cra_exit(struct crypto_tfm *tfm)
434{
435 struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
436 struct n2_hmac_ctx *ctx = crypto_ahash_ctx(ahash);
437
438 crypto_free_ahash(ctx->base.fallback_tfm);
439 crypto_free_shash(ctx->child_shash);
440}
441
442static int n2_hmac_async_setkey(struct crypto_ahash *tfm, const u8 *key,
443 unsigned int keylen)
444{
445 struct n2_hmac_ctx *ctx = crypto_ahash_ctx(tfm);
446 struct crypto_shash *child_shash = ctx->child_shash;
447 struct crypto_ahash *fallback_tfm;
448 struct {
449 struct shash_desc shash;
450 char ctx[crypto_shash_descsize(child_shash)];
451 } desc;
452 int err, bs, ds;
453
454 fallback_tfm = ctx->base.fallback_tfm;
455 err = crypto_ahash_setkey(fallback_tfm, key, keylen);
456 if (err)
457 return err;
458
459 desc.shash.tfm = child_shash;
460 desc.shash.flags = crypto_ahash_get_flags(tfm) &
461 CRYPTO_TFM_REQ_MAY_SLEEP;
462
463 bs = crypto_shash_blocksize(child_shash);
464 ds = crypto_shash_digestsize(child_shash);
465 BUG_ON(ds > N2_HASH_KEY_MAX);
466 if (keylen > bs) {
467 err = crypto_shash_digest(&desc.shash, key, keylen,
468 ctx->hash_key);
469 if (err)
470 return err;
471 keylen = ds;
472 } else if (keylen <= N2_HASH_KEY_MAX)
473 memcpy(ctx->hash_key, key, keylen);
474
475 ctx->hash_key_len = keylen;
476
477 return err;
478}
479
359static unsigned long wait_for_tail(struct spu_queue *qp) 480static unsigned long wait_for_tail(struct spu_queue *qp)
360{ 481{
361 unsigned long head, hv_ret; 482 unsigned long head, hv_ret;
@@ -385,12 +506,12 @@ static unsigned long submit_and_wait_for_tail(struct spu_queue *qp,
385 return hv_ret; 506 return hv_ret;
386} 507}
387 508
388static int n2_hash_async_digest(struct ahash_request *req, 509static int n2_do_async_digest(struct ahash_request *req,
389 unsigned int auth_type, unsigned int digest_size, 510 unsigned int auth_type, unsigned int digest_size,
390 unsigned int result_size, void *hash_loc) 511 unsigned int result_size, void *hash_loc,
512 unsigned long auth_key, unsigned int auth_key_len)
391{ 513{
392 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 514 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
393 struct n2_hash_ctx *ctx = crypto_ahash_ctx(tfm);
394 struct cwq_initial_entry *ent; 515 struct cwq_initial_entry *ent;
395 struct crypto_hash_walk walk; 516 struct crypto_hash_walk walk;
396 struct spu_queue *qp; 517 struct spu_queue *qp;
@@ -403,6 +524,7 @@ static int n2_hash_async_digest(struct ahash_request *req,
403 */ 524 */
404 if (unlikely(req->nbytes > (1 << 16))) { 525 if (unlikely(req->nbytes > (1 << 16))) {
405 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); 526 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req);
527 struct n2_hash_ctx *ctx = crypto_ahash_ctx(tfm);
406 528
407 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); 529 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
408 rctx->fallback_req.base.flags = 530 rctx->fallback_req.base.flags =
@@ -414,8 +536,6 @@ static int n2_hash_async_digest(struct ahash_request *req,
414 return crypto_ahash_digest(&rctx->fallback_req); 536 return crypto_ahash_digest(&rctx->fallback_req);
415 } 537 }
416 538
417 n2_base_ctx_init(&ctx->base);
418
419 nbytes = crypto_hash_walk_first(req, &walk); 539 nbytes = crypto_hash_walk_first(req, &walk);
420 540
421 cpu = get_cpu(); 541 cpu = get_cpu();
@@ -430,13 +550,13 @@ static int n2_hash_async_digest(struct ahash_request *req,
430 */ 550 */
431 ent = qp->q + qp->tail; 551 ent = qp->q + qp->tail;
432 552
433 ent->control = control_word_base(nbytes, 0, 0, 553 ent->control = control_word_base(nbytes, auth_key_len, 0,
434 auth_type, digest_size, 554 auth_type, digest_size,
435 false, true, false, false, 555 false, true, false, false,
436 OPCODE_INPLACE_BIT | 556 OPCODE_INPLACE_BIT |
437 OPCODE_AUTH_MAC); 557 OPCODE_AUTH_MAC);
438 ent->src_addr = __pa(walk.data); 558 ent->src_addr = __pa(walk.data);
439 ent->auth_key_addr = 0UL; 559 ent->auth_key_addr = auth_key;
440 ent->auth_iv_addr = __pa(hash_loc); 560 ent->auth_iv_addr = __pa(hash_loc);
441 ent->final_auth_state_addr = 0UL; 561 ent->final_auth_state_addr = 0UL;
442 ent->enc_key_addr = 0UL; 562 ent->enc_key_addr = 0UL;
@@ -475,114 +595,55 @@ out:
475 return err; 595 return err;
476} 596}
477 597
478static int n2_md5_async_digest(struct ahash_request *req) 598static int n2_hash_async_digest(struct ahash_request *req)
479{ 599{
600 struct n2_ahash_alg *n2alg = n2_ahash_alg(req->base.tfm);
480 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); 601 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req);
481 struct md5_state *m = &rctx->u.md5; 602 int ds;
482 603
604 ds = n2alg->digest_size;
483 if (unlikely(req->nbytes == 0)) { 605 if (unlikely(req->nbytes == 0)) {
484 static const char md5_zero[MD5_DIGEST_SIZE] = { 606 memcpy(req->result, n2alg->hash_zero, ds);
485 0xd4, 0x1d, 0x8c, 0xd9, 0x8f, 0x00, 0xb2, 0x04,
486 0xe9, 0x80, 0x09, 0x98, 0xec, 0xf8, 0x42, 0x7e,
487 };
488
489 memcpy(req->result, md5_zero, MD5_DIGEST_SIZE);
490 return 0; 607 return 0;
491 } 608 }
492 m->hash[0] = cpu_to_le32(0x67452301); 609 memcpy(&rctx->u, n2alg->hash_init, n2alg->hw_op_hashsz);
493 m->hash[1] = cpu_to_le32(0xefcdab89);
494 m->hash[2] = cpu_to_le32(0x98badcfe);
495 m->hash[3] = cpu_to_le32(0x10325476);
496 610
497 return n2_hash_async_digest(req, AUTH_TYPE_MD5, 611 return n2_do_async_digest(req, n2alg->auth_type,
498 MD5_DIGEST_SIZE, MD5_DIGEST_SIZE, 612 n2alg->hw_op_hashsz, ds,
499 m->hash); 613 &rctx->u, 0UL, 0);
500} 614}
501 615
502static int n2_sha1_async_digest(struct ahash_request *req) 616static int n2_hmac_async_digest(struct ahash_request *req)
503{ 617{
618 struct n2_hmac_alg *n2alg = n2_hmac_alg(req->base.tfm);
504 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); 619 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req);
505 struct sha1_state *s = &rctx->u.sha1; 620 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
506 621 struct n2_hmac_ctx *ctx = crypto_ahash_ctx(tfm);
507 if (unlikely(req->nbytes == 0)) { 622 int ds;
508 static const char sha1_zero[SHA1_DIGEST_SIZE] = {
509 0xda, 0x39, 0xa3, 0xee, 0x5e, 0x6b, 0x4b, 0x0d, 0x32,
510 0x55, 0xbf, 0xef, 0x95, 0x60, 0x18, 0x90, 0xaf, 0xd8,
511 0x07, 0x09
512 };
513
514 memcpy(req->result, sha1_zero, SHA1_DIGEST_SIZE);
515 return 0;
516 }
517 s->state[0] = SHA1_H0;
518 s->state[1] = SHA1_H1;
519 s->state[2] = SHA1_H2;
520 s->state[3] = SHA1_H3;
521 s->state[4] = SHA1_H4;
522
523 return n2_hash_async_digest(req, AUTH_TYPE_SHA1,
524 SHA1_DIGEST_SIZE, SHA1_DIGEST_SIZE,
525 s->state);
526}
527
528static int n2_sha256_async_digest(struct ahash_request *req)
529{
530 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req);
531 struct sha256_state *s = &rctx->u.sha256;
532
533 if (req->nbytes == 0) {
534 static const char sha256_zero[SHA256_DIGEST_SIZE] = {
535 0xe3, 0xb0, 0xc4, 0x42, 0x98, 0xfc, 0x1c, 0x14, 0x9a,
536 0xfb, 0xf4, 0xc8, 0x99, 0x6f, 0xb9, 0x24, 0x27, 0xae,
537 0x41, 0xe4, 0x64, 0x9b, 0x93, 0x4c, 0xa4, 0x95, 0x99,
538 0x1b, 0x78, 0x52, 0xb8, 0x55
539 };
540
541 memcpy(req->result, sha256_zero, SHA256_DIGEST_SIZE);
542 return 0;
543 }
544 s->state[0] = SHA256_H0;
545 s->state[1] = SHA256_H1;
546 s->state[2] = SHA256_H2;
547 s->state[3] = SHA256_H3;
548 s->state[4] = SHA256_H4;
549 s->state[5] = SHA256_H5;
550 s->state[6] = SHA256_H6;
551 s->state[7] = SHA256_H7;
552
553 return n2_hash_async_digest(req, AUTH_TYPE_SHA256,
554 SHA256_DIGEST_SIZE, SHA256_DIGEST_SIZE,
555 s->state);
556}
557 623
558static int n2_sha224_async_digest(struct ahash_request *req) 624 ds = n2alg->derived.digest_size;
559{ 625 if (unlikely(req->nbytes == 0) ||
560 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); 626 unlikely(ctx->hash_key_len > N2_HASH_KEY_MAX)) {
561 struct sha256_state *s = &rctx->u.sha256; 627 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req);
628 struct n2_hash_ctx *ctx = crypto_ahash_ctx(tfm);
562 629
563 if (req->nbytes == 0) { 630 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
564 static const char sha224_zero[SHA224_DIGEST_SIZE] = { 631 rctx->fallback_req.base.flags =
565 0xd1, 0x4a, 0x02, 0x8c, 0x2a, 0x3a, 0x2b, 0xc9, 0x47, 632 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
566 0x61, 0x02, 0xbb, 0x28, 0x82, 0x34, 0xc4, 0x15, 0xa2, 633 rctx->fallback_req.nbytes = req->nbytes;
567 0xb0, 0x1f, 0x82, 0x8e, 0xa6, 0x2a, 0xc5, 0xb3, 0xe4, 634 rctx->fallback_req.src = req->src;
568 0x2f 635 rctx->fallback_req.result = req->result;
569 };
570 636
571 memcpy(req->result, sha224_zero, SHA224_DIGEST_SIZE); 637 return crypto_ahash_digest(&rctx->fallback_req);
572 return 0;
573 } 638 }
574 s->state[0] = SHA224_H0; 639 memcpy(&rctx->u, n2alg->derived.hash_init,
575 s->state[1] = SHA224_H1; 640 n2alg->derived.hw_op_hashsz);
576 s->state[2] = SHA224_H2;
577 s->state[3] = SHA224_H3;
578 s->state[4] = SHA224_H4;
579 s->state[5] = SHA224_H5;
580 s->state[6] = SHA224_H6;
581 s->state[7] = SHA224_H7;
582 641
583 return n2_hash_async_digest(req, AUTH_TYPE_SHA256, 642 return n2_do_async_digest(req, n2alg->derived.hmac_type,
584 SHA256_DIGEST_SIZE, SHA224_DIGEST_SIZE, 643 n2alg->derived.hw_op_hashsz, ds,
585 s->state); 644 &rctx->u,
645 __pa(&ctx->hash_key),
646 ctx->hash_key_len);
586} 647}
587 648
588struct n2_cipher_context { 649struct n2_cipher_context {
@@ -1209,35 +1270,92 @@ static LIST_HEAD(cipher_algs);
1209 1270
1210struct n2_hash_tmpl { 1271struct n2_hash_tmpl {
1211 const char *name; 1272 const char *name;
1212 int (*digest)(struct ahash_request *req); 1273 const char *hash_zero;
1274 const u32 *hash_init;
1275 u8 hw_op_hashsz;
1213 u8 digest_size; 1276 u8 digest_size;
1214 u8 block_size; 1277 u8 block_size;
1278 u8 auth_type;
1279 u8 hmac_type;
1280};
1281
1282static const char md5_zero[MD5_DIGEST_SIZE] = {
1283 0xd4, 0x1d, 0x8c, 0xd9, 0x8f, 0x00, 0xb2, 0x04,
1284 0xe9, 0x80, 0x09, 0x98, 0xec, 0xf8, 0x42, 0x7e,
1285};
1286static const u32 md5_init[MD5_HASH_WORDS] = {
1287 cpu_to_le32(0x67452301),
1288 cpu_to_le32(0xefcdab89),
1289 cpu_to_le32(0x98badcfe),
1290 cpu_to_le32(0x10325476),
1291};
1292static const char sha1_zero[SHA1_DIGEST_SIZE] = {
1293 0xda, 0x39, 0xa3, 0xee, 0x5e, 0x6b, 0x4b, 0x0d, 0x32,
1294 0x55, 0xbf, 0xef, 0x95, 0x60, 0x18, 0x90, 0xaf, 0xd8,
1295 0x07, 0x09
1215}; 1296};
1297static const u32 sha1_init[SHA1_DIGEST_SIZE / 4] = {
1298 SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4,
1299};
1300static const char sha256_zero[SHA256_DIGEST_SIZE] = {
1301 0xe3, 0xb0, 0xc4, 0x42, 0x98, 0xfc, 0x1c, 0x14, 0x9a,
1302 0xfb, 0xf4, 0xc8, 0x99, 0x6f, 0xb9, 0x24, 0x27, 0xae,
1303 0x41, 0xe4, 0x64, 0x9b, 0x93, 0x4c, 0xa4, 0x95, 0x99,
1304 0x1b, 0x78, 0x52, 0xb8, 0x55
1305};
1306static const u32 sha256_init[SHA256_DIGEST_SIZE / 4] = {
1307 SHA256_H0, SHA256_H1, SHA256_H2, SHA256_H3,
1308 SHA256_H4, SHA256_H5, SHA256_H6, SHA256_H7,
1309};
1310static const char sha224_zero[SHA224_DIGEST_SIZE] = {
1311 0xd1, 0x4a, 0x02, 0x8c, 0x2a, 0x3a, 0x2b, 0xc9, 0x47,
1312 0x61, 0x02, 0xbb, 0x28, 0x82, 0x34, 0xc4, 0x15, 0xa2,
1313 0xb0, 0x1f, 0x82, 0x8e, 0xa6, 0x2a, 0xc5, 0xb3, 0xe4,
1314 0x2f
1315};
1316static const u32 sha224_init[SHA256_DIGEST_SIZE / 4] = {
1317 SHA224_H0, SHA224_H1, SHA224_H2, SHA224_H3,
1318 SHA224_H4, SHA224_H5, SHA224_H6, SHA224_H7,
1319};
1320
1216static const struct n2_hash_tmpl hash_tmpls[] = { 1321static const struct n2_hash_tmpl hash_tmpls[] = {
1217 { .name = "md5", 1322 { .name = "md5",
1218 .digest = n2_md5_async_digest, 1323 .hash_zero = md5_zero,
1324 .hash_init = md5_init,
1325 .auth_type = AUTH_TYPE_MD5,
1326 .hmac_type = AUTH_TYPE_HMAC_MD5,
1327 .hw_op_hashsz = MD5_DIGEST_SIZE,
1219 .digest_size = MD5_DIGEST_SIZE, 1328 .digest_size = MD5_DIGEST_SIZE,
1220 .block_size = MD5_HMAC_BLOCK_SIZE }, 1329 .block_size = MD5_HMAC_BLOCK_SIZE },
1221 { .name = "sha1", 1330 { .name = "sha1",
1222 .digest = n2_sha1_async_digest, 1331 .hash_zero = sha1_zero,
1332 .hash_init = sha1_init,
1333 .auth_type = AUTH_TYPE_SHA1,
1334 .hmac_type = AUTH_TYPE_HMAC_SHA1,
1335 .hw_op_hashsz = SHA1_DIGEST_SIZE,
1223 .digest_size = SHA1_DIGEST_SIZE, 1336 .digest_size = SHA1_DIGEST_SIZE,
1224 .block_size = SHA1_BLOCK_SIZE }, 1337 .block_size = SHA1_BLOCK_SIZE },
1225 { .name = "sha256", 1338 { .name = "sha256",
1226 .digest = n2_sha256_async_digest, 1339 .hash_zero = sha256_zero,
1340 .hash_init = sha256_init,
1341 .auth_type = AUTH_TYPE_SHA256,
1342 .hmac_type = AUTH_TYPE_HMAC_SHA256,
1343 .hw_op_hashsz = SHA256_DIGEST_SIZE,
1227 .digest_size = SHA256_DIGEST_SIZE, 1344 .digest_size = SHA256_DIGEST_SIZE,
1228 .block_size = SHA256_BLOCK_SIZE }, 1345 .block_size = SHA256_BLOCK_SIZE },
1229 { .name = "sha224", 1346 { .name = "sha224",
1230 .digest = n2_sha224_async_digest, 1347 .hash_zero = sha224_zero,
1348 .hash_init = sha224_init,
1349 .auth_type = AUTH_TYPE_SHA256,
1350 .hmac_type = AUTH_TYPE_RESERVED,
1351 .hw_op_hashsz = SHA256_DIGEST_SIZE,
1231 .digest_size = SHA224_DIGEST_SIZE, 1352 .digest_size = SHA224_DIGEST_SIZE,
1232 .block_size = SHA224_BLOCK_SIZE }, 1353 .block_size = SHA224_BLOCK_SIZE },
1233}; 1354};
1234#define NUM_HASH_TMPLS ARRAY_SIZE(hash_tmpls) 1355#define NUM_HASH_TMPLS ARRAY_SIZE(hash_tmpls)
1235 1356
1236struct n2_ahash_alg {
1237 struct list_head entry;
1238 struct ahash_alg alg;
1239};
1240static LIST_HEAD(ahash_algs); 1357static LIST_HEAD(ahash_algs);
1358static LIST_HEAD(hmac_algs);
1241 1359
1242static int algs_registered; 1360static int algs_registered;
1243 1361
@@ -1245,12 +1363,18 @@ static void __n2_unregister_algs(void)
1245{ 1363{
1246 struct n2_cipher_alg *cipher, *cipher_tmp; 1364 struct n2_cipher_alg *cipher, *cipher_tmp;
1247 struct n2_ahash_alg *alg, *alg_tmp; 1365 struct n2_ahash_alg *alg, *alg_tmp;
1366 struct n2_hmac_alg *hmac, *hmac_tmp;
1248 1367
1249 list_for_each_entry_safe(cipher, cipher_tmp, &cipher_algs, entry) { 1368 list_for_each_entry_safe(cipher, cipher_tmp, &cipher_algs, entry) {
1250 crypto_unregister_alg(&cipher->alg); 1369 crypto_unregister_alg(&cipher->alg);
1251 list_del(&cipher->entry); 1370 list_del(&cipher->entry);
1252 kfree(cipher); 1371 kfree(cipher);
1253 } 1372 }
1373 list_for_each_entry_safe(hmac, hmac_tmp, &hmac_algs, derived.entry) {
1374 crypto_unregister_ahash(&hmac->derived.alg);
1375 list_del(&hmac->derived.entry);
1376 kfree(hmac);
1377 }
1254 list_for_each_entry_safe(alg, alg_tmp, &ahash_algs, entry) { 1378 list_for_each_entry_safe(alg, alg_tmp, &ahash_algs, entry) {
1255 crypto_unregister_ahash(&alg->alg); 1379 crypto_unregister_ahash(&alg->alg);
1256 list_del(&alg->entry); 1380 list_del(&alg->entry);
@@ -1290,8 +1414,49 @@ static int __devinit __n2_register_one_cipher(const struct n2_cipher_tmpl *tmpl)
1290 list_add(&p->entry, &cipher_algs); 1414 list_add(&p->entry, &cipher_algs);
1291 err = crypto_register_alg(alg); 1415 err = crypto_register_alg(alg);
1292 if (err) { 1416 if (err) {
1417 pr_err("%s alg registration failed\n", alg->cra_name);
1293 list_del(&p->entry); 1418 list_del(&p->entry);
1294 kfree(p); 1419 kfree(p);
1420 } else {
1421 pr_info("%s alg registered\n", alg->cra_name);
1422 }
1423 return err;
1424}
1425
1426static int __devinit __n2_register_one_hmac(struct n2_ahash_alg *n2ahash)
1427{
1428 struct n2_hmac_alg *p = kzalloc(sizeof(*p), GFP_KERNEL);
1429 struct ahash_alg *ahash;
1430 struct crypto_alg *base;
1431 int err;
1432
1433 if (!p)
1434 return -ENOMEM;
1435
1436 p->child_alg = n2ahash->alg.halg.base.cra_name;
1437 memcpy(&p->derived, n2ahash, sizeof(struct n2_ahash_alg));
1438 INIT_LIST_HEAD(&p->derived.entry);
1439
1440 ahash = &p->derived.alg;
1441 ahash->digest = n2_hmac_async_digest;
1442 ahash->setkey = n2_hmac_async_setkey;
1443
1444 base = &ahash->halg.base;
1445 snprintf(base->cra_name, CRYPTO_MAX_ALG_NAME, "hmac(%s)", p->child_alg);
1446 snprintf(base->cra_driver_name, CRYPTO_MAX_ALG_NAME, "hmac-%s-n2", p->child_alg);
1447
1448 base->cra_ctxsize = sizeof(struct n2_hmac_ctx);
1449 base->cra_init = n2_hmac_cra_init;
1450 base->cra_exit = n2_hmac_cra_exit;
1451
1452 list_add(&p->derived.entry, &hmac_algs);
1453 err = crypto_register_ahash(ahash);
1454 if (err) {
1455 pr_err("%s alg registration failed\n", base->cra_name);
1456 list_del(&p->derived.entry);
1457 kfree(p);
1458 } else {
1459 pr_info("%s alg registered\n", base->cra_name);
1295 } 1460 }
1296 return err; 1461 return err;
1297} 1462}
@@ -1307,12 +1472,19 @@ static int __devinit __n2_register_one_ahash(const struct n2_hash_tmpl *tmpl)
1307 if (!p) 1472 if (!p)
1308 return -ENOMEM; 1473 return -ENOMEM;
1309 1474
1475 p->hash_zero = tmpl->hash_zero;
1476 p->hash_init = tmpl->hash_init;
1477 p->auth_type = tmpl->auth_type;
1478 p->hmac_type = tmpl->hmac_type;
1479 p->hw_op_hashsz = tmpl->hw_op_hashsz;
1480 p->digest_size = tmpl->digest_size;
1481
1310 ahash = &p->alg; 1482 ahash = &p->alg;
1311 ahash->init = n2_hash_async_init; 1483 ahash->init = n2_hash_async_init;
1312 ahash->update = n2_hash_async_update; 1484 ahash->update = n2_hash_async_update;
1313 ahash->final = n2_hash_async_final; 1485 ahash->final = n2_hash_async_final;
1314 ahash->finup = n2_hash_async_finup; 1486 ahash->finup = n2_hash_async_finup;
1315 ahash->digest = tmpl->digest; 1487 ahash->digest = n2_hash_async_digest;
1316 1488
1317 halg = &ahash->halg; 1489 halg = &ahash->halg;
1318 halg->digestsize = tmpl->digest_size; 1490 halg->digestsize = tmpl->digest_size;
@@ -1331,9 +1503,14 @@ static int __devinit __n2_register_one_ahash(const struct n2_hash_tmpl *tmpl)
1331 list_add(&p->entry, &ahash_algs); 1503 list_add(&p->entry, &ahash_algs);
1332 err = crypto_register_ahash(ahash); 1504 err = crypto_register_ahash(ahash);
1333 if (err) { 1505 if (err) {
1506 pr_err("%s alg registration failed\n", base->cra_name);
1334 list_del(&p->entry); 1507 list_del(&p->entry);
1335 kfree(p); 1508 kfree(p);
1509 } else {
1510 pr_info("%s alg registered\n", base->cra_name);
1336 } 1511 }
1512 if (!err && p->hmac_type != AUTH_TYPE_RESERVED)
1513 err = __n2_register_one_hmac(p);
1337 return err; 1514 return err;
1338} 1515}
1339 1516