aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/crypto
diff options
context:
space:
mode:
authorDavid S. Miller <davem@davemloft.net>2010-05-23 01:53:09 -0400
committerDavid S. Miller <davem@davemloft.net>2010-05-25 20:37:17 -0400
commitdc4ccfd15d4fc7a91ddf222bc5eed5cc4bcf10e6 (patch)
tree087e672d4a6caac9af433c0e9729612029cf8451 /drivers/crypto
parent3a2c034697558602a72e51897c6d3665bc515927 (diff)
n2_crypto: Add HMAC support.
One note is that, unlike with non-HMAC hashes, we can't support hmac(sha224) using the HMAC_SHA256 opcode. Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'drivers/crypto')
-rw-r--r--drivers/crypto/n2_core.c210
1 files changed, 206 insertions, 4 deletions
diff --git a/drivers/crypto/n2_core.c b/drivers/crypto/n2_core.c
index d01a2afda6e9..b99c38f23d61 100644
--- a/drivers/crypto/n2_core.c
+++ b/drivers/crypto/n2_core.c
@@ -246,6 +246,7 @@ struct n2_ahash_alg {
246 u8 hw_op_hashsz; 246 u8 hw_op_hashsz;
247 u8 digest_size; 247 u8 digest_size;
248 u8 auth_type; 248 u8 auth_type;
249 u8 hmac_type;
249 struct ahash_alg alg; 250 struct ahash_alg alg;
250}; 251};
251 252
@@ -259,10 +260,36 @@ static inline struct n2_ahash_alg *n2_ahash_alg(struct crypto_tfm *tfm)
259 return container_of(ahash_alg, struct n2_ahash_alg, alg); 260 return container_of(ahash_alg, struct n2_ahash_alg, alg);
260} 261}
261 262
263struct n2_hmac_alg {
264 const char *child_alg;
265 struct n2_ahash_alg derived;
266};
267
268static inline struct n2_hmac_alg *n2_hmac_alg(struct crypto_tfm *tfm)
269{
270 struct crypto_alg *alg = tfm->__crt_alg;
271 struct ahash_alg *ahash_alg;
272
273 ahash_alg = container_of(alg, struct ahash_alg, halg.base);
274
275 return container_of(ahash_alg, struct n2_hmac_alg, derived.alg);
276}
277
262struct n2_hash_ctx { 278struct n2_hash_ctx {
263 struct crypto_ahash *fallback_tfm; 279 struct crypto_ahash *fallback_tfm;
264}; 280};
265 281
282#define N2_HASH_KEY_MAX 32 /* HW limit for all HMAC requests */
283
284struct n2_hmac_ctx {
285 struct n2_hash_ctx base;
286
287 struct crypto_shash *child_shash;
288
289 int hash_key_len;
290 unsigned char hash_key[N2_HASH_KEY_MAX];
291};
292
266struct n2_hash_req_ctx { 293struct n2_hash_req_ctx {
267 union { 294 union {
268 struct md5_state md5; 295 struct md5_state md5;
@@ -362,6 +389,94 @@ static void n2_hash_cra_exit(struct crypto_tfm *tfm)
362 crypto_free_ahash(ctx->fallback_tfm); 389 crypto_free_ahash(ctx->fallback_tfm);
363} 390}
364 391
392static int n2_hmac_cra_init(struct crypto_tfm *tfm)
393{
394 const char *fallback_driver_name = tfm->__crt_alg->cra_name;
395 struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
396 struct n2_hmac_ctx *ctx = crypto_ahash_ctx(ahash);
397 struct n2_hmac_alg *n2alg = n2_hmac_alg(tfm);
398 struct crypto_ahash *fallback_tfm;
399 struct crypto_shash *child_shash;
400 int err;
401
402 fallback_tfm = crypto_alloc_ahash(fallback_driver_name, 0,
403 CRYPTO_ALG_NEED_FALLBACK);
404 if (IS_ERR(fallback_tfm)) {
405 pr_warning("Fallback driver '%s' could not be loaded!\n",
406 fallback_driver_name);
407 err = PTR_ERR(fallback_tfm);
408 goto out;
409 }
410
411 child_shash = crypto_alloc_shash(n2alg->child_alg, 0, 0);
412 if (IS_ERR(child_shash)) {
413 pr_warning("Child shash '%s' could not be loaded!\n",
414 n2alg->child_alg);
415 err = PTR_ERR(child_shash);
416 goto out_free_fallback;
417 }
418
419 crypto_ahash_set_reqsize(ahash, (sizeof(struct n2_hash_req_ctx) +
420 crypto_ahash_reqsize(fallback_tfm)));
421
422 ctx->child_shash = child_shash;
423 ctx->base.fallback_tfm = fallback_tfm;
424 return 0;
425
426out_free_fallback:
427 crypto_free_ahash(fallback_tfm);
428
429out:
430 return err;
431}
432
433static void n2_hmac_cra_exit(struct crypto_tfm *tfm)
434{
435 struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
436 struct n2_hmac_ctx *ctx = crypto_ahash_ctx(ahash);
437
438 crypto_free_ahash(ctx->base.fallback_tfm);
439 crypto_free_shash(ctx->child_shash);
440}
441
442static int n2_hmac_async_setkey(struct crypto_ahash *tfm, const u8 *key,
443 unsigned int keylen)
444{
445 struct n2_hmac_ctx *ctx = crypto_ahash_ctx(tfm);
446 struct crypto_shash *child_shash = ctx->child_shash;
447 struct crypto_ahash *fallback_tfm;
448 struct {
449 struct shash_desc shash;
450 char ctx[crypto_shash_descsize(child_shash)];
451 } desc;
452 int err, bs, ds;
453
454 fallback_tfm = ctx->base.fallback_tfm;
455 err = crypto_ahash_setkey(fallback_tfm, key, keylen);
456 if (err)
457 return err;
458
459 desc.shash.tfm = child_shash;
460 desc.shash.flags = crypto_ahash_get_flags(tfm) &
461 CRYPTO_TFM_REQ_MAY_SLEEP;
462
463 bs = crypto_shash_blocksize(child_shash);
464 ds = crypto_shash_digestsize(child_shash);
465 BUG_ON(ds > N2_HASH_KEY_MAX);
466 if (keylen > bs) {
467 err = crypto_shash_digest(&desc.shash, key, keylen,
468 ctx->hash_key);
469 if (err)
470 return err;
471 keylen = ds;
472 } else if (keylen <= N2_HASH_KEY_MAX)
473 memcpy(ctx->hash_key, key, keylen);
474
475 ctx->hash_key_len = keylen;
476
477 return err;
478}
479
365static unsigned long wait_for_tail(struct spu_queue *qp) 480static unsigned long wait_for_tail(struct spu_queue *qp)
366{ 481{
367 unsigned long head, hv_ret; 482 unsigned long head, hv_ret;
@@ -393,7 +508,8 @@ static unsigned long submit_and_wait_for_tail(struct spu_queue *qp,
393 508
394static int n2_do_async_digest(struct ahash_request *req, 509static int n2_do_async_digest(struct ahash_request *req,
395 unsigned int auth_type, unsigned int digest_size, 510 unsigned int auth_type, unsigned int digest_size,
396 unsigned int result_size, void *hash_loc) 511 unsigned int result_size, void *hash_loc,
512 unsigned long auth_key, unsigned int auth_key_len)
397{ 513{
398 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 514 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
399 struct cwq_initial_entry *ent; 515 struct cwq_initial_entry *ent;
@@ -434,13 +550,13 @@ static int n2_do_async_digest(struct ahash_request *req,
434 */ 550 */
435 ent = qp->q + qp->tail; 551 ent = qp->q + qp->tail;
436 552
437 ent->control = control_word_base(nbytes, 0, 0, 553 ent->control = control_word_base(nbytes, auth_key_len, 0,
438 auth_type, digest_size, 554 auth_type, digest_size,
439 false, true, false, false, 555 false, true, false, false,
440 OPCODE_INPLACE_BIT | 556 OPCODE_INPLACE_BIT |
441 OPCODE_AUTH_MAC); 557 OPCODE_AUTH_MAC);
442 ent->src_addr = __pa(walk.data); 558 ent->src_addr = __pa(walk.data);
443 ent->auth_key_addr = 0UL; 559 ent->auth_key_addr = auth_key;
444 ent->auth_iv_addr = __pa(hash_loc); 560 ent->auth_iv_addr = __pa(hash_loc);
445 ent->final_auth_state_addr = 0UL; 561 ent->final_auth_state_addr = 0UL;
446 ent->enc_key_addr = 0UL; 562 ent->enc_key_addr = 0UL;
@@ -494,7 +610,40 @@ static int n2_hash_async_digest(struct ahash_request *req)
494 610
495 return n2_do_async_digest(req, n2alg->auth_type, 611 return n2_do_async_digest(req, n2alg->auth_type,
496 n2alg->hw_op_hashsz, ds, 612 n2alg->hw_op_hashsz, ds,
497 &rctx->u); 613 &rctx->u, 0UL, 0);
614}
615
616static int n2_hmac_async_digest(struct ahash_request *req)
617{
618 struct n2_hmac_alg *n2alg = n2_hmac_alg(req->base.tfm);
619 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req);
620 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
621 struct n2_hmac_ctx *ctx = crypto_ahash_ctx(tfm);
622 int ds;
623
624 ds = n2alg->derived.digest_size;
625 if (unlikely(req->nbytes == 0) ||
626 unlikely(ctx->hash_key_len > N2_HASH_KEY_MAX)) {
627 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req);
628 struct n2_hash_ctx *ctx = crypto_ahash_ctx(tfm);
629
630 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
631 rctx->fallback_req.base.flags =
632 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
633 rctx->fallback_req.nbytes = req->nbytes;
634 rctx->fallback_req.src = req->src;
635 rctx->fallback_req.result = req->result;
636
637 return crypto_ahash_digest(&rctx->fallback_req);
638 }
639 memcpy(&rctx->u, n2alg->derived.hash_init,
640 n2alg->derived.hw_op_hashsz);
641
642 return n2_do_async_digest(req, n2alg->derived.hmac_type,
643 n2alg->derived.hw_op_hashsz, ds,
644 &rctx->u,
645 __pa(&ctx->hash_key),
646 ctx->hash_key_len);
498} 647}
499 648
500struct n2_cipher_context { 649struct n2_cipher_context {
@@ -1127,6 +1276,7 @@ struct n2_hash_tmpl {
1127 u8 digest_size; 1276 u8 digest_size;
1128 u8 block_size; 1277 u8 block_size;
1129 u8 auth_type; 1278 u8 auth_type;
1279 u8 hmac_type;
1130}; 1280};
1131 1281
1132static const char md5_zero[MD5_DIGEST_SIZE] = { 1282static const char md5_zero[MD5_DIGEST_SIZE] = {
@@ -1173,6 +1323,7 @@ static const struct n2_hash_tmpl hash_tmpls[] = {
1173 .hash_zero = md5_zero, 1323 .hash_zero = md5_zero,
1174 .hash_init = md5_init, 1324 .hash_init = md5_init,
1175 .auth_type = AUTH_TYPE_MD5, 1325 .auth_type = AUTH_TYPE_MD5,
1326 .hmac_type = AUTH_TYPE_HMAC_MD5,
1176 .hw_op_hashsz = MD5_DIGEST_SIZE, 1327 .hw_op_hashsz = MD5_DIGEST_SIZE,
1177 .digest_size = MD5_DIGEST_SIZE, 1328 .digest_size = MD5_DIGEST_SIZE,
1178 .block_size = MD5_HMAC_BLOCK_SIZE }, 1329 .block_size = MD5_HMAC_BLOCK_SIZE },
@@ -1180,6 +1331,7 @@ static const struct n2_hash_tmpl hash_tmpls[] = {
1180 .hash_zero = sha1_zero, 1331 .hash_zero = sha1_zero,
1181 .hash_init = sha1_init, 1332 .hash_init = sha1_init,
1182 .auth_type = AUTH_TYPE_SHA1, 1333 .auth_type = AUTH_TYPE_SHA1,
1334 .hmac_type = AUTH_TYPE_HMAC_SHA1,
1183 .hw_op_hashsz = SHA1_DIGEST_SIZE, 1335 .hw_op_hashsz = SHA1_DIGEST_SIZE,
1184 .digest_size = SHA1_DIGEST_SIZE, 1336 .digest_size = SHA1_DIGEST_SIZE,
1185 .block_size = SHA1_BLOCK_SIZE }, 1337 .block_size = SHA1_BLOCK_SIZE },
@@ -1187,6 +1339,7 @@ static const struct n2_hash_tmpl hash_tmpls[] = {
1187 .hash_zero = sha256_zero, 1339 .hash_zero = sha256_zero,
1188 .hash_init = sha256_init, 1340 .hash_init = sha256_init,
1189 .auth_type = AUTH_TYPE_SHA256, 1341 .auth_type = AUTH_TYPE_SHA256,
1342 .hmac_type = AUTH_TYPE_HMAC_SHA256,
1190 .hw_op_hashsz = SHA256_DIGEST_SIZE, 1343 .hw_op_hashsz = SHA256_DIGEST_SIZE,
1191 .digest_size = SHA256_DIGEST_SIZE, 1344 .digest_size = SHA256_DIGEST_SIZE,
1192 .block_size = SHA256_BLOCK_SIZE }, 1345 .block_size = SHA256_BLOCK_SIZE },
@@ -1194,6 +1347,7 @@ static const struct n2_hash_tmpl hash_tmpls[] = {
1194 .hash_zero = sha224_zero, 1347 .hash_zero = sha224_zero,
1195 .hash_init = sha224_init, 1348 .hash_init = sha224_init,
1196 .auth_type = AUTH_TYPE_SHA256, 1349 .auth_type = AUTH_TYPE_SHA256,
1350 .hmac_type = AUTH_TYPE_RESERVED,
1197 .hw_op_hashsz = SHA256_DIGEST_SIZE, 1351 .hw_op_hashsz = SHA256_DIGEST_SIZE,
1198 .digest_size = SHA224_DIGEST_SIZE, 1352 .digest_size = SHA224_DIGEST_SIZE,
1199 .block_size = SHA224_BLOCK_SIZE }, 1353 .block_size = SHA224_BLOCK_SIZE },
@@ -1201,6 +1355,7 @@ static const struct n2_hash_tmpl hash_tmpls[] = {
1201#define NUM_HASH_TMPLS ARRAY_SIZE(hash_tmpls) 1355#define NUM_HASH_TMPLS ARRAY_SIZE(hash_tmpls)
1202 1356
1203static LIST_HEAD(ahash_algs); 1357static LIST_HEAD(ahash_algs);
1358static LIST_HEAD(hmac_algs);
1204 1359
1205static int algs_registered; 1360static int algs_registered;
1206 1361
@@ -1208,12 +1363,18 @@ static void __n2_unregister_algs(void)
1208{ 1363{
1209 struct n2_cipher_alg *cipher, *cipher_tmp; 1364 struct n2_cipher_alg *cipher, *cipher_tmp;
1210 struct n2_ahash_alg *alg, *alg_tmp; 1365 struct n2_ahash_alg *alg, *alg_tmp;
1366 struct n2_hmac_alg *hmac, *hmac_tmp;
1211 1367
1212 list_for_each_entry_safe(cipher, cipher_tmp, &cipher_algs, entry) { 1368 list_for_each_entry_safe(cipher, cipher_tmp, &cipher_algs, entry) {
1213 crypto_unregister_alg(&cipher->alg); 1369 crypto_unregister_alg(&cipher->alg);
1214 list_del(&cipher->entry); 1370 list_del(&cipher->entry);
1215 kfree(cipher); 1371 kfree(cipher);
1216 } 1372 }
1373 list_for_each_entry_safe(hmac, hmac_tmp, &hmac_algs, derived.entry) {
1374 crypto_unregister_ahash(&hmac->derived.alg);
1375 list_del(&hmac->derived.entry);
1376 kfree(hmac);
1377 }
1217 list_for_each_entry_safe(alg, alg_tmp, &ahash_algs, entry) { 1378 list_for_each_entry_safe(alg, alg_tmp, &ahash_algs, entry) {
1218 crypto_unregister_ahash(&alg->alg); 1379 crypto_unregister_ahash(&alg->alg);
1219 list_del(&alg->entry); 1380 list_del(&alg->entry);
@@ -1262,6 +1423,44 @@ static int __devinit __n2_register_one_cipher(const struct n2_cipher_tmpl *tmpl)
1262 return err; 1423 return err;
1263} 1424}
1264 1425
1426static int __devinit __n2_register_one_hmac(struct n2_ahash_alg *n2ahash)
1427{
1428 struct n2_hmac_alg *p = kzalloc(sizeof(*p), GFP_KERNEL);
1429 struct ahash_alg *ahash;
1430 struct crypto_alg *base;
1431 int err;
1432
1433 if (!p)
1434 return -ENOMEM;
1435
1436 p->child_alg = n2ahash->alg.halg.base.cra_name;
1437 memcpy(&p->derived, n2ahash, sizeof(struct n2_ahash_alg));
1438 INIT_LIST_HEAD(&p->derived.entry);
1439
1440 ahash = &p->derived.alg;
1441 ahash->digest = n2_hmac_async_digest;
1442 ahash->setkey = n2_hmac_async_setkey;
1443
1444 base = &ahash->halg.base;
1445 snprintf(base->cra_name, CRYPTO_MAX_ALG_NAME, "hmac(%s)", p->child_alg);
1446 snprintf(base->cra_driver_name, CRYPTO_MAX_ALG_NAME, "hmac-%s-n2", p->child_alg);
1447
1448 base->cra_ctxsize = sizeof(struct n2_hmac_ctx);
1449 base->cra_init = n2_hmac_cra_init;
1450 base->cra_exit = n2_hmac_cra_exit;
1451
1452 list_add(&p->derived.entry, &hmac_algs);
1453 err = crypto_register_ahash(ahash);
1454 if (err) {
1455 pr_err("%s alg registration failed\n", base->cra_name);
1456 list_del(&p->derived.entry);
1457 kfree(p);
1458 } else {
1459 pr_info("%s alg registered\n", base->cra_name);
1460 }
1461 return err;
1462}
1463
1265static int __devinit __n2_register_one_ahash(const struct n2_hash_tmpl *tmpl) 1464static int __devinit __n2_register_one_ahash(const struct n2_hash_tmpl *tmpl)
1266{ 1465{
1267 struct n2_ahash_alg *p = kzalloc(sizeof(*p), GFP_KERNEL); 1466 struct n2_ahash_alg *p = kzalloc(sizeof(*p), GFP_KERNEL);
@@ -1276,6 +1475,7 @@ static int __devinit __n2_register_one_ahash(const struct n2_hash_tmpl *tmpl)
1276 p->hash_zero = tmpl->hash_zero; 1475 p->hash_zero = tmpl->hash_zero;
1277 p->hash_init = tmpl->hash_init; 1476 p->hash_init = tmpl->hash_init;
1278 p->auth_type = tmpl->auth_type; 1477 p->auth_type = tmpl->auth_type;
1478 p->hmac_type = tmpl->hmac_type;
1279 p->hw_op_hashsz = tmpl->hw_op_hashsz; 1479 p->hw_op_hashsz = tmpl->hw_op_hashsz;
1280 p->digest_size = tmpl->digest_size; 1480 p->digest_size = tmpl->digest_size;
1281 1481
@@ -1309,6 +1509,8 @@ static int __devinit __n2_register_one_ahash(const struct n2_hash_tmpl *tmpl)
1309 } else { 1509 } else {
1310 pr_info("%s alg registered\n", base->cra_name); 1510 pr_info("%s alg registered\n", base->cra_name);
1311 } 1511 }
1512 if (!err && p->hmac_type != AUTH_TYPE_RESERVED)
1513 err = __n2_register_one_hmac(p);
1312 return err; 1514 return err;
1313} 1515}
1314 1516