aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--drivers/crypto/chelsio/Kconfig1
-rw-r--r--drivers/crypto/chelsio/chcr_algo.c1482
-rw-r--r--drivers/crypto/chelsio/chcr_algo.h16
-rw-r--r--drivers/crypto/chelsio/chcr_core.c8
-rw-r--r--drivers/crypto/chelsio/chcr_core.h2
-rw-r--r--drivers/crypto/chelsio/chcr_crypto.h90
6 files changed, 1558 insertions, 41 deletions
diff --git a/drivers/crypto/chelsio/Kconfig b/drivers/crypto/chelsio/Kconfig
index 4ce67fb9a880..3e104f5aa0c2 100644
--- a/drivers/crypto/chelsio/Kconfig
+++ b/drivers/crypto/chelsio/Kconfig
@@ -4,6 +4,7 @@ config CRYPTO_DEV_CHELSIO
4 select CRYPTO_SHA1 4 select CRYPTO_SHA1
5 select CRYPTO_SHA256 5 select CRYPTO_SHA256
6 select CRYPTO_SHA512 6 select CRYPTO_SHA512
7 select CRYPTO_AUTHENC
7 ---help--- 8 ---help---
8 The Chelsio Crypto Co-processor driver for T6 adapters. 9 The Chelsio Crypto Co-processor driver for T6 adapters.
9 10
diff --git a/drivers/crypto/chelsio/chcr_algo.c b/drivers/crypto/chelsio/chcr_algo.c
index 8d677c7e3d61..e73b9809591d 100644
--- a/drivers/crypto/chelsio/chcr_algo.c
+++ b/drivers/crypto/chelsio/chcr_algo.c
@@ -54,6 +54,12 @@
54#include <crypto/algapi.h> 54#include <crypto/algapi.h>
55#include <crypto/hash.h> 55#include <crypto/hash.h>
56#include <crypto/sha.h> 56#include <crypto/sha.h>
57#include <crypto/authenc.h>
58#include <crypto/internal/aead.h>
59#include <crypto/null.h>
60#include <crypto/internal/skcipher.h>
61#include <crypto/aead.h>
62#include <crypto/scatterwalk.h>
57#include <crypto/internal/hash.h> 63#include <crypto/internal/hash.h>
58 64
59#include "t4fw_api.h" 65#include "t4fw_api.h"
@@ -62,6 +68,11 @@
62#include "chcr_algo.h" 68#include "chcr_algo.h"
63#include "chcr_crypto.h" 69#include "chcr_crypto.h"
64 70
71static inline struct chcr_aead_ctx *AEAD_CTX(struct chcr_context *ctx)
72{
73 return ctx->crypto_ctx->aeadctx;
74}
75
65static inline struct ablk_ctx *ABLK_CTX(struct chcr_context *ctx) 76static inline struct ablk_ctx *ABLK_CTX(struct chcr_context *ctx)
66{ 77{
67 return ctx->crypto_ctx->ablkctx; 78 return ctx->crypto_ctx->ablkctx;
@@ -72,6 +83,16 @@ static inline struct hmac_ctx *HMAC_CTX(struct chcr_context *ctx)
72 return ctx->crypto_ctx->hmacctx; 83 return ctx->crypto_ctx->hmacctx;
73} 84}
74 85
86static inline struct chcr_gcm_ctx *GCM_CTX(struct chcr_aead_ctx *gctx)
87{
88 return gctx->ctx->gcm;
89}
90
91static inline struct chcr_authenc_ctx *AUTHENC_CTX(struct chcr_aead_ctx *gctx)
92{
93 return gctx->ctx->authenc;
94}
95
75static inline struct uld_ctx *ULD_CTX(struct chcr_context *ctx) 96static inline struct uld_ctx *ULD_CTX(struct chcr_context *ctx)
76{ 97{
77 return ctx->dev->u_ctx; 98 return ctx->dev->u_ctx;
@@ -94,12 +115,37 @@ static inline unsigned int sgl_len(unsigned int n)
94 return (3 * n) / 2 + (n & 1) + 2; 115 return (3 * n) / 2 + (n & 1) + 2;
95} 116}
96 117
118static void chcr_verify_tag(struct aead_request *req, u8 *input, int *err)
119{
120 u8 temp[SHA512_DIGEST_SIZE];
121 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
122 int authsize = crypto_aead_authsize(tfm);
123 struct cpl_fw6_pld *fw6_pld;
124 int cmp = 0;
125
126 fw6_pld = (struct cpl_fw6_pld *)input;
127 if ((get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106) ||
128 (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_GCM)) {
129 cmp = memcmp(&fw6_pld->data[2], (fw6_pld + 1), authsize);
130 } else {
131
132 sg_pcopy_to_buffer(req->src, sg_nents(req->src), temp,
133 authsize, req->assoclen +
134 req->cryptlen - authsize);
135 cmp = memcmp(temp, (fw6_pld + 1), authsize);
136 }
137 if (cmp)
138 *err = -EBADMSG;
139 else
140 *err = 0;
141}
142
97/* 143/*
98 * chcr_handle_resp - Unmap the DMA buffers associated with the request 144 * chcr_handle_resp - Unmap the DMA buffers associated with the request
99 * @req: crypto request 145 * @req: crypto request
100 */ 146 */
101int chcr_handle_resp(struct crypto_async_request *req, unsigned char *input, 147int chcr_handle_resp(struct crypto_async_request *req, unsigned char *input,
102 int error_status) 148 int err)
103{ 149{
104 struct crypto_tfm *tfm = req->tfm; 150 struct crypto_tfm *tfm = req->tfm;
105 struct chcr_context *ctx = crypto_tfm_ctx(tfm); 151 struct chcr_context *ctx = crypto_tfm_ctx(tfm);
@@ -109,11 +155,27 @@ int chcr_handle_resp(struct crypto_async_request *req, unsigned char *input,
109 unsigned int digestsize, updated_digestsize; 155 unsigned int digestsize, updated_digestsize;
110 156
111 switch (tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { 157 switch (tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
158 case CRYPTO_ALG_TYPE_AEAD:
159 ctx_req.req.aead_req = (struct aead_request *)req;
160 ctx_req.ctx.reqctx = aead_request_ctx(ctx_req.req.aead_req);
161 dma_unmap_sg(&u_ctx->lldi.pdev->dev, ctx_req.req.aead_req->dst,
162 ctx_req.ctx.reqctx->dst_nents, DMA_FROM_DEVICE);
163 if (ctx_req.ctx.reqctx->skb) {
164 kfree_skb(ctx_req.ctx.reqctx->skb);
165 ctx_req.ctx.reqctx->skb = NULL;
166 }
167 if (ctx_req.ctx.reqctx->verify == VERIFY_SW) {
168 chcr_verify_tag(ctx_req.req.aead_req, input,
169 &err);
170 ctx_req.ctx.reqctx->verify = VERIFY_HW;
171 }
172 break;
173
112 case CRYPTO_ALG_TYPE_BLKCIPHER: 174 case CRYPTO_ALG_TYPE_BLKCIPHER:
113 ctx_req.req.ablk_req = (struct ablkcipher_request *)req; 175 ctx_req.req.ablk_req = (struct ablkcipher_request *)req;
114 ctx_req.ctx.ablk_ctx = 176 ctx_req.ctx.ablk_ctx =
115 ablkcipher_request_ctx(ctx_req.req.ablk_req); 177 ablkcipher_request_ctx(ctx_req.req.ablk_req);
116 if (!error_status) { 178 if (!err) {
117 fw6_pld = (struct cpl_fw6_pld *)input; 179 fw6_pld = (struct cpl_fw6_pld *)input;
118 memcpy(ctx_req.req.ablk_req->info, &fw6_pld->data[2], 180 memcpy(ctx_req.req.ablk_req->info, &fw6_pld->data[2],
119 AES_BLOCK_SIZE); 181 AES_BLOCK_SIZE);
@@ -154,7 +216,7 @@ int chcr_handle_resp(struct crypto_async_request *req, unsigned char *input,
154 } 216 }
155 break; 217 break;
156 } 218 }
157 return 0; 219 return err;
158} 220}
159 221
160/* 222/*
@@ -380,6 +442,14 @@ static inline int map_writesg_phys_cpl(struct device *dev,
380 return 0; 442 return 0;
381} 443}
382 444
445static inline int get_aead_subtype(struct crypto_aead *aead)
446{
447 struct aead_alg *alg = crypto_aead_alg(aead);
448 struct chcr_alg_template *chcr_crypto_alg =
449 container_of(alg, struct chcr_alg_template, alg.aead);
450 return chcr_crypto_alg->type & CRYPTO_ALG_SUB_TYPE_MASK;
451}
452
383static inline int get_cryptoalg_subtype(struct crypto_tfm *tfm) 453static inline int get_cryptoalg_subtype(struct crypto_tfm *tfm)
384{ 454{
385 struct crypto_alg *alg = tfm->__crt_alg; 455 struct crypto_alg *alg = tfm->__crt_alg;
@@ -447,7 +517,8 @@ static inline void create_wreq(struct chcr_context *ctx,
447 struct chcr_wr *chcr_req, 517 struct chcr_wr *chcr_req,
448 void *req, struct sk_buff *skb, 518 void *req, struct sk_buff *skb,
449 int kctx_len, int hash_sz, 519 int kctx_len, int hash_sz,
450 unsigned int phys_dsgl) 520 int is_iv,
521 unsigned int sc_len)
451{ 522{
452 struct uld_ctx *u_ctx = ULD_CTX(ctx); 523 struct uld_ctx *u_ctx = ULD_CTX(ctx);
453 int iv_loc = IV_DSGL; 524 int iv_loc = IV_DSGL;
@@ -472,7 +543,7 @@ static inline void create_wreq(struct chcr_context *ctx,
472 chcr_req->wreq.cookie = cpu_to_be64((uintptr_t)req); 543 chcr_req->wreq.cookie = cpu_to_be64((uintptr_t)req);
473 chcr_req->wreq.rx_chid_to_rx_q_id = 544 chcr_req->wreq.rx_chid_to_rx_q_id =
474 FILL_WR_RX_Q_ID(ctx->dev->tx_channel_id, qid, 545 FILL_WR_RX_Q_ID(ctx->dev->tx_channel_id, qid,
475 (hash_sz) ? IV_NOP : iv_loc); 546 is_iv ? iv_loc : IV_NOP);
476 547
477 chcr_req->ulptx.cmd_dest = FILL_ULPTX_CMD_DEST(ctx->dev->tx_channel_id); 548 chcr_req->ulptx.cmd_dest = FILL_ULPTX_CMD_DEST(ctx->dev->tx_channel_id);
478 chcr_req->ulptx.len = htonl((DIV_ROUND_UP((calc_tx_flits_ofld(skb) * 8), 549 chcr_req->ulptx.len = htonl((DIV_ROUND_UP((calc_tx_flits_ofld(skb) * 8),
@@ -481,10 +552,7 @@ static inline void create_wreq(struct chcr_context *ctx,
481 chcr_req->sc_imm.cmd_more = FILL_CMD_MORE(immdatalen); 552 chcr_req->sc_imm.cmd_more = FILL_CMD_MORE(immdatalen);
482 chcr_req->sc_imm.len = cpu_to_be32(sizeof(struct cpl_tx_sec_pdu) + 553 chcr_req->sc_imm.len = cpu_to_be32(sizeof(struct cpl_tx_sec_pdu) +
483 sizeof(chcr_req->key_ctx) + 554 sizeof(chcr_req->key_ctx) +
484 kctx_len + 555 kctx_len + sc_len + immdatalen);
485 ((hash_sz) ? DUMMY_BYTES :
486 (sizeof(struct cpl_rx_phys_dsgl) +
487 phys_dsgl)) + immdatalen);
488} 556}
489 557
490/** 558/**
@@ -582,7 +650,8 @@ static struct sk_buff
582 memcpy(reqctx->iv, req->info, ivsize); 650 memcpy(reqctx->iv, req->info, ivsize);
583 write_buffer_to_skb(skb, &frags, reqctx->iv, ivsize); 651 write_buffer_to_skb(skb, &frags, reqctx->iv, ivsize);
584 write_sg_to_skb(skb, &frags, req->src, req->nbytes); 652 write_sg_to_skb(skb, &frags, req->src, req->nbytes);
585 create_wreq(ctx, chcr_req, req, skb, kctx_len, 0, phys_dsgl); 653 create_wreq(ctx, chcr_req, req, skb, kctx_len, 0, 1,
654 sizeof(struct cpl_rx_phys_dsgl) + phys_dsgl);
586 reqctx->skb = skb; 655 reqctx->skb = skb;
587 skb_get(skb); 656 skb_get(skb);
588 return skb; 657 return skb;
@@ -769,7 +838,7 @@ static inline void chcr_free_shash(struct crypto_shash *base_hash)
769 * @req - Cipher req base 838 * @req - Cipher req base
770 */ 839 */
771static struct sk_buff *create_hash_wr(struct ahash_request *req, 840static struct sk_buff *create_hash_wr(struct ahash_request *req,
772 struct hash_wr_param *param) 841 struct hash_wr_param *param)
773{ 842{
774 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req); 843 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
775 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 844 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
@@ -840,8 +909,8 @@ static struct sk_buff *create_hash_wr(struct ahash_request *req,
840 if (param->sg_len != 0) 909 if (param->sg_len != 0)
841 write_sg_to_skb(skb, &frags, req->src, param->sg_len); 910 write_sg_to_skb(skb, &frags, req->src, param->sg_len);
842 911
843 create_wreq(ctx, chcr_req, req, skb, kctx_len, hash_size_in_response, 912 create_wreq(ctx, chcr_req, req, skb, kctx_len, hash_size_in_response, 0,
844 0); 913 DUMMY_BYTES);
845 req_ctx->skb = skb; 914 req_ctx->skb = skb;
846 skb_get(skb); 915 skb_get(skb);
847 return skb; 916 return skb;
@@ -1249,6 +1318,1167 @@ static void chcr_hmac_cra_exit(struct crypto_tfm *tfm)
1249 } 1318 }
1250} 1319}
1251 1320
1321static int chcr_copy_assoc(struct aead_request *req,
1322 struct chcr_aead_ctx *ctx)
1323{
1324 SKCIPHER_REQUEST_ON_STACK(skreq, ctx->null);
1325
1326 skcipher_request_set_tfm(skreq, ctx->null);
1327 skcipher_request_set_callback(skreq, aead_request_flags(req),
1328 NULL, NULL);
1329 skcipher_request_set_crypt(skreq, req->src, req->dst, req->assoclen,
1330 NULL);
1331
1332 return crypto_skcipher_encrypt(skreq);
1333}
1334
1335static unsigned char get_hmac(unsigned int authsize)
1336{
1337 switch (authsize) {
1338 case ICV_8:
1339 return CHCR_SCMD_HMAC_CTRL_PL1;
1340 case ICV_10:
1341 return CHCR_SCMD_HMAC_CTRL_TRUNC_RFC4366;
1342 case ICV_12:
1343 return CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
1344 }
1345 return CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
1346}
1347
1348
1349static struct sk_buff *create_authenc_wr(struct aead_request *req,
1350 unsigned short qid,
1351 int size,
1352 unsigned short op_type)
1353{
1354 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1355 struct chcr_context *ctx = crypto_aead_ctx(tfm);
1356 struct uld_ctx *u_ctx = ULD_CTX(ctx);
1357 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
1358 struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx);
1359 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
1360 struct sk_buff *skb = NULL;
1361 struct chcr_wr *chcr_req;
1362 struct cpl_rx_phys_dsgl *phys_cpl;
1363 struct phys_sge_parm sg_param;
1364 struct scatterlist *src, *dst;
1365 struct scatterlist src_sg[2], dst_sg[2];
1366 unsigned int frags = 0, transhdr_len;
1367 unsigned int ivsize = crypto_aead_ivsize(tfm), dst_size = 0;
1368 unsigned int kctx_len = 0;
1369 unsigned short stop_offset = 0;
1370 unsigned int assoclen = req->assoclen;
1371 unsigned int authsize = crypto_aead_authsize(tfm);
1372 int err = 0;
1373 int null = 0;
1374 gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
1375 GFP_ATOMIC;
1376
1377 if (aeadctx->enckey_len == 0 || (req->cryptlen == 0))
1378 goto err;
1379
1380 if (op_type && req->cryptlen < crypto_aead_authsize(tfm))
1381 goto err;
1382
1383 if (sg_nents_for_len(req->src, req->assoclen + req->cryptlen) < 0)
1384 goto err;
1385 src = scatterwalk_ffwd(src_sg, req->src, req->assoclen);
1386 dst = src;
1387 if (req->src != req->dst) {
1388 err = chcr_copy_assoc(req, aeadctx);
1389 if (err)
1390 return ERR_PTR(err);
1391 dst = scatterwalk_ffwd(dst_sg, req->dst, req->assoclen);
1392 }
1393 if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_NULL) {
1394 null = 1;
1395 assoclen = 0;
1396 }
1397 reqctx->dst_nents = sg_nents_for_len(dst, req->cryptlen +
1398 (op_type ? -authsize : authsize));
1399 if (reqctx->dst_nents <= 0) {
1400 pr_err("AUTHENC:Invalid Destination sg entries\n");
1401 goto err;
1402 }
1403 dst_size = get_space_for_phys_dsgl(reqctx->dst_nents);
1404 kctx_len = (ntohl(KEY_CONTEXT_CTX_LEN_V(aeadctx->key_ctx_hdr)) << 4)
1405 - sizeof(chcr_req->key_ctx);
1406 transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
1407 skb = alloc_skb((transhdr_len + sizeof(struct sge_opaque_hdr)), flags);
1408 if (!skb)
1409 goto err;
1410
1411 /* LLD is going to write the sge hdr. */
1412 skb_reserve(skb, sizeof(struct sge_opaque_hdr));
1413
1414 /* Write WR */
1415 chcr_req = (struct chcr_wr *) __skb_put(skb, transhdr_len);
1416 memset(chcr_req, 0, transhdr_len);
1417
1418 stop_offset = (op_type == CHCR_ENCRYPT_OP) ? 0 : authsize;
1419
1420 /*
1421 * Input order is AAD,IV and Payload. where IV should be included as
1422 * the part of authdata. All other fields should be filled according
1423 * to the hardware spec
1424 */
1425 chcr_req->sec_cpl.op_ivinsrtofst =
1426 FILL_SEC_CPL_OP_IVINSR(ctx->dev->tx_channel_id, 2,
1427 (ivsize ? (assoclen + 1) : 0));
1428 chcr_req->sec_cpl.pldlen = htonl(assoclen + ivsize + req->cryptlen);
1429 chcr_req->sec_cpl.aadstart_cipherstop_hi = FILL_SEC_CPL_CIPHERSTOP_HI(
1430 assoclen ? 1 : 0, assoclen,
1431 assoclen + ivsize + 1,
1432 (stop_offset & 0x1F0) >> 4);
1433 chcr_req->sec_cpl.cipherstop_lo_authinsert = FILL_SEC_CPL_AUTHINSERT(
1434 stop_offset & 0xF,
1435 null ? 0 : assoclen + ivsize + 1,
1436 stop_offset, stop_offset);
1437 chcr_req->sec_cpl.seqno_numivs = FILL_SEC_CPL_SCMD0_SEQNO(op_type,
1438 (op_type == CHCR_ENCRYPT_OP) ? 1 : 0,
1439 CHCR_SCMD_CIPHER_MODE_AES_CBC,
1440 actx->auth_mode, aeadctx->hmac_ctrl,
1441 ivsize >> 1);
1442 chcr_req->sec_cpl.ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1,
1443 0, 1, dst_size);
1444
1445 chcr_req->key_ctx.ctx_hdr = aeadctx->key_ctx_hdr;
1446 if (op_type == CHCR_ENCRYPT_OP)
1447 memcpy(chcr_req->key_ctx.key, aeadctx->key,
1448 aeadctx->enckey_len);
1449 else
1450 memcpy(chcr_req->key_ctx.key, actx->dec_rrkey,
1451 aeadctx->enckey_len);
1452
1453 memcpy(chcr_req->key_ctx.key + (DIV_ROUND_UP(aeadctx->enckey_len, 16) <<
1454 4), actx->h_iopad, kctx_len -
1455 (DIV_ROUND_UP(aeadctx->enckey_len, 16) << 4));
1456
1457 phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
1458 sg_param.nents = reqctx->dst_nents;
1459 sg_param.obsize = req->cryptlen + (op_type ? -authsize : authsize);
1460 sg_param.qid = qid;
1461 sg_param.align = 0;
1462 if (map_writesg_phys_cpl(&u_ctx->lldi.pdev->dev, phys_cpl, dst,
1463 &sg_param))
1464 goto dstmap_fail;
1465
1466 skb_set_transport_header(skb, transhdr_len);
1467
1468 if (assoclen) {
1469 /* AAD buffer in */
1470 write_sg_to_skb(skb, &frags, req->src, assoclen);
1471
1472 }
1473 write_buffer_to_skb(skb, &frags, req->iv, ivsize);
1474 write_sg_to_skb(skb, &frags, src, req->cryptlen);
1475 create_wreq(ctx, chcr_req, req, skb, kctx_len, size, 1,
1476 sizeof(struct cpl_rx_phys_dsgl) + dst_size);
1477 reqctx->skb = skb;
1478 skb_get(skb);
1479
1480 return skb;
1481dstmap_fail:
1482 /* ivmap_fail: */
1483 kfree_skb(skb);
1484err:
1485 return ERR_PTR(-EINVAL);
1486}
1487
1488static void aes_gcm_empty_pld_pad(struct scatterlist *sg,
1489 unsigned short offset)
1490{
1491 struct page *spage;
1492 unsigned char *addr;
1493
1494 spage = sg_page(sg);
1495 get_page(spage); /* so that it is not freed by NIC */
1496#ifdef KMAP_ATOMIC_ARGS
1497 addr = kmap_atomic(spage, KM_SOFTIRQ0);
1498#else
1499 addr = kmap_atomic(spage);
1500#endif
1501 memset(addr + sg->offset, 0, offset + 1);
1502
1503 kunmap_atomic(addr);
1504}
1505
1506static int set_msg_len(u8 *block, unsigned int msglen, int csize)
1507{
1508 __be32 data;
1509
1510 memset(block, 0, csize);
1511 block += csize;
1512
1513 if (csize >= 4)
1514 csize = 4;
1515 else if (msglen > (unsigned int)(1 << (8 * csize)))
1516 return -EOVERFLOW;
1517
1518 data = cpu_to_be32(msglen);
1519 memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
1520
1521 return 0;
1522}
1523
1524static void generate_b0(struct aead_request *req,
1525 struct chcr_aead_ctx *aeadctx,
1526 unsigned short op_type)
1527{
1528 unsigned int l, lp, m;
1529 int rc;
1530 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1531 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
1532 u8 *b0 = reqctx->scratch_pad;
1533
1534 m = crypto_aead_authsize(aead);
1535
1536 memcpy(b0, reqctx->iv, 16);
1537
1538 lp = b0[0];
1539 l = lp + 1;
1540
1541 /* set m, bits 3-5 */
1542 *b0 |= (8 * ((m - 2) / 2));
1543
1544 /* set adata, bit 6, if associated data is used */
1545 if (req->assoclen)
1546 *b0 |= 64;
1547 rc = set_msg_len(b0 + 16 - l,
1548 (op_type == CHCR_DECRYPT_OP) ?
1549 req->cryptlen - m : req->cryptlen, l);
1550}
1551
1552static inline int crypto_ccm_check_iv(const u8 *iv)
1553{
1554 /* 2 <= L <= 8, so 1 <= L' <= 7. */
1555 if (iv[0] < 1 || iv[0] > 7)
1556 return -EINVAL;
1557
1558 return 0;
1559}
1560
1561static int ccm_format_packet(struct aead_request *req,
1562 struct chcr_aead_ctx *aeadctx,
1563 unsigned int sub_type,
1564 unsigned short op_type)
1565{
1566 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
1567 int rc = 0;
1568
1569 if (req->assoclen > T5_MAX_AAD_SIZE) {
1570 pr_err("CCM: Unsupported AAD data. It should be < %d\n",
1571 T5_MAX_AAD_SIZE);
1572 return -EINVAL;
1573 }
1574 if (sub_type == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309) {
1575 reqctx->iv[0] = 3;
1576 memcpy(reqctx->iv + 1, &aeadctx->salt[0], 3);
1577 memcpy(reqctx->iv + 4, req->iv, 8);
1578 memset(reqctx->iv + 12, 0, 4);
1579 *((unsigned short *)(reqctx->scratch_pad + 16)) =
1580 htons(req->assoclen - 8);
1581 } else {
1582 memcpy(reqctx->iv, req->iv, 16);
1583 *((unsigned short *)(reqctx->scratch_pad + 16)) =
1584 htons(req->assoclen);
1585 }
1586 generate_b0(req, aeadctx, op_type);
1587 /* zero the ctr value */
1588 memset(reqctx->iv + 15 - reqctx->iv[0], 0, reqctx->iv[0] + 1);
1589 return rc;
1590}
1591
1592static void fill_sec_cpl_for_aead(struct cpl_tx_sec_pdu *sec_cpl,
1593 unsigned int dst_size,
1594 struct aead_request *req,
1595 unsigned short op_type,
1596 struct chcr_context *chcrctx)
1597{
1598 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1599 unsigned int ivsize = AES_BLOCK_SIZE;
1600 unsigned int cipher_mode = CHCR_SCMD_CIPHER_MODE_AES_CCM;
1601 unsigned int mac_mode = CHCR_SCMD_AUTH_MODE_CBCMAC;
1602 unsigned int c_id = chcrctx->dev->tx_channel_id;
1603 unsigned int ccm_xtra;
1604 unsigned char tag_offset = 0, auth_offset = 0;
1605 unsigned char hmac_ctrl = get_hmac(crypto_aead_authsize(tfm));
1606 unsigned int assoclen;
1607
1608 if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309)
1609 assoclen = req->assoclen - 8;
1610 else
1611 assoclen = req->assoclen;
1612 ccm_xtra = CCM_B0_SIZE +
1613 ((assoclen) ? CCM_AAD_FIELD_SIZE : 0);
1614
1615 auth_offset = req->cryptlen ?
1616 (assoclen + ivsize + 1 + ccm_xtra) : 0;
1617 if (op_type == CHCR_DECRYPT_OP) {
1618 if (crypto_aead_authsize(tfm) != req->cryptlen)
1619 tag_offset = crypto_aead_authsize(tfm);
1620 else
1621 auth_offset = 0;
1622 }
1623
1624
1625 sec_cpl->op_ivinsrtofst = FILL_SEC_CPL_OP_IVINSR(c_id,
1626 2, (ivsize ? (assoclen + 1) : 0) +
1627 ccm_xtra);
1628 sec_cpl->pldlen =
1629 htonl(assoclen + ivsize + req->cryptlen + ccm_xtra);
1630 /* For CCM there wil be b0 always. So AAD start will be 1 always */
1631 sec_cpl->aadstart_cipherstop_hi = FILL_SEC_CPL_CIPHERSTOP_HI(
1632 1, assoclen + ccm_xtra, assoclen
1633 + ivsize + 1 + ccm_xtra, 0);
1634
1635 sec_cpl->cipherstop_lo_authinsert = FILL_SEC_CPL_AUTHINSERT(0,
1636 auth_offset, tag_offset,
1637 (op_type == CHCR_ENCRYPT_OP) ? 0 :
1638 crypto_aead_authsize(tfm));
1639 sec_cpl->seqno_numivs = FILL_SEC_CPL_SCMD0_SEQNO(op_type,
1640 (op_type == CHCR_ENCRYPT_OP) ? 0 : 1,
1641 cipher_mode, mac_mode, hmac_ctrl,
1642 ivsize >> 1);
1643
1644 sec_cpl->ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1, 0,
1645 1, dst_size);
1646}
1647
1648int aead_ccm_validate_input(unsigned short op_type,
1649 struct aead_request *req,
1650 struct chcr_aead_ctx *aeadctx,
1651 unsigned int sub_type)
1652{
1653 if (sub_type != CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309) {
1654 if (crypto_ccm_check_iv(req->iv)) {
1655 pr_err("CCM: IV check fails\n");
1656 return -EINVAL;
1657 }
1658 } else {
1659 if (req->assoclen != 16 && req->assoclen != 20) {
1660 pr_err("RFC4309: Invalid AAD length %d\n",
1661 req->assoclen);
1662 return -EINVAL;
1663 }
1664 }
1665 if (aeadctx->enckey_len == 0) {
1666 pr_err("CCM: Encryption key not set\n");
1667 return -EINVAL;
1668 }
1669 return 0;
1670}
1671
1672unsigned int fill_aead_req_fields(struct sk_buff *skb,
1673 struct aead_request *req,
1674 struct scatterlist *src,
1675 unsigned int ivsize,
1676 struct chcr_aead_ctx *aeadctx)
1677{
1678 unsigned int frags = 0;
1679 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1680 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
1681 /* b0 and aad length(if available) */
1682
1683 write_buffer_to_skb(skb, &frags, reqctx->scratch_pad, CCM_B0_SIZE +
1684 (req->assoclen ? CCM_AAD_FIELD_SIZE : 0));
1685 if (req->assoclen) {
1686 if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309)
1687 write_sg_to_skb(skb, &frags, req->src,
1688 req->assoclen - 8);
1689 else
1690 write_sg_to_skb(skb, &frags, req->src, req->assoclen);
1691 }
1692 write_buffer_to_skb(skb, &frags, reqctx->iv, ivsize);
1693 if (req->cryptlen)
1694 write_sg_to_skb(skb, &frags, src, req->cryptlen);
1695
1696 return frags;
1697}
1698
1699static struct sk_buff *create_aead_ccm_wr(struct aead_request *req,
1700 unsigned short qid,
1701 int size,
1702 unsigned short op_type)
1703{
1704 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1705 struct chcr_context *ctx = crypto_aead_ctx(tfm);
1706 struct uld_ctx *u_ctx = ULD_CTX(ctx);
1707 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
1708 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
1709 struct sk_buff *skb = NULL;
1710 struct chcr_wr *chcr_req;
1711 struct cpl_rx_phys_dsgl *phys_cpl;
1712 struct phys_sge_parm sg_param;
1713 struct scatterlist *src, *dst;
1714 struct scatterlist src_sg[2], dst_sg[2];
1715 unsigned int frags = 0, transhdr_len, ivsize = AES_BLOCK_SIZE;
1716 unsigned int dst_size = 0, kctx_len;
1717 unsigned int sub_type;
1718 unsigned int authsize = crypto_aead_authsize(tfm);
1719 int err = 0;
1720 gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
1721 GFP_ATOMIC;
1722
1723
1724 if (op_type && req->cryptlen < crypto_aead_authsize(tfm))
1725 goto err;
1726
1727 if (sg_nents_for_len(req->src, req->assoclen + req->cryptlen) < 0)
1728 goto err;
1729 sub_type = get_aead_subtype(tfm);
1730 src = scatterwalk_ffwd(src_sg, req->src, req->assoclen);
1731 dst = src;
1732 if (req->src != req->dst) {
1733 err = chcr_copy_assoc(req, aeadctx);
1734 if (err) {
1735 pr_err("AAD copy to destination buffer fails\n");
1736 return ERR_PTR(err);
1737 }
1738 dst = scatterwalk_ffwd(dst_sg, req->dst, req->assoclen);
1739 }
1740 reqctx->dst_nents = sg_nents_for_len(dst, req->cryptlen +
1741 (op_type ? -authsize : authsize));
1742 if (reqctx->dst_nents <= 0) {
1743 pr_err("CCM:Invalid Destination sg entries\n");
1744 goto err;
1745 }
1746
1747
1748 if (aead_ccm_validate_input(op_type, req, aeadctx, sub_type))
1749 goto err;
1750
1751 dst_size = get_space_for_phys_dsgl(reqctx->dst_nents);
1752 kctx_len = ((DIV_ROUND_UP(aeadctx->enckey_len, 16)) << 4) * 2;
1753 transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
1754 skb = alloc_skb((transhdr_len + sizeof(struct sge_opaque_hdr)), flags);
1755
1756 if (!skb)
1757 goto err;
1758
1759 skb_reserve(skb, sizeof(struct sge_opaque_hdr));
1760
1761 chcr_req = (struct chcr_wr *) __skb_put(skb, transhdr_len);
1762 memset(chcr_req, 0, transhdr_len);
1763
1764 fill_sec_cpl_for_aead(&chcr_req->sec_cpl, dst_size, req, op_type, ctx);
1765
1766 chcr_req->key_ctx.ctx_hdr = aeadctx->key_ctx_hdr;
1767 memcpy(chcr_req->key_ctx.key, aeadctx->key, aeadctx->enckey_len);
1768 memcpy(chcr_req->key_ctx.key + (DIV_ROUND_UP(aeadctx->enckey_len, 16) *
1769 16), aeadctx->key, aeadctx->enckey_len);
1770
1771 phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
1772 if (ccm_format_packet(req, aeadctx, sub_type, op_type))
1773 goto dstmap_fail;
1774
1775 sg_param.nents = reqctx->dst_nents;
1776 sg_param.obsize = req->cryptlen + (op_type ? -authsize : authsize);
1777 sg_param.qid = qid;
1778 sg_param.align = 0;
1779 if (map_writesg_phys_cpl(&u_ctx->lldi.pdev->dev, phys_cpl, dst,
1780 &sg_param))
1781 goto dstmap_fail;
1782
1783 skb_set_transport_header(skb, transhdr_len);
1784 frags = fill_aead_req_fields(skb, req, src, ivsize, aeadctx);
1785 create_wreq(ctx, chcr_req, req, skb, kctx_len, 0, 1,
1786 sizeof(struct cpl_rx_phys_dsgl) + dst_size);
1787 reqctx->skb = skb;
1788 skb_get(skb);
1789 return skb;
1790dstmap_fail:
1791 kfree_skb(skb);
1792 skb = NULL;
1793err:
1794 return ERR_PTR(-EINVAL);
1795}
1796
1797static struct sk_buff *create_gcm_wr(struct aead_request *req,
1798 unsigned short qid,
1799 int size,
1800 unsigned short op_type)
1801{
1802 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1803 struct chcr_context *ctx = crypto_aead_ctx(tfm);
1804 struct uld_ctx *u_ctx = ULD_CTX(ctx);
1805 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
1806 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
1807 struct sk_buff *skb = NULL;
1808 struct chcr_wr *chcr_req;
1809 struct cpl_rx_phys_dsgl *phys_cpl;
1810 struct phys_sge_parm sg_param;
1811 struct scatterlist *src, *dst;
1812 struct scatterlist src_sg[2], dst_sg[2];
1813 unsigned int frags = 0, transhdr_len;
1814 unsigned int ivsize = AES_BLOCK_SIZE;
1815 unsigned int dst_size = 0, kctx_len;
1816 unsigned char tag_offset = 0;
1817 unsigned int crypt_len = 0;
1818 unsigned int authsize = crypto_aead_authsize(tfm);
1819 unsigned char hmac_ctrl = get_hmac(authsize);
1820 int err = 0;
1821 gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
1822 GFP_ATOMIC;
1823
1824 /* validate key size */
1825 if (aeadctx->enckey_len == 0)
1826 goto err;
1827
1828 if (op_type && req->cryptlen < crypto_aead_authsize(tfm))
1829 goto err;
1830
1831 if (sg_nents_for_len(req->src, req->assoclen + req->cryptlen) < 0)
1832 goto err;
1833
1834 src = scatterwalk_ffwd(src_sg, req->src, req->assoclen);
1835 dst = src;
1836 if (req->src != req->dst) {
1837 err = chcr_copy_assoc(req, aeadctx);
1838 if (err)
1839 return ERR_PTR(err);
1840 dst = scatterwalk_ffwd(dst_sg, req->dst, req->assoclen);
1841 }
1842
1843 if (!req->cryptlen)
1844 /* null-payload is not supported in the hardware.
1845 * software is sending block size
1846 */
1847 crypt_len = AES_BLOCK_SIZE;
1848 else
1849 crypt_len = req->cryptlen;
1850 reqctx->dst_nents = sg_nents_for_len(dst, req->cryptlen +
1851 (op_type ? -authsize : authsize));
1852 if (reqctx->dst_nents <= 0) {
1853 pr_err("GCM:Invalid Destination sg entries\n");
1854 goto err;
1855 }
1856
1857
1858 dst_size = get_space_for_phys_dsgl(reqctx->dst_nents);
1859 kctx_len = ((DIV_ROUND_UP(aeadctx->enckey_len, 16)) << 4) +
1860 AEAD_H_SIZE;
1861 transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
1862 skb = alloc_skb((transhdr_len + sizeof(struct sge_opaque_hdr)), flags);
1863 if (!skb)
1864 goto err;
1865
1866 /* NIC driver is going to write the sge hdr. */
1867 skb_reserve(skb, sizeof(struct sge_opaque_hdr));
1868
1869 chcr_req = (struct chcr_wr *)__skb_put(skb, transhdr_len);
1870 memset(chcr_req, 0, transhdr_len);
1871
1872 if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106)
1873 req->assoclen -= 8;
1874
1875 tag_offset = (op_type == CHCR_ENCRYPT_OP) ? 0 : authsize;
1876 chcr_req->sec_cpl.op_ivinsrtofst = FILL_SEC_CPL_OP_IVINSR(
1877 ctx->dev->tx_channel_id, 2, (ivsize ?
1878 (req->assoclen + 1) : 0));
1879 chcr_req->sec_cpl.pldlen = htonl(req->assoclen + ivsize + crypt_len);
1880 chcr_req->sec_cpl.aadstart_cipherstop_hi = FILL_SEC_CPL_CIPHERSTOP_HI(
1881 req->assoclen ? 1 : 0, req->assoclen,
1882 req->assoclen + ivsize + 1, 0);
1883 if (req->cryptlen) {
1884 chcr_req->sec_cpl.cipherstop_lo_authinsert =
1885 FILL_SEC_CPL_AUTHINSERT(0, req->assoclen + ivsize + 1,
1886 tag_offset, tag_offset);
1887 chcr_req->sec_cpl.seqno_numivs =
1888 FILL_SEC_CPL_SCMD0_SEQNO(op_type, (op_type ==
1889 CHCR_ENCRYPT_OP) ? 1 : 0,
1890 CHCR_SCMD_CIPHER_MODE_AES_GCM,
1891 CHCR_SCMD_AUTH_MODE_GHASH, hmac_ctrl,
1892 ivsize >> 1);
1893 } else {
1894 chcr_req->sec_cpl.cipherstop_lo_authinsert =
1895 FILL_SEC_CPL_AUTHINSERT(0, 0, 0, 0);
1896 chcr_req->sec_cpl.seqno_numivs =
1897 FILL_SEC_CPL_SCMD0_SEQNO(op_type,
1898 (op_type == CHCR_ENCRYPT_OP) ?
1899 1 : 0, CHCR_SCMD_CIPHER_MODE_AES_CBC,
1900 0, 0, ivsize >> 1);
1901 }
1902 chcr_req->sec_cpl.ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1,
1903 0, 1, dst_size);
1904 chcr_req->key_ctx.ctx_hdr = aeadctx->key_ctx_hdr;
1905 memcpy(chcr_req->key_ctx.key, aeadctx->key, aeadctx->enckey_len);
1906 memcpy(chcr_req->key_ctx.key + (DIV_ROUND_UP(aeadctx->enckey_len, 16) *
1907 16), GCM_CTX(aeadctx)->ghash_h, AEAD_H_SIZE);
1908
1909 /* prepare a 16 byte iv */
1910 /* S A L T | IV | 0x00000001 */
1911 if (get_aead_subtype(tfm) ==
1912 CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106) {
1913 memcpy(reqctx->iv, aeadctx->salt, 4);
1914 memcpy(reqctx->iv + 4, req->iv, 8);
1915 } else {
1916 memcpy(reqctx->iv, req->iv, 12);
1917 }
1918 *((unsigned int *)(reqctx->iv + 12)) = htonl(0x01);
1919
1920 phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
1921 sg_param.nents = reqctx->dst_nents;
1922 sg_param.obsize = req->cryptlen + (op_type ? -authsize : authsize);
1923 sg_param.qid = qid;
1924 sg_param.align = 0;
1925 if (map_writesg_phys_cpl(&u_ctx->lldi.pdev->dev, phys_cpl, dst,
1926 &sg_param))
1927 goto dstmap_fail;
1928
1929 skb_set_transport_header(skb, transhdr_len);
1930
1931 write_sg_to_skb(skb, &frags, req->src, req->assoclen);
1932
1933 write_buffer_to_skb(skb, &frags, reqctx->iv, ivsize);
1934
1935 if (req->cryptlen) {
1936 write_sg_to_skb(skb, &frags, src, req->cryptlen);
1937 } else {
1938 aes_gcm_empty_pld_pad(req->dst, authsize - 1);
1939 write_sg_to_skb(skb, &frags, dst, crypt_len);
1940 }
1941
1942 create_wreq(ctx, chcr_req, req, skb, kctx_len, size, 1,
1943 sizeof(struct cpl_rx_phys_dsgl) + dst_size);
1944 reqctx->skb = skb;
1945 skb_get(skb);
1946 return skb;
1947
1948dstmap_fail:
1949 /* ivmap_fail: */
1950 kfree_skb(skb);
1951 skb = NULL;
1952err:
1953 return skb;
1954}
1955
1956
1957
1958static int chcr_aead_cra_init(struct crypto_aead *tfm)
1959{
1960 struct chcr_context *ctx = crypto_aead_ctx(tfm);
1961 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
1962
1963 crypto_aead_set_reqsize(tfm, sizeof(struct chcr_aead_reqctx));
1964 aeadctx->null = crypto_get_default_null_skcipher();
1965 if (IS_ERR(aeadctx->null))
1966 return PTR_ERR(aeadctx->null);
1967 return chcr_device_init(ctx);
1968}
1969
1970static void chcr_aead_cra_exit(struct crypto_aead *tfm)
1971{
1972 crypto_put_default_null_skcipher();
1973}
1974
1975static int chcr_authenc_null_setauthsize(struct crypto_aead *tfm,
1976 unsigned int authsize)
1977{
1978 struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
1979
1980 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NOP;
1981 aeadctx->mayverify = VERIFY_HW;
1982 return 0;
1983}
1984static int chcr_authenc_setauthsize(struct crypto_aead *tfm,
1985 unsigned int authsize)
1986{
1987 struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
1988 u32 maxauth = crypto_aead_maxauthsize(tfm);
1989
1990 /*SHA1 authsize in ipsec is 12 instead of 10 i.e maxauthsize / 2 is not
1991 * true for sha1. authsize == 12 condition should be before
1992 * authsize == (maxauth >> 1)
1993 */
1994 if (authsize == ICV_4) {
1995 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL1;
1996 aeadctx->mayverify = VERIFY_HW;
1997 } else if (authsize == ICV_6) {
1998 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL2;
1999 aeadctx->mayverify = VERIFY_HW;
2000 } else if (authsize == ICV_10) {
2001 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_TRUNC_RFC4366;
2002 aeadctx->mayverify = VERIFY_HW;
2003 } else if (authsize == ICV_12) {
2004 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
2005 aeadctx->mayverify = VERIFY_HW;
2006 } else if (authsize == ICV_14) {
2007 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL3;
2008 aeadctx->mayverify = VERIFY_HW;
2009 } else if (authsize == (maxauth >> 1)) {
2010 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
2011 aeadctx->mayverify = VERIFY_HW;
2012 } else if (authsize == maxauth) {
2013 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2014 aeadctx->mayverify = VERIFY_HW;
2015 } else {
2016 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2017 aeadctx->mayverify = VERIFY_SW;
2018 }
2019 return 0;
2020}
2021
2022
2023static int chcr_gcm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
2024{
2025 struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
2026
2027 switch (authsize) {
2028 case ICV_4:
2029 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL1;
2030 aeadctx->mayverify = VERIFY_HW;
2031 break;
2032 case ICV_8:
2033 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
2034 aeadctx->mayverify = VERIFY_HW;
2035 break;
2036 case ICV_12:
2037 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
2038 aeadctx->mayverify = VERIFY_HW;
2039 break;
2040 case ICV_14:
2041 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL3;
2042 aeadctx->mayverify = VERIFY_HW;
2043 break;
2044 case ICV_16:
2045 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2046 aeadctx->mayverify = VERIFY_HW;
2047 break;
2048 case ICV_13:
2049 case ICV_15:
2050 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2051 aeadctx->mayverify = VERIFY_SW;
2052 break;
2053 default:
2054
2055 crypto_tfm_set_flags((struct crypto_tfm *) tfm,
2056 CRYPTO_TFM_RES_BAD_KEY_LEN);
2057 return -EINVAL;
2058 }
2059 return 0;
2060}
2061
2062static int chcr_4106_4309_setauthsize(struct crypto_aead *tfm,
2063 unsigned int authsize)
2064{
2065 struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
2066
2067 switch (authsize) {
2068 case ICV_8:
2069 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
2070 aeadctx->mayverify = VERIFY_HW;
2071 break;
2072 case ICV_12:
2073 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
2074 aeadctx->mayverify = VERIFY_HW;
2075 break;
2076 case ICV_16:
2077 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2078 aeadctx->mayverify = VERIFY_HW;
2079 break;
2080 default:
2081 crypto_tfm_set_flags((struct crypto_tfm *)tfm,
2082 CRYPTO_TFM_RES_BAD_KEY_LEN);
2083 return -EINVAL;
2084 }
2085 return 0;
2086}
2087
2088static int chcr_ccm_setauthsize(struct crypto_aead *tfm,
2089 unsigned int authsize)
2090{
2091 struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
2092
2093 switch (authsize) {
2094 case ICV_4:
2095 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL1;
2096 aeadctx->mayverify = VERIFY_HW;
2097 break;
2098 case ICV_6:
2099 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL2;
2100 aeadctx->mayverify = VERIFY_HW;
2101 break;
2102 case ICV_8:
2103 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
2104 aeadctx->mayverify = VERIFY_HW;
2105 break;
2106 case ICV_10:
2107 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_TRUNC_RFC4366;
2108 aeadctx->mayverify = VERIFY_HW;
2109 break;
2110 case ICV_12:
2111 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
2112 aeadctx->mayverify = VERIFY_HW;
2113 break;
2114 case ICV_14:
2115 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL3;
2116 aeadctx->mayverify = VERIFY_HW;
2117 break;
2118 case ICV_16:
2119 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2120 aeadctx->mayverify = VERIFY_HW;
2121 break;
2122 default:
2123 crypto_tfm_set_flags((struct crypto_tfm *)tfm,
2124 CRYPTO_TFM_RES_BAD_KEY_LEN);
2125 return -EINVAL;
2126 }
2127 return 0;
2128}
2129
2130static int chcr_aead_ccm_setkey(struct crypto_aead *aead,
2131 const u8 *key,
2132 unsigned int keylen)
2133{
2134 struct chcr_context *ctx = crypto_aead_ctx(aead);
2135 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2136 unsigned char ck_size, mk_size;
2137 int key_ctx_size = 0;
2138
2139 memcpy(aeadctx->key, key, keylen);
2140 aeadctx->enckey_len = keylen;
2141 key_ctx_size = sizeof(struct _key_ctx) +
2142 ((DIV_ROUND_UP(keylen, 16)) << 4) * 2;
2143 if (keylen == AES_KEYSIZE_128) {
2144 mk_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
2145 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
2146 } else if (keylen == AES_KEYSIZE_192) {
2147 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
2148 mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_192;
2149 } else if (keylen == AES_KEYSIZE_256) {
2150 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
2151 mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_256;
2152 } else {
2153 crypto_tfm_set_flags((struct crypto_tfm *)aead,
2154 CRYPTO_TFM_RES_BAD_KEY_LEN);
2155 aeadctx->enckey_len = 0;
2156 return -EINVAL;
2157 }
2158 aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, mk_size, 0, 0,
2159 key_ctx_size >> 4);
2160 return 0;
2161}
2162
2163static int chcr_aead_rfc4309_setkey(struct crypto_aead *aead, const u8 *key,
2164 unsigned int keylen)
2165{
2166 struct chcr_context *ctx = crypto_aead_ctx(aead);
2167 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2168
2169 if (keylen < 3) {
2170 crypto_tfm_set_flags((struct crypto_tfm *)aead,
2171 CRYPTO_TFM_RES_BAD_KEY_LEN);
2172 aeadctx->enckey_len = 0;
2173 return -EINVAL;
2174 }
2175 keylen -= 3;
2176 memcpy(aeadctx->salt, key + keylen, 3);
2177 return chcr_aead_ccm_setkey(aead, key, keylen);
2178}
2179
2180static int chcr_gcm_setkey(struct crypto_aead *aead, const u8 *key,
2181 unsigned int keylen)
2182{
2183 struct chcr_context *ctx = crypto_aead_ctx(aead);
2184 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2185 struct chcr_gcm_ctx *gctx = GCM_CTX(aeadctx);
2186 struct blkcipher_desc h_desc;
2187 struct scatterlist src[1];
2188 unsigned int ck_size;
2189 int ret = 0, key_ctx_size = 0;
2190
2191 if (get_aead_subtype(aead) ==
2192 CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106) {
2193 keylen -= 4; /* nonce/salt is present in the last 4 bytes */
2194 memcpy(aeadctx->salt, key + keylen, 4);
2195 }
2196 if (keylen == AES_KEYSIZE_128) {
2197 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
2198 } else if (keylen == AES_KEYSIZE_192) {
2199 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
2200 } else if (keylen == AES_KEYSIZE_256) {
2201 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
2202 } else {
2203 crypto_tfm_set_flags((struct crypto_tfm *)aead,
2204 CRYPTO_TFM_RES_BAD_KEY_LEN);
2205 aeadctx->enckey_len = 0;
2206 pr_err("GCM: Invalid key length %d", keylen);
2207 ret = -EINVAL;
2208 goto out;
2209 }
2210
2211 memcpy(aeadctx->key, key, keylen);
2212 aeadctx->enckey_len = keylen;
2213 key_ctx_size = sizeof(struct _key_ctx) +
2214 ((DIV_ROUND_UP(keylen, 16)) << 4) +
2215 AEAD_H_SIZE;
2216 aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size,
2217 CHCR_KEYCTX_MAC_KEY_SIZE_128,
2218 0, 0,
2219 key_ctx_size >> 4);
2220 /* Calculate the H = CIPH(K, 0 repeated 16 times) using sync aes
2221 * blkcipher It will go on key context
2222 */
2223 h_desc.tfm = crypto_alloc_blkcipher("cbc(aes-generic)", 0, 0);
2224 if (IS_ERR(h_desc.tfm)) {
2225 aeadctx->enckey_len = 0;
2226 ret = -ENOMEM;
2227 goto out;
2228 }
2229 h_desc.flags = 0;
2230 ret = crypto_blkcipher_setkey(h_desc.tfm, key, keylen);
2231 if (ret) {
2232 aeadctx->enckey_len = 0;
2233 goto out1;
2234 }
2235 memset(gctx->ghash_h, 0, AEAD_H_SIZE);
2236 sg_init_one(&src[0], gctx->ghash_h, AEAD_H_SIZE);
2237 ret = crypto_blkcipher_encrypt(&h_desc, &src[0], &src[0], AEAD_H_SIZE);
2238
2239out1:
2240 crypto_free_blkcipher(h_desc.tfm);
2241out:
2242 return ret;
2243}
2244
2245static int chcr_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
2246 unsigned int keylen)
2247{
2248 struct chcr_context *ctx = crypto_aead_ctx(authenc);
2249 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2250 struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx);
2251 /* it contains auth and cipher key both*/
2252 struct crypto_authenc_keys keys;
2253 unsigned int bs;
2254 unsigned int max_authsize = crypto_aead_alg(authenc)->maxauthsize;
2255 int err = 0, i, key_ctx_len = 0;
2256 unsigned char ck_size = 0;
2257 unsigned char pad[CHCR_HASH_MAX_BLOCK_SIZE_128] = { 0 };
2258 struct crypto_shash *base_hash = NULL;
2259 struct algo_param param;
2260 int align;
2261 u8 *o_ptr = NULL;
2262
2263 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) {
2264 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
2265 goto out;
2266 }
2267
2268 if (get_alg_config(&param, max_authsize)) {
2269 pr_err("chcr : Unsupported digest size\n");
2270 goto out;
2271 }
2272 if (keys.enckeylen == AES_KEYSIZE_128) {
2273 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
2274 } else if (keys.enckeylen == AES_KEYSIZE_192) {
2275 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
2276 } else if (keys.enckeylen == AES_KEYSIZE_256) {
2277 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
2278 } else {
2279 pr_err("chcr : Unsupported cipher key\n");
2280 goto out;
2281 }
2282
2283 /* Copy only encryption key. We use authkey to generate h(ipad) and
2284 * h(opad) so authkey is not needed again. authkeylen size have the
2285 * size of the hash digest size.
2286 */
2287 memcpy(aeadctx->key, keys.enckey, keys.enckeylen);
2288 aeadctx->enckey_len = keys.enckeylen;
2289 get_aes_decrypt_key(actx->dec_rrkey, aeadctx->key,
2290 aeadctx->enckey_len << 3);
2291
2292 base_hash = chcr_alloc_shash(max_authsize);
2293 if (IS_ERR(base_hash)) {
2294 pr_err("chcr : Base driver cannot be loaded\n");
2295 goto out;
2296 }
2297 {
2298 SHASH_DESC_ON_STACK(shash, base_hash);
2299 shash->tfm = base_hash;
2300 shash->flags = crypto_shash_get_flags(base_hash);
2301 bs = crypto_shash_blocksize(base_hash);
2302 align = KEYCTX_ALIGN_PAD(max_authsize);
2303 o_ptr = actx->h_iopad + param.result_size + align;
2304
2305 if (keys.authkeylen > bs) {
2306 err = crypto_shash_digest(shash, keys.authkey,
2307 keys.authkeylen,
2308 o_ptr);
2309 if (err) {
2310 pr_err("chcr : Base driver cannot be loaded\n");
2311 goto out;
2312 }
2313 keys.authkeylen = max_authsize;
2314 } else
2315 memcpy(o_ptr, keys.authkey, keys.authkeylen);
2316
2317 /* Compute the ipad-digest*/
2318 memset(pad + keys.authkeylen, 0, bs - keys.authkeylen);
2319 memcpy(pad, o_ptr, keys.authkeylen);
2320 for (i = 0; i < bs >> 2; i++)
2321 *((unsigned int *)pad + i) ^= IPAD_DATA;
2322
2323 if (chcr_compute_partial_hash(shash, pad, actx->h_iopad,
2324 max_authsize))
2325 goto out;
2326 /* Compute the opad-digest */
2327 memset(pad + keys.authkeylen, 0, bs - keys.authkeylen);
2328 memcpy(pad, o_ptr, keys.authkeylen);
2329 for (i = 0; i < bs >> 2; i++)
2330 *((unsigned int *)pad + i) ^= OPAD_DATA;
2331
2332 if (chcr_compute_partial_hash(shash, pad, o_ptr, max_authsize))
2333 goto out;
2334
2335 /* convert the ipad and opad digest to network order */
2336 chcr_change_order(actx->h_iopad, param.result_size);
2337 chcr_change_order(o_ptr, param.result_size);
2338 key_ctx_len = sizeof(struct _key_ctx) +
2339 ((DIV_ROUND_UP(keys.enckeylen, 16)) << 4) +
2340 (param.result_size + align) * 2;
2341 aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, param.mk_size,
2342 0, 1, key_ctx_len >> 4);
2343 actx->auth_mode = param.auth_mode;
2344 chcr_free_shash(base_hash);
2345
2346 return 0;
2347 }
2348out:
2349 aeadctx->enckey_len = 0;
2350 if (base_hash)
2351 chcr_free_shash(base_hash);
2352 return -EINVAL;
2353}
2354
2355static int chcr_aead_digest_null_setkey(struct crypto_aead *authenc,
2356 const u8 *key, unsigned int keylen)
2357{
2358 struct chcr_context *ctx = crypto_aead_ctx(authenc);
2359 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2360 struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx);
2361 struct crypto_authenc_keys keys;
2362
2363 /* it contains auth and cipher key both*/
2364 int key_ctx_len = 0;
2365 unsigned char ck_size = 0;
2366
2367 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) {
2368 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
2369 goto out;
2370 }
2371 if (keys.enckeylen == AES_KEYSIZE_128) {
2372 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
2373 } else if (keys.enckeylen == AES_KEYSIZE_192) {
2374 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
2375 } else if (keys.enckeylen == AES_KEYSIZE_256) {
2376 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
2377 } else {
2378 pr_err("chcr : Unsupported cipher key\n");
2379 goto out;
2380 }
2381 memcpy(aeadctx->key, keys.enckey, keys.enckeylen);
2382 aeadctx->enckey_len = keys.enckeylen;
2383 get_aes_decrypt_key(actx->dec_rrkey, aeadctx->key,
2384 aeadctx->enckey_len << 3);
2385 key_ctx_len = sizeof(struct _key_ctx)
2386 + ((DIV_ROUND_UP(keys.enckeylen, 16)) << 4);
2387
2388 aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY, 0,
2389 0, key_ctx_len >> 4);
2390 actx->auth_mode = CHCR_SCMD_AUTH_MODE_NOP;
2391 return 0;
2392out:
2393 aeadctx->enckey_len = 0;
2394 return -EINVAL;
2395}
2396static int chcr_aead_encrypt(struct aead_request *req)
2397{
2398 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2399 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2400
2401 reqctx->verify = VERIFY_HW;
2402
2403 switch (get_aead_subtype(tfm)) {
2404 case CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC:
2405 case CRYPTO_ALG_SUB_TYPE_AEAD_NULL:
2406 return chcr_aead_op(req, CHCR_ENCRYPT_OP, 0,
2407 create_authenc_wr);
2408 case CRYPTO_ALG_SUB_TYPE_AEAD_CCM:
2409 case CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309:
2410 return chcr_aead_op(req, CHCR_ENCRYPT_OP, 0,
2411 create_aead_ccm_wr);
2412 default:
2413 return chcr_aead_op(req, CHCR_ENCRYPT_OP, 0,
2414 create_gcm_wr);
2415 }
2416}
2417
2418static int chcr_aead_decrypt(struct aead_request *req)
2419{
2420 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2421 struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
2422 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2423 int size;
2424
2425 if (aeadctx->mayverify == VERIFY_SW) {
2426 size = crypto_aead_maxauthsize(tfm);
2427 reqctx->verify = VERIFY_SW;
2428 } else {
2429 size = 0;
2430 reqctx->verify = VERIFY_HW;
2431 }
2432
2433 switch (get_aead_subtype(tfm)) {
2434 case CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC:
2435 case CRYPTO_ALG_SUB_TYPE_AEAD_NULL:
2436 return chcr_aead_op(req, CHCR_DECRYPT_OP, size,
2437 create_authenc_wr);
2438 case CRYPTO_ALG_SUB_TYPE_AEAD_CCM:
2439 case CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309:
2440 return chcr_aead_op(req, CHCR_DECRYPT_OP, size,
2441 create_aead_ccm_wr);
2442 default:
2443 return chcr_aead_op(req, CHCR_DECRYPT_OP, size,
2444 create_gcm_wr);
2445 }
2446}
2447
2448static int chcr_aead_op(struct aead_request *req,
2449 unsigned short op_type,
2450 int size,
2451 create_wr_t create_wr_fn)
2452{
2453 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2454 struct chcr_context *ctx = crypto_aead_ctx(tfm);
2455 struct uld_ctx *u_ctx = ULD_CTX(ctx);
2456 struct sk_buff *skb;
2457
2458 if (ctx && !ctx->dev) {
2459 pr_err("chcr : %s : No crypto device.\n", __func__);
2460 return -ENXIO;
2461 }
2462 if (cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
2463 ctx->tx_channel_id)) {
2464 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
2465 return -EBUSY;
2466 }
2467
2468 /* Form a WR from req */
2469 skb = create_wr_fn(req, u_ctx->lldi.rxq_ids[ctx->tx_channel_id], size,
2470 op_type);
2471
2472 if (IS_ERR(skb) || skb == NULL) {
2473 pr_err("chcr : %s : failed to form WR. No memory\n", __func__);
2474 return PTR_ERR(skb);
2475 }
2476
2477 skb->dev = u_ctx->lldi.ports[0];
2478 set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_channel_id);
2479 chcr_send_wr(skb);
2480 return -EINPROGRESS;
2481}
1252static struct chcr_alg_template driver_algs[] = { 2482static struct chcr_alg_template driver_algs[] = {
1253 /* AES-CBC */ 2483 /* AES-CBC */
1254 { 2484 {
@@ -1256,7 +2486,7 @@ static struct chcr_alg_template driver_algs[] = {
1256 .is_registered = 0, 2486 .is_registered = 0,
1257 .alg.crypto = { 2487 .alg.crypto = {
1258 .cra_name = "cbc(aes)", 2488 .cra_name = "cbc(aes)",
1259 .cra_driver_name = "cbc(aes-chcr)", 2489 .cra_driver_name = "cbc-aes-chcr",
1260 .cra_priority = CHCR_CRA_PRIORITY, 2490 .cra_priority = CHCR_CRA_PRIORITY,
1261 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | 2491 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
1262 CRYPTO_ALG_ASYNC, 2492 CRYPTO_ALG_ASYNC,
@@ -1283,7 +2513,7 @@ static struct chcr_alg_template driver_algs[] = {
1283 .is_registered = 0, 2513 .is_registered = 0,
1284 .alg.crypto = { 2514 .alg.crypto = {
1285 .cra_name = "xts(aes)", 2515 .cra_name = "xts(aes)",
1286 .cra_driver_name = "xts(aes-chcr)", 2516 .cra_driver_name = "xts-aes-chcr",
1287 .cra_priority = CHCR_CRA_PRIORITY, 2517 .cra_priority = CHCR_CRA_PRIORITY,
1288 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | 2518 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
1289 CRYPTO_ALG_ASYNC, 2519 CRYPTO_ALG_ASYNC,
@@ -1376,7 +2606,7 @@ static struct chcr_alg_template driver_algs[] = {
1376 .halg.digestsize = SHA1_DIGEST_SIZE, 2606 .halg.digestsize = SHA1_DIGEST_SIZE,
1377 .halg.base = { 2607 .halg.base = {
1378 .cra_name = "hmac(sha1)", 2608 .cra_name = "hmac(sha1)",
1379 .cra_driver_name = "hmac(sha1-chcr)", 2609 .cra_driver_name = "hmac-sha1-chcr",
1380 .cra_blocksize = SHA1_BLOCK_SIZE, 2610 .cra_blocksize = SHA1_BLOCK_SIZE,
1381 } 2611 }
1382 } 2612 }
@@ -1388,7 +2618,7 @@ static struct chcr_alg_template driver_algs[] = {
1388 .halg.digestsize = SHA224_DIGEST_SIZE, 2618 .halg.digestsize = SHA224_DIGEST_SIZE,
1389 .halg.base = { 2619 .halg.base = {
1390 .cra_name = "hmac(sha224)", 2620 .cra_name = "hmac(sha224)",
1391 .cra_driver_name = "hmac(sha224-chcr)", 2621 .cra_driver_name = "hmac-sha224-chcr",
1392 .cra_blocksize = SHA224_BLOCK_SIZE, 2622 .cra_blocksize = SHA224_BLOCK_SIZE,
1393 } 2623 }
1394 } 2624 }
@@ -1400,7 +2630,7 @@ static struct chcr_alg_template driver_algs[] = {
1400 .halg.digestsize = SHA256_DIGEST_SIZE, 2630 .halg.digestsize = SHA256_DIGEST_SIZE,
1401 .halg.base = { 2631 .halg.base = {
1402 .cra_name = "hmac(sha256)", 2632 .cra_name = "hmac(sha256)",
1403 .cra_driver_name = "hmac(sha256-chcr)", 2633 .cra_driver_name = "hmac-sha256-chcr",
1404 .cra_blocksize = SHA256_BLOCK_SIZE, 2634 .cra_blocksize = SHA256_BLOCK_SIZE,
1405 } 2635 }
1406 } 2636 }
@@ -1412,7 +2642,7 @@ static struct chcr_alg_template driver_algs[] = {
1412 .halg.digestsize = SHA384_DIGEST_SIZE, 2642 .halg.digestsize = SHA384_DIGEST_SIZE,
1413 .halg.base = { 2643 .halg.base = {
1414 .cra_name = "hmac(sha384)", 2644 .cra_name = "hmac(sha384)",
1415 .cra_driver_name = "hmac(sha384-chcr)", 2645 .cra_driver_name = "hmac-sha384-chcr",
1416 .cra_blocksize = SHA384_BLOCK_SIZE, 2646 .cra_blocksize = SHA384_BLOCK_SIZE,
1417 } 2647 }
1418 } 2648 }
@@ -1424,11 +2654,205 @@ static struct chcr_alg_template driver_algs[] = {
1424 .halg.digestsize = SHA512_DIGEST_SIZE, 2654 .halg.digestsize = SHA512_DIGEST_SIZE,
1425 .halg.base = { 2655 .halg.base = {
1426 .cra_name = "hmac(sha512)", 2656 .cra_name = "hmac(sha512)",
1427 .cra_driver_name = "hmac(sha512-chcr)", 2657 .cra_driver_name = "hmac-sha512-chcr",
1428 .cra_blocksize = SHA512_BLOCK_SIZE, 2658 .cra_blocksize = SHA512_BLOCK_SIZE,
1429 } 2659 }
1430 } 2660 }
1431 }, 2661 },
2662 /* Add AEAD Algorithms */
2663 {
2664 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_GCM,
2665 .is_registered = 0,
2666 .alg.aead = {
2667 .base = {
2668 .cra_name = "gcm(aes)",
2669 .cra_driver_name = "gcm-aes-chcr",
2670 .cra_blocksize = 1,
2671 .cra_ctxsize = sizeof(struct chcr_context) +
2672 sizeof(struct chcr_aead_ctx) +
2673 sizeof(struct chcr_gcm_ctx),
2674 },
2675 .ivsize = 12,
2676 .maxauthsize = GHASH_DIGEST_SIZE,
2677 .setkey = chcr_gcm_setkey,
2678 .setauthsize = chcr_gcm_setauthsize,
2679 }
2680 },
2681 {
2682 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106,
2683 .is_registered = 0,
2684 .alg.aead = {
2685 .base = {
2686 .cra_name = "rfc4106(gcm(aes))",
2687 .cra_driver_name = "rfc4106-gcm-aes-chcr",
2688 .cra_blocksize = 1,
2689 .cra_ctxsize = sizeof(struct chcr_context) +
2690 sizeof(struct chcr_aead_ctx) +
2691 sizeof(struct chcr_gcm_ctx),
2692
2693 },
2694 .ivsize = 8,
2695 .maxauthsize = GHASH_DIGEST_SIZE,
2696 .setkey = chcr_gcm_setkey,
2697 .setauthsize = chcr_4106_4309_setauthsize,
2698 }
2699 },
2700 {
2701 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_CCM,
2702 .is_registered = 0,
2703 .alg.aead = {
2704 .base = {
2705 .cra_name = "ccm(aes)",
2706 .cra_driver_name = "ccm-aes-chcr",
2707 .cra_blocksize = 1,
2708 .cra_ctxsize = sizeof(struct chcr_context) +
2709 sizeof(struct chcr_aead_ctx),
2710
2711 },
2712 .ivsize = AES_BLOCK_SIZE,
2713 .maxauthsize = GHASH_DIGEST_SIZE,
2714 .setkey = chcr_aead_ccm_setkey,
2715 .setauthsize = chcr_ccm_setauthsize,
2716 }
2717 },
2718 {
2719 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309,
2720 .is_registered = 0,
2721 .alg.aead = {
2722 .base = {
2723 .cra_name = "rfc4309(ccm(aes))",
2724 .cra_driver_name = "rfc4309-ccm-aes-chcr",
2725 .cra_blocksize = 1,
2726 .cra_ctxsize = sizeof(struct chcr_context) +
2727 sizeof(struct chcr_aead_ctx),
2728
2729 },
2730 .ivsize = 8,
2731 .maxauthsize = GHASH_DIGEST_SIZE,
2732 .setkey = chcr_aead_rfc4309_setkey,
2733 .setauthsize = chcr_4106_4309_setauthsize,
2734 }
2735 },
2736 {
2737 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC,
2738 .is_registered = 0,
2739 .alg.aead = {
2740 .base = {
2741 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2742 .cra_driver_name =
2743 "authenc-hmac-sha1-cbc-aes-chcr",
2744 .cra_blocksize = AES_BLOCK_SIZE,
2745 .cra_ctxsize = sizeof(struct chcr_context) +
2746 sizeof(struct chcr_aead_ctx) +
2747 sizeof(struct chcr_authenc_ctx),
2748
2749 },
2750 .ivsize = AES_BLOCK_SIZE,
2751 .maxauthsize = SHA1_DIGEST_SIZE,
2752 .setkey = chcr_authenc_setkey,
2753 .setauthsize = chcr_authenc_setauthsize,
2754 }
2755 },
2756 {
2757 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC,
2758 .is_registered = 0,
2759 .alg.aead = {
2760 .base = {
2761
2762 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2763 .cra_driver_name =
2764 "authenc-hmac-sha256-cbc-aes-chcr",
2765 .cra_blocksize = AES_BLOCK_SIZE,
2766 .cra_ctxsize = sizeof(struct chcr_context) +
2767 sizeof(struct chcr_aead_ctx) +
2768 sizeof(struct chcr_authenc_ctx),
2769
2770 },
2771 .ivsize = AES_BLOCK_SIZE,
2772 .maxauthsize = SHA256_DIGEST_SIZE,
2773 .setkey = chcr_authenc_setkey,
2774 .setauthsize = chcr_authenc_setauthsize,
2775 }
2776 },
2777 {
2778 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC,
2779 .is_registered = 0,
2780 .alg.aead = {
2781 .base = {
2782 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2783 .cra_driver_name =
2784 "authenc-hmac-sha224-cbc-aes-chcr",
2785 .cra_blocksize = AES_BLOCK_SIZE,
2786 .cra_ctxsize = sizeof(struct chcr_context) +
2787 sizeof(struct chcr_aead_ctx) +
2788 sizeof(struct chcr_authenc_ctx),
2789 },
2790 .ivsize = AES_BLOCK_SIZE,
2791 .maxauthsize = SHA224_DIGEST_SIZE,
2792 .setkey = chcr_authenc_setkey,
2793 .setauthsize = chcr_authenc_setauthsize,
2794 }
2795 },
2796 {
2797 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC,
2798 .is_registered = 0,
2799 .alg.aead = {
2800 .base = {
2801 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2802 .cra_driver_name =
2803 "authenc-hmac-sha384-cbc-aes-chcr",
2804 .cra_blocksize = AES_BLOCK_SIZE,
2805 .cra_ctxsize = sizeof(struct chcr_context) +
2806 sizeof(struct chcr_aead_ctx) +
2807 sizeof(struct chcr_authenc_ctx),
2808
2809 },
2810 .ivsize = AES_BLOCK_SIZE,
2811 .maxauthsize = SHA384_DIGEST_SIZE,
2812 .setkey = chcr_authenc_setkey,
2813 .setauthsize = chcr_authenc_setauthsize,
2814 }
2815 },
2816 {
2817 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC,
2818 .is_registered = 0,
2819 .alg.aead = {
2820 .base = {
2821 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2822 .cra_driver_name =
2823 "authenc-hmac-sha512-cbc-aes-chcr",
2824 .cra_blocksize = AES_BLOCK_SIZE,
2825 .cra_ctxsize = sizeof(struct chcr_context) +
2826 sizeof(struct chcr_aead_ctx) +
2827 sizeof(struct chcr_authenc_ctx),
2828
2829 },
2830 .ivsize = AES_BLOCK_SIZE,
2831 .maxauthsize = SHA512_DIGEST_SIZE,
2832 .setkey = chcr_authenc_setkey,
2833 .setauthsize = chcr_authenc_setauthsize,
2834 }
2835 },
2836 {
2837 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_NULL,
2838 .is_registered = 0,
2839 .alg.aead = {
2840 .base = {
2841 .cra_name = "authenc(digest_null,cbc(aes))",
2842 .cra_driver_name =
2843 "authenc-digest_null-cbc-aes-chcr",
2844 .cra_blocksize = AES_BLOCK_SIZE,
2845 .cra_ctxsize = sizeof(struct chcr_context) +
2846 sizeof(struct chcr_aead_ctx) +
2847 sizeof(struct chcr_authenc_ctx),
2848
2849 },
2850 .ivsize = AES_BLOCK_SIZE,
2851 .maxauthsize = 0,
2852 .setkey = chcr_aead_digest_null_setkey,
2853 .setauthsize = chcr_authenc_null_setauthsize,
2854 }
2855 },
1432}; 2856};
1433 2857
1434/* 2858/*
@@ -1446,6 +2870,11 @@ static int chcr_unregister_alg(void)
1446 crypto_unregister_alg( 2870 crypto_unregister_alg(
1447 &driver_algs[i].alg.crypto); 2871 &driver_algs[i].alg.crypto);
1448 break; 2872 break;
2873 case CRYPTO_ALG_TYPE_AEAD:
2874 if (driver_algs[i].is_registered)
2875 crypto_unregister_aead(
2876 &driver_algs[i].alg.aead);
2877 break;
1449 case CRYPTO_ALG_TYPE_AHASH: 2878 case CRYPTO_ALG_TYPE_AHASH:
1450 if (driver_algs[i].is_registered) 2879 if (driver_algs[i].is_registered)
1451 crypto_unregister_ahash( 2880 crypto_unregister_ahash(
@@ -1480,6 +2909,19 @@ static int chcr_register_alg(void)
1480 err = crypto_register_alg(&driver_algs[i].alg.crypto); 2909 err = crypto_register_alg(&driver_algs[i].alg.crypto);
1481 name = driver_algs[i].alg.crypto.cra_driver_name; 2910 name = driver_algs[i].alg.crypto.cra_driver_name;
1482 break; 2911 break;
2912 case CRYPTO_ALG_TYPE_AEAD:
2913 driver_algs[i].alg.aead.base.cra_priority =
2914 CHCR_CRA_PRIORITY;
2915 driver_algs[i].alg.aead.base.cra_flags =
2916 CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC;
2917 driver_algs[i].alg.aead.encrypt = chcr_aead_encrypt;
2918 driver_algs[i].alg.aead.decrypt = chcr_aead_decrypt;
2919 driver_algs[i].alg.aead.init = chcr_aead_cra_init;
2920 driver_algs[i].alg.aead.exit = chcr_aead_cra_exit;
2921 driver_algs[i].alg.aead.base.cra_module = THIS_MODULE;
2922 err = crypto_register_aead(&driver_algs[i].alg.aead);
2923 name = driver_algs[i].alg.aead.base.cra_driver_name;
2924 break;
1483 case CRYPTO_ALG_TYPE_AHASH: 2925 case CRYPTO_ALG_TYPE_AHASH:
1484 a_hash = &driver_algs[i].alg.hash; 2926 a_hash = &driver_algs[i].alg.hash;
1485 a_hash->update = chcr_ahash_update; 2927 a_hash->update = chcr_ahash_update;
diff --git a/drivers/crypto/chelsio/chcr_algo.h b/drivers/crypto/chelsio/chcr_algo.h
index f2a59058b204..3c7c51f7bedf 100644
--- a/drivers/crypto/chelsio/chcr_algo.h
+++ b/drivers/crypto/chelsio/chcr_algo.h
@@ -258,13 +258,15 @@ enum {
258 * where they indicate the size of the integrity check value (ICV) 258 * where they indicate the size of the integrity check value (ICV)
259 */ 259 */
260enum { 260enum {
261 AES_CCM_ICV_4 = 4, 261 ICV_4 = 4,
262 AES_CCM_ICV_6 = 6, 262 ICV_6 = 6,
263 AES_CCM_ICV_8 = 8, 263 ICV_8 = 8,
264 AES_CCM_ICV_10 = 10, 264 ICV_10 = 10,
265 AES_CCM_ICV_12 = 12, 265 ICV_12 = 12,
266 AES_CCM_ICV_14 = 14, 266 ICV_13 = 13,
267 AES_CCM_ICV_16 = 16 267 ICV_14 = 14,
268 ICV_15 = 15,
269 ICV_16 = 16
268}; 270};
269 271
270struct hash_op_params { 272struct hash_op_params {
diff --git a/drivers/crypto/chelsio/chcr_core.c b/drivers/crypto/chelsio/chcr_core.c
index fb5f9bbfa09c..acde2de2c1e1 100644
--- a/drivers/crypto/chelsio/chcr_core.c
+++ b/drivers/crypto/chelsio/chcr_core.c
@@ -109,14 +109,12 @@ static int cpl_fw6_pld_handler(struct chcr_dev *dev,
109 if (ack_err_status) { 109 if (ack_err_status) {
110 if (CHK_MAC_ERR_BIT(ack_err_status) || 110 if (CHK_MAC_ERR_BIT(ack_err_status) ||
111 CHK_PAD_ERR_BIT(ack_err_status)) 111 CHK_PAD_ERR_BIT(ack_err_status))
112 error_status = -EINVAL; 112 error_status = -EBADMSG;
113 } 113 }
114 /* call completion callback with failure status */ 114 /* call completion callback with failure status */
115 if (req) { 115 if (req) {
116 if (!chcr_handle_resp(req, input, error_status)) 116 error_status = chcr_handle_resp(req, input, error_status);
117 req->complete(req, error_status); 117 req->complete(req, error_status);
118 else
119 return -EINVAL;
120 } else { 118 } else {
121 pr_err("Incorrect request address from the firmware\n"); 119 pr_err("Incorrect request address from the firmware\n");
122 return -EFAULT; 120 return -EFAULT;
diff --git a/drivers/crypto/chelsio/chcr_core.h b/drivers/crypto/chelsio/chcr_core.h
index fc3cd77ad6d1..c7088a4e0a49 100644
--- a/drivers/crypto/chelsio/chcr_core.h
+++ b/drivers/crypto/chelsio/chcr_core.h
@@ -72,9 +72,7 @@ struct chcr_wr {
72}; 72};
73 73
74struct chcr_dev { 74struct chcr_dev {
75 /* Request submited to h/w and waiting for response. */
76 spinlock_t lock_chcr_dev; 75 spinlock_t lock_chcr_dev;
77 struct crypto_queue pending_queue;
78 struct uld_ctx *u_ctx; 76 struct uld_ctx *u_ctx;
79 unsigned char tx_channel_id; 77 unsigned char tx_channel_id;
80}; 78};
diff --git a/drivers/crypto/chelsio/chcr_crypto.h b/drivers/crypto/chelsio/chcr_crypto.h
index 40a5182fac90..d5af7d64a763 100644
--- a/drivers/crypto/chelsio/chcr_crypto.h
+++ b/drivers/crypto/chelsio/chcr_crypto.h
@@ -36,6 +36,14 @@
36#ifndef __CHCR_CRYPTO_H__ 36#ifndef __CHCR_CRYPTO_H__
37#define __CHCR_CRYPTO_H__ 37#define __CHCR_CRYPTO_H__
38 38
39#define GHASH_BLOCK_SIZE 16
40#define GHASH_DIGEST_SIZE 16
41
42#define CCM_B0_SIZE 16
43#define CCM_AAD_FIELD_SIZE 2
44#define T5_MAX_AAD_SIZE 512
45
46
39/* Define following if h/w is not dropping the AAD and IV data before 47/* Define following if h/w is not dropping the AAD and IV data before
40 * giving the processed data 48 * giving the processed data
41 */ 49 */
@@ -63,22 +71,36 @@
63#define CHCR_SCMD_AUTH_CTRL_AUTH_CIPHER 0 71#define CHCR_SCMD_AUTH_CTRL_AUTH_CIPHER 0
64#define CHCR_SCMD_AUTH_CTRL_CIPHER_AUTH 1 72#define CHCR_SCMD_AUTH_CTRL_CIPHER_AUTH 1
65 73
66#define CHCR_SCMD_CIPHER_MODE_NOP 0 74#define CHCR_SCMD_CIPHER_MODE_NOP 0
67#define CHCR_SCMD_CIPHER_MODE_AES_CBC 1 75#define CHCR_SCMD_CIPHER_MODE_AES_CBC 1
68#define CHCR_SCMD_CIPHER_MODE_GENERIC_AES 4 76#define CHCR_SCMD_CIPHER_MODE_AES_GCM 2
69#define CHCR_SCMD_CIPHER_MODE_AES_XTS 6 77#define CHCR_SCMD_CIPHER_MODE_AES_CTR 3
78#define CHCR_SCMD_CIPHER_MODE_GENERIC_AES 4
79#define CHCR_SCMD_CIPHER_MODE_AES_XTS 6
80#define CHCR_SCMD_CIPHER_MODE_AES_CCM 7
70 81
71#define CHCR_SCMD_AUTH_MODE_NOP 0 82#define CHCR_SCMD_AUTH_MODE_NOP 0
72#define CHCR_SCMD_AUTH_MODE_SHA1 1 83#define CHCR_SCMD_AUTH_MODE_SHA1 1
73#define CHCR_SCMD_AUTH_MODE_SHA224 2 84#define CHCR_SCMD_AUTH_MODE_SHA224 2
74#define CHCR_SCMD_AUTH_MODE_SHA256 3 85#define CHCR_SCMD_AUTH_MODE_SHA256 3
86#define CHCR_SCMD_AUTH_MODE_GHASH 4
75#define CHCR_SCMD_AUTH_MODE_SHA512_224 5 87#define CHCR_SCMD_AUTH_MODE_SHA512_224 5
76#define CHCR_SCMD_AUTH_MODE_SHA512_256 6 88#define CHCR_SCMD_AUTH_MODE_SHA512_256 6
77#define CHCR_SCMD_AUTH_MODE_SHA512_384 7 89#define CHCR_SCMD_AUTH_MODE_SHA512_384 7
78#define CHCR_SCMD_AUTH_MODE_SHA512_512 8 90#define CHCR_SCMD_AUTH_MODE_SHA512_512 8
91#define CHCR_SCMD_AUTH_MODE_CBCMAC 9
92#define CHCR_SCMD_AUTH_MODE_CMAC 10
79 93
80#define CHCR_SCMD_HMAC_CTRL_NOP 0 94#define CHCR_SCMD_HMAC_CTRL_NOP 0
81#define CHCR_SCMD_HMAC_CTRL_NO_TRUNC 1 95#define CHCR_SCMD_HMAC_CTRL_NO_TRUNC 1
96#define CHCR_SCMD_HMAC_CTRL_TRUNC_RFC4366 2
97#define CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT 3
98#define CHCR_SCMD_HMAC_CTRL_PL1 4
99#define CHCR_SCMD_HMAC_CTRL_PL2 5
100#define CHCR_SCMD_HMAC_CTRL_PL3 6
101#define CHCR_SCMD_HMAC_CTRL_DIV2 7
102#define VERIFY_HW 0
103#define VERIFY_SW 1
82 104
83#define CHCR_SCMD_IVGEN_CTRL_HW 0 105#define CHCR_SCMD_IVGEN_CTRL_HW 0
84#define CHCR_SCMD_IVGEN_CTRL_SW 1 106#define CHCR_SCMD_IVGEN_CTRL_SW 1
@@ -106,12 +128,20 @@
106#define IV_IMMEDIATE 1 128#define IV_IMMEDIATE 1
107#define IV_DSGL 2 129#define IV_DSGL 2
108 130
131#define AEAD_H_SIZE 16
132
109#define CRYPTO_ALG_SUB_TYPE_MASK 0x0f000000 133#define CRYPTO_ALG_SUB_TYPE_MASK 0x0f000000
110#define CRYPTO_ALG_SUB_TYPE_HASH_HMAC 0x01000000 134#define CRYPTO_ALG_SUB_TYPE_HASH_HMAC 0x01000000
135#define CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106 0x02000000
136#define CRYPTO_ALG_SUB_TYPE_AEAD_GCM 0x03000000
137#define CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC 0x04000000
138#define CRYPTO_ALG_SUB_TYPE_AEAD_CCM 0x05000000
139#define CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309 0x06000000
140#define CRYPTO_ALG_SUB_TYPE_AEAD_NULL 0x07000000
141#define CRYPTO_ALG_SUB_TYPE_CTR 0x08000000
111#define CRYPTO_ALG_TYPE_HMAC (CRYPTO_ALG_TYPE_AHASH |\ 142#define CRYPTO_ALG_TYPE_HMAC (CRYPTO_ALG_TYPE_AHASH |\
112 CRYPTO_ALG_SUB_TYPE_HASH_HMAC) 143 CRYPTO_ALG_SUB_TYPE_HASH_HMAC)
113 144
114#define MAX_SALT 4
115#define MAX_SCRATCH_PAD_SIZE 32 145#define MAX_SCRATCH_PAD_SIZE 32
116 146
117#define CHCR_HASH_MAX_BLOCK_SIZE_64 64 147#define CHCR_HASH_MAX_BLOCK_SIZE_64 64
@@ -126,6 +156,42 @@ struct ablk_ctx {
126 unsigned char ciph_mode; 156 unsigned char ciph_mode;
127 u8 rrkey[AES_MAX_KEY_SIZE]; 157 u8 rrkey[AES_MAX_KEY_SIZE];
128}; 158};
159struct chcr_aead_reqctx {
160 struct sk_buff *skb;
161 short int dst_nents;
162 u16 verify;
163 u8 iv[CHCR_MAX_CRYPTO_IV_LEN];
164 unsigned char scratch_pad[MAX_SCRATCH_PAD_SIZE];
165};
166
167struct chcr_gcm_ctx {
168 u8 ghash_h[AEAD_H_SIZE];
169};
170
171struct chcr_authenc_ctx {
172 u8 dec_rrkey[AES_MAX_KEY_SIZE];
173 u8 h_iopad[2 * CHCR_HASH_MAX_DIGEST_SIZE];
174 unsigned char auth_mode;
175};
176
177struct __aead_ctx {
178 struct chcr_gcm_ctx gcm[0];
179 struct chcr_authenc_ctx authenc[0];
180};
181
182
183
184struct chcr_aead_ctx {
185 __be32 key_ctx_hdr;
186 unsigned int enckey_len;
187 struct crypto_skcipher *null;
188 u8 salt[MAX_SALT];
189 u8 key[CHCR_AES_MAX_KEY_LEN];
190 u16 hmac_ctrl;
191 u16 mayverify;
192 struct __aead_ctx ctx[0];
193};
194
129 195
130 196
131struct hmac_ctx { 197struct hmac_ctx {
@@ -137,6 +203,7 @@ struct hmac_ctx {
137struct __crypto_ctx { 203struct __crypto_ctx {
138 struct hmac_ctx hmacctx[0]; 204 struct hmac_ctx hmacctx[0];
139 struct ablk_ctx ablkctx[0]; 205 struct ablk_ctx ablkctx[0];
206 struct chcr_aead_ctx aeadctx[0];
140}; 207};
141 208
142struct chcr_context { 209struct chcr_context {
@@ -171,16 +238,19 @@ struct chcr_alg_template {
171 union { 238 union {
172 struct crypto_alg crypto; 239 struct crypto_alg crypto;
173 struct ahash_alg hash; 240 struct ahash_alg hash;
241 struct aead_alg aead;
174 } alg; 242 } alg;
175}; 243};
176 244
177struct chcr_req_ctx { 245struct chcr_req_ctx {
178 union { 246 union {
179 struct ahash_request *ahash_req; 247 struct ahash_request *ahash_req;
248 struct aead_request *aead_req;
180 struct ablkcipher_request *ablk_req; 249 struct ablkcipher_request *ablk_req;
181 } req; 250 } req;
182 union { 251 union {
183 struct chcr_ahash_req_ctx *ahash_ctx; 252 struct chcr_ahash_req_ctx *ahash_ctx;
253 struct chcr_aead_reqctx *reqctx;
184 struct chcr_blkcipher_req_ctx *ablk_ctx; 254 struct chcr_blkcipher_req_ctx *ablk_ctx;
185 } ctx; 255 } ctx;
186}; 256};
@@ -190,9 +260,15 @@ struct sge_opaque_hdr {
190 dma_addr_t addr[MAX_SKB_FRAGS + 1]; 260 dma_addr_t addr[MAX_SKB_FRAGS + 1];
191}; 261};
192 262
193typedef struct sk_buff *(*create_wr_t)(struct crypto_async_request *req, 263typedef struct sk_buff *(*create_wr_t)(struct aead_request *req,
194 struct chcr_context *ctx,
195 unsigned short qid, 264 unsigned short qid,
265 int size,
196 unsigned short op_type); 266 unsigned short op_type);
197 267
268static int chcr_aead_op(struct aead_request *req_base,
269 unsigned short op_type,
270 int size,
271 create_wr_t create_wr_fn);
272static inline int get_aead_subtype(struct crypto_aead *aead);
273
198#endif /* __CHCR_CRYPTO_H__ */ 274#endif /* __CHCR_CRYPTO_H__ */