diff options
author | Lee Nipper <lee.nipper@gmail.com> | 2010-05-19 05:20:36 -0400 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2010-05-19 05:20:36 -0400 |
commit | 497f2e6b8b21407625a4fb34bc04b50eff098085 (patch) | |
tree | 80a85aa925491e21253b41b5587712e116e712c9 /drivers/crypto | |
parent | acbf7c627fb59dfea975f7aafeaba97921085061 (diff) |
crypto: talitos - add hash algorithms
Add the following alorithms to talitos:
md5,
sha1,
sha256,
sha384,
sha512.
These are all type ahash.
Signed-off-by: Lee Nipper <lee.nipper@gmail.com>
Acked-By: Kim Phillips <kim.phillips@freescale.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers/crypto')
-rw-r--r-- | drivers/crypto/talitos.c | 544 | ||||
-rw-r--r-- | drivers/crypto/talitos.h | 8 |
2 files changed, 534 insertions, 18 deletions
diff --git a/drivers/crypto/talitos.c b/drivers/crypto/talitos.c index 4e8153a839a4..1b08a3951fb4 100644 --- a/drivers/crypto/talitos.c +++ b/drivers/crypto/talitos.c | |||
@@ -43,6 +43,7 @@ | |||
43 | #include <crypto/aes.h> | 43 | #include <crypto/aes.h> |
44 | #include <crypto/des.h> | 44 | #include <crypto/des.h> |
45 | #include <crypto/sha.h> | 45 | #include <crypto/sha.h> |
46 | #include <crypto/md5.h> | ||
46 | #include <crypto/aead.h> | 47 | #include <crypto/aead.h> |
47 | #include <crypto/authenc.h> | 48 | #include <crypto/authenc.h> |
48 | #include <crypto/skcipher.h> | 49 | #include <crypto/skcipher.h> |
@@ -67,6 +68,13 @@ struct talitos_ptr { | |||
67 | __be32 ptr; /* address */ | 68 | __be32 ptr; /* address */ |
68 | }; | 69 | }; |
69 | 70 | ||
71 | static const struct talitos_ptr zero_entry = { | ||
72 | .len = 0, | ||
73 | .j_extent = 0, | ||
74 | .eptr = 0, | ||
75 | .ptr = 0 | ||
76 | }; | ||
77 | |||
70 | /* descriptor */ | 78 | /* descriptor */ |
71 | struct talitos_desc { | 79 | struct talitos_desc { |
72 | __be32 hdr; /* header high bits */ | 80 | __be32 hdr; /* header high bits */ |
@@ -694,7 +702,7 @@ static void talitos_unregister_rng(struct device *dev) | |||
694 | #define TALITOS_MAX_KEY_SIZE 64 | 702 | #define TALITOS_MAX_KEY_SIZE 64 |
695 | #define TALITOS_MAX_IV_LENGTH 16 /* max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */ | 703 | #define TALITOS_MAX_IV_LENGTH 16 /* max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */ |
696 | 704 | ||
697 | #define MD5_DIGEST_SIZE 16 | 705 | #define MD5_BLOCK_SIZE 64 |
698 | 706 | ||
699 | struct talitos_ctx { | 707 | struct talitos_ctx { |
700 | struct device *dev; | 708 | struct device *dev; |
@@ -707,6 +715,22 @@ struct talitos_ctx { | |||
707 | unsigned int authsize; | 715 | unsigned int authsize; |
708 | }; | 716 | }; |
709 | 717 | ||
718 | #define HASH_MAX_BLOCK_SIZE SHA512_BLOCK_SIZE | ||
719 | #define TALITOS_MDEU_MAX_CONTEXT_SIZE TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512 | ||
720 | |||
721 | struct talitos_ahash_req_ctx { | ||
722 | u64 count; | ||
723 | u8 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE]; | ||
724 | unsigned int hw_context_size; | ||
725 | u8 buf[HASH_MAX_BLOCK_SIZE]; | ||
726 | u8 bufnext[HASH_MAX_BLOCK_SIZE]; | ||
727 | unsigned int first; | ||
728 | unsigned int last; | ||
729 | unsigned int to_hash_later; | ||
730 | struct scatterlist bufsl[2]; | ||
731 | struct scatterlist *psrc; | ||
732 | }; | ||
733 | |||
710 | static int aead_setauthsize(struct crypto_aead *authenc, | 734 | static int aead_setauthsize(struct crypto_aead *authenc, |
711 | unsigned int authsize) | 735 | unsigned int authsize) |
712 | { | 736 | { |
@@ -823,10 +847,14 @@ static void talitos_sg_unmap(struct device *dev, | |||
823 | else | 847 | else |
824 | dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); | 848 | dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); |
825 | 849 | ||
826 | if (edesc->dst_is_chained) | 850 | if (dst) { |
827 | talitos_unmap_sg_chain(dev, dst, DMA_FROM_DEVICE); | 851 | if (edesc->dst_is_chained) |
828 | else | 852 | talitos_unmap_sg_chain(dev, dst, |
829 | dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE); | 853 | DMA_FROM_DEVICE); |
854 | else | ||
855 | dma_unmap_sg(dev, dst, dst_nents, | ||
856 | DMA_FROM_DEVICE); | ||
857 | } | ||
830 | } else | 858 | } else |
831 | if (edesc->src_is_chained) | 859 | if (edesc->src_is_chained) |
832 | talitos_unmap_sg_chain(dev, src, DMA_BIDIRECTIONAL); | 860 | talitos_unmap_sg_chain(dev, src, DMA_BIDIRECTIONAL); |
@@ -1116,12 +1144,67 @@ static int sg_count(struct scatterlist *sg_list, int nbytes, int *chained) | |||
1116 | return sg_nents; | 1144 | return sg_nents; |
1117 | } | 1145 | } |
1118 | 1146 | ||
1147 | /** | ||
1148 | * sg_copy_end_to_buffer - Copy end data from SG list to a linear buffer | ||
1149 | * @sgl: The SG list | ||
1150 | * @nents: Number of SG entries | ||
1151 | * @buf: Where to copy to | ||
1152 | * @buflen: The number of bytes to copy | ||
1153 | * @skip: The number of bytes to skip before copying. | ||
1154 | * Note: skip + buflen should equal SG total size. | ||
1155 | * | ||
1156 | * Returns the number of copied bytes. | ||
1157 | * | ||
1158 | **/ | ||
1159 | static size_t sg_copy_end_to_buffer(struct scatterlist *sgl, unsigned int nents, | ||
1160 | void *buf, size_t buflen, unsigned int skip) | ||
1161 | { | ||
1162 | unsigned int offset = 0; | ||
1163 | unsigned int boffset = 0; | ||
1164 | struct sg_mapping_iter miter; | ||
1165 | unsigned long flags; | ||
1166 | unsigned int sg_flags = SG_MITER_ATOMIC; | ||
1167 | size_t total_buffer = buflen + skip; | ||
1168 | |||
1169 | sg_flags |= SG_MITER_FROM_SG; | ||
1170 | |||
1171 | sg_miter_start(&miter, sgl, nents, sg_flags); | ||
1172 | |||
1173 | local_irq_save(flags); | ||
1174 | |||
1175 | while (sg_miter_next(&miter) && offset < total_buffer) { | ||
1176 | unsigned int len; | ||
1177 | unsigned int ignore; | ||
1178 | |||
1179 | if ((offset + miter.length) > skip) { | ||
1180 | if (offset < skip) { | ||
1181 | /* Copy part of this segment */ | ||
1182 | ignore = skip - offset; | ||
1183 | len = miter.length - ignore; | ||
1184 | memcpy(buf + boffset, miter.addr + ignore, len); | ||
1185 | } else { | ||
1186 | /* Copy all of this segment */ | ||
1187 | len = miter.length; | ||
1188 | memcpy(buf + boffset, miter.addr, len); | ||
1189 | } | ||
1190 | boffset += len; | ||
1191 | } | ||
1192 | offset += miter.length; | ||
1193 | } | ||
1194 | |||
1195 | sg_miter_stop(&miter); | ||
1196 | |||
1197 | local_irq_restore(flags); | ||
1198 | return boffset; | ||
1199 | } | ||
1200 | |||
1119 | /* | 1201 | /* |
1120 | * allocate and map the extended descriptor | 1202 | * allocate and map the extended descriptor |
1121 | */ | 1203 | */ |
1122 | static struct talitos_edesc *talitos_edesc_alloc(struct device *dev, | 1204 | static struct talitos_edesc *talitos_edesc_alloc(struct device *dev, |
1123 | struct scatterlist *src, | 1205 | struct scatterlist *src, |
1124 | struct scatterlist *dst, | 1206 | struct scatterlist *dst, |
1207 | int hash_result, | ||
1125 | unsigned int cryptlen, | 1208 | unsigned int cryptlen, |
1126 | unsigned int authsize, | 1209 | unsigned int authsize, |
1127 | int icv_stashing, | 1210 | int icv_stashing, |
@@ -1141,11 +1224,16 @@ static struct talitos_edesc *talitos_edesc_alloc(struct device *dev, | |||
1141 | src_nents = sg_count(src, cryptlen + authsize, &src_chained); | 1224 | src_nents = sg_count(src, cryptlen + authsize, &src_chained); |
1142 | src_nents = (src_nents == 1) ? 0 : src_nents; | 1225 | src_nents = (src_nents == 1) ? 0 : src_nents; |
1143 | 1226 | ||
1144 | if (dst == src) { | 1227 | if (hash_result) { |
1145 | dst_nents = src_nents; | 1228 | dst_nents = 0; |
1146 | } else { | 1229 | } else { |
1147 | dst_nents = sg_count(dst, cryptlen + authsize, &dst_chained); | 1230 | if (dst == src) { |
1148 | dst_nents = (dst_nents == 1) ? 0 : dst_nents; | 1231 | dst_nents = src_nents; |
1232 | } else { | ||
1233 | dst_nents = sg_count(dst, cryptlen + authsize, | ||
1234 | &dst_chained); | ||
1235 | dst_nents = (dst_nents == 1) ? 0 : dst_nents; | ||
1236 | } | ||
1149 | } | 1237 | } |
1150 | 1238 | ||
1151 | /* | 1239 | /* |
@@ -1174,8 +1262,10 @@ static struct talitos_edesc *talitos_edesc_alloc(struct device *dev, | |||
1174 | edesc->src_is_chained = src_chained; | 1262 | edesc->src_is_chained = src_chained; |
1175 | edesc->dst_is_chained = dst_chained; | 1263 | edesc->dst_is_chained = dst_chained; |
1176 | edesc->dma_len = dma_len; | 1264 | edesc->dma_len = dma_len; |
1177 | edesc->dma_link_tbl = dma_map_single(dev, &edesc->link_tbl[0], | 1265 | if (dma_len) |
1178 | edesc->dma_len, DMA_BIDIRECTIONAL); | 1266 | edesc->dma_link_tbl = dma_map_single(dev, &edesc->link_tbl[0], |
1267 | edesc->dma_len, | ||
1268 | DMA_BIDIRECTIONAL); | ||
1179 | 1269 | ||
1180 | return edesc; | 1270 | return edesc; |
1181 | } | 1271 | } |
@@ -1186,7 +1276,7 @@ static struct talitos_edesc *aead_edesc_alloc(struct aead_request *areq, | |||
1186 | struct crypto_aead *authenc = crypto_aead_reqtfm(areq); | 1276 | struct crypto_aead *authenc = crypto_aead_reqtfm(areq); |
1187 | struct talitos_ctx *ctx = crypto_aead_ctx(authenc); | 1277 | struct talitos_ctx *ctx = crypto_aead_ctx(authenc); |
1188 | 1278 | ||
1189 | return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst, | 1279 | return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst, 0, |
1190 | areq->cryptlen, ctx->authsize, icv_stashing, | 1280 | areq->cryptlen, ctx->authsize, icv_stashing, |
1191 | areq->base.flags); | 1281 | areq->base.flags); |
1192 | } | 1282 | } |
@@ -1443,8 +1533,8 @@ static struct talitos_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request * | |||
1443 | struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); | 1533 | struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); |
1444 | struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher); | 1534 | struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher); |
1445 | 1535 | ||
1446 | return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst, areq->nbytes, | 1536 | return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst, 0, |
1447 | 0, 0, areq->base.flags); | 1537 | areq->nbytes, 0, 0, areq->base.flags); |
1448 | } | 1538 | } |
1449 | 1539 | ||
1450 | static int ablkcipher_encrypt(struct ablkcipher_request *areq) | 1540 | static int ablkcipher_encrypt(struct ablkcipher_request *areq) |
@@ -1480,6 +1570,286 @@ static int ablkcipher_decrypt(struct ablkcipher_request *areq) | |||
1480 | return common_nonsnoop(edesc, areq, NULL, ablkcipher_done); | 1570 | return common_nonsnoop(edesc, areq, NULL, ablkcipher_done); |
1481 | } | 1571 | } |
1482 | 1572 | ||
1573 | static void common_nonsnoop_hash_unmap(struct device *dev, | ||
1574 | struct talitos_edesc *edesc, | ||
1575 | struct ahash_request *areq) | ||
1576 | { | ||
1577 | struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); | ||
1578 | |||
1579 | unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE); | ||
1580 | |||
1581 | /* When using hashctx-in, must unmap it. */ | ||
1582 | if (edesc->desc.ptr[1].len) | ||
1583 | unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1], | ||
1584 | DMA_TO_DEVICE); | ||
1585 | |||
1586 | if (edesc->desc.ptr[2].len) | ||
1587 | unmap_single_talitos_ptr(dev, &edesc->desc.ptr[2], | ||
1588 | DMA_TO_DEVICE); | ||
1589 | |||
1590 | talitos_sg_unmap(dev, edesc, req_ctx->psrc, NULL); | ||
1591 | |||
1592 | if (edesc->dma_len) | ||
1593 | dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len, | ||
1594 | DMA_BIDIRECTIONAL); | ||
1595 | |||
1596 | } | ||
1597 | |||
1598 | static void ahash_done(struct device *dev, | ||
1599 | struct talitos_desc *desc, void *context, | ||
1600 | int err) | ||
1601 | { | ||
1602 | struct ahash_request *areq = context; | ||
1603 | struct talitos_edesc *edesc = | ||
1604 | container_of(desc, struct talitos_edesc, desc); | ||
1605 | struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); | ||
1606 | |||
1607 | if (!req_ctx->last && req_ctx->to_hash_later) { | ||
1608 | /* Position any partial block for next update/final/finup */ | ||
1609 | memcpy(req_ctx->buf, req_ctx->bufnext, req_ctx->to_hash_later); | ||
1610 | } | ||
1611 | common_nonsnoop_hash_unmap(dev, edesc, areq); | ||
1612 | |||
1613 | kfree(edesc); | ||
1614 | |||
1615 | areq->base.complete(&areq->base, err); | ||
1616 | } | ||
1617 | |||
1618 | static int common_nonsnoop_hash(struct talitos_edesc *edesc, | ||
1619 | struct ahash_request *areq, unsigned int length, | ||
1620 | void (*callback) (struct device *dev, | ||
1621 | struct talitos_desc *desc, | ||
1622 | void *context, int error)) | ||
1623 | { | ||
1624 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); | ||
1625 | struct talitos_ctx *ctx = crypto_ahash_ctx(tfm); | ||
1626 | struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); | ||
1627 | struct device *dev = ctx->dev; | ||
1628 | struct talitos_desc *desc = &edesc->desc; | ||
1629 | int sg_count, ret; | ||
1630 | |||
1631 | /* first DWORD empty */ | ||
1632 | desc->ptr[0] = zero_entry; | ||
1633 | |||
1634 | /* hash context in (if not first) */ | ||
1635 | if (!req_ctx->first) { | ||
1636 | map_single_talitos_ptr(dev, &desc->ptr[1], | ||
1637 | req_ctx->hw_context_size, | ||
1638 | (char *)req_ctx->hw_context, 0, | ||
1639 | DMA_TO_DEVICE); | ||
1640 | } else { | ||
1641 | desc->ptr[1] = zero_entry; | ||
1642 | /* Indicate next op is not the first. */ | ||
1643 | req_ctx->first = 0; | ||
1644 | } | ||
1645 | |||
1646 | /* HMAC key */ | ||
1647 | if (ctx->keylen) | ||
1648 | map_single_talitos_ptr(dev, &desc->ptr[2], ctx->keylen, | ||
1649 | (char *)&ctx->key, 0, DMA_TO_DEVICE); | ||
1650 | else | ||
1651 | desc->ptr[2] = zero_entry; | ||
1652 | |||
1653 | /* | ||
1654 | * data in | ||
1655 | */ | ||
1656 | desc->ptr[3].len = cpu_to_be16(length); | ||
1657 | desc->ptr[3].j_extent = 0; | ||
1658 | |||
1659 | sg_count = talitos_map_sg(dev, req_ctx->psrc, | ||
1660 | edesc->src_nents ? : 1, | ||
1661 | DMA_TO_DEVICE, | ||
1662 | edesc->src_is_chained); | ||
1663 | |||
1664 | if (sg_count == 1) { | ||
1665 | to_talitos_ptr(&desc->ptr[3], sg_dma_address(req_ctx->psrc)); | ||
1666 | } else { | ||
1667 | sg_count = sg_to_link_tbl(req_ctx->psrc, sg_count, length, | ||
1668 | &edesc->link_tbl[0]); | ||
1669 | if (sg_count > 1) { | ||
1670 | desc->ptr[3].j_extent |= DESC_PTR_LNKTBL_JUMP; | ||
1671 | to_talitos_ptr(&desc->ptr[3], edesc->dma_link_tbl); | ||
1672 | dma_sync_single_for_device(ctx->dev, | ||
1673 | edesc->dma_link_tbl, | ||
1674 | edesc->dma_len, | ||
1675 | DMA_BIDIRECTIONAL); | ||
1676 | } else { | ||
1677 | /* Only one segment now, so no link tbl needed */ | ||
1678 | to_talitos_ptr(&desc->ptr[3], | ||
1679 | sg_dma_address(req_ctx->psrc)); | ||
1680 | } | ||
1681 | } | ||
1682 | |||
1683 | /* fifth DWORD empty */ | ||
1684 | desc->ptr[4] = zero_entry; | ||
1685 | |||
1686 | /* hash/HMAC out -or- hash context out */ | ||
1687 | if (req_ctx->last) | ||
1688 | map_single_talitos_ptr(dev, &desc->ptr[5], | ||
1689 | crypto_ahash_digestsize(tfm), | ||
1690 | areq->result, 0, DMA_FROM_DEVICE); | ||
1691 | else | ||
1692 | map_single_talitos_ptr(dev, &desc->ptr[5], | ||
1693 | req_ctx->hw_context_size, | ||
1694 | req_ctx->hw_context, 0, DMA_FROM_DEVICE); | ||
1695 | |||
1696 | /* last DWORD empty */ | ||
1697 | desc->ptr[6] = zero_entry; | ||
1698 | |||
1699 | ret = talitos_submit(dev, desc, callback, areq); | ||
1700 | if (ret != -EINPROGRESS) { | ||
1701 | common_nonsnoop_hash_unmap(dev, edesc, areq); | ||
1702 | kfree(edesc); | ||
1703 | } | ||
1704 | return ret; | ||
1705 | } | ||
1706 | |||
1707 | static struct talitos_edesc *ahash_edesc_alloc(struct ahash_request *areq, | ||
1708 | unsigned int nbytes) | ||
1709 | { | ||
1710 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); | ||
1711 | struct talitos_ctx *ctx = crypto_ahash_ctx(tfm); | ||
1712 | struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); | ||
1713 | |||
1714 | return talitos_edesc_alloc(ctx->dev, req_ctx->psrc, NULL, 1, | ||
1715 | nbytes, 0, 0, areq->base.flags); | ||
1716 | } | ||
1717 | |||
1718 | static int ahash_init(struct ahash_request *areq) | ||
1719 | { | ||
1720 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); | ||
1721 | struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); | ||
1722 | |||
1723 | /* Initialize the context */ | ||
1724 | req_ctx->count = 0; | ||
1725 | req_ctx->first = 1; /* first indicates h/w must init it's context */ | ||
1726 | req_ctx->hw_context_size = | ||
1727 | (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE) | ||
1728 | ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256 | ||
1729 | : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512; | ||
1730 | |||
1731 | return 0; | ||
1732 | } | ||
1733 | |||
1734 | static int ahash_process_req(struct ahash_request *areq, unsigned int nbytes) | ||
1735 | { | ||
1736 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); | ||
1737 | struct talitos_ctx *ctx = crypto_ahash_ctx(tfm); | ||
1738 | struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); | ||
1739 | struct talitos_edesc *edesc; | ||
1740 | unsigned int blocksize = | ||
1741 | crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm)); | ||
1742 | unsigned int nbytes_to_hash; | ||
1743 | unsigned int to_hash_later; | ||
1744 | unsigned int index; | ||
1745 | int chained; | ||
1746 | |||
1747 | index = req_ctx->count & (blocksize - 1); | ||
1748 | req_ctx->count += nbytes; | ||
1749 | |||
1750 | if (!req_ctx->last && (index + nbytes) < blocksize) { | ||
1751 | /* Buffer the partial block */ | ||
1752 | sg_copy_to_buffer(areq->src, | ||
1753 | sg_count(areq->src, nbytes, &chained), | ||
1754 | req_ctx->buf + index, nbytes); | ||
1755 | return 0; | ||
1756 | } | ||
1757 | |||
1758 | if (index) { | ||
1759 | /* partial block from previous update; chain it in. */ | ||
1760 | sg_init_table(req_ctx->bufsl, (nbytes) ? 2 : 1); | ||
1761 | sg_set_buf(req_ctx->bufsl, req_ctx->buf, index); | ||
1762 | if (nbytes) | ||
1763 | scatterwalk_sg_chain(req_ctx->bufsl, 2, | ||
1764 | areq->src); | ||
1765 | req_ctx->psrc = req_ctx->bufsl; | ||
1766 | } else { | ||
1767 | req_ctx->psrc = areq->src; | ||
1768 | } | ||
1769 | nbytes_to_hash = index + nbytes; | ||
1770 | if (!req_ctx->last) { | ||
1771 | to_hash_later = (nbytes_to_hash & (blocksize - 1)); | ||
1772 | if (to_hash_later) { | ||
1773 | int nents; | ||
1774 | /* Must copy to_hash_later bytes from the end | ||
1775 | * to bufnext (a partial block) for later. | ||
1776 | */ | ||
1777 | nents = sg_count(areq->src, nbytes, &chained); | ||
1778 | sg_copy_end_to_buffer(areq->src, nents, | ||
1779 | req_ctx->bufnext, | ||
1780 | to_hash_later, | ||
1781 | nbytes - to_hash_later); | ||
1782 | |||
1783 | /* Adjust count for what will be hashed now */ | ||
1784 | nbytes_to_hash -= to_hash_later; | ||
1785 | } | ||
1786 | req_ctx->to_hash_later = to_hash_later; | ||
1787 | } | ||
1788 | |||
1789 | /* allocate extended descriptor */ | ||
1790 | edesc = ahash_edesc_alloc(areq, nbytes_to_hash); | ||
1791 | if (IS_ERR(edesc)) | ||
1792 | return PTR_ERR(edesc); | ||
1793 | |||
1794 | edesc->desc.hdr = ctx->desc_hdr_template; | ||
1795 | |||
1796 | /* On last one, request SEC to pad; otherwise continue */ | ||
1797 | if (req_ctx->last) | ||
1798 | edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_PAD; | ||
1799 | else | ||
1800 | edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_CONT; | ||
1801 | |||
1802 | /* On first one, request SEC to INIT hash. */ | ||
1803 | if (req_ctx->first) | ||
1804 | edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_INIT; | ||
1805 | |||
1806 | /* When the tfm context has a keylen, it's an HMAC. | ||
1807 | * A first or last (ie. not middle) descriptor must request HMAC. | ||
1808 | */ | ||
1809 | if (ctx->keylen && (req_ctx->first || req_ctx->last)) | ||
1810 | edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_HMAC; | ||
1811 | |||
1812 | return common_nonsnoop_hash(edesc, areq, nbytes_to_hash, | ||
1813 | ahash_done); | ||
1814 | } | ||
1815 | |||
1816 | static int ahash_update(struct ahash_request *areq) | ||
1817 | { | ||
1818 | struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); | ||
1819 | |||
1820 | req_ctx->last = 0; | ||
1821 | |||
1822 | return ahash_process_req(areq, areq->nbytes); | ||
1823 | } | ||
1824 | |||
1825 | static int ahash_final(struct ahash_request *areq) | ||
1826 | { | ||
1827 | struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); | ||
1828 | |||
1829 | req_ctx->last = 1; | ||
1830 | |||
1831 | return ahash_process_req(areq, 0); | ||
1832 | } | ||
1833 | |||
1834 | static int ahash_finup(struct ahash_request *areq) | ||
1835 | { | ||
1836 | struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); | ||
1837 | |||
1838 | req_ctx->last = 1; | ||
1839 | |||
1840 | return ahash_process_req(areq, areq->nbytes); | ||
1841 | } | ||
1842 | |||
1843 | static int ahash_digest(struct ahash_request *areq) | ||
1844 | { | ||
1845 | struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); | ||
1846 | |||
1847 | ahash_init(areq); | ||
1848 | req_ctx->last = 1; | ||
1849 | |||
1850 | return ahash_process_req(areq, areq->nbytes); | ||
1851 | } | ||
1852 | |||
1483 | struct talitos_alg_template { | 1853 | struct talitos_alg_template { |
1484 | u32 type; | 1854 | u32 type; |
1485 | union { | 1855 | union { |
@@ -1695,7 +2065,113 @@ static struct talitos_alg_template driver_algs[] = { | |||
1695 | DESC_HDR_SEL0_DEU | | 2065 | DESC_HDR_SEL0_DEU | |
1696 | DESC_HDR_MODE0_DEU_CBC | | 2066 | DESC_HDR_MODE0_DEU_CBC | |
1697 | DESC_HDR_MODE0_DEU_3DES, | 2067 | DESC_HDR_MODE0_DEU_3DES, |
1698 | } | 2068 | }, |
2069 | /* AHASH algorithms. */ | ||
2070 | { .type = CRYPTO_ALG_TYPE_AHASH, | ||
2071 | .alg.hash = { | ||
2072 | .init = ahash_init, | ||
2073 | .update = ahash_update, | ||
2074 | .final = ahash_final, | ||
2075 | .finup = ahash_finup, | ||
2076 | .digest = ahash_digest, | ||
2077 | .halg.digestsize = MD5_DIGEST_SIZE, | ||
2078 | .halg.base = { | ||
2079 | .cra_name = "md5", | ||
2080 | .cra_driver_name = "md5-talitos", | ||
2081 | .cra_blocksize = MD5_BLOCK_SIZE, | ||
2082 | .cra_flags = CRYPTO_ALG_TYPE_AHASH | | ||
2083 | CRYPTO_ALG_ASYNC, | ||
2084 | .cra_type = &crypto_ahash_type | ||
2085 | } | ||
2086 | }, | ||
2087 | .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU | | ||
2088 | DESC_HDR_SEL0_MDEUA | | ||
2089 | DESC_HDR_MODE0_MDEU_MD5, | ||
2090 | }, | ||
2091 | { .type = CRYPTO_ALG_TYPE_AHASH, | ||
2092 | .alg.hash = { | ||
2093 | .init = ahash_init, | ||
2094 | .update = ahash_update, | ||
2095 | .final = ahash_final, | ||
2096 | .finup = ahash_finup, | ||
2097 | .digest = ahash_digest, | ||
2098 | .halg.digestsize = SHA1_DIGEST_SIZE, | ||
2099 | .halg.base = { | ||
2100 | .cra_name = "sha1", | ||
2101 | .cra_driver_name = "sha1-talitos", | ||
2102 | .cra_blocksize = SHA1_BLOCK_SIZE, | ||
2103 | .cra_flags = CRYPTO_ALG_TYPE_AHASH | | ||
2104 | CRYPTO_ALG_ASYNC, | ||
2105 | .cra_type = &crypto_ahash_type | ||
2106 | } | ||
2107 | }, | ||
2108 | .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU | | ||
2109 | DESC_HDR_SEL0_MDEUA | | ||
2110 | DESC_HDR_MODE0_MDEU_SHA1, | ||
2111 | }, | ||
2112 | { .type = CRYPTO_ALG_TYPE_AHASH, | ||
2113 | .alg.hash = { | ||
2114 | .init = ahash_init, | ||
2115 | .update = ahash_update, | ||
2116 | .final = ahash_final, | ||
2117 | .finup = ahash_finup, | ||
2118 | .digest = ahash_digest, | ||
2119 | .halg.digestsize = SHA256_DIGEST_SIZE, | ||
2120 | .halg.base = { | ||
2121 | .cra_name = "sha256", | ||
2122 | .cra_driver_name = "sha256-talitos", | ||
2123 | .cra_blocksize = SHA256_BLOCK_SIZE, | ||
2124 | .cra_flags = CRYPTO_ALG_TYPE_AHASH | | ||
2125 | CRYPTO_ALG_ASYNC, | ||
2126 | .cra_type = &crypto_ahash_type | ||
2127 | } | ||
2128 | }, | ||
2129 | .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU | | ||
2130 | DESC_HDR_SEL0_MDEUA | | ||
2131 | DESC_HDR_MODE0_MDEU_SHA256, | ||
2132 | }, | ||
2133 | { .type = CRYPTO_ALG_TYPE_AHASH, | ||
2134 | .alg.hash = { | ||
2135 | .init = ahash_init, | ||
2136 | .update = ahash_update, | ||
2137 | .final = ahash_final, | ||
2138 | .finup = ahash_finup, | ||
2139 | .digest = ahash_digest, | ||
2140 | .halg.digestsize = SHA384_DIGEST_SIZE, | ||
2141 | .halg.base = { | ||
2142 | .cra_name = "sha384", | ||
2143 | .cra_driver_name = "sha384-talitos", | ||
2144 | .cra_blocksize = SHA384_BLOCK_SIZE, | ||
2145 | .cra_flags = CRYPTO_ALG_TYPE_AHASH | | ||
2146 | CRYPTO_ALG_ASYNC, | ||
2147 | .cra_type = &crypto_ahash_type | ||
2148 | } | ||
2149 | }, | ||
2150 | .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU | | ||
2151 | DESC_HDR_SEL0_MDEUB | | ||
2152 | DESC_HDR_MODE0_MDEUB_SHA384, | ||
2153 | }, | ||
2154 | { .type = CRYPTO_ALG_TYPE_AHASH, | ||
2155 | .alg.hash = { | ||
2156 | .init = ahash_init, | ||
2157 | .update = ahash_update, | ||
2158 | .final = ahash_final, | ||
2159 | .finup = ahash_finup, | ||
2160 | .digest = ahash_digest, | ||
2161 | .halg.digestsize = SHA512_DIGEST_SIZE, | ||
2162 | .halg.base = { | ||
2163 | .cra_name = "sha512", | ||
2164 | .cra_driver_name = "sha512-talitos", | ||
2165 | .cra_blocksize = SHA512_BLOCK_SIZE, | ||
2166 | .cra_flags = CRYPTO_ALG_TYPE_AHASH | | ||
2167 | CRYPTO_ALG_ASYNC, | ||
2168 | .cra_type = &crypto_ahash_type | ||
2169 | } | ||
2170 | }, | ||
2171 | .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU | | ||
2172 | DESC_HDR_SEL0_MDEUB | | ||
2173 | DESC_HDR_MODE0_MDEUB_SHA512, | ||
2174 | }, | ||
1699 | }; | 2175 | }; |
1700 | 2176 | ||
1701 | struct talitos_crypto_alg { | 2177 | struct talitos_crypto_alg { |
@@ -1710,8 +2186,13 @@ static int talitos_cra_init(struct crypto_tfm *tfm) | |||
1710 | struct talitos_crypto_alg *talitos_alg; | 2186 | struct talitos_crypto_alg *talitos_alg; |
1711 | struct talitos_ctx *ctx = crypto_tfm_ctx(tfm); | 2187 | struct talitos_ctx *ctx = crypto_tfm_ctx(tfm); |
1712 | 2188 | ||
1713 | talitos_alg = container_of(alg, struct talitos_crypto_alg, | 2189 | if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_AHASH) |
1714 | algt.alg.crypto); | 2190 | talitos_alg = container_of(__crypto_ahash_alg(alg), |
2191 | struct talitos_crypto_alg, | ||
2192 | algt.alg.hash); | ||
2193 | else | ||
2194 | talitos_alg = container_of(alg, struct talitos_crypto_alg, | ||
2195 | algt.alg.crypto); | ||
1715 | 2196 | ||
1716 | /* update context with ptr to dev */ | 2197 | /* update context with ptr to dev */ |
1717 | ctx->dev = talitos_alg->dev; | 2198 | ctx->dev = talitos_alg->dev; |
@@ -1719,12 +2200,34 @@ static int talitos_cra_init(struct crypto_tfm *tfm) | |||
1719 | /* copy descriptor header template value */ | 2200 | /* copy descriptor header template value */ |
1720 | ctx->desc_hdr_template = talitos_alg->algt.desc_hdr_template; | 2201 | ctx->desc_hdr_template = talitos_alg->algt.desc_hdr_template; |
1721 | 2202 | ||
2203 | return 0; | ||
2204 | } | ||
2205 | |||
2206 | static int talitos_cra_init_aead(struct crypto_tfm *tfm) | ||
2207 | { | ||
2208 | struct talitos_ctx *ctx = crypto_tfm_ctx(tfm); | ||
2209 | |||
2210 | talitos_cra_init(tfm); | ||
2211 | |||
1722 | /* random first IV */ | 2212 | /* random first IV */ |
1723 | get_random_bytes(ctx->iv, TALITOS_MAX_IV_LENGTH); | 2213 | get_random_bytes(ctx->iv, TALITOS_MAX_IV_LENGTH); |
1724 | 2214 | ||
1725 | return 0; | 2215 | return 0; |
1726 | } | 2216 | } |
1727 | 2217 | ||
2218 | static int talitos_cra_init_ahash(struct crypto_tfm *tfm) | ||
2219 | { | ||
2220 | struct talitos_ctx *ctx = crypto_tfm_ctx(tfm); | ||
2221 | |||
2222 | talitos_cra_init(tfm); | ||
2223 | |||
2224 | ctx->keylen = 0; | ||
2225 | crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), | ||
2226 | sizeof(struct talitos_ahash_req_ctx)); | ||
2227 | |||
2228 | return 0; | ||
2229 | } | ||
2230 | |||
1728 | /* | 2231 | /* |
1729 | * given the alg's descriptor header template, determine whether descriptor | 2232 | * given the alg's descriptor header template, determine whether descriptor |
1730 | * type and primary/secondary execution units required match the hw | 2233 | * type and primary/secondary execution units required match the hw |
@@ -1806,15 +2309,20 @@ static struct talitos_crypto_alg *talitos_alg_alloc(struct device *dev, | |||
1806 | 2309 | ||
1807 | switch (t_alg->algt.type) { | 2310 | switch (t_alg->algt.type) { |
1808 | case CRYPTO_ALG_TYPE_ABLKCIPHER: | 2311 | case CRYPTO_ALG_TYPE_ABLKCIPHER: |
2312 | alg = &t_alg->algt.alg.crypto; | ||
2313 | alg->cra_init = talitos_cra_init; | ||
2314 | break; | ||
1809 | case CRYPTO_ALG_TYPE_AEAD: | 2315 | case CRYPTO_ALG_TYPE_AEAD: |
1810 | alg = &t_alg->algt.alg.crypto; | 2316 | alg = &t_alg->algt.alg.crypto; |
2317 | alg->cra_init = talitos_cra_init_aead; | ||
1811 | break; | 2318 | break; |
1812 | case CRYPTO_ALG_TYPE_AHASH: | 2319 | case CRYPTO_ALG_TYPE_AHASH: |
1813 | alg = &t_alg->algt.alg.hash.halg.base; | 2320 | alg = &t_alg->algt.alg.hash.halg.base; |
2321 | alg->cra_init = talitos_cra_init_ahash; | ||
2322 | break; | ||
1814 | } | 2323 | } |
1815 | 2324 | ||
1816 | alg->cra_module = THIS_MODULE; | 2325 | alg->cra_module = THIS_MODULE; |
1817 | alg->cra_init = talitos_cra_init; | ||
1818 | alg->cra_priority = TALITOS_CRA_PRIORITY; | 2326 | alg->cra_priority = TALITOS_CRA_PRIORITY; |
1819 | alg->cra_alignmask = 0; | 2327 | alg->cra_alignmask = 0; |
1820 | alg->cra_ctxsize = sizeof(struct talitos_ctx); | 2328 | alg->cra_ctxsize = sizeof(struct talitos_ctx); |
diff --git a/drivers/crypto/talitos.h b/drivers/crypto/talitos.h index ff5a1450e145..05c57b730e99 100644 --- a/drivers/crypto/talitos.h +++ b/drivers/crypto/talitos.h | |||
@@ -130,6 +130,9 @@ | |||
130 | #define TALITOS_CRCUISR 0xf030 /* cyclic redundancy check unit*/ | 130 | #define TALITOS_CRCUISR 0xf030 /* cyclic redundancy check unit*/ |
131 | #define TALITOS_CRCUISR_LO 0xf034 | 131 | #define TALITOS_CRCUISR_LO 0xf034 |
132 | 132 | ||
133 | #define TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256 0x28 | ||
134 | #define TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512 0x48 | ||
135 | |||
133 | /* | 136 | /* |
134 | * talitos descriptor header (hdr) bits | 137 | * talitos descriptor header (hdr) bits |
135 | */ | 138 | */ |
@@ -157,12 +160,15 @@ | |||
157 | #define DESC_HDR_MODE0_AESU_CBC cpu_to_be32(0x00200000) | 160 | #define DESC_HDR_MODE0_AESU_CBC cpu_to_be32(0x00200000) |
158 | #define DESC_HDR_MODE0_DEU_CBC cpu_to_be32(0x00400000) | 161 | #define DESC_HDR_MODE0_DEU_CBC cpu_to_be32(0x00400000) |
159 | #define DESC_HDR_MODE0_DEU_3DES cpu_to_be32(0x00200000) | 162 | #define DESC_HDR_MODE0_DEU_3DES cpu_to_be32(0x00200000) |
163 | #define DESC_HDR_MODE0_MDEU_CONT cpu_to_be32(0x08000000) | ||
160 | #define DESC_HDR_MODE0_MDEU_INIT cpu_to_be32(0x01000000) | 164 | #define DESC_HDR_MODE0_MDEU_INIT cpu_to_be32(0x01000000) |
161 | #define DESC_HDR_MODE0_MDEU_HMAC cpu_to_be32(0x00800000) | 165 | #define DESC_HDR_MODE0_MDEU_HMAC cpu_to_be32(0x00800000) |
162 | #define DESC_HDR_MODE0_MDEU_PAD cpu_to_be32(0x00400000) | 166 | #define DESC_HDR_MODE0_MDEU_PAD cpu_to_be32(0x00400000) |
163 | #define DESC_HDR_MODE0_MDEU_MD5 cpu_to_be32(0x00200000) | 167 | #define DESC_HDR_MODE0_MDEU_MD5 cpu_to_be32(0x00200000) |
164 | #define DESC_HDR_MODE0_MDEU_SHA256 cpu_to_be32(0x00100000) | 168 | #define DESC_HDR_MODE0_MDEU_SHA256 cpu_to_be32(0x00100000) |
165 | #define DESC_HDR_MODE0_MDEU_SHA1 cpu_to_be32(0x00000000) | 169 | #define DESC_HDR_MODE0_MDEU_SHA1 cpu_to_be32(0x00000000) |
170 | #define DESC_HDR_MODE0_MDEUB_SHA384 cpu_to_be32(0x00000000) | ||
171 | #define DESC_HDR_MODE0_MDEUB_SHA512 cpu_to_be32(0x00200000) | ||
166 | #define DESC_HDR_MODE0_MDEU_MD5_HMAC (DESC_HDR_MODE0_MDEU_MD5 | \ | 172 | #define DESC_HDR_MODE0_MDEU_MD5_HMAC (DESC_HDR_MODE0_MDEU_MD5 | \ |
167 | DESC_HDR_MODE0_MDEU_HMAC) | 173 | DESC_HDR_MODE0_MDEU_HMAC) |
168 | #define DESC_HDR_MODE0_MDEU_SHA256_HMAC (DESC_HDR_MODE0_MDEU_SHA256 | \ | 174 | #define DESC_HDR_MODE0_MDEU_SHA256_HMAC (DESC_HDR_MODE0_MDEU_SHA256 | \ |
@@ -184,6 +190,8 @@ | |||
184 | #define DESC_HDR_MODE1_MDEU_MD5 cpu_to_be32(0x00000200) | 190 | #define DESC_HDR_MODE1_MDEU_MD5 cpu_to_be32(0x00000200) |
185 | #define DESC_HDR_MODE1_MDEU_SHA256 cpu_to_be32(0x00000100) | 191 | #define DESC_HDR_MODE1_MDEU_SHA256 cpu_to_be32(0x00000100) |
186 | #define DESC_HDR_MODE1_MDEU_SHA1 cpu_to_be32(0x00000000) | 192 | #define DESC_HDR_MODE1_MDEU_SHA1 cpu_to_be32(0x00000000) |
193 | #define DESC_HDR_MODE1_MDEUB_SHA384 cpu_to_be32(0x00000000) | ||
194 | #define DESC_HDR_MODE1_MDEUB_SHA512 cpu_to_be32(0x00000200) | ||
187 | #define DESC_HDR_MODE1_MDEU_MD5_HMAC (DESC_HDR_MODE1_MDEU_MD5 | \ | 195 | #define DESC_HDR_MODE1_MDEU_MD5_HMAC (DESC_HDR_MODE1_MDEU_MD5 | \ |
188 | DESC_HDR_MODE1_MDEU_HMAC) | 196 | DESC_HDR_MODE1_MDEU_HMAC) |
189 | #define DESC_HDR_MODE1_MDEU_SHA256_HMAC (DESC_HDR_MODE1_MDEU_SHA256 | \ | 197 | #define DESC_HDR_MODE1_MDEU_SHA256_HMAC (DESC_HDR_MODE1_MDEU_SHA256 | \ |