aboutsummaryrefslogtreecommitdiffstats
path: root/drivers
diff options
context:
space:
mode:
Diffstat (limited to 'drivers')
-rw-r--r--drivers/char/hw_random/tx4939-rng.c5
-rw-r--r--drivers/crypto/Kconfig11
-rw-r--r--drivers/crypto/Makefile1
-rw-r--r--drivers/crypto/caam/caamalg.c126
-rw-r--r--drivers/crypto/caam/ctrl.c2
-rw-r--r--drivers/crypto/geode-aes.c6
-rw-r--r--drivers/crypto/hifn_795x.c3
-rw-r--r--drivers/crypto/ixp4xx_crypto.c4
-rw-r--r--drivers/crypto/mv_cesa.c12
-rw-r--r--drivers/crypto/n2_core.c7
-rw-r--r--drivers/crypto/omap-aes.c8
-rw-r--r--drivers/crypto/omap-sham.c4
-rw-r--r--drivers/crypto/picoxcell_crypto.c46
-rw-r--r--drivers/crypto/s5p-sss.c6
-rw-r--r--drivers/crypto/talitos.c1
-rw-r--r--drivers/crypto/tegra-aes.c1096
-rw-r--r--drivers/crypto/tegra-aes.h103
17 files changed, 1406 insertions, 35 deletions
diff --git a/drivers/char/hw_random/tx4939-rng.c b/drivers/char/hw_random/tx4939-rng.c
index 0bc0cb70210b..de473ef3882b 100644
--- a/drivers/char/hw_random/tx4939-rng.c
+++ b/drivers/char/hw_random/tx4939-rng.c
@@ -115,10 +115,7 @@ static int __init tx4939_rng_probe(struct platform_device *dev)
115 rngdev = devm_kzalloc(&dev->dev, sizeof(*rngdev), GFP_KERNEL); 115 rngdev = devm_kzalloc(&dev->dev, sizeof(*rngdev), GFP_KERNEL);
116 if (!rngdev) 116 if (!rngdev)
117 return -ENOMEM; 117 return -ENOMEM;
118 if (!devm_request_mem_region(&dev->dev, r->start, resource_size(r), 118 rngdev->base = devm_request_and_ioremap(&dev->dev, r);
119 dev_name(&dev->dev)))
120 return -EBUSY;
121 rngdev->base = devm_ioremap(&dev->dev, r->start, resource_size(r));
122 if (!rngdev->base) 119 if (!rngdev->base)
123 return -EBUSY; 120 return -EBUSY;
124 121
diff --git a/drivers/crypto/Kconfig b/drivers/crypto/Kconfig
index 6d16b4b0d7a0..e707979767fb 100644
--- a/drivers/crypto/Kconfig
+++ b/drivers/crypto/Kconfig
@@ -293,4 +293,15 @@ config CRYPTO_DEV_S5P
293 Select this to offload Samsung S5PV210 or S5PC110 from AES 293 Select this to offload Samsung S5PV210 or S5PC110 from AES
294 algorithms execution. 294 algorithms execution.
295 295
296config CRYPTO_DEV_TEGRA_AES
297 tristate "Support for TEGRA AES hw engine"
298 depends on ARCH_TEGRA
299 select CRYPTO_AES
300 help
301 TEGRA processors have AES module accelerator. Select this if you
302 want to use the TEGRA module for AES algorithms.
303
304 To compile this driver as a module, choose M here: the module
305 will be called tegra-aes.
306
296endif # CRYPTO_HW 307endif # CRYPTO_HW
diff --git a/drivers/crypto/Makefile b/drivers/crypto/Makefile
index 53ea50155319..f3e64eadd7af 100644
--- a/drivers/crypto/Makefile
+++ b/drivers/crypto/Makefile
@@ -13,3 +13,4 @@ obj-$(CONFIG_CRYPTO_DEV_OMAP_SHAM) += omap-sham.o
13obj-$(CONFIG_CRYPTO_DEV_OMAP_AES) += omap-aes.o 13obj-$(CONFIG_CRYPTO_DEV_OMAP_AES) += omap-aes.o
14obj-$(CONFIG_CRYPTO_DEV_PICOXCELL) += picoxcell_crypto.o 14obj-$(CONFIG_CRYPTO_DEV_PICOXCELL) += picoxcell_crypto.o
15obj-$(CONFIG_CRYPTO_DEV_S5P) += s5p-sss.o 15obj-$(CONFIG_CRYPTO_DEV_S5P) += s5p-sss.o
16obj-$(CONFIG_CRYPTO_DEV_TEGRA_AES) += tegra-aes.o
diff --git a/drivers/crypto/caam/caamalg.c b/drivers/crypto/caam/caamalg.c
index e73cf2e8110a..534a36469d57 100644
--- a/drivers/crypto/caam/caamalg.c
+++ b/drivers/crypto/caam/caamalg.c
@@ -1844,6 +1844,25 @@ static struct caam_alg_template driver_algs[] = {
1844 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC, 1844 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
1845 }, 1845 },
1846 { 1846 {
1847 .name = "authenc(hmac(sha224),cbc(aes))",
1848 .driver_name = "authenc-hmac-sha224-cbc-aes-caam",
1849 .blocksize = AES_BLOCK_SIZE,
1850 .template_aead = {
1851 .setkey = aead_setkey,
1852 .setauthsize = aead_setauthsize,
1853 .encrypt = aead_encrypt,
1854 .decrypt = aead_decrypt,
1855 .givencrypt = aead_givencrypt,
1856 .geniv = "<built-in>",
1857 .ivsize = AES_BLOCK_SIZE,
1858 .maxauthsize = SHA224_DIGEST_SIZE,
1859 },
1860 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1861 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1862 OP_ALG_AAI_HMAC_PRECOMP,
1863 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
1864 },
1865 {
1847 .name = "authenc(hmac(sha256),cbc(aes))", 1866 .name = "authenc(hmac(sha256),cbc(aes))",
1848 .driver_name = "authenc-hmac-sha256-cbc-aes-caam", 1867 .driver_name = "authenc-hmac-sha256-cbc-aes-caam",
1849 .blocksize = AES_BLOCK_SIZE, 1868 .blocksize = AES_BLOCK_SIZE,
@@ -1864,6 +1883,26 @@ static struct caam_alg_template driver_algs[] = {
1864 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC, 1883 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
1865 }, 1884 },
1866 { 1885 {
1886 .name = "authenc(hmac(sha384),cbc(aes))",
1887 .driver_name = "authenc-hmac-sha384-cbc-aes-caam",
1888 .blocksize = AES_BLOCK_SIZE,
1889 .template_aead = {
1890 .setkey = aead_setkey,
1891 .setauthsize = aead_setauthsize,
1892 .encrypt = aead_encrypt,
1893 .decrypt = aead_decrypt,
1894 .givencrypt = aead_givencrypt,
1895 .geniv = "<built-in>",
1896 .ivsize = AES_BLOCK_SIZE,
1897 .maxauthsize = SHA384_DIGEST_SIZE,
1898 },
1899 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1900 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
1901 OP_ALG_AAI_HMAC_PRECOMP,
1902 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
1903 },
1904
1905 {
1867 .name = "authenc(hmac(sha512),cbc(aes))", 1906 .name = "authenc(hmac(sha512),cbc(aes))",
1868 .driver_name = "authenc-hmac-sha512-cbc-aes-caam", 1907 .driver_name = "authenc-hmac-sha512-cbc-aes-caam",
1869 .blocksize = AES_BLOCK_SIZE, 1908 .blocksize = AES_BLOCK_SIZE,
@@ -1922,6 +1961,25 @@ static struct caam_alg_template driver_algs[] = {
1922 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC, 1961 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
1923 }, 1962 },
1924 { 1963 {
1964 .name = "authenc(hmac(sha224),cbc(des3_ede))",
1965 .driver_name = "authenc-hmac-sha224-cbc-des3_ede-caam",
1966 .blocksize = DES3_EDE_BLOCK_SIZE,
1967 .template_aead = {
1968 .setkey = aead_setkey,
1969 .setauthsize = aead_setauthsize,
1970 .encrypt = aead_encrypt,
1971 .decrypt = aead_decrypt,
1972 .givencrypt = aead_givencrypt,
1973 .geniv = "<built-in>",
1974 .ivsize = DES3_EDE_BLOCK_SIZE,
1975 .maxauthsize = SHA224_DIGEST_SIZE,
1976 },
1977 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1978 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1979 OP_ALG_AAI_HMAC_PRECOMP,
1980 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
1981 },
1982 {
1925 .name = "authenc(hmac(sha256),cbc(des3_ede))", 1983 .name = "authenc(hmac(sha256),cbc(des3_ede))",
1926 .driver_name = "authenc-hmac-sha256-cbc-des3_ede-caam", 1984 .driver_name = "authenc-hmac-sha256-cbc-des3_ede-caam",
1927 .blocksize = DES3_EDE_BLOCK_SIZE, 1985 .blocksize = DES3_EDE_BLOCK_SIZE,
@@ -1942,6 +2000,25 @@ static struct caam_alg_template driver_algs[] = {
1942 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC, 2000 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
1943 }, 2001 },
1944 { 2002 {
2003 .name = "authenc(hmac(sha384),cbc(des3_ede))",
2004 .driver_name = "authenc-hmac-sha384-cbc-des3_ede-caam",
2005 .blocksize = DES3_EDE_BLOCK_SIZE,
2006 .template_aead = {
2007 .setkey = aead_setkey,
2008 .setauthsize = aead_setauthsize,
2009 .encrypt = aead_encrypt,
2010 .decrypt = aead_decrypt,
2011 .givencrypt = aead_givencrypt,
2012 .geniv = "<built-in>",
2013 .ivsize = DES3_EDE_BLOCK_SIZE,
2014 .maxauthsize = SHA384_DIGEST_SIZE,
2015 },
2016 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2017 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2018 OP_ALG_AAI_HMAC_PRECOMP,
2019 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
2020 },
2021 {
1945 .name = "authenc(hmac(sha512),cbc(des3_ede))", 2022 .name = "authenc(hmac(sha512),cbc(des3_ede))",
1946 .driver_name = "authenc-hmac-sha512-cbc-des3_ede-caam", 2023 .driver_name = "authenc-hmac-sha512-cbc-des3_ede-caam",
1947 .blocksize = DES3_EDE_BLOCK_SIZE, 2024 .blocksize = DES3_EDE_BLOCK_SIZE,
@@ -2000,6 +2077,25 @@ static struct caam_alg_template driver_algs[] = {
2000 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC, 2077 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
2001 }, 2078 },
2002 { 2079 {
2080 .name = "authenc(hmac(sha224),cbc(des))",
2081 .driver_name = "authenc-hmac-sha224-cbc-des-caam",
2082 .blocksize = DES_BLOCK_SIZE,
2083 .template_aead = {
2084 .setkey = aead_setkey,
2085 .setauthsize = aead_setauthsize,
2086 .encrypt = aead_encrypt,
2087 .decrypt = aead_decrypt,
2088 .givencrypt = aead_givencrypt,
2089 .geniv = "<built-in>",
2090 .ivsize = DES_BLOCK_SIZE,
2091 .maxauthsize = SHA224_DIGEST_SIZE,
2092 },
2093 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2094 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2095 OP_ALG_AAI_HMAC_PRECOMP,
2096 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
2097 },
2098 {
2003 .name = "authenc(hmac(sha256),cbc(des))", 2099 .name = "authenc(hmac(sha256),cbc(des))",
2004 .driver_name = "authenc-hmac-sha256-cbc-des-caam", 2100 .driver_name = "authenc-hmac-sha256-cbc-des-caam",
2005 .blocksize = DES_BLOCK_SIZE, 2101 .blocksize = DES_BLOCK_SIZE,
@@ -2020,6 +2116,25 @@ static struct caam_alg_template driver_algs[] = {
2020 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC, 2116 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
2021 }, 2117 },
2022 { 2118 {
2119 .name = "authenc(hmac(sha384),cbc(des))",
2120 .driver_name = "authenc-hmac-sha384-cbc-des-caam",
2121 .blocksize = DES_BLOCK_SIZE,
2122 .template_aead = {
2123 .setkey = aead_setkey,
2124 .setauthsize = aead_setauthsize,
2125 .encrypt = aead_encrypt,
2126 .decrypt = aead_decrypt,
2127 .givencrypt = aead_givencrypt,
2128 .geniv = "<built-in>",
2129 .ivsize = DES_BLOCK_SIZE,
2130 .maxauthsize = SHA384_DIGEST_SIZE,
2131 },
2132 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2133 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2134 OP_ALG_AAI_HMAC_PRECOMP,
2135 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
2136 },
2137 {
2023 .name = "authenc(hmac(sha512),cbc(des))", 2138 .name = "authenc(hmac(sha512),cbc(des))",
2024 .driver_name = "authenc-hmac-sha512-cbc-des-caam", 2139 .driver_name = "authenc-hmac-sha512-cbc-des-caam",
2025 .blocksize = DES_BLOCK_SIZE, 2140 .blocksize = DES_BLOCK_SIZE,
@@ -2205,7 +2320,8 @@ static struct caam_crypto_alg *caam_alg_alloc(struct device *ctrldev,
2205 alg->cra_blocksize = template->blocksize; 2320 alg->cra_blocksize = template->blocksize;
2206 alg->cra_alignmask = 0; 2321 alg->cra_alignmask = 0;
2207 alg->cra_ctxsize = sizeof(struct caam_ctx); 2322 alg->cra_ctxsize = sizeof(struct caam_ctx);
2208 alg->cra_flags = CRYPTO_ALG_ASYNC | template->type; 2323 alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
2324 template->type;
2209 switch (template->type) { 2325 switch (template->type) {
2210 case CRYPTO_ALG_TYPE_ABLKCIPHER: 2326 case CRYPTO_ALG_TYPE_ABLKCIPHER:
2211 alg->cra_type = &crypto_ablkcipher_type; 2327 alg->cra_type = &crypto_ablkcipher_type;
@@ -2285,12 +2401,12 @@ static int __init caam_algapi_init(void)
2285 dev_warn(ctrldev, "%s alg registration failed\n", 2401 dev_warn(ctrldev, "%s alg registration failed\n",
2286 t_alg->crypto_alg.cra_driver_name); 2402 t_alg->crypto_alg.cra_driver_name);
2287 kfree(t_alg); 2403 kfree(t_alg);
2288 } else { 2404 } else
2289 list_add_tail(&t_alg->entry, &priv->alg_list); 2405 list_add_tail(&t_alg->entry, &priv->alg_list);
2290 dev_info(ctrldev, "%s\n",
2291 t_alg->crypto_alg.cra_driver_name);
2292 }
2293 } 2406 }
2407 if (!list_empty(&priv->alg_list))
2408 dev_info(ctrldev, "%s algorithms registered in /proc/crypto\n",
2409 (char *)of_get_property(dev_node, "compatible", NULL));
2294 2410
2295 return err; 2411 return err;
2296} 2412}
diff --git a/drivers/crypto/caam/ctrl.c b/drivers/crypto/caam/ctrl.c
index 8ae3ba2a160d..c5f61c55d923 100644
--- a/drivers/crypto/caam/ctrl.c
+++ b/drivers/crypto/caam/ctrl.c
@@ -46,7 +46,7 @@ static int caam_remove(struct platform_device *pdev)
46/* Probe routine for CAAM top (controller) level */ 46/* Probe routine for CAAM top (controller) level */
47static int caam_probe(struct platform_device *pdev) 47static int caam_probe(struct platform_device *pdev)
48{ 48{
49 int d, ring, rspec; 49 int ring, rspec;
50 struct device *dev; 50 struct device *dev;
51 struct device_node *nprop, *np; 51 struct device_node *nprop, *np;
52 struct caam_ctrl __iomem *ctrl; 52 struct caam_ctrl __iomem *ctrl;
diff --git a/drivers/crypto/geode-aes.c b/drivers/crypto/geode-aes.c
index 219d09cbb0d1..f3e36c86b6c3 100644
--- a/drivers/crypto/geode-aes.c
+++ b/drivers/crypto/geode-aes.c
@@ -393,7 +393,8 @@ static struct crypto_alg geode_cbc_alg = {
393 .cra_driver_name = "cbc-aes-geode", 393 .cra_driver_name = "cbc-aes-geode",
394 .cra_priority = 400, 394 .cra_priority = 400,
395 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | 395 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
396 CRYPTO_ALG_NEED_FALLBACK, 396 CRYPTO_ALG_KERN_DRIVER_ONLY |
397 CRYPTO_ALG_NEED_FALLBACK,
397 .cra_init = fallback_init_blk, 398 .cra_init = fallback_init_blk,
398 .cra_exit = fallback_exit_blk, 399 .cra_exit = fallback_exit_blk,
399 .cra_blocksize = AES_MIN_BLOCK_SIZE, 400 .cra_blocksize = AES_MIN_BLOCK_SIZE,
@@ -479,7 +480,8 @@ static struct crypto_alg geode_ecb_alg = {
479 .cra_driver_name = "ecb-aes-geode", 480 .cra_driver_name = "ecb-aes-geode",
480 .cra_priority = 400, 481 .cra_priority = 400,
481 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | 482 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
482 CRYPTO_ALG_NEED_FALLBACK, 483 CRYPTO_ALG_KERN_DRIVER_ONLY |
484 CRYPTO_ALG_NEED_FALLBACK,
483 .cra_init = fallback_init_blk, 485 .cra_init = fallback_init_blk,
484 .cra_exit = fallback_exit_blk, 486 .cra_exit = fallback_exit_blk,
485 .cra_blocksize = AES_MIN_BLOCK_SIZE, 487 .cra_blocksize = AES_MIN_BLOCK_SIZE,
diff --git a/drivers/crypto/hifn_795x.c b/drivers/crypto/hifn_795x.c
index 76368f984023..c9c4befb5a8d 100644
--- a/drivers/crypto/hifn_795x.c
+++ b/drivers/crypto/hifn_795x.c
@@ -2494,7 +2494,8 @@ static int hifn_alg_alloc(struct hifn_device *dev, struct hifn_alg_template *t)
2494 t->drv_name, dev->name); 2494 t->drv_name, dev->name);
2495 2495
2496 alg->alg.cra_priority = 300; 2496 alg->alg.cra_priority = 300;
2497 alg->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC; 2497 alg->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2498 CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC;
2498 alg->alg.cra_blocksize = t->bsize; 2499 alg->alg.cra_blocksize = t->bsize;
2499 alg->alg.cra_ctxsize = sizeof(struct hifn_context); 2500 alg->alg.cra_ctxsize = sizeof(struct hifn_context);
2500 alg->alg.cra_alignmask = 0; 2501 alg->alg.cra_alignmask = 0;
diff --git a/drivers/crypto/ixp4xx_crypto.c b/drivers/crypto/ixp4xx_crypto.c
index 4c20c5bf6058..0053d7ebb5ca 100644
--- a/drivers/crypto/ixp4xx_crypto.c
+++ b/drivers/crypto/ixp4xx_crypto.c
@@ -265,7 +265,7 @@ static int setup_crypt_desc(void)
265 BUILD_BUG_ON(sizeof(struct crypt_ctl) != 64); 265 BUILD_BUG_ON(sizeof(struct crypt_ctl) != 64);
266 crypt_virt = dma_alloc_coherent(dev, 266 crypt_virt = dma_alloc_coherent(dev,
267 NPE_QLEN * sizeof(struct crypt_ctl), 267 NPE_QLEN * sizeof(struct crypt_ctl),
268 &crypt_phys, GFP_KERNEL); 268 &crypt_phys, GFP_ATOMIC);
269 if (!crypt_virt) 269 if (!crypt_virt)
270 return -ENOMEM; 270 return -ENOMEM;
271 memset(crypt_virt, 0, NPE_QLEN * sizeof(struct crypt_ctl)); 271 memset(crypt_virt, 0, NPE_QLEN * sizeof(struct crypt_ctl));
@@ -1449,6 +1449,7 @@ static int __init ixp_module_init(void)
1449 /* block ciphers */ 1449 /* block ciphers */
1450 cra->cra_type = &crypto_ablkcipher_type; 1450 cra->cra_type = &crypto_ablkcipher_type;
1451 cra->cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | 1451 cra->cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1452 CRYPTO_ALG_KERN_DRIVER_ONLY |
1452 CRYPTO_ALG_ASYNC; 1453 CRYPTO_ALG_ASYNC;
1453 if (!cra->cra_ablkcipher.setkey) 1454 if (!cra->cra_ablkcipher.setkey)
1454 cra->cra_ablkcipher.setkey = ablk_setkey; 1455 cra->cra_ablkcipher.setkey = ablk_setkey;
@@ -1461,6 +1462,7 @@ static int __init ixp_module_init(void)
1461 /* authenc */ 1462 /* authenc */
1462 cra->cra_type = &crypto_aead_type; 1463 cra->cra_type = &crypto_aead_type;
1463 cra->cra_flags = CRYPTO_ALG_TYPE_AEAD | 1464 cra->cra_flags = CRYPTO_ALG_TYPE_AEAD |
1465 CRYPTO_ALG_KERN_DRIVER_ONLY |
1464 CRYPTO_ALG_ASYNC; 1466 CRYPTO_ALG_ASYNC;
1465 cra->cra_aead.setkey = aead_setkey; 1467 cra->cra_aead.setkey = aead_setkey;
1466 cra->cra_aead.setauthsize = aead_setauthsize; 1468 cra->cra_aead.setauthsize = aead_setauthsize;
diff --git a/drivers/crypto/mv_cesa.c b/drivers/crypto/mv_cesa.c
index 0d40cf66b3cc..e6ecc5f23943 100644
--- a/drivers/crypto/mv_cesa.c
+++ b/drivers/crypto/mv_cesa.c
@@ -899,7 +899,8 @@ struct crypto_alg mv_aes_alg_ecb = {
899 .cra_name = "ecb(aes)", 899 .cra_name = "ecb(aes)",
900 .cra_driver_name = "mv-ecb-aes", 900 .cra_driver_name = "mv-ecb-aes",
901 .cra_priority = 300, 901 .cra_priority = 300,
902 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 902 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
903 CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC,
903 .cra_blocksize = 16, 904 .cra_blocksize = 16,
904 .cra_ctxsize = sizeof(struct mv_ctx), 905 .cra_ctxsize = sizeof(struct mv_ctx),
905 .cra_alignmask = 0, 906 .cra_alignmask = 0,
@@ -921,7 +922,8 @@ struct crypto_alg mv_aes_alg_cbc = {
921 .cra_name = "cbc(aes)", 922 .cra_name = "cbc(aes)",
922 .cra_driver_name = "mv-cbc-aes", 923 .cra_driver_name = "mv-cbc-aes",
923 .cra_priority = 300, 924 .cra_priority = 300,
924 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 925 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
926 CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC,
925 .cra_blocksize = AES_BLOCK_SIZE, 927 .cra_blocksize = AES_BLOCK_SIZE,
926 .cra_ctxsize = sizeof(struct mv_ctx), 928 .cra_ctxsize = sizeof(struct mv_ctx),
927 .cra_alignmask = 0, 929 .cra_alignmask = 0,
@@ -953,7 +955,8 @@ struct ahash_alg mv_sha1_alg = {
953 .cra_driver_name = "mv-sha1", 955 .cra_driver_name = "mv-sha1",
954 .cra_priority = 300, 956 .cra_priority = 300,
955 .cra_flags = 957 .cra_flags =
956 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK, 958 CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
959 CRYPTO_ALG_NEED_FALLBACK,
957 .cra_blocksize = SHA1_BLOCK_SIZE, 960 .cra_blocksize = SHA1_BLOCK_SIZE,
958 .cra_ctxsize = sizeof(struct mv_tfm_hash_ctx), 961 .cra_ctxsize = sizeof(struct mv_tfm_hash_ctx),
959 .cra_init = mv_cra_hash_sha1_init, 962 .cra_init = mv_cra_hash_sha1_init,
@@ -977,7 +980,8 @@ struct ahash_alg mv_hmac_sha1_alg = {
977 .cra_driver_name = "mv-hmac-sha1", 980 .cra_driver_name = "mv-hmac-sha1",
978 .cra_priority = 300, 981 .cra_priority = 300,
979 .cra_flags = 982 .cra_flags =
980 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK, 983 CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
984 CRYPTO_ALG_NEED_FALLBACK,
981 .cra_blocksize = SHA1_BLOCK_SIZE, 985 .cra_blocksize = SHA1_BLOCK_SIZE,
982 .cra_ctxsize = sizeof(struct mv_tfm_hash_ctx), 986 .cra_ctxsize = sizeof(struct mv_tfm_hash_ctx),
983 .cra_init = mv_cra_hash_hmac_sha1_init, 987 .cra_init = mv_cra_hash_hmac_sha1_init,
diff --git a/drivers/crypto/n2_core.c b/drivers/crypto/n2_core.c
index 8944dabc0e3c..67b97c5fd859 100644
--- a/drivers/crypto/n2_core.c
+++ b/drivers/crypto/n2_core.c
@@ -1402,7 +1402,8 @@ static int __devinit __n2_register_one_cipher(const struct n2_cipher_tmpl *tmpl)
1402 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name); 1402 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name);
1403 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s-n2", tmpl->drv_name); 1403 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s-n2", tmpl->drv_name);
1404 alg->cra_priority = N2_CRA_PRIORITY; 1404 alg->cra_priority = N2_CRA_PRIORITY;
1405 alg->cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC; 1405 alg->cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1406 CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC;
1406 alg->cra_blocksize = tmpl->block_size; 1407 alg->cra_blocksize = tmpl->block_size;
1407 p->enc_type = tmpl->enc_type; 1408 p->enc_type = tmpl->enc_type;
1408 alg->cra_ctxsize = sizeof(struct n2_cipher_context); 1409 alg->cra_ctxsize = sizeof(struct n2_cipher_context);
@@ -1493,7 +1494,9 @@ static int __devinit __n2_register_one_ahash(const struct n2_hash_tmpl *tmpl)
1493 snprintf(base->cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name); 1494 snprintf(base->cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name);
1494 snprintf(base->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s-n2", tmpl->name); 1495 snprintf(base->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s-n2", tmpl->name);
1495 base->cra_priority = N2_CRA_PRIORITY; 1496 base->cra_priority = N2_CRA_PRIORITY;
1496 base->cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK; 1497 base->cra_flags = CRYPTO_ALG_TYPE_AHASH |
1498 CRYPTO_ALG_KERN_DRIVER_ONLY |
1499 CRYPTO_ALG_NEED_FALLBACK;
1497 base->cra_blocksize = tmpl->block_size; 1500 base->cra_blocksize = tmpl->block_size;
1498 base->cra_ctxsize = sizeof(struct n2_hash_ctx); 1501 base->cra_ctxsize = sizeof(struct n2_hash_ctx);
1499 base->cra_module = THIS_MODULE; 1502 base->cra_module = THIS_MODULE;
diff --git a/drivers/crypto/omap-aes.c b/drivers/crypto/omap-aes.c
index 5b970d9e9956..63e57b57a12c 100644
--- a/drivers/crypto/omap-aes.c
+++ b/drivers/crypto/omap-aes.c
@@ -756,7 +756,9 @@ static struct crypto_alg algs[] = {
756 .cra_name = "ecb(aes)", 756 .cra_name = "ecb(aes)",
757 .cra_driver_name = "ecb-aes-omap", 757 .cra_driver_name = "ecb-aes-omap",
758 .cra_priority = 100, 758 .cra_priority = 100,
759 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 759 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
760 CRYPTO_ALG_KERN_DRIVER_ONLY |
761 CRYPTO_ALG_ASYNC,
760 .cra_blocksize = AES_BLOCK_SIZE, 762 .cra_blocksize = AES_BLOCK_SIZE,
761 .cra_ctxsize = sizeof(struct omap_aes_ctx), 763 .cra_ctxsize = sizeof(struct omap_aes_ctx),
762 .cra_alignmask = 0, 764 .cra_alignmask = 0,
@@ -776,7 +778,9 @@ static struct crypto_alg algs[] = {
776 .cra_name = "cbc(aes)", 778 .cra_name = "cbc(aes)",
777 .cra_driver_name = "cbc-aes-omap", 779 .cra_driver_name = "cbc-aes-omap",
778 .cra_priority = 100, 780 .cra_priority = 100,
779 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 781 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
782 CRYPTO_ALG_KERN_DRIVER_ONLY |
783 CRYPTO_ALG_ASYNC,
780 .cra_blocksize = AES_BLOCK_SIZE, 784 .cra_blocksize = AES_BLOCK_SIZE,
781 .cra_ctxsize = sizeof(struct omap_aes_ctx), 785 .cra_ctxsize = sizeof(struct omap_aes_ctx),
782 .cra_alignmask = 0, 786 .cra_alignmask = 0,
diff --git a/drivers/crypto/omap-sham.c b/drivers/crypto/omap-sham.c
index 6399a8f1938a..a3fd6fc504b1 100644
--- a/drivers/crypto/omap-sham.c
+++ b/drivers/crypto/omap-sham.c
@@ -953,6 +953,7 @@ static struct ahash_alg algs[] = {
953 .cra_driver_name = "omap-sha1", 953 .cra_driver_name = "omap-sha1",
954 .cra_priority = 100, 954 .cra_priority = 100,
955 .cra_flags = CRYPTO_ALG_TYPE_AHASH | 955 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
956 CRYPTO_ALG_KERN_DRIVER_ONLY |
956 CRYPTO_ALG_ASYNC | 957 CRYPTO_ALG_ASYNC |
957 CRYPTO_ALG_NEED_FALLBACK, 958 CRYPTO_ALG_NEED_FALLBACK,
958 .cra_blocksize = SHA1_BLOCK_SIZE, 959 .cra_blocksize = SHA1_BLOCK_SIZE,
@@ -975,6 +976,7 @@ static struct ahash_alg algs[] = {
975 .cra_driver_name = "omap-md5", 976 .cra_driver_name = "omap-md5",
976 .cra_priority = 100, 977 .cra_priority = 100,
977 .cra_flags = CRYPTO_ALG_TYPE_AHASH | 978 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
979 CRYPTO_ALG_KERN_DRIVER_ONLY |
978 CRYPTO_ALG_ASYNC | 980 CRYPTO_ALG_ASYNC |
979 CRYPTO_ALG_NEED_FALLBACK, 981 CRYPTO_ALG_NEED_FALLBACK,
980 .cra_blocksize = SHA1_BLOCK_SIZE, 982 .cra_blocksize = SHA1_BLOCK_SIZE,
@@ -998,6 +1000,7 @@ static struct ahash_alg algs[] = {
998 .cra_driver_name = "omap-hmac-sha1", 1000 .cra_driver_name = "omap-hmac-sha1",
999 .cra_priority = 100, 1001 .cra_priority = 100,
1000 .cra_flags = CRYPTO_ALG_TYPE_AHASH | 1002 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
1003 CRYPTO_ALG_KERN_DRIVER_ONLY |
1001 CRYPTO_ALG_ASYNC | 1004 CRYPTO_ALG_ASYNC |
1002 CRYPTO_ALG_NEED_FALLBACK, 1005 CRYPTO_ALG_NEED_FALLBACK,
1003 .cra_blocksize = SHA1_BLOCK_SIZE, 1006 .cra_blocksize = SHA1_BLOCK_SIZE,
@@ -1022,6 +1025,7 @@ static struct ahash_alg algs[] = {
1022 .cra_driver_name = "omap-hmac-md5", 1025 .cra_driver_name = "omap-hmac-md5",
1023 .cra_priority = 100, 1026 .cra_priority = 100,
1024 .cra_flags = CRYPTO_ALG_TYPE_AHASH | 1027 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
1028 CRYPTO_ALG_KERN_DRIVER_ONLY |
1025 CRYPTO_ALG_ASYNC | 1029 CRYPTO_ALG_ASYNC |
1026 CRYPTO_ALG_NEED_FALLBACK, 1030 CRYPTO_ALG_NEED_FALLBACK,
1027 .cra_blocksize = SHA1_BLOCK_SIZE, 1031 .cra_blocksize = SHA1_BLOCK_SIZE,
diff --git a/drivers/crypto/picoxcell_crypto.c b/drivers/crypto/picoxcell_crypto.c
index 58480d009324..410a03c01ca4 100644
--- a/drivers/crypto/picoxcell_crypto.c
+++ b/drivers/crypto/picoxcell_crypto.c
@@ -1322,6 +1322,7 @@ static struct spacc_alg ipsec_engine_algs[] = {
1322 .cra_driver_name = "cbc-aes-picoxcell", 1322 .cra_driver_name = "cbc-aes-picoxcell",
1323 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY, 1323 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
1324 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | 1324 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1325 CRYPTO_ALG_KERN_DRIVER_ONLY |
1325 CRYPTO_ALG_ASYNC | 1326 CRYPTO_ALG_ASYNC |
1326 CRYPTO_ALG_NEED_FALLBACK, 1327 CRYPTO_ALG_NEED_FALLBACK,
1327 .cra_blocksize = AES_BLOCK_SIZE, 1328 .cra_blocksize = AES_BLOCK_SIZE,
@@ -1349,6 +1350,7 @@ static struct spacc_alg ipsec_engine_algs[] = {
1349 .cra_driver_name = "ecb-aes-picoxcell", 1350 .cra_driver_name = "ecb-aes-picoxcell",
1350 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY, 1351 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
1351 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | 1352 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1353 CRYPTO_ALG_KERN_DRIVER_ONLY |
1352 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK, 1354 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
1353 .cra_blocksize = AES_BLOCK_SIZE, 1355 .cra_blocksize = AES_BLOCK_SIZE,
1354 .cra_ctxsize = sizeof(struct spacc_ablk_ctx), 1356 .cra_ctxsize = sizeof(struct spacc_ablk_ctx),
@@ -1373,7 +1375,9 @@ static struct spacc_alg ipsec_engine_algs[] = {
1373 .cra_name = "cbc(des)", 1375 .cra_name = "cbc(des)",
1374 .cra_driver_name = "cbc-des-picoxcell", 1376 .cra_driver_name = "cbc-des-picoxcell",
1375 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY, 1377 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
1376 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 1378 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1379 CRYPTO_ALG_ASYNC |
1380 CRYPTO_ALG_KERN_DRIVER_ONLY,
1377 .cra_blocksize = DES_BLOCK_SIZE, 1381 .cra_blocksize = DES_BLOCK_SIZE,
1378 .cra_ctxsize = sizeof(struct spacc_ablk_ctx), 1382 .cra_ctxsize = sizeof(struct spacc_ablk_ctx),
1379 .cra_type = &crypto_ablkcipher_type, 1383 .cra_type = &crypto_ablkcipher_type,
@@ -1398,7 +1402,9 @@ static struct spacc_alg ipsec_engine_algs[] = {
1398 .cra_name = "ecb(des)", 1402 .cra_name = "ecb(des)",
1399 .cra_driver_name = "ecb-des-picoxcell", 1403 .cra_driver_name = "ecb-des-picoxcell",
1400 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY, 1404 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
1401 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 1405 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1406 CRYPTO_ALG_ASYNC |
1407 CRYPTO_ALG_KERN_DRIVER_ONLY,
1402 .cra_blocksize = DES_BLOCK_SIZE, 1408 .cra_blocksize = DES_BLOCK_SIZE,
1403 .cra_ctxsize = sizeof(struct spacc_ablk_ctx), 1409 .cra_ctxsize = sizeof(struct spacc_ablk_ctx),
1404 .cra_type = &crypto_ablkcipher_type, 1410 .cra_type = &crypto_ablkcipher_type,
@@ -1422,7 +1428,9 @@ static struct spacc_alg ipsec_engine_algs[] = {
1422 .cra_name = "cbc(des3_ede)", 1428 .cra_name = "cbc(des3_ede)",
1423 .cra_driver_name = "cbc-des3-ede-picoxcell", 1429 .cra_driver_name = "cbc-des3-ede-picoxcell",
1424 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY, 1430 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
1425 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 1431 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1432 CRYPTO_ALG_ASYNC |
1433 CRYPTO_ALG_KERN_DRIVER_ONLY,
1426 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 1434 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1427 .cra_ctxsize = sizeof(struct spacc_ablk_ctx), 1435 .cra_ctxsize = sizeof(struct spacc_ablk_ctx),
1428 .cra_type = &crypto_ablkcipher_type, 1436 .cra_type = &crypto_ablkcipher_type,
@@ -1447,7 +1455,9 @@ static struct spacc_alg ipsec_engine_algs[] = {
1447 .cra_name = "ecb(des3_ede)", 1455 .cra_name = "ecb(des3_ede)",
1448 .cra_driver_name = "ecb-des3-ede-picoxcell", 1456 .cra_driver_name = "ecb-des3-ede-picoxcell",
1449 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY, 1457 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
1450 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 1458 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1459 CRYPTO_ALG_ASYNC |
1460 CRYPTO_ALG_KERN_DRIVER_ONLY,
1451 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 1461 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1452 .cra_ctxsize = sizeof(struct spacc_ablk_ctx), 1462 .cra_ctxsize = sizeof(struct spacc_ablk_ctx),
1453 .cra_type = &crypto_ablkcipher_type, 1463 .cra_type = &crypto_ablkcipher_type,
@@ -1472,7 +1482,9 @@ static struct spacc_alg ipsec_engine_algs[] = {
1472 .cra_name = "authenc(hmac(sha1),cbc(aes))", 1482 .cra_name = "authenc(hmac(sha1),cbc(aes))",
1473 .cra_driver_name = "authenc-hmac-sha1-cbc-aes-picoxcell", 1483 .cra_driver_name = "authenc-hmac-sha1-cbc-aes-picoxcell",
1474 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY, 1484 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
1475 .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC, 1485 .cra_flags = CRYPTO_ALG_TYPE_AEAD |
1486 CRYPTO_ALG_ASYNC |
1487 CRYPTO_ALG_KERN_DRIVER_ONLY,
1476 .cra_blocksize = AES_BLOCK_SIZE, 1488 .cra_blocksize = AES_BLOCK_SIZE,
1477 .cra_ctxsize = sizeof(struct spacc_aead_ctx), 1489 .cra_ctxsize = sizeof(struct spacc_aead_ctx),
1478 .cra_type = &crypto_aead_type, 1490 .cra_type = &crypto_aead_type,
@@ -1500,7 +1512,9 @@ static struct spacc_alg ipsec_engine_algs[] = {
1500 .cra_name = "authenc(hmac(sha256),cbc(aes))", 1512 .cra_name = "authenc(hmac(sha256),cbc(aes))",
1501 .cra_driver_name = "authenc-hmac-sha256-cbc-aes-picoxcell", 1513 .cra_driver_name = "authenc-hmac-sha256-cbc-aes-picoxcell",
1502 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY, 1514 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
1503 .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC, 1515 .cra_flags = CRYPTO_ALG_TYPE_AEAD |
1516 CRYPTO_ALG_ASYNC |
1517 CRYPTO_ALG_KERN_DRIVER_ONLY,
1504 .cra_blocksize = AES_BLOCK_SIZE, 1518 .cra_blocksize = AES_BLOCK_SIZE,
1505 .cra_ctxsize = sizeof(struct spacc_aead_ctx), 1519 .cra_ctxsize = sizeof(struct spacc_aead_ctx),
1506 .cra_type = &crypto_aead_type, 1520 .cra_type = &crypto_aead_type,
@@ -1527,7 +1541,9 @@ static struct spacc_alg ipsec_engine_algs[] = {
1527 .cra_name = "authenc(hmac(md5),cbc(aes))", 1541 .cra_name = "authenc(hmac(md5),cbc(aes))",
1528 .cra_driver_name = "authenc-hmac-md5-cbc-aes-picoxcell", 1542 .cra_driver_name = "authenc-hmac-md5-cbc-aes-picoxcell",
1529 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY, 1543 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
1530 .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC, 1544 .cra_flags = CRYPTO_ALG_TYPE_AEAD |
1545 CRYPTO_ALG_ASYNC |
1546 CRYPTO_ALG_KERN_DRIVER_ONLY,
1531 .cra_blocksize = AES_BLOCK_SIZE, 1547 .cra_blocksize = AES_BLOCK_SIZE,
1532 .cra_ctxsize = sizeof(struct spacc_aead_ctx), 1548 .cra_ctxsize = sizeof(struct spacc_aead_ctx),
1533 .cra_type = &crypto_aead_type, 1549 .cra_type = &crypto_aead_type,
@@ -1554,7 +1570,9 @@ static struct spacc_alg ipsec_engine_algs[] = {
1554 .cra_name = "authenc(hmac(sha1),cbc(des3_ede))", 1570 .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1555 .cra_driver_name = "authenc-hmac-sha1-cbc-3des-picoxcell", 1571 .cra_driver_name = "authenc-hmac-sha1-cbc-3des-picoxcell",
1556 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY, 1572 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
1557 .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC, 1573 .cra_flags = CRYPTO_ALG_TYPE_AEAD |
1574 CRYPTO_ALG_ASYNC |
1575 CRYPTO_ALG_KERN_DRIVER_ONLY,
1558 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 1576 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1559 .cra_ctxsize = sizeof(struct spacc_aead_ctx), 1577 .cra_ctxsize = sizeof(struct spacc_aead_ctx),
1560 .cra_type = &crypto_aead_type, 1578 .cra_type = &crypto_aead_type,
@@ -1582,7 +1600,9 @@ static struct spacc_alg ipsec_engine_algs[] = {
1582 .cra_name = "authenc(hmac(sha256),cbc(des3_ede))", 1600 .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1583 .cra_driver_name = "authenc-hmac-sha256-cbc-3des-picoxcell", 1601 .cra_driver_name = "authenc-hmac-sha256-cbc-3des-picoxcell",
1584 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY, 1602 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
1585 .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC, 1603 .cra_flags = CRYPTO_ALG_TYPE_AEAD |
1604 CRYPTO_ALG_ASYNC |
1605 CRYPTO_ALG_KERN_DRIVER_ONLY,
1586 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 1606 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1587 .cra_ctxsize = sizeof(struct spacc_aead_ctx), 1607 .cra_ctxsize = sizeof(struct spacc_aead_ctx),
1588 .cra_type = &crypto_aead_type, 1608 .cra_type = &crypto_aead_type,
@@ -1609,7 +1629,9 @@ static struct spacc_alg ipsec_engine_algs[] = {
1609 .cra_name = "authenc(hmac(md5),cbc(des3_ede))", 1629 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
1610 .cra_driver_name = "authenc-hmac-md5-cbc-3des-picoxcell", 1630 .cra_driver_name = "authenc-hmac-md5-cbc-3des-picoxcell",
1611 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY, 1631 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
1612 .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC, 1632 .cra_flags = CRYPTO_ALG_TYPE_AEAD |
1633 CRYPTO_ALG_ASYNC |
1634 CRYPTO_ALG_KERN_DRIVER_ONLY,
1613 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 1635 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1614 .cra_ctxsize = sizeof(struct spacc_aead_ctx), 1636 .cra_ctxsize = sizeof(struct spacc_aead_ctx),
1615 .cra_type = &crypto_aead_type, 1637 .cra_type = &crypto_aead_type,
@@ -1639,7 +1661,9 @@ static struct spacc_alg l2_engine_algs[] = {
1639 .cra_name = "f8(kasumi)", 1661 .cra_name = "f8(kasumi)",
1640 .cra_driver_name = "f8-kasumi-picoxcell", 1662 .cra_driver_name = "f8-kasumi-picoxcell",
1641 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY, 1663 .cra_priority = SPACC_CRYPTO_ALG_PRIORITY,
1642 .cra_flags = CRYPTO_ALG_TYPE_GIVCIPHER | CRYPTO_ALG_ASYNC, 1664 .cra_flags = CRYPTO_ALG_TYPE_GIVCIPHER |
1665 CRYPTO_ALG_ASYNC |
1666 CRYPTO_ALG_KERN_DRIVER_ONLY,
1643 .cra_blocksize = 8, 1667 .cra_blocksize = 8,
1644 .cra_ctxsize = sizeof(struct spacc_ablk_ctx), 1668 .cra_ctxsize = sizeof(struct spacc_ablk_ctx),
1645 .cra_type = &crypto_ablkcipher_type, 1669 .cra_type = &crypto_ablkcipher_type,
diff --git a/drivers/crypto/s5p-sss.c b/drivers/crypto/s5p-sss.c
index 3376bca200fc..bc986f806086 100644
--- a/drivers/crypto/s5p-sss.c
+++ b/drivers/crypto/s5p-sss.c
@@ -518,7 +518,8 @@ static struct crypto_alg algs[] = {
518 .cra_driver_name = "ecb-aes-s5p", 518 .cra_driver_name = "ecb-aes-s5p",
519 .cra_priority = 100, 519 .cra_priority = 100,
520 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | 520 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
521 CRYPTO_ALG_ASYNC, 521 CRYPTO_ALG_ASYNC |
522 CRYPTO_ALG_KERN_DRIVER_ONLY,
522 .cra_blocksize = AES_BLOCK_SIZE, 523 .cra_blocksize = AES_BLOCK_SIZE,
523 .cra_ctxsize = sizeof(struct s5p_aes_ctx), 524 .cra_ctxsize = sizeof(struct s5p_aes_ctx),
524 .cra_alignmask = 0x0f, 525 .cra_alignmask = 0x0f,
@@ -538,7 +539,8 @@ static struct crypto_alg algs[] = {
538 .cra_driver_name = "cbc-aes-s5p", 539 .cra_driver_name = "cbc-aes-s5p",
539 .cra_priority = 100, 540 .cra_priority = 100,
540 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | 541 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
541 CRYPTO_ALG_ASYNC, 542 CRYPTO_ALG_ASYNC |
543 CRYPTO_ALG_KERN_DRIVER_ONLY,
542 .cra_blocksize = AES_BLOCK_SIZE, 544 .cra_blocksize = AES_BLOCK_SIZE,
543 .cra_ctxsize = sizeof(struct s5p_aes_ctx), 545 .cra_ctxsize = sizeof(struct s5p_aes_ctx),
544 .cra_alignmask = 0x0f, 546 .cra_alignmask = 0x0f,
diff --git a/drivers/crypto/talitos.c b/drivers/crypto/talitos.c
index 2d8c78901686..dc641c796526 100644
--- a/drivers/crypto/talitos.c
+++ b/drivers/crypto/talitos.c
@@ -2648,6 +2648,7 @@ static struct talitos_crypto_alg *talitos_alg_alloc(struct device *dev,
2648 alg->cra_priority = TALITOS_CRA_PRIORITY; 2648 alg->cra_priority = TALITOS_CRA_PRIORITY;
2649 alg->cra_alignmask = 0; 2649 alg->cra_alignmask = 0;
2650 alg->cra_ctxsize = sizeof(struct talitos_ctx); 2650 alg->cra_ctxsize = sizeof(struct talitos_ctx);
2651 alg->cra_flags |= CRYPTO_ALG_KERN_DRIVER_ONLY;
2651 2652
2652 t_alg->dev = dev; 2653 t_alg->dev = dev;
2653 2654
diff --git a/drivers/crypto/tegra-aes.c b/drivers/crypto/tegra-aes.c
new file mode 100644
index 000000000000..422a9766c7c9
--- /dev/null
+++ b/drivers/crypto/tegra-aes.c
@@ -0,0 +1,1096 @@
1/*
2 * drivers/crypto/tegra-aes.c
3 *
4 * Driver for NVIDIA Tegra AES hardware engine residing inside the
5 * Bit Stream Engine for Video (BSEV) hardware block.
6 *
7 * The programming sequence for this engine is with the help
8 * of commands which travel via a command queue residing between the
9 * CPU and the BSEV block. The BSEV engine has an internal RAM (VRAM)
10 * where the final input plaintext, keys and the IV have to be copied
11 * before starting the encrypt/decrypt operation.
12 *
13 * Copyright (c) 2010, NVIDIA Corporation.
14 *
15 * This program is free software; you can redistribute it and/or modify
16 * it under the terms of the GNU General Public License as published by
17 * the Free Software Foundation; either version 2 of the License, or
18 * (at your option) any later version.
19 *
20 * This program is distributed in the hope that it will be useful, but WITHOUT
21 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
22 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
23 * more details.
24 *
25 * You should have received a copy of the GNU General Public License along
26 * with this program; if not, write to the Free Software Foundation, Inc.,
27 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
28 */
29
30#include <linux/module.h>
31#include <linux/init.h>
32#include <linux/errno.h>
33#include <linux/kernel.h>
34#include <linux/clk.h>
35#include <linux/platform_device.h>
36#include <linux/scatterlist.h>
37#include <linux/dma-mapping.h>
38#include <linux/io.h>
39#include <linux/mutex.h>
40#include <linux/interrupt.h>
41#include <linux/completion.h>
42#include <linux/workqueue.h>
43
44#include <mach/clk.h>
45
46#include <crypto/scatterwalk.h>
47#include <crypto/aes.h>
48#include <crypto/internal/rng.h>
49
50#include "tegra-aes.h"
51
52#define FLAGS_MODE_MASK 0x00FF
53#define FLAGS_ENCRYPT BIT(0)
54#define FLAGS_CBC BIT(1)
55#define FLAGS_GIV BIT(2)
56#define FLAGS_RNG BIT(3)
57#define FLAGS_OFB BIT(4)
58#define FLAGS_NEW_KEY BIT(5)
59#define FLAGS_NEW_IV BIT(6)
60#define FLAGS_INIT BIT(7)
61#define FLAGS_FAST BIT(8)
62#define FLAGS_BUSY 9
63
64/*
65 * Defines AES engine Max process bytes size in one go, which takes 1 msec.
66 * AES engine spends about 176 cycles/16-bytes or 11 cycles/byte
67 * The duration CPU can use the BSE to 1 msec, then the number of available
68 * cycles of AVP/BSE is 216K. In this duration, AES can process 216/11 ~= 19KB
69 * Based on this AES_HW_DMA_BUFFER_SIZE_BYTES is configured to 16KB.
70 */
71#define AES_HW_DMA_BUFFER_SIZE_BYTES 0x4000
72
73/*
74 * The key table length is 64 bytes
75 * (This includes first upto 32 bytes key + 16 bytes original initial vector
76 * and 16 bytes updated initial vector)
77 */
78#define AES_HW_KEY_TABLE_LENGTH_BYTES 64
79
80/*
81 * The memory being used is divides as follows:
82 * 1. Key - 32 bytes
83 * 2. Original IV - 16 bytes
84 * 3. Updated IV - 16 bytes
85 * 4. Key schedule - 256 bytes
86 *
87 * 1+2+3 constitute the hw key table.
88 */
89#define AES_HW_IV_SIZE 16
90#define AES_HW_KEYSCHEDULE_LEN 256
91#define AES_IVKEY_SIZE (AES_HW_KEY_TABLE_LENGTH_BYTES + AES_HW_KEYSCHEDULE_LEN)
92
93/* Define commands required for AES operation */
94enum {
95 CMD_BLKSTARTENGINE = 0x0E,
96 CMD_DMASETUP = 0x10,
97 CMD_DMACOMPLETE = 0x11,
98 CMD_SETTABLE = 0x15,
99 CMD_MEMDMAVD = 0x22,
100};
101
102/* Define sub-commands */
103enum {
104 SUBCMD_VRAM_SEL = 0x1,
105 SUBCMD_CRYPTO_TABLE_SEL = 0x3,
106 SUBCMD_KEY_TABLE_SEL = 0x8,
107};
108
109/* memdma_vd command */
110#define MEMDMA_DIR_DTOVRAM 0 /* sdram -> vram */
111#define MEMDMA_DIR_VTODRAM 1 /* vram -> sdram */
112#define MEMDMA_DIR_SHIFT 25
113#define MEMDMA_NUM_WORDS_SHIFT 12
114
115/* command queue bit shifts */
116enum {
117 CMDQ_KEYTABLEADDR_SHIFT = 0,
118 CMDQ_KEYTABLEID_SHIFT = 17,
119 CMDQ_VRAMSEL_SHIFT = 23,
120 CMDQ_TABLESEL_SHIFT = 24,
121 CMDQ_OPCODE_SHIFT = 26,
122};
123
124/*
125 * The secure key slot contains a unique secure key generated
126 * and loaded by the bootloader. This slot is marked as non-accessible
127 * to the kernel.
128 */
129#define SSK_SLOT_NUM 4
130
131#define AES_NR_KEYSLOTS 8
132#define TEGRA_AES_QUEUE_LENGTH 50
133#define DEFAULT_RNG_BLK_SZ 16
134
135/* The command queue depth */
136#define AES_HW_MAX_ICQ_LENGTH 5
137
138struct tegra_aes_slot {
139 struct list_head node;
140 int slot_num;
141};
142
143static struct tegra_aes_slot ssk = {
144 .slot_num = SSK_SLOT_NUM,
145};
146
147struct tegra_aes_reqctx {
148 unsigned long mode;
149};
150
151struct tegra_aes_dev {
152 struct device *dev;
153 void __iomem *io_base;
154 dma_addr_t ivkey_phys_base;
155 void __iomem *ivkey_base;
156 struct clk *aes_clk;
157 struct tegra_aes_ctx *ctx;
158 int irq;
159 unsigned long flags;
160 struct completion op_complete;
161 u32 *buf_in;
162 dma_addr_t dma_buf_in;
163 u32 *buf_out;
164 dma_addr_t dma_buf_out;
165 u8 *iv;
166 u8 dt[DEFAULT_RNG_BLK_SZ];
167 int ivlen;
168 u64 ctr;
169 spinlock_t lock;
170 struct crypto_queue queue;
171 struct tegra_aes_slot *slots;
172 struct ablkcipher_request *req;
173 size_t total;
174 struct scatterlist *in_sg;
175 size_t in_offset;
176 struct scatterlist *out_sg;
177 size_t out_offset;
178};
179
180static struct tegra_aes_dev *aes_dev;
181
182struct tegra_aes_ctx {
183 struct tegra_aes_dev *dd;
184 unsigned long flags;
185 struct tegra_aes_slot *slot;
186 u8 key[AES_MAX_KEY_SIZE];
187 size_t keylen;
188};
189
190static struct tegra_aes_ctx rng_ctx = {
191 .flags = FLAGS_NEW_KEY,
192 .keylen = AES_KEYSIZE_128,
193};
194
195/* keep registered devices data here */
196static struct list_head dev_list;
197static DEFINE_SPINLOCK(list_lock);
198static DEFINE_MUTEX(aes_lock);
199
200static void aes_workqueue_handler(struct work_struct *work);
201static DECLARE_WORK(aes_work, aes_workqueue_handler);
202static struct workqueue_struct *aes_wq;
203
204extern unsigned long long tegra_chip_uid(void);
205
206static inline u32 aes_readl(struct tegra_aes_dev *dd, u32 offset)
207{
208 return readl(dd->io_base + offset);
209}
210
211static inline void aes_writel(struct tegra_aes_dev *dd, u32 val, u32 offset)
212{
213 writel(val, dd->io_base + offset);
214}
215
216static int aes_start_crypt(struct tegra_aes_dev *dd, u32 in_addr, u32 out_addr,
217 int nblocks, int mode, bool upd_iv)
218{
219 u32 cmdq[AES_HW_MAX_ICQ_LENGTH];
220 int i, eng_busy, icq_empty, ret;
221 u32 value;
222
223 /* reset all the interrupt bits */
224 aes_writel(dd, 0xFFFFFFFF, TEGRA_AES_INTR_STATUS);
225
226 /* enable error, dma xfer complete interrupts */
227 aes_writel(dd, 0x33, TEGRA_AES_INT_ENB);
228
229 cmdq[0] = CMD_DMASETUP << CMDQ_OPCODE_SHIFT;
230 cmdq[1] = in_addr;
231 cmdq[2] = CMD_BLKSTARTENGINE << CMDQ_OPCODE_SHIFT | (nblocks-1);
232 cmdq[3] = CMD_DMACOMPLETE << CMDQ_OPCODE_SHIFT;
233
234 value = aes_readl(dd, TEGRA_AES_CMDQUE_CONTROL);
235 /* access SDRAM through AHB */
236 value &= ~TEGRA_AES_CMDQ_CTRL_SRC_STM_SEL_FIELD;
237 value &= ~TEGRA_AES_CMDQ_CTRL_DST_STM_SEL_FIELD;
238 value |= TEGRA_AES_CMDQ_CTRL_SRC_STM_SEL_FIELD |
239 TEGRA_AES_CMDQ_CTRL_DST_STM_SEL_FIELD |
240 TEGRA_AES_CMDQ_CTRL_ICMDQEN_FIELD;
241 aes_writel(dd, value, TEGRA_AES_CMDQUE_CONTROL);
242 dev_dbg(dd->dev, "cmd_q_ctrl=0x%x", value);
243
244 value = (0x1 << TEGRA_AES_SECURE_INPUT_ALG_SEL_SHIFT) |
245 ((dd->ctx->keylen * 8) <<
246 TEGRA_AES_SECURE_INPUT_KEY_LEN_SHIFT) |
247 ((u32)upd_iv << TEGRA_AES_SECURE_IV_SELECT_SHIFT);
248
249 if (mode & FLAGS_CBC) {
250 value |= ((((mode & FLAGS_ENCRYPT) ? 2 : 3)
251 << TEGRA_AES_SECURE_XOR_POS_SHIFT) |
252 (((mode & FLAGS_ENCRYPT) ? 2 : 3)
253 << TEGRA_AES_SECURE_VCTRAM_SEL_SHIFT) |
254 ((mode & FLAGS_ENCRYPT) ? 1 : 0)
255 << TEGRA_AES_SECURE_CORE_SEL_SHIFT);
256 } else if (mode & FLAGS_OFB) {
257 value |= ((TEGRA_AES_SECURE_XOR_POS_FIELD) |
258 (2 << TEGRA_AES_SECURE_INPUT_SEL_SHIFT) |
259 (TEGRA_AES_SECURE_CORE_SEL_FIELD));
260 } else if (mode & FLAGS_RNG) {
261 value |= (((mode & FLAGS_ENCRYPT) ? 1 : 0)
262 << TEGRA_AES_SECURE_CORE_SEL_SHIFT |
263 TEGRA_AES_SECURE_RNG_ENB_FIELD);
264 } else {
265 value |= (((mode & FLAGS_ENCRYPT) ? 1 : 0)
266 << TEGRA_AES_SECURE_CORE_SEL_SHIFT);
267 }
268
269 dev_dbg(dd->dev, "secure_in_sel=0x%x", value);
270 aes_writel(dd, value, TEGRA_AES_SECURE_INPUT_SELECT);
271
272 aes_writel(dd, out_addr, TEGRA_AES_SECURE_DEST_ADDR);
273 INIT_COMPLETION(dd->op_complete);
274
275 for (i = 0; i < AES_HW_MAX_ICQ_LENGTH - 1; i++) {
276 do {
277 value = aes_readl(dd, TEGRA_AES_INTR_STATUS);
278 eng_busy = value & TEGRA_AES_ENGINE_BUSY_FIELD;
279 icq_empty = value & TEGRA_AES_ICQ_EMPTY_FIELD;
280 } while (eng_busy & (!icq_empty));
281 aes_writel(dd, cmdq[i], TEGRA_AES_ICMDQUE_WR);
282 }
283
284 ret = wait_for_completion_timeout(&dd->op_complete,
285 msecs_to_jiffies(150));
286 if (ret == 0) {
287 dev_err(dd->dev, "timed out (0x%x)\n",
288 aes_readl(dd, TEGRA_AES_INTR_STATUS));
289 return -ETIMEDOUT;
290 }
291
292 aes_writel(dd, cmdq[AES_HW_MAX_ICQ_LENGTH - 1], TEGRA_AES_ICMDQUE_WR);
293 return 0;
294}
295
296static void aes_release_key_slot(struct tegra_aes_slot *slot)
297{
298 if (slot->slot_num == SSK_SLOT_NUM)
299 return;
300
301 spin_lock(&list_lock);
302 list_add_tail(&slot->node, &dev_list);
303 slot = NULL;
304 spin_unlock(&list_lock);
305}
306
307static struct tegra_aes_slot *aes_find_key_slot(void)
308{
309 struct tegra_aes_slot *slot = NULL;
310 struct list_head *new_head;
311 int empty;
312
313 spin_lock(&list_lock);
314 empty = list_empty(&dev_list);
315 if (!empty) {
316 slot = list_entry(&dev_list, struct tegra_aes_slot, node);
317 new_head = dev_list.next;
318 list_del(&dev_list);
319 dev_list.next = new_head->next;
320 dev_list.prev = NULL;
321 }
322 spin_unlock(&list_lock);
323
324 return slot;
325}
326
327static int aes_set_key(struct tegra_aes_dev *dd)
328{
329 u32 value, cmdq[2];
330 struct tegra_aes_ctx *ctx = dd->ctx;
331 int eng_busy, icq_empty, dma_busy;
332 bool use_ssk = false;
333
334 /* use ssk? */
335 if (!dd->ctx->slot) {
336 dev_dbg(dd->dev, "using ssk");
337 dd->ctx->slot = &ssk;
338 use_ssk = true;
339 }
340
341 /* enable key schedule generation in hardware */
342 value = aes_readl(dd, TEGRA_AES_SECURE_CONFIG_EXT);
343 value &= ~TEGRA_AES_SECURE_KEY_SCH_DIS_FIELD;
344 aes_writel(dd, value, TEGRA_AES_SECURE_CONFIG_EXT);
345
346 /* select the key slot */
347 value = aes_readl(dd, TEGRA_AES_SECURE_CONFIG);
348 value &= ~TEGRA_AES_SECURE_KEY_INDEX_FIELD;
349 value |= (ctx->slot->slot_num << TEGRA_AES_SECURE_KEY_INDEX_SHIFT);
350 aes_writel(dd, value, TEGRA_AES_SECURE_CONFIG);
351
352 if (use_ssk)
353 return 0;
354
355 /* copy the key table from sdram to vram */
356 cmdq[0] = CMD_MEMDMAVD << CMDQ_OPCODE_SHIFT |
357 MEMDMA_DIR_DTOVRAM << MEMDMA_DIR_SHIFT |
358 AES_HW_KEY_TABLE_LENGTH_BYTES / sizeof(u32) <<
359 MEMDMA_NUM_WORDS_SHIFT;
360 cmdq[1] = (u32)dd->ivkey_phys_base;
361
362 aes_writel(dd, cmdq[0], TEGRA_AES_ICMDQUE_WR);
363 aes_writel(dd, cmdq[1], TEGRA_AES_ICMDQUE_WR);
364
365 do {
366 value = aes_readl(dd, TEGRA_AES_INTR_STATUS);
367 eng_busy = value & TEGRA_AES_ENGINE_BUSY_FIELD;
368 icq_empty = value & TEGRA_AES_ICQ_EMPTY_FIELD;
369 dma_busy = value & TEGRA_AES_DMA_BUSY_FIELD;
370 } while (eng_busy & (!icq_empty) & dma_busy);
371
372 /* settable command to get key into internal registers */
373 value = CMD_SETTABLE << CMDQ_OPCODE_SHIFT |
374 SUBCMD_CRYPTO_TABLE_SEL << CMDQ_TABLESEL_SHIFT |
375 SUBCMD_VRAM_SEL << CMDQ_VRAMSEL_SHIFT |
376 (SUBCMD_KEY_TABLE_SEL | ctx->slot->slot_num) <<
377 CMDQ_KEYTABLEID_SHIFT;
378 aes_writel(dd, value, TEGRA_AES_ICMDQUE_WR);
379
380 do {
381 value = aes_readl(dd, TEGRA_AES_INTR_STATUS);
382 eng_busy = value & TEGRA_AES_ENGINE_BUSY_FIELD;
383 icq_empty = value & TEGRA_AES_ICQ_EMPTY_FIELD;
384 } while (eng_busy & (!icq_empty));
385
386 return 0;
387}
388
389static int tegra_aes_handle_req(struct tegra_aes_dev *dd)
390{
391 struct crypto_async_request *async_req, *backlog;
392 struct crypto_ablkcipher *tfm;
393 struct tegra_aes_ctx *ctx;
394 struct tegra_aes_reqctx *rctx;
395 struct ablkcipher_request *req;
396 unsigned long flags;
397 int dma_max = AES_HW_DMA_BUFFER_SIZE_BYTES;
398 int ret = 0, nblocks, total;
399 int count = 0;
400 dma_addr_t addr_in, addr_out;
401 struct scatterlist *in_sg, *out_sg;
402
403 if (!dd)
404 return -EINVAL;
405
406 spin_lock_irqsave(&dd->lock, flags);
407 backlog = crypto_get_backlog(&dd->queue);
408 async_req = crypto_dequeue_request(&dd->queue);
409 if (!async_req)
410 clear_bit(FLAGS_BUSY, &dd->flags);
411 spin_unlock_irqrestore(&dd->lock, flags);
412
413 if (!async_req)
414 return -ENODATA;
415
416 if (backlog)
417 backlog->complete(backlog, -EINPROGRESS);
418
419 req = ablkcipher_request_cast(async_req);
420
421 dev_dbg(dd->dev, "%s: get new req\n", __func__);
422
423 if (!req->src || !req->dst)
424 return -EINVAL;
425
426 /* take mutex to access the aes hw */
427 mutex_lock(&aes_lock);
428
429 /* assign new request to device */
430 dd->req = req;
431 dd->total = req->nbytes;
432 dd->in_offset = 0;
433 dd->in_sg = req->src;
434 dd->out_offset = 0;
435 dd->out_sg = req->dst;
436
437 in_sg = dd->in_sg;
438 out_sg = dd->out_sg;
439
440 total = dd->total;
441
442 tfm = crypto_ablkcipher_reqtfm(req);
443 rctx = ablkcipher_request_ctx(req);
444 ctx = crypto_ablkcipher_ctx(tfm);
445 rctx->mode &= FLAGS_MODE_MASK;
446 dd->flags = (dd->flags & ~FLAGS_MODE_MASK) | rctx->mode;
447
448 dd->iv = (u8 *)req->info;
449 dd->ivlen = crypto_ablkcipher_ivsize(tfm);
450
451 /* assign new context to device */
452 ctx->dd = dd;
453 dd->ctx = ctx;
454
455 if (ctx->flags & FLAGS_NEW_KEY) {
456 /* copy the key */
457 memcpy(dd->ivkey_base, ctx->key, ctx->keylen);
458 memset(dd->ivkey_base + ctx->keylen, 0, AES_HW_KEY_TABLE_LENGTH_BYTES - ctx->keylen);
459 aes_set_key(dd);
460 ctx->flags &= ~FLAGS_NEW_KEY;
461 }
462
463 if (((dd->flags & FLAGS_CBC) || (dd->flags & FLAGS_OFB)) && dd->iv) {
464 /* set iv to the aes hw slot
465 * Hw generates updated iv only after iv is set in slot.
466 * So key and iv is passed asynchronously.
467 */
468 memcpy(dd->buf_in, dd->iv, dd->ivlen);
469
470 ret = aes_start_crypt(dd, (u32)dd->dma_buf_in,
471 dd->dma_buf_out, 1, FLAGS_CBC, false);
472 if (ret < 0) {
473 dev_err(dd->dev, "aes_start_crypt fail(%d)\n", ret);
474 goto out;
475 }
476 }
477
478 while (total) {
479 dev_dbg(dd->dev, "remain: %d\n", total);
480 ret = dma_map_sg(dd->dev, in_sg, 1, DMA_TO_DEVICE);
481 if (!ret) {
482 dev_err(dd->dev, "dma_map_sg() error\n");
483 goto out;
484 }
485
486 ret = dma_map_sg(dd->dev, out_sg, 1, DMA_FROM_DEVICE);
487 if (!ret) {
488 dev_err(dd->dev, "dma_map_sg() error\n");
489 dma_unmap_sg(dd->dev, dd->in_sg,
490 1, DMA_TO_DEVICE);
491 goto out;
492 }
493
494 addr_in = sg_dma_address(in_sg);
495 addr_out = sg_dma_address(out_sg);
496 dd->flags |= FLAGS_FAST;
497 count = min_t(int, sg_dma_len(in_sg), dma_max);
498 WARN_ON(sg_dma_len(in_sg) != sg_dma_len(out_sg));
499 nblocks = DIV_ROUND_UP(count, AES_BLOCK_SIZE);
500
501 ret = aes_start_crypt(dd, addr_in, addr_out, nblocks,
502 dd->flags, true);
503
504 dma_unmap_sg(dd->dev, out_sg, 1, DMA_FROM_DEVICE);
505 dma_unmap_sg(dd->dev, in_sg, 1, DMA_TO_DEVICE);
506
507 if (ret < 0) {
508 dev_err(dd->dev, "aes_start_crypt fail(%d)\n", ret);
509 goto out;
510 }
511 dd->flags &= ~FLAGS_FAST;
512
513 dev_dbg(dd->dev, "out: copied %d\n", count);
514 total -= count;
515 in_sg = sg_next(in_sg);
516 out_sg = sg_next(out_sg);
517 WARN_ON(((total != 0) && (!in_sg || !out_sg)));
518 }
519
520out:
521 mutex_unlock(&aes_lock);
522
523 dd->total = total;
524
525 if (dd->req->base.complete)
526 dd->req->base.complete(&dd->req->base, ret);
527
528 dev_dbg(dd->dev, "%s: exit\n", __func__);
529 return ret;
530}
531
532static int tegra_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
533 unsigned int keylen)
534{
535 struct tegra_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
536 struct tegra_aes_dev *dd = aes_dev;
537 struct tegra_aes_slot *key_slot;
538
539 if ((keylen != AES_KEYSIZE_128) && (keylen != AES_KEYSIZE_192) &&
540 (keylen != AES_KEYSIZE_256)) {
541 dev_err(dd->dev, "unsupported key size\n");
542 crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
543 return -EINVAL;
544 }
545
546 dev_dbg(dd->dev, "keylen: %d\n", keylen);
547
548 ctx->dd = dd;
549
550 if (key) {
551 if (!ctx->slot) {
552 key_slot = aes_find_key_slot();
553 if (!key_slot) {
554 dev_err(dd->dev, "no empty slot\n");
555 return -ENOMEM;
556 }
557
558 ctx->slot = key_slot;
559 }
560
561 memcpy(ctx->key, key, keylen);
562 ctx->keylen = keylen;
563 }
564
565 ctx->flags |= FLAGS_NEW_KEY;
566 dev_dbg(dd->dev, "done\n");
567 return 0;
568}
569
570static void aes_workqueue_handler(struct work_struct *work)
571{
572 struct tegra_aes_dev *dd = aes_dev;
573 int ret;
574
575 ret = clk_enable(dd->aes_clk);
576 if (ret)
577 BUG_ON("clock enable failed");
578
579 /* empty the crypto queue and then return */
580 do {
581 ret = tegra_aes_handle_req(dd);
582 } while (!ret);
583
584 clk_disable(dd->aes_clk);
585}
586
587static irqreturn_t aes_irq(int irq, void *dev_id)
588{
589 struct tegra_aes_dev *dd = (struct tegra_aes_dev *)dev_id;
590 u32 value = aes_readl(dd, TEGRA_AES_INTR_STATUS);
591 int busy = test_bit(FLAGS_BUSY, &dd->flags);
592
593 if (!busy) {
594 dev_dbg(dd->dev, "spurious interrupt\n");
595 return IRQ_NONE;
596 }
597
598 dev_dbg(dd->dev, "irq_stat: 0x%x\n", value);
599 if (value & TEGRA_AES_INT_ERROR_MASK)
600 aes_writel(dd, TEGRA_AES_INT_ERROR_MASK, TEGRA_AES_INTR_STATUS);
601
602 if (!(value & TEGRA_AES_ENGINE_BUSY_FIELD))
603 complete(&dd->op_complete);
604 else
605 return IRQ_NONE;
606
607 return IRQ_HANDLED;
608}
609
610static int tegra_aes_crypt(struct ablkcipher_request *req, unsigned long mode)
611{
612 struct tegra_aes_reqctx *rctx = ablkcipher_request_ctx(req);
613 struct tegra_aes_dev *dd = aes_dev;
614 unsigned long flags;
615 int err = 0;
616 int busy;
617
618 dev_dbg(dd->dev, "nbytes: %d, enc: %d, cbc: %d, ofb: %d\n",
619 req->nbytes, !!(mode & FLAGS_ENCRYPT),
620 !!(mode & FLAGS_CBC), !!(mode & FLAGS_OFB));
621
622 rctx->mode = mode;
623
624 spin_lock_irqsave(&dd->lock, flags);
625 err = ablkcipher_enqueue_request(&dd->queue, req);
626 busy = test_and_set_bit(FLAGS_BUSY, &dd->flags);
627 spin_unlock_irqrestore(&dd->lock, flags);
628
629 if (!busy)
630 queue_work(aes_wq, &aes_work);
631
632 return err;
633}
634
635static int tegra_aes_ecb_encrypt(struct ablkcipher_request *req)
636{
637 return tegra_aes_crypt(req, FLAGS_ENCRYPT);
638}
639
640static int tegra_aes_ecb_decrypt(struct ablkcipher_request *req)
641{
642 return tegra_aes_crypt(req, 0);
643}
644
645static int tegra_aes_cbc_encrypt(struct ablkcipher_request *req)
646{
647 return tegra_aes_crypt(req, FLAGS_ENCRYPT | FLAGS_CBC);
648}
649
650static int tegra_aes_cbc_decrypt(struct ablkcipher_request *req)
651{
652 return tegra_aes_crypt(req, FLAGS_CBC);
653}
654
655static int tegra_aes_ofb_encrypt(struct ablkcipher_request *req)
656{
657 return tegra_aes_crypt(req, FLAGS_ENCRYPT | FLAGS_OFB);
658}
659
660static int tegra_aes_ofb_decrypt(struct ablkcipher_request *req)
661{
662 return tegra_aes_crypt(req, FLAGS_OFB);
663}
664
665static int tegra_aes_get_random(struct crypto_rng *tfm, u8 *rdata,
666 unsigned int dlen)
667{
668 struct tegra_aes_dev *dd = aes_dev;
669 struct tegra_aes_ctx *ctx = &rng_ctx;
670 int ret, i;
671 u8 *dest = rdata, *dt = dd->dt;
672
673 /* take mutex to access the aes hw */
674 mutex_lock(&aes_lock);
675
676 ret = clk_enable(dd->aes_clk);
677 if (ret)
678 return ret;
679
680 ctx->dd = dd;
681 dd->ctx = ctx;
682 dd->flags = FLAGS_ENCRYPT | FLAGS_RNG;
683
684 memcpy(dd->buf_in, dt, DEFAULT_RNG_BLK_SZ);
685
686 ret = aes_start_crypt(dd, (u32)dd->dma_buf_in,
687 (u32)dd->dma_buf_out, 1, dd->flags, true);
688 if (ret < 0) {
689 dev_err(dd->dev, "aes_start_crypt fail(%d)\n", ret);
690 dlen = ret;
691 goto out;
692 }
693 memcpy(dest, dd->buf_out, dlen);
694
695 /* update the DT */
696 for (i = DEFAULT_RNG_BLK_SZ - 1; i >= 0; i--) {
697 dt[i] += 1;
698 if (dt[i] != 0)
699 break;
700 }
701
702out:
703 clk_disable(dd->aes_clk);
704 mutex_unlock(&aes_lock);
705
706 dev_dbg(dd->dev, "%s: done\n", __func__);
707 return dlen;
708}
709
710static int tegra_aes_rng_reset(struct crypto_rng *tfm, u8 *seed,
711 unsigned int slen)
712{
713 struct tegra_aes_dev *dd = aes_dev;
714 struct tegra_aes_ctx *ctx = &rng_ctx;
715 struct tegra_aes_slot *key_slot;
716 struct timespec ts;
717 int ret = 0;
718 u64 nsec, tmp[2];
719 u8 *dt;
720
721 if (!ctx || !dd) {
722 dev_err(dd->dev, "ctx=0x%x, dd=0x%x\n",
723 (unsigned int)ctx, (unsigned int)dd);
724 return -EINVAL;
725 }
726
727 if (slen < (DEFAULT_RNG_BLK_SZ + AES_KEYSIZE_128)) {
728 dev_err(dd->dev, "seed size invalid");
729 return -ENOMEM;
730 }
731
732 /* take mutex to access the aes hw */
733 mutex_lock(&aes_lock);
734
735 if (!ctx->slot) {
736 key_slot = aes_find_key_slot();
737 if (!key_slot) {
738 dev_err(dd->dev, "no empty slot\n");
739 mutex_unlock(&aes_lock);
740 return -ENOMEM;
741 }
742 ctx->slot = key_slot;
743 }
744
745 ctx->dd = dd;
746 dd->ctx = ctx;
747 dd->ctr = 0;
748
749 ctx->keylen = AES_KEYSIZE_128;
750 ctx->flags |= FLAGS_NEW_KEY;
751
752 /* copy the key to the key slot */
753 memcpy(dd->ivkey_base, seed + DEFAULT_RNG_BLK_SZ, AES_KEYSIZE_128);
754 memset(dd->ivkey_base + AES_KEYSIZE_128, 0, AES_HW_KEY_TABLE_LENGTH_BYTES - AES_KEYSIZE_128);
755
756 dd->iv = seed;
757 dd->ivlen = slen;
758
759 dd->flags = FLAGS_ENCRYPT | FLAGS_RNG;
760
761 ret = clk_enable(dd->aes_clk);
762 if (ret)
763 return ret;
764
765 aes_set_key(dd);
766
767 /* set seed to the aes hw slot */
768 memcpy(dd->buf_in, dd->iv, DEFAULT_RNG_BLK_SZ);
769 ret = aes_start_crypt(dd, (u32)dd->dma_buf_in,
770 dd->dma_buf_out, 1, FLAGS_CBC, false);
771 if (ret < 0) {
772 dev_err(dd->dev, "aes_start_crypt fail(%d)\n", ret);
773 goto out;
774 }
775
776 if (dd->ivlen >= (2 * DEFAULT_RNG_BLK_SZ + AES_KEYSIZE_128)) {
777 dt = dd->iv + DEFAULT_RNG_BLK_SZ + AES_KEYSIZE_128;
778 } else {
779 getnstimeofday(&ts);
780 nsec = timespec_to_ns(&ts);
781 do_div(nsec, 1000);
782 nsec ^= dd->ctr << 56;
783 dd->ctr++;
784 tmp[0] = nsec;
785 tmp[1] = tegra_chip_uid();
786 dt = (u8 *)tmp;
787 }
788 memcpy(dd->dt, dt, DEFAULT_RNG_BLK_SZ);
789
790out:
791 clk_disable(dd->aes_clk);
792 mutex_unlock(&aes_lock);
793
794 dev_dbg(dd->dev, "%s: done\n", __func__);
795 return ret;
796}
797
798static int tegra_aes_cra_init(struct crypto_tfm *tfm)
799{
800 tfm->crt_ablkcipher.reqsize = sizeof(struct tegra_aes_reqctx);
801
802 return 0;
803}
804
805void tegra_aes_cra_exit(struct crypto_tfm *tfm)
806{
807 struct tegra_aes_ctx *ctx =
808 crypto_ablkcipher_ctx((struct crypto_ablkcipher *)tfm);
809
810 if (ctx && ctx->slot)
811 aes_release_key_slot(ctx->slot);
812}
813
814static struct crypto_alg algs[] = {
815 {
816 .cra_name = "ecb(aes)",
817 .cra_driver_name = "ecb-aes-tegra",
818 .cra_priority = 300,
819 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
820 .cra_blocksize = AES_BLOCK_SIZE,
821 .cra_alignmask = 3,
822 .cra_type = &crypto_ablkcipher_type,
823 .cra_u.ablkcipher = {
824 .min_keysize = AES_MIN_KEY_SIZE,
825 .max_keysize = AES_MAX_KEY_SIZE,
826 .setkey = tegra_aes_setkey,
827 .encrypt = tegra_aes_ecb_encrypt,
828 .decrypt = tegra_aes_ecb_decrypt,
829 },
830 }, {
831 .cra_name = "cbc(aes)",
832 .cra_driver_name = "cbc-aes-tegra",
833 .cra_priority = 300,
834 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
835 .cra_blocksize = AES_BLOCK_SIZE,
836 .cra_alignmask = 3,
837 .cra_type = &crypto_ablkcipher_type,
838 .cra_u.ablkcipher = {
839 .min_keysize = AES_MIN_KEY_SIZE,
840 .max_keysize = AES_MAX_KEY_SIZE,
841 .ivsize = AES_MIN_KEY_SIZE,
842 .setkey = tegra_aes_setkey,
843 .encrypt = tegra_aes_cbc_encrypt,
844 .decrypt = tegra_aes_cbc_decrypt,
845 }
846 }, {
847 .cra_name = "ofb(aes)",
848 .cra_driver_name = "ofb-aes-tegra",
849 .cra_priority = 300,
850 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
851 .cra_blocksize = AES_BLOCK_SIZE,
852 .cra_alignmask = 3,
853 .cra_type = &crypto_ablkcipher_type,
854 .cra_u.ablkcipher = {
855 .min_keysize = AES_MIN_KEY_SIZE,
856 .max_keysize = AES_MAX_KEY_SIZE,
857 .ivsize = AES_MIN_KEY_SIZE,
858 .setkey = tegra_aes_setkey,
859 .encrypt = tegra_aes_ofb_encrypt,
860 .decrypt = tegra_aes_ofb_decrypt,
861 }
862 }, {
863 .cra_name = "ansi_cprng",
864 .cra_driver_name = "rng-aes-tegra",
865 .cra_flags = CRYPTO_ALG_TYPE_RNG,
866 .cra_ctxsize = sizeof(struct tegra_aes_ctx),
867 .cra_type = &crypto_rng_type,
868 .cra_u.rng = {
869 .rng_make_random = tegra_aes_get_random,
870 .rng_reset = tegra_aes_rng_reset,
871 .seedsize = AES_KEYSIZE_128 + (2 * DEFAULT_RNG_BLK_SZ),
872 }
873 }
874};
875
876static int tegra_aes_probe(struct platform_device *pdev)
877{
878 struct device *dev = &pdev->dev;
879 struct tegra_aes_dev *dd;
880 struct resource *res;
881 int err = -ENOMEM, i = 0, j;
882
883 dd = devm_kzalloc(dev, sizeof(struct tegra_aes_dev), GFP_KERNEL);
884 if (dd == NULL) {
885 dev_err(dev, "unable to alloc data struct.\n");
886 return err;
887 }
888
889 dd->dev = dev;
890 platform_set_drvdata(pdev, dd);
891
892 dd->slots = devm_kzalloc(dev, sizeof(struct tegra_aes_slot) *
893 AES_NR_KEYSLOTS, GFP_KERNEL);
894 if (dd->slots == NULL) {
895 dev_err(dev, "unable to alloc slot struct.\n");
896 goto out;
897 }
898
899 spin_lock_init(&dd->lock);
900 crypto_init_queue(&dd->queue, TEGRA_AES_QUEUE_LENGTH);
901
902 /* Get the module base address */
903 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
904 if (!res) {
905 dev_err(dev, "invalid resource type: base\n");
906 err = -ENODEV;
907 goto out;
908 }
909
910 if (!devm_request_mem_region(&pdev->dev, res->start,
911 resource_size(res),
912 dev_name(&pdev->dev))) {
913 dev_err(&pdev->dev, "Couldn't request MEM resource\n");
914 return -ENODEV;
915 }
916
917 dd->io_base = devm_ioremap(dev, res->start, resource_size(res));
918 if (!dd->io_base) {
919 dev_err(dev, "can't ioremap register space\n");
920 err = -ENOMEM;
921 goto out;
922 }
923
924 /* Initialize the vde clock */
925 dd->aes_clk = clk_get(dev, "vde");
926 if (IS_ERR(dd->aes_clk)) {
927 dev_err(dev, "iclock intialization failed.\n");
928 err = -ENODEV;
929 goto out;
930 }
931
932 err = clk_set_rate(dd->aes_clk, ULONG_MAX);
933 if (err) {
934 dev_err(dd->dev, "iclk set_rate fail(%d)\n", err);
935 goto out;
936 }
937
938 /*
939 * the foll contiguous memory is allocated as follows -
940 * - hardware key table
941 * - key schedule
942 */
943 dd->ivkey_base = dma_alloc_coherent(dev, AES_HW_KEY_TABLE_LENGTH_BYTES,
944 &dd->ivkey_phys_base,
945 GFP_KERNEL);
946 if (!dd->ivkey_base) {
947 dev_err(dev, "can not allocate iv/key buffer\n");
948 err = -ENOMEM;
949 goto out;
950 }
951
952 dd->buf_in = dma_alloc_coherent(dev, AES_HW_DMA_BUFFER_SIZE_BYTES,
953 &dd->dma_buf_in, GFP_KERNEL);
954 if (!dd->buf_in) {
955 dev_err(dev, "can not allocate dma-in buffer\n");
956 err = -ENOMEM;
957 goto out;
958 }
959
960 dd->buf_out = dma_alloc_coherent(dev, AES_HW_DMA_BUFFER_SIZE_BYTES,
961 &dd->dma_buf_out, GFP_KERNEL);
962 if (!dd->buf_out) {
963 dev_err(dev, "can not allocate dma-out buffer\n");
964 err = -ENOMEM;
965 goto out;
966 }
967
968 init_completion(&dd->op_complete);
969 aes_wq = alloc_workqueue("tegra_aes_wq", WQ_HIGHPRI | WQ_UNBOUND, 1);
970 if (!aes_wq) {
971 dev_err(dev, "alloc_workqueue failed\n");
972 goto out;
973 }
974
975 /* get the irq */
976 res = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
977 if (!res) {
978 dev_err(dev, "invalid resource type: base\n");
979 err = -ENODEV;
980 goto out;
981 }
982 dd->irq = res->start;
983
984 err = devm_request_irq(dev, dd->irq, aes_irq, IRQF_TRIGGER_HIGH |
985 IRQF_SHARED, "tegra-aes", dd);
986 if (err) {
987 dev_err(dev, "request_irq failed\n");
988 goto out;
989 }
990
991 mutex_init(&aes_lock);
992 INIT_LIST_HEAD(&dev_list);
993
994 spin_lock_init(&list_lock);
995 spin_lock(&list_lock);
996 for (i = 0; i < AES_NR_KEYSLOTS; i++) {
997 if (i == SSK_SLOT_NUM)
998 continue;
999 dd->slots[i].slot_num = i;
1000 INIT_LIST_HEAD(&dd->slots[i].node);
1001 list_add_tail(&dd->slots[i].node, &dev_list);
1002 }
1003 spin_unlock(&list_lock);
1004
1005 aes_dev = dd;
1006 for (i = 0; i < ARRAY_SIZE(algs); i++) {
1007 INIT_LIST_HEAD(&algs[i].cra_list);
1008
1009 algs[i].cra_priority = 300;
1010 algs[i].cra_ctxsize = sizeof(struct tegra_aes_ctx);
1011 algs[i].cra_module = THIS_MODULE;
1012 algs[i].cra_init = tegra_aes_cra_init;
1013 algs[i].cra_exit = tegra_aes_cra_exit;
1014
1015 err = crypto_register_alg(&algs[i]);
1016 if (err)
1017 goto out;
1018 }
1019
1020 dev_info(dev, "registered");
1021 return 0;
1022
1023out:
1024 for (j = 0; j < i; j++)
1025 crypto_unregister_alg(&algs[j]);
1026 if (dd->ivkey_base)
1027 dma_free_coherent(dev, AES_HW_KEY_TABLE_LENGTH_BYTES,
1028 dd->ivkey_base, dd->ivkey_phys_base);
1029 if (dd->buf_in)
1030 dma_free_coherent(dev, AES_HW_DMA_BUFFER_SIZE_BYTES,
1031 dd->buf_in, dd->dma_buf_in);
1032 if (dd->buf_out)
1033 dma_free_coherent(dev, AES_HW_DMA_BUFFER_SIZE_BYTES,
1034 dd->buf_out, dd->dma_buf_out);
1035 if (IS_ERR(dd->aes_clk))
1036 clk_put(dd->aes_clk);
1037 if (aes_wq)
1038 destroy_workqueue(aes_wq);
1039 spin_lock(&list_lock);
1040 list_del(&dev_list);
1041 spin_unlock(&list_lock);
1042
1043 aes_dev = NULL;
1044
1045 dev_err(dev, "%s: initialization failed.\n", __func__);
1046 return err;
1047}
1048
1049static int __devexit tegra_aes_remove(struct platform_device *pdev)
1050{
1051 struct device *dev = &pdev->dev;
1052 struct tegra_aes_dev *dd = platform_get_drvdata(pdev);
1053 int i;
1054
1055 for (i = 0; i < ARRAY_SIZE(algs); i++)
1056 crypto_unregister_alg(&algs[i]);
1057
1058 cancel_work_sync(&aes_work);
1059 destroy_workqueue(aes_wq);
1060 spin_lock(&list_lock);
1061 list_del(&dev_list);
1062 spin_unlock(&list_lock);
1063
1064 dma_free_coherent(dev, AES_HW_KEY_TABLE_LENGTH_BYTES,
1065 dd->ivkey_base, dd->ivkey_phys_base);
1066 dma_free_coherent(dev, AES_HW_DMA_BUFFER_SIZE_BYTES,
1067 dd->buf_in, dd->dma_buf_in);
1068 dma_free_coherent(dev, AES_HW_DMA_BUFFER_SIZE_BYTES,
1069 dd->buf_out, dd->dma_buf_out);
1070 clk_put(dd->aes_clk);
1071 aes_dev = NULL;
1072
1073 return 0;
1074}
1075
1076static struct of_device_id tegra_aes_of_match[] __devinitdata = {
1077 { .compatible = "nvidia,tegra20-aes", },
1078 { .compatible = "nvidia,tegra30-aes", },
1079 { },
1080};
1081
1082static struct platform_driver tegra_aes_driver = {
1083 .probe = tegra_aes_probe,
1084 .remove = __devexit_p(tegra_aes_remove),
1085 .driver = {
1086 .name = "tegra-aes",
1087 .owner = THIS_MODULE,
1088 .of_match_table = tegra_aes_of_match,
1089 },
1090};
1091
1092module_platform_driver(tegra_aes_driver);
1093
1094MODULE_DESCRIPTION("Tegra AES/OFB/CPRNG hw acceleration support.");
1095MODULE_AUTHOR("NVIDIA Corporation");
1096MODULE_LICENSE("GPL v2");
diff --git a/drivers/crypto/tegra-aes.h b/drivers/crypto/tegra-aes.h
new file mode 100644
index 000000000000..6006333a8934
--- /dev/null
+++ b/drivers/crypto/tegra-aes.h
@@ -0,0 +1,103 @@
1/*
2 * Copyright (c) 2010, NVIDIA Corporation.
3 *
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License as published by
6 * the Free Software Foundation; either version 2 of the License, or
7 * (at your option) any later version.
8 *
9 * This program is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
12 * more details.
13 *
14 * You should have received a copy of the GNU General Public License along
15 * with this program; if not, write to the Free Software Foundation, Inc.,
16 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
17 */
18
19#ifndef __CRYPTODEV_TEGRA_AES_H
20#define __CRYPTODEV_TEGRA_AES_H
21
22#define TEGRA_AES_ICMDQUE_WR 0x1000
23#define TEGRA_AES_CMDQUE_CONTROL 0x1008
24#define TEGRA_AES_INTR_STATUS 0x1018
25#define TEGRA_AES_INT_ENB 0x1040
26#define TEGRA_AES_CONFIG 0x1044
27#define TEGRA_AES_IRAM_ACCESS_CFG 0x10A0
28#define TEGRA_AES_SECURE_DEST_ADDR 0x1100
29#define TEGRA_AES_SECURE_INPUT_SELECT 0x1104
30#define TEGRA_AES_SECURE_CONFIG 0x1108
31#define TEGRA_AES_SECURE_CONFIG_EXT 0x110C
32#define TEGRA_AES_SECURE_SECURITY 0x1110
33#define TEGRA_AES_SECURE_HASH_RESULT0 0x1120
34#define TEGRA_AES_SECURE_HASH_RESULT1 0x1124
35#define TEGRA_AES_SECURE_HASH_RESULT2 0x1128
36#define TEGRA_AES_SECURE_HASH_RESULT3 0x112C
37#define TEGRA_AES_SECURE_SEC_SEL0 0x1140
38#define TEGRA_AES_SECURE_SEC_SEL1 0x1144
39#define TEGRA_AES_SECURE_SEC_SEL2 0x1148
40#define TEGRA_AES_SECURE_SEC_SEL3 0x114C
41#define TEGRA_AES_SECURE_SEC_SEL4 0x1150
42#define TEGRA_AES_SECURE_SEC_SEL5 0x1154
43#define TEGRA_AES_SECURE_SEC_SEL6 0x1158
44#define TEGRA_AES_SECURE_SEC_SEL7 0x115C
45
46/* interrupt status reg masks and shifts */
47#define TEGRA_AES_ENGINE_BUSY_FIELD BIT(0)
48#define TEGRA_AES_ICQ_EMPTY_FIELD BIT(3)
49#define TEGRA_AES_DMA_BUSY_FIELD BIT(23)
50
51/* secure select reg masks and shifts */
52#define TEGRA_AES_SECURE_SEL0_KEYREAD_ENB0_FIELD BIT(0)
53
54/* secure config ext masks and shifts */
55#define TEGRA_AES_SECURE_KEY_SCH_DIS_FIELD BIT(15)
56
57/* secure config masks and shifts */
58#define TEGRA_AES_SECURE_KEY_INDEX_SHIFT 20
59#define TEGRA_AES_SECURE_KEY_INDEX_FIELD (0x1F << TEGRA_AES_SECURE_KEY_INDEX_SHIFT)
60#define TEGRA_AES_SECURE_BLOCK_CNT_SHIFT 0
61#define TEGRA_AES_SECURE_BLOCK_CNT_FIELD (0xFFFFF << TEGRA_AES_SECURE_BLOCK_CNT_SHIFT)
62
63/* stream interface select masks and shifts */
64#define TEGRA_AES_CMDQ_CTRL_UCMDQEN_FIELD BIT(0)
65#define TEGRA_AES_CMDQ_CTRL_ICMDQEN_FIELD BIT(1)
66#define TEGRA_AES_CMDQ_CTRL_SRC_STM_SEL_FIELD BIT(4)
67#define TEGRA_AES_CMDQ_CTRL_DST_STM_SEL_FIELD BIT(5)
68
69/* config register masks and shifts */
70#define TEGRA_AES_CONFIG_ENDIAN_ENB_FIELD BIT(10)
71#define TEGRA_AES_CONFIG_MODE_SEL_SHIFT 0
72#define TEGRA_AES_CONFIG_MODE_SEL_FIELD (0x1F << TEGRA_AES_CONFIG_MODE_SEL_SHIFT)
73
74/* extended config */
75#define TEGRA_AES_SECURE_OFFSET_CNT_SHIFT 24
76#define TEGRA_AES_SECURE_OFFSET_CNT_FIELD (0xFF << TEGRA_AES_SECURE_OFFSET_CNT_SHIFT)
77#define TEGRA_AES_SECURE_KEYSCHED_GEN_FIELD BIT(15)
78
79/* init vector select */
80#define TEGRA_AES_SECURE_IV_SELECT_SHIFT 10
81#define TEGRA_AES_SECURE_IV_SELECT_FIELD BIT(10)
82
83/* secure engine input */
84#define TEGRA_AES_SECURE_INPUT_ALG_SEL_SHIFT 28
85#define TEGRA_AES_SECURE_INPUT_ALG_SEL_FIELD (0xF << TEGRA_AES_SECURE_INPUT_ALG_SEL_SHIFT)
86#define TEGRA_AES_SECURE_INPUT_KEY_LEN_SHIFT 16
87#define TEGRA_AES_SECURE_INPUT_KEY_LEN_FIELD (0xFFF << TEGRA_AES_SECURE_INPUT_KEY_LEN_SHIFT)
88#define TEGRA_AES_SECURE_RNG_ENB_FIELD BIT(11)
89#define TEGRA_AES_SECURE_CORE_SEL_SHIFT 9
90#define TEGRA_AES_SECURE_CORE_SEL_FIELD BIT(9)
91#define TEGRA_AES_SECURE_VCTRAM_SEL_SHIFT 7
92#define TEGRA_AES_SECURE_VCTRAM_SEL_FIELD (0x3 << TEGRA_AES_SECURE_VCTRAM_SEL_SHIFT)
93#define TEGRA_AES_SECURE_INPUT_SEL_SHIFT 5
94#define TEGRA_AES_SECURE_INPUT_SEL_FIELD (0x3 << TEGRA_AES_SECURE_INPUT_SEL_SHIFT)
95#define TEGRA_AES_SECURE_XOR_POS_SHIFT 3
96#define TEGRA_AES_SECURE_XOR_POS_FIELD (0x3 << TEGRA_AES_SECURE_XOR_POS_SHIFT)
97#define TEGRA_AES_SECURE_HASH_ENB_FIELD BIT(2)
98#define TEGRA_AES_SECURE_ON_THE_FLY_FIELD BIT(0)
99
100/* interrupt error mask */
101#define TEGRA_AES_INT_ERROR_MASK 0xFFF000
102
103#endif