aboutsummaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
Diffstat (limited to 'crypto')
-rw-r--r--crypto/Kconfig43
-rw-r--r--crypto/Makefile1
-rw-r--r--crypto/ablkcipher.c12
-rw-r--r--crypto/aead.c15
-rw-r--r--crypto/ahash.c2
-rw-r--r--crypto/algapi.c10
-rw-r--r--crypto/async_tx/async_memcpy.c6
-rw-r--r--crypto/async_tx/async_memset.c1
-rw-r--r--crypto/async_tx/async_tx.c9
-rw-r--r--crypto/async_tx/async_xor.c4
-rw-r--r--crypto/authenc.c3
-rw-r--r--crypto/authencesn.c3
-rw-r--r--crypto/blkcipher.c12
-rw-r--r--crypto/ccm.c23
-rw-r--r--crypto/chainiv.c3
-rw-r--r--crypto/crc32.c158
-rw-r--r--crypto/crypto_user.c38
-rw-r--r--crypto/ctr.c173
-rw-r--r--crypto/cts.c3
-rw-r--r--crypto/gcm.c29
-rw-r--r--crypto/pcompress.c3
-rw-r--r--crypto/rng.c2
-rw-r--r--crypto/seqiv.c3
-rw-r--r--crypto/shash.c3
-rw-r--r--crypto/tcrypt.c4
-rw-r--r--crypto/tcrypt.h1
-rw-r--r--crypto/testmgr.c15
-rw-r--r--crypto/testmgr.h38
28 files changed, 427 insertions, 190 deletions
diff --git a/crypto/Kconfig b/crypto/Kconfig
index 4641d95651d3..05c0ce52f96d 100644
--- a/crypto/Kconfig
+++ b/crypto/Kconfig
@@ -134,8 +134,8 @@ config CRYPTO_NULL
134 These are 'Null' algorithms, used by IPsec, which do nothing. 134 These are 'Null' algorithms, used by IPsec, which do nothing.
135 135
136config CRYPTO_PCRYPT 136config CRYPTO_PCRYPT
137 tristate "Parallel crypto engine (EXPERIMENTAL)" 137 tristate "Parallel crypto engine"
138 depends on SMP && EXPERIMENTAL 138 depends on SMP
139 select PADATA 139 select PADATA
140 select CRYPTO_MANAGER 140 select CRYPTO_MANAGER
141 select CRYPTO_AEAD 141 select CRYPTO_AEAD
@@ -292,7 +292,6 @@ config CRYPTO_HMAC
292 292
293config CRYPTO_XCBC 293config CRYPTO_XCBC
294 tristate "XCBC support" 294 tristate "XCBC support"
295 depends on EXPERIMENTAL
296 select CRYPTO_HASH 295 select CRYPTO_HASH
297 select CRYPTO_MANAGER 296 select CRYPTO_MANAGER
298 help 297 help
@@ -303,7 +302,6 @@ config CRYPTO_XCBC
303 302
304config CRYPTO_VMAC 303config CRYPTO_VMAC
305 tristate "VMAC support" 304 tristate "VMAC support"
306 depends on EXPERIMENTAL
307 select CRYPTO_HASH 305 select CRYPTO_HASH
308 select CRYPTO_MANAGER 306 select CRYPTO_MANAGER
309 help 307 help
@@ -355,6 +353,27 @@ config CRYPTO_CRC32C_SPARC64
355 CRC32c CRC algorithm implemented using sparc64 crypto instructions, 353 CRC32c CRC algorithm implemented using sparc64 crypto instructions,
356 when available. 354 when available.
357 355
356config CRYPTO_CRC32
357 tristate "CRC32 CRC algorithm"
358 select CRYPTO_HASH
359 select CRC32
360 help
361 CRC-32-IEEE 802.3 cyclic redundancy-check algorithm.
362 Shash crypto api wrappers to crc32_le function.
363
364config CRYPTO_CRC32_PCLMUL
365 tristate "CRC32 PCLMULQDQ hardware acceleration"
366 depends on X86
367 select CRYPTO_HASH
368 select CRC32
369 help
370 From Intel Westmere and AMD Bulldozer processor with SSE4.2
371 and PCLMULQDQ supported, the processor will support
372 CRC32 PCLMULQDQ implementation using hardware accelerated PCLMULQDQ
373 instruction. This option will create 'crc32-plcmul' module,
374 which will enable any routine to use the CRC-32-IEEE 802.3 checksum
375 and gain better performance as compared with the table implementation.
376
358config CRYPTO_GHASH 377config CRYPTO_GHASH
359 tristate "GHASH digest algorithm" 378 tristate "GHASH digest algorithm"
360 select CRYPTO_GF128MUL 379 select CRYPTO_GF128MUL
@@ -479,6 +498,13 @@ config CRYPTO_SHA1_ARM
479 SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2) implemented 498 SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2) implemented
480 using optimized ARM assembler. 499 using optimized ARM assembler.
481 500
501config CRYPTO_SHA1_PPC
502 tristate "SHA1 digest algorithm (powerpc)"
503 depends on PPC
504 help
505 This is the powerpc hardware accelerated implementation of the
506 SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2).
507
482config CRYPTO_SHA256 508config CRYPTO_SHA256
483 tristate "SHA224 and SHA256 digest algorithm" 509 tristate "SHA224 and SHA256 digest algorithm"
484 select CRYPTO_HASH 510 select CRYPTO_HASH
@@ -932,8 +958,7 @@ config CRYPTO_KHAZAD
932 <http://www.larc.usp.br/~pbarreto/KhazadPage.html> 958 <http://www.larc.usp.br/~pbarreto/KhazadPage.html>
933 959
934config CRYPTO_SALSA20 960config CRYPTO_SALSA20
935 tristate "Salsa20 stream cipher algorithm (EXPERIMENTAL)" 961 tristate "Salsa20 stream cipher algorithm"
936 depends on EXPERIMENTAL
937 select CRYPTO_BLKCIPHER 962 select CRYPTO_BLKCIPHER
938 help 963 help
939 Salsa20 stream cipher algorithm. 964 Salsa20 stream cipher algorithm.
@@ -945,9 +970,8 @@ config CRYPTO_SALSA20
945 Bernstein <djb@cr.yp.to>. See <http://cr.yp.to/snuffle.html> 970 Bernstein <djb@cr.yp.to>. See <http://cr.yp.to/snuffle.html>
946 971
947config CRYPTO_SALSA20_586 972config CRYPTO_SALSA20_586
948 tristate "Salsa20 stream cipher algorithm (i586) (EXPERIMENTAL)" 973 tristate "Salsa20 stream cipher algorithm (i586)"
949 depends on (X86 || UML_X86) && !64BIT 974 depends on (X86 || UML_X86) && !64BIT
950 depends on EXPERIMENTAL
951 select CRYPTO_BLKCIPHER 975 select CRYPTO_BLKCIPHER
952 help 976 help
953 Salsa20 stream cipher algorithm. 977 Salsa20 stream cipher algorithm.
@@ -959,9 +983,8 @@ config CRYPTO_SALSA20_586
959 Bernstein <djb@cr.yp.to>. See <http://cr.yp.to/snuffle.html> 983 Bernstein <djb@cr.yp.to>. See <http://cr.yp.to/snuffle.html>
960 984
961config CRYPTO_SALSA20_X86_64 985config CRYPTO_SALSA20_X86_64
962 tristate "Salsa20 stream cipher algorithm (x86_64) (EXPERIMENTAL)" 986 tristate "Salsa20 stream cipher algorithm (x86_64)"
963 depends on (X86 || UML_X86) && 64BIT 987 depends on (X86 || UML_X86) && 64BIT
964 depends on EXPERIMENTAL
965 select CRYPTO_BLKCIPHER 988 select CRYPTO_BLKCIPHER
966 help 989 help
967 Salsa20 stream cipher algorithm. 990 Salsa20 stream cipher algorithm.
diff --git a/crypto/Makefile b/crypto/Makefile
index d59dec749804..be1a1bebbb86 100644
--- a/crypto/Makefile
+++ b/crypto/Makefile
@@ -81,6 +81,7 @@ obj-$(CONFIG_CRYPTO_DEFLATE) += deflate.o
81obj-$(CONFIG_CRYPTO_ZLIB) += zlib.o 81obj-$(CONFIG_CRYPTO_ZLIB) += zlib.o
82obj-$(CONFIG_CRYPTO_MICHAEL_MIC) += michael_mic.o 82obj-$(CONFIG_CRYPTO_MICHAEL_MIC) += michael_mic.o
83obj-$(CONFIG_CRYPTO_CRC32C) += crc32c.o 83obj-$(CONFIG_CRYPTO_CRC32C) += crc32c.o
84obj-$(CONFIG_CRYPTO_CRC32) += crc32.o
84obj-$(CONFIG_CRYPTO_AUTHENC) += authenc.o authencesn.o 85obj-$(CONFIG_CRYPTO_AUTHENC) += authenc.o authencesn.o
85obj-$(CONFIG_CRYPTO_LZO) += lzo.o 86obj-$(CONFIG_CRYPTO_LZO) += lzo.o
86obj-$(CONFIG_CRYPTO_842) += 842.o 87obj-$(CONFIG_CRYPTO_842) += 842.o
diff --git a/crypto/ablkcipher.c b/crypto/ablkcipher.c
index 533de9550a82..7d4a8d28277e 100644
--- a/crypto/ablkcipher.c
+++ b/crypto/ablkcipher.c
@@ -388,9 +388,9 @@ static int crypto_ablkcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
388{ 388{
389 struct crypto_report_blkcipher rblkcipher; 389 struct crypto_report_blkcipher rblkcipher;
390 390
391 snprintf(rblkcipher.type, CRYPTO_MAX_ALG_NAME, "%s", "ablkcipher"); 391 strncpy(rblkcipher.type, "ablkcipher", sizeof(rblkcipher.type));
392 snprintf(rblkcipher.geniv, CRYPTO_MAX_ALG_NAME, "%s", 392 strncpy(rblkcipher.geniv, alg->cra_ablkcipher.geniv ?: "<default>",
393 alg->cra_ablkcipher.geniv ?: "<default>"); 393 sizeof(rblkcipher.geniv));
394 394
395 rblkcipher.blocksize = alg->cra_blocksize; 395 rblkcipher.blocksize = alg->cra_blocksize;
396 rblkcipher.min_keysize = alg->cra_ablkcipher.min_keysize; 396 rblkcipher.min_keysize = alg->cra_ablkcipher.min_keysize;
@@ -469,9 +469,9 @@ static int crypto_givcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
469{ 469{
470 struct crypto_report_blkcipher rblkcipher; 470 struct crypto_report_blkcipher rblkcipher;
471 471
472 snprintf(rblkcipher.type, CRYPTO_MAX_ALG_NAME, "%s", "givcipher"); 472 strncpy(rblkcipher.type, "givcipher", sizeof(rblkcipher.type));
473 snprintf(rblkcipher.geniv, CRYPTO_MAX_ALG_NAME, "%s", 473 strncpy(rblkcipher.geniv, alg->cra_ablkcipher.geniv ?: "<built-in>",
474 alg->cra_ablkcipher.geniv ?: "<built-in>"); 474 sizeof(rblkcipher.geniv));
475 475
476 rblkcipher.blocksize = alg->cra_blocksize; 476 rblkcipher.blocksize = alg->cra_blocksize;
477 rblkcipher.min_keysize = alg->cra_ablkcipher.min_keysize; 477 rblkcipher.min_keysize = alg->cra_ablkcipher.min_keysize;
diff --git a/crypto/aead.c b/crypto/aead.c
index 0b8121ebec07..547491e35c63 100644
--- a/crypto/aead.c
+++ b/crypto/aead.c
@@ -117,9 +117,8 @@ static int crypto_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
117 struct crypto_report_aead raead; 117 struct crypto_report_aead raead;
118 struct aead_alg *aead = &alg->cra_aead; 118 struct aead_alg *aead = &alg->cra_aead;
119 119
120 snprintf(raead.type, CRYPTO_MAX_ALG_NAME, "%s", "aead"); 120 strncpy(raead.type, "aead", sizeof(raead.type));
121 snprintf(raead.geniv, CRYPTO_MAX_ALG_NAME, "%s", 121 strncpy(raead.geniv, aead->geniv ?: "<built-in>", sizeof(raead.geniv));
122 aead->geniv ?: "<built-in>");
123 122
124 raead.blocksize = alg->cra_blocksize; 123 raead.blocksize = alg->cra_blocksize;
125 raead.maxauthsize = aead->maxauthsize; 124 raead.maxauthsize = aead->maxauthsize;
@@ -203,8 +202,8 @@ static int crypto_nivaead_report(struct sk_buff *skb, struct crypto_alg *alg)
203 struct crypto_report_aead raead; 202 struct crypto_report_aead raead;
204 struct aead_alg *aead = &alg->cra_aead; 203 struct aead_alg *aead = &alg->cra_aead;
205 204
206 snprintf(raead.type, CRYPTO_MAX_ALG_NAME, "%s", "nivaead"); 205 strncpy(raead.type, "nivaead", sizeof(raead.type));
207 snprintf(raead.geniv, CRYPTO_MAX_ALG_NAME, "%s", aead->geniv); 206 strncpy(raead.geniv, aead->geniv, sizeof(raead.geniv));
208 207
209 raead.blocksize = alg->cra_blocksize; 208 raead.blocksize = alg->cra_blocksize;
210 raead.maxauthsize = aead->maxauthsize; 209 raead.maxauthsize = aead->maxauthsize;
@@ -282,18 +281,16 @@ struct crypto_instance *aead_geniv_alloc(struct crypto_template *tmpl,
282 int err; 281 int err;
283 282
284 algt = crypto_get_attr_type(tb); 283 algt = crypto_get_attr_type(tb);
285 err = PTR_ERR(algt);
286 if (IS_ERR(algt)) 284 if (IS_ERR(algt))
287 return ERR_PTR(err); 285 return ERR_CAST(algt);
288 286
289 if ((algt->type ^ (CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV)) & 287 if ((algt->type ^ (CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV)) &
290 algt->mask) 288 algt->mask)
291 return ERR_PTR(-EINVAL); 289 return ERR_PTR(-EINVAL);
292 290
293 name = crypto_attr_alg_name(tb[1]); 291 name = crypto_attr_alg_name(tb[1]);
294 err = PTR_ERR(name);
295 if (IS_ERR(name)) 292 if (IS_ERR(name))
296 return ERR_PTR(err); 293 return ERR_CAST(name);
297 294
298 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 295 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
299 if (!inst) 296 if (!inst)
diff --git a/crypto/ahash.c b/crypto/ahash.c
index 3887856c2dd6..793a27f2493e 100644
--- a/crypto/ahash.c
+++ b/crypto/ahash.c
@@ -404,7 +404,7 @@ static int crypto_ahash_report(struct sk_buff *skb, struct crypto_alg *alg)
404{ 404{
405 struct crypto_report_hash rhash; 405 struct crypto_report_hash rhash;
406 406
407 snprintf(rhash.type, CRYPTO_MAX_ALG_NAME, "%s", "ahash"); 407 strncpy(rhash.type, "ahash", sizeof(rhash.type));
408 408
409 rhash.blocksize = alg->cra_blocksize; 409 rhash.blocksize = alg->cra_blocksize;
410 rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize; 410 rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize;
diff --git a/crypto/algapi.c b/crypto/algapi.c
index c3b9bfeeb7ff..6149a6e09643 100644
--- a/crypto/algapi.c
+++ b/crypto/algapi.c
@@ -447,7 +447,7 @@ EXPORT_SYMBOL_GPL(crypto_register_template);
447void crypto_unregister_template(struct crypto_template *tmpl) 447void crypto_unregister_template(struct crypto_template *tmpl)
448{ 448{
449 struct crypto_instance *inst; 449 struct crypto_instance *inst;
450 struct hlist_node *p, *n; 450 struct hlist_node *n;
451 struct hlist_head *list; 451 struct hlist_head *list;
452 LIST_HEAD(users); 452 LIST_HEAD(users);
453 453
@@ -457,7 +457,7 @@ void crypto_unregister_template(struct crypto_template *tmpl)
457 list_del_init(&tmpl->list); 457 list_del_init(&tmpl->list);
458 458
459 list = &tmpl->instances; 459 list = &tmpl->instances;
460 hlist_for_each_entry(inst, p, list, list) { 460 hlist_for_each_entry(inst, list, list) {
461 int err = crypto_remove_alg(&inst->alg, &users); 461 int err = crypto_remove_alg(&inst->alg, &users);
462 BUG_ON(err); 462 BUG_ON(err);
463 } 463 }
@@ -466,7 +466,7 @@ void crypto_unregister_template(struct crypto_template *tmpl)
466 466
467 up_write(&crypto_alg_sem); 467 up_write(&crypto_alg_sem);
468 468
469 hlist_for_each_entry_safe(inst, p, n, list, list) { 469 hlist_for_each_entry_safe(inst, n, list, list) {
470 BUG_ON(atomic_read(&inst->alg.cra_refcnt) != 1); 470 BUG_ON(atomic_read(&inst->alg.cra_refcnt) != 1);
471 tmpl->free(inst); 471 tmpl->free(inst);
472 } 472 }
@@ -749,12 +749,10 @@ struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
749 u32 type, u32 mask) 749 u32 type, u32 mask)
750{ 750{
751 const char *name; 751 const char *name;
752 int err;
753 752
754 name = crypto_attr_alg_name(rta); 753 name = crypto_attr_alg_name(rta);
755 err = PTR_ERR(name);
756 if (IS_ERR(name)) 754 if (IS_ERR(name))
757 return ERR_PTR(err); 755 return ERR_CAST(name);
758 756
759 return crypto_find_alg(name, frontend, type, mask); 757 return crypto_find_alg(name, frontend, type, mask);
760} 758}
diff --git a/crypto/async_tx/async_memcpy.c b/crypto/async_tx/async_memcpy.c
index 361b5e8239bc..9e62feffb374 100644
--- a/crypto/async_tx/async_memcpy.c
+++ b/crypto/async_tx/async_memcpy.c
@@ -67,6 +67,12 @@ async_memcpy(struct page *dest, struct page *src, unsigned int dest_offset,
67 67
68 tx = device->device_prep_dma_memcpy(chan, dma_dest, dma_src, 68 tx = device->device_prep_dma_memcpy(chan, dma_dest, dma_src,
69 len, dma_prep_flags); 69 len, dma_prep_flags);
70 if (!tx) {
71 dma_unmap_page(device->dev, dma_dest, len,
72 DMA_FROM_DEVICE);
73 dma_unmap_page(device->dev, dma_src, len,
74 DMA_TO_DEVICE);
75 }
70 } 76 }
71 77
72 if (tx) { 78 if (tx) {
diff --git a/crypto/async_tx/async_memset.c b/crypto/async_tx/async_memset.c
index 58e4a8752aee..05a4d1e00148 100644
--- a/crypto/async_tx/async_memset.c
+++ b/crypto/async_tx/async_memset.c
@@ -25,6 +25,7 @@
25 */ 25 */
26#include <linux/kernel.h> 26#include <linux/kernel.h>
27#include <linux/interrupt.h> 27#include <linux/interrupt.h>
28#include <linux/module.h>
28#include <linux/mm.h> 29#include <linux/mm.h>
29#include <linux/dma-mapping.h> 30#include <linux/dma-mapping.h>
30#include <linux/async_tx.h> 31#include <linux/async_tx.h>
diff --git a/crypto/async_tx/async_tx.c b/crypto/async_tx/async_tx.c
index 842120979374..7be34248b450 100644
--- a/crypto/async_tx/async_tx.c
+++ b/crypto/async_tx/async_tx.c
@@ -128,8 +128,8 @@ async_tx_channel_switch(struct dma_async_tx_descriptor *depend_tx,
128 } 128 }
129 device->device_issue_pending(chan); 129 device->device_issue_pending(chan);
130 } else { 130 } else {
131 if (dma_wait_for_async_tx(depend_tx) == DMA_ERROR) 131 if (dma_wait_for_async_tx(depend_tx) != DMA_SUCCESS)
132 panic("%s: DMA_ERROR waiting for depend_tx\n", 132 panic("%s: DMA error waiting for depend_tx\n",
133 __func__); 133 __func__);
134 tx->tx_submit(tx); 134 tx->tx_submit(tx);
135 } 135 }
@@ -280,8 +280,9 @@ void async_tx_quiesce(struct dma_async_tx_descriptor **tx)
280 * we are referring to the correct operation 280 * we are referring to the correct operation
281 */ 281 */
282 BUG_ON(async_tx_test_ack(*tx)); 282 BUG_ON(async_tx_test_ack(*tx));
283 if (dma_wait_for_async_tx(*tx) == DMA_ERROR) 283 if (dma_wait_for_async_tx(*tx) != DMA_SUCCESS)
284 panic("DMA_ERROR waiting for transaction\n"); 284 panic("%s: DMA error waiting for transaction\n",
285 __func__);
285 async_tx_ack(*tx); 286 async_tx_ack(*tx);
286 *tx = NULL; 287 *tx = NULL;
287 } 288 }
diff --git a/crypto/async_tx/async_xor.c b/crypto/async_tx/async_xor.c
index 154cc84381c2..8ade0a0481c6 100644
--- a/crypto/async_tx/async_xor.c
+++ b/crypto/async_tx/async_xor.c
@@ -230,9 +230,7 @@ EXPORT_SYMBOL_GPL(async_xor);
230 230
231static int page_is_zero(struct page *p, unsigned int offset, size_t len) 231static int page_is_zero(struct page *p, unsigned int offset, size_t len)
232{ 232{
233 char *a = page_address(p) + offset; 233 return !memchr_inv(page_address(p) + offset, 0, len);
234 return ((*(u32 *) a) == 0 &&
235 memcmp(a, a + 4, len - 4) == 0);
236} 234}
237 235
238static inline struct dma_chan * 236static inline struct dma_chan *
diff --git a/crypto/authenc.c b/crypto/authenc.c
index d0583a4489e6..ffce19de05cf 100644
--- a/crypto/authenc.c
+++ b/crypto/authenc.c
@@ -592,9 +592,8 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
592 int err; 592 int err;
593 593
594 algt = crypto_get_attr_type(tb); 594 algt = crypto_get_attr_type(tb);
595 err = PTR_ERR(algt);
596 if (IS_ERR(algt)) 595 if (IS_ERR(algt))
597 return ERR_PTR(err); 596 return ERR_CAST(algt);
598 597
599 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 598 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
600 return ERR_PTR(-EINVAL); 599 return ERR_PTR(-EINVAL);
diff --git a/crypto/authencesn.c b/crypto/authencesn.c
index 136b68b9d8d4..ab53762fc309 100644
--- a/crypto/authencesn.c
+++ b/crypto/authencesn.c
@@ -715,9 +715,8 @@ static struct crypto_instance *crypto_authenc_esn_alloc(struct rtattr **tb)
715 int err; 715 int err;
716 716
717 algt = crypto_get_attr_type(tb); 717 algt = crypto_get_attr_type(tb);
718 err = PTR_ERR(algt);
719 if (IS_ERR(algt)) 718 if (IS_ERR(algt))
720 return ERR_PTR(err); 719 return ERR_CAST(algt);
721 720
722 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 721 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
723 return ERR_PTR(-EINVAL); 722 return ERR_PTR(-EINVAL);
diff --git a/crypto/blkcipher.c b/crypto/blkcipher.c
index a8d85a1d670e..a79e7e9ab86e 100644
--- a/crypto/blkcipher.c
+++ b/crypto/blkcipher.c
@@ -499,9 +499,9 @@ static int crypto_blkcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
499{ 499{
500 struct crypto_report_blkcipher rblkcipher; 500 struct crypto_report_blkcipher rblkcipher;
501 501
502 snprintf(rblkcipher.type, CRYPTO_MAX_ALG_NAME, "%s", "blkcipher"); 502 strncpy(rblkcipher.type, "blkcipher", sizeof(rblkcipher.type));
503 snprintf(rblkcipher.geniv, CRYPTO_MAX_ALG_NAME, "%s", 503 strncpy(rblkcipher.geniv, alg->cra_blkcipher.geniv ?: "<default>",
504 alg->cra_blkcipher.geniv ?: "<default>"); 504 sizeof(rblkcipher.geniv));
505 505
506 rblkcipher.blocksize = alg->cra_blocksize; 506 rblkcipher.blocksize = alg->cra_blocksize;
507 rblkcipher.min_keysize = alg->cra_blkcipher.min_keysize; 507 rblkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
@@ -588,18 +588,16 @@ struct crypto_instance *skcipher_geniv_alloc(struct crypto_template *tmpl,
588 int err; 588 int err;
589 589
590 algt = crypto_get_attr_type(tb); 590 algt = crypto_get_attr_type(tb);
591 err = PTR_ERR(algt);
592 if (IS_ERR(algt)) 591 if (IS_ERR(algt))
593 return ERR_PTR(err); 592 return ERR_CAST(algt);
594 593
595 if ((algt->type ^ (CRYPTO_ALG_TYPE_GIVCIPHER | CRYPTO_ALG_GENIV)) & 594 if ((algt->type ^ (CRYPTO_ALG_TYPE_GIVCIPHER | CRYPTO_ALG_GENIV)) &
596 algt->mask) 595 algt->mask)
597 return ERR_PTR(-EINVAL); 596 return ERR_PTR(-EINVAL);
598 597
599 name = crypto_attr_alg_name(tb[1]); 598 name = crypto_attr_alg_name(tb[1]);
600 err = PTR_ERR(name);
601 if (IS_ERR(name)) 599 if (IS_ERR(name))
602 return ERR_PTR(err); 600 return ERR_CAST(name);
603 601
604 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 602 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
605 if (!inst) 603 if (!inst)
diff --git a/crypto/ccm.c b/crypto/ccm.c
index 32fe1bb5decb..499c91717d93 100644
--- a/crypto/ccm.c
+++ b/crypto/ccm.c
@@ -484,18 +484,16 @@ static struct crypto_instance *crypto_ccm_alloc_common(struct rtattr **tb,
484 int err; 484 int err;
485 485
486 algt = crypto_get_attr_type(tb); 486 algt = crypto_get_attr_type(tb);
487 err = PTR_ERR(algt);
488 if (IS_ERR(algt)) 487 if (IS_ERR(algt))
489 return ERR_PTR(err); 488 return ERR_CAST(algt);
490 489
491 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 490 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
492 return ERR_PTR(-EINVAL); 491 return ERR_PTR(-EINVAL);
493 492
494 cipher = crypto_alg_mod_lookup(cipher_name, CRYPTO_ALG_TYPE_CIPHER, 493 cipher = crypto_alg_mod_lookup(cipher_name, CRYPTO_ALG_TYPE_CIPHER,
495 CRYPTO_ALG_TYPE_MASK); 494 CRYPTO_ALG_TYPE_MASK);
496 err = PTR_ERR(cipher);
497 if (IS_ERR(cipher)) 495 if (IS_ERR(cipher))
498 return ERR_PTR(err); 496 return ERR_CAST(cipher);
499 497
500 err = -EINVAL; 498 err = -EINVAL;
501 if (cipher->cra_blocksize != 16) 499 if (cipher->cra_blocksize != 16)
@@ -573,15 +571,13 @@ out_put_cipher:
573 571
574static struct crypto_instance *crypto_ccm_alloc(struct rtattr **tb) 572static struct crypto_instance *crypto_ccm_alloc(struct rtattr **tb)
575{ 573{
576 int err;
577 const char *cipher_name; 574 const char *cipher_name;
578 char ctr_name[CRYPTO_MAX_ALG_NAME]; 575 char ctr_name[CRYPTO_MAX_ALG_NAME];
579 char full_name[CRYPTO_MAX_ALG_NAME]; 576 char full_name[CRYPTO_MAX_ALG_NAME];
580 577
581 cipher_name = crypto_attr_alg_name(tb[1]); 578 cipher_name = crypto_attr_alg_name(tb[1]);
582 err = PTR_ERR(cipher_name);
583 if (IS_ERR(cipher_name)) 579 if (IS_ERR(cipher_name))
584 return ERR_PTR(err); 580 return ERR_CAST(cipher_name);
585 581
586 if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", 582 if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
587 cipher_name) >= CRYPTO_MAX_ALG_NAME) 583 cipher_name) >= CRYPTO_MAX_ALG_NAME)
@@ -612,20 +608,17 @@ static struct crypto_template crypto_ccm_tmpl = {
612 608
613static struct crypto_instance *crypto_ccm_base_alloc(struct rtattr **tb) 609static struct crypto_instance *crypto_ccm_base_alloc(struct rtattr **tb)
614{ 610{
615 int err;
616 const char *ctr_name; 611 const char *ctr_name;
617 const char *cipher_name; 612 const char *cipher_name;
618 char full_name[CRYPTO_MAX_ALG_NAME]; 613 char full_name[CRYPTO_MAX_ALG_NAME];
619 614
620 ctr_name = crypto_attr_alg_name(tb[1]); 615 ctr_name = crypto_attr_alg_name(tb[1]);
621 err = PTR_ERR(ctr_name);
622 if (IS_ERR(ctr_name)) 616 if (IS_ERR(ctr_name))
623 return ERR_PTR(err); 617 return ERR_CAST(ctr_name);
624 618
625 cipher_name = crypto_attr_alg_name(tb[2]); 619 cipher_name = crypto_attr_alg_name(tb[2]);
626 err = PTR_ERR(cipher_name);
627 if (IS_ERR(cipher_name)) 620 if (IS_ERR(cipher_name))
628 return ERR_PTR(err); 621 return ERR_CAST(cipher_name);
629 622
630 if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm_base(%s,%s)", 623 if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm_base(%s,%s)",
631 ctr_name, cipher_name) >= CRYPTO_MAX_ALG_NAME) 624 ctr_name, cipher_name) >= CRYPTO_MAX_ALG_NAME)
@@ -760,17 +753,15 @@ static struct crypto_instance *crypto_rfc4309_alloc(struct rtattr **tb)
760 int err; 753 int err;
761 754
762 algt = crypto_get_attr_type(tb); 755 algt = crypto_get_attr_type(tb);
763 err = PTR_ERR(algt);
764 if (IS_ERR(algt)) 756 if (IS_ERR(algt))
765 return ERR_PTR(err); 757 return ERR_CAST(algt);
766 758
767 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 759 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
768 return ERR_PTR(-EINVAL); 760 return ERR_PTR(-EINVAL);
769 761
770 ccm_name = crypto_attr_alg_name(tb[1]); 762 ccm_name = crypto_attr_alg_name(tb[1]);
771 err = PTR_ERR(ccm_name);
772 if (IS_ERR(ccm_name)) 763 if (IS_ERR(ccm_name))
773 return ERR_PTR(err); 764 return ERR_CAST(ccm_name);
774 765
775 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 766 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
776 if (!inst) 767 if (!inst)
diff --git a/crypto/chainiv.c b/crypto/chainiv.c
index ba200b07449d..834d8dd3d4fc 100644
--- a/crypto/chainiv.c
+++ b/crypto/chainiv.c
@@ -291,9 +291,8 @@ static struct crypto_instance *chainiv_alloc(struct rtattr **tb)
291 int err; 291 int err;
292 292
293 algt = crypto_get_attr_type(tb); 293 algt = crypto_get_attr_type(tb);
294 err = PTR_ERR(algt);
295 if (IS_ERR(algt)) 294 if (IS_ERR(algt))
296 return ERR_PTR(err); 295 return ERR_CAST(algt);
297 296
298 err = crypto_get_default_rng(); 297 err = crypto_get_default_rng();
299 if (err) 298 if (err)
diff --git a/crypto/crc32.c b/crypto/crc32.c
new file mode 100644
index 000000000000..9d1c41569898
--- /dev/null
+++ b/crypto/crc32.c
@@ -0,0 +1,158 @@
1/* GPL HEADER START
2 *
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License version 2 only,
7 * as published by the Free Software Foundation.
8 *
9 * This program is distributed in the hope that it will be useful, but
10 * WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * General Public License version 2 for more details (a copy is included
13 * in the LICENSE file that accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License
16 * version 2 along with this program; If not, see http://www.gnu.org/licenses
17 *
18 * Please visit http://www.xyratex.com/contact if you need additional
19 * information or have any questions.
20 *
21 * GPL HEADER END
22 */
23
24/*
25 * Copyright 2012 Xyratex Technology Limited
26 */
27
28/*
29 * This is crypto api shash wrappers to crc32_le.
30 */
31
32#include <linux/crc32.h>
33#include <crypto/internal/hash.h>
34#include <linux/init.h>
35#include <linux/module.h>
36#include <linux/string.h>
37#include <linux/kernel.h>
38
39#define CHKSUM_BLOCK_SIZE 1
40#define CHKSUM_DIGEST_SIZE 4
41
42static u32 __crc32_le(u32 crc, unsigned char const *p, size_t len)
43{
44 return crc32_le(crc, p, len);
45}
46
47/** No default init with ~0 */
48static int crc32_cra_init(struct crypto_tfm *tfm)
49{
50 u32 *key = crypto_tfm_ctx(tfm);
51
52 *key = 0;
53
54 return 0;
55}
56
57
58/*
59 * Setting the seed allows arbitrary accumulators and flexible XOR policy
60 * If your algorithm starts with ~0, then XOR with ~0 before you set
61 * the seed.
62 */
63static int crc32_setkey(struct crypto_shash *hash, const u8 *key,
64 unsigned int keylen)
65{
66 u32 *mctx = crypto_shash_ctx(hash);
67
68 if (keylen != sizeof(u32)) {
69 crypto_shash_set_flags(hash, CRYPTO_TFM_RES_BAD_KEY_LEN);
70 return -EINVAL;
71 }
72 *mctx = le32_to_cpup((__le32 *)key);
73 return 0;
74}
75
76static int crc32_init(struct shash_desc *desc)
77{
78 u32 *mctx = crypto_shash_ctx(desc->tfm);
79 u32 *crcp = shash_desc_ctx(desc);
80
81 *crcp = *mctx;
82
83 return 0;
84}
85
86static int crc32_update(struct shash_desc *desc, const u8 *data,
87 unsigned int len)
88{
89 u32 *crcp = shash_desc_ctx(desc);
90
91 *crcp = __crc32_le(*crcp, data, len);
92 return 0;
93}
94
95/* No final XOR 0xFFFFFFFF, like crc32_le */
96static int __crc32_finup(u32 *crcp, const u8 *data, unsigned int len,
97 u8 *out)
98{
99 *(__le32 *)out = cpu_to_le32(__crc32_le(*crcp, data, len));
100 return 0;
101}
102
103static int crc32_finup(struct shash_desc *desc, const u8 *data,
104 unsigned int len, u8 *out)
105{
106 return __crc32_finup(shash_desc_ctx(desc), data, len, out);
107}
108
109static int crc32_final(struct shash_desc *desc, u8 *out)
110{
111 u32 *crcp = shash_desc_ctx(desc);
112
113 *(__le32 *)out = cpu_to_le32p(crcp);
114 return 0;
115}
116
117static int crc32_digest(struct shash_desc *desc, const u8 *data,
118 unsigned int len, u8 *out)
119{
120 return __crc32_finup(crypto_shash_ctx(desc->tfm), data, len,
121 out);
122}
123static struct shash_alg alg = {
124 .setkey = crc32_setkey,
125 .init = crc32_init,
126 .update = crc32_update,
127 .final = crc32_final,
128 .finup = crc32_finup,
129 .digest = crc32_digest,
130 .descsize = sizeof(u32),
131 .digestsize = CHKSUM_DIGEST_SIZE,
132 .base = {
133 .cra_name = "crc32",
134 .cra_driver_name = "crc32-table",
135 .cra_priority = 100,
136 .cra_blocksize = CHKSUM_BLOCK_SIZE,
137 .cra_ctxsize = sizeof(u32),
138 .cra_module = THIS_MODULE,
139 .cra_init = crc32_cra_init,
140 }
141};
142
143static int __init crc32_mod_init(void)
144{
145 return crypto_register_shash(&alg);
146}
147
148static void __exit crc32_mod_fini(void)
149{
150 crypto_unregister_shash(&alg);
151}
152
153module_init(crc32_mod_init);
154module_exit(crc32_mod_fini);
155
156MODULE_AUTHOR("Alexander Boyko <alexander_boyko@xyratex.com>");
157MODULE_DESCRIPTION("CRC32 calculations wrapper for lib/crc32");
158MODULE_LICENSE("GPL");
diff --git a/crypto/crypto_user.c b/crypto/crypto_user.c
index 35d700a97d79..dfd511fb39ee 100644
--- a/crypto/crypto_user.c
+++ b/crypto/crypto_user.c
@@ -30,6 +30,8 @@
30 30
31#include "internal.h" 31#include "internal.h"
32 32
33#define null_terminated(x) (strnlen(x, sizeof(x)) < sizeof(x))
34
33static DEFINE_MUTEX(crypto_cfg_mutex); 35static DEFINE_MUTEX(crypto_cfg_mutex);
34 36
35/* The crypto netlink socket */ 37/* The crypto netlink socket */
@@ -75,7 +77,7 @@ static int crypto_report_cipher(struct sk_buff *skb, struct crypto_alg *alg)
75{ 77{
76 struct crypto_report_cipher rcipher; 78 struct crypto_report_cipher rcipher;
77 79
78 snprintf(rcipher.type, CRYPTO_MAX_ALG_NAME, "%s", "cipher"); 80 strncpy(rcipher.type, "cipher", sizeof(rcipher.type));
79 81
80 rcipher.blocksize = alg->cra_blocksize; 82 rcipher.blocksize = alg->cra_blocksize;
81 rcipher.min_keysize = alg->cra_cipher.cia_min_keysize; 83 rcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
@@ -94,8 +96,7 @@ static int crypto_report_comp(struct sk_buff *skb, struct crypto_alg *alg)
94{ 96{
95 struct crypto_report_comp rcomp; 97 struct crypto_report_comp rcomp;
96 98
97 snprintf(rcomp.type, CRYPTO_MAX_ALG_NAME, "%s", "compression"); 99 strncpy(rcomp.type, "compression", sizeof(rcomp.type));
98
99 if (nla_put(skb, CRYPTOCFGA_REPORT_COMPRESS, 100 if (nla_put(skb, CRYPTOCFGA_REPORT_COMPRESS,
100 sizeof(struct crypto_report_comp), &rcomp)) 101 sizeof(struct crypto_report_comp), &rcomp))
101 goto nla_put_failure; 102 goto nla_put_failure;
@@ -108,12 +109,14 @@ nla_put_failure:
108static int crypto_report_one(struct crypto_alg *alg, 109static int crypto_report_one(struct crypto_alg *alg,
109 struct crypto_user_alg *ualg, struct sk_buff *skb) 110 struct crypto_user_alg *ualg, struct sk_buff *skb)
110{ 111{
111 memcpy(&ualg->cru_name, &alg->cra_name, sizeof(ualg->cru_name)); 112 strncpy(ualg->cru_name, alg->cra_name, sizeof(ualg->cru_name));
112 memcpy(&ualg->cru_driver_name, &alg->cra_driver_name, 113 strncpy(ualg->cru_driver_name, alg->cra_driver_name,
113 sizeof(ualg->cru_driver_name)); 114 sizeof(ualg->cru_driver_name));
114 memcpy(&ualg->cru_module_name, module_name(alg->cra_module), 115 strncpy(ualg->cru_module_name, module_name(alg->cra_module),
115 CRYPTO_MAX_ALG_NAME); 116 sizeof(ualg->cru_module_name));
116 117
118 ualg->cru_type = 0;
119 ualg->cru_mask = 0;
117 ualg->cru_flags = alg->cra_flags; 120 ualg->cru_flags = alg->cra_flags;
118 ualg->cru_refcnt = atomic_read(&alg->cra_refcnt); 121 ualg->cru_refcnt = atomic_read(&alg->cra_refcnt);
119 122
@@ -122,8 +125,7 @@ static int crypto_report_one(struct crypto_alg *alg,
122 if (alg->cra_flags & CRYPTO_ALG_LARVAL) { 125 if (alg->cra_flags & CRYPTO_ALG_LARVAL) {
123 struct crypto_report_larval rl; 126 struct crypto_report_larval rl;
124 127
125 snprintf(rl.type, CRYPTO_MAX_ALG_NAME, "%s", "larval"); 128 strncpy(rl.type, "larval", sizeof(rl.type));
126
127 if (nla_put(skb, CRYPTOCFGA_REPORT_LARVAL, 129 if (nla_put(skb, CRYPTOCFGA_REPORT_LARVAL,
128 sizeof(struct crypto_report_larval), &rl)) 130 sizeof(struct crypto_report_larval), &rl))
129 goto nla_put_failure; 131 goto nla_put_failure;
@@ -196,7 +198,10 @@ static int crypto_report(struct sk_buff *in_skb, struct nlmsghdr *in_nlh,
196 struct crypto_dump_info info; 198 struct crypto_dump_info info;
197 int err; 199 int err;
198 200
199 if (!p->cru_driver_name) 201 if (!null_terminated(p->cru_name) || !null_terminated(p->cru_driver_name))
202 return -EINVAL;
203
204 if (!p->cru_driver_name[0])
200 return -EINVAL; 205 return -EINVAL;
201 206
202 alg = crypto_alg_match(p, 1); 207 alg = crypto_alg_match(p, 1);
@@ -260,6 +265,9 @@ static int crypto_update_alg(struct sk_buff *skb, struct nlmsghdr *nlh,
260 struct nlattr *priority = attrs[CRYPTOCFGA_PRIORITY_VAL]; 265 struct nlattr *priority = attrs[CRYPTOCFGA_PRIORITY_VAL];
261 LIST_HEAD(list); 266 LIST_HEAD(list);
262 267
268 if (!null_terminated(p->cru_name) || !null_terminated(p->cru_driver_name))
269 return -EINVAL;
270
263 if (priority && !strlen(p->cru_driver_name)) 271 if (priority && !strlen(p->cru_driver_name))
264 return -EINVAL; 272 return -EINVAL;
265 273
@@ -287,6 +295,9 @@ static int crypto_del_alg(struct sk_buff *skb, struct nlmsghdr *nlh,
287 struct crypto_alg *alg; 295 struct crypto_alg *alg;
288 struct crypto_user_alg *p = nlmsg_data(nlh); 296 struct crypto_user_alg *p = nlmsg_data(nlh);
289 297
298 if (!null_terminated(p->cru_name) || !null_terminated(p->cru_driver_name))
299 return -EINVAL;
300
290 alg = crypto_alg_match(p, 1); 301 alg = crypto_alg_match(p, 1);
291 if (!alg) 302 if (!alg)
292 return -ENOENT; 303 return -ENOENT;
@@ -368,6 +379,9 @@ static int crypto_add_alg(struct sk_buff *skb, struct nlmsghdr *nlh,
368 struct crypto_user_alg *p = nlmsg_data(nlh); 379 struct crypto_user_alg *p = nlmsg_data(nlh);
369 struct nlattr *priority = attrs[CRYPTOCFGA_PRIORITY_VAL]; 380 struct nlattr *priority = attrs[CRYPTOCFGA_PRIORITY_VAL];
370 381
382 if (!null_terminated(p->cru_name) || !null_terminated(p->cru_driver_name))
383 return -EINVAL;
384
371 if (strlen(p->cru_driver_name)) 385 if (strlen(p->cru_driver_name))
372 exact = 1; 386 exact = 1;
373 387
diff --git a/crypto/ctr.c b/crypto/ctr.c
index 4ca7222cfeb6..f2b94f27bb2c 100644
--- a/crypto/ctr.c
+++ b/crypto/ctr.c
@@ -12,6 +12,7 @@
12 12
13#include <crypto/algapi.h> 13#include <crypto/algapi.h>
14#include <crypto/ctr.h> 14#include <crypto/ctr.h>
15#include <crypto/internal/skcipher.h>
15#include <linux/err.h> 16#include <linux/err.h>
16#include <linux/init.h> 17#include <linux/init.h>
17#include <linux/kernel.h> 18#include <linux/kernel.h>
@@ -25,10 +26,15 @@ struct crypto_ctr_ctx {
25}; 26};
26 27
27struct crypto_rfc3686_ctx { 28struct crypto_rfc3686_ctx {
28 struct crypto_blkcipher *child; 29 struct crypto_ablkcipher *child;
29 u8 nonce[CTR_RFC3686_NONCE_SIZE]; 30 u8 nonce[CTR_RFC3686_NONCE_SIZE];
30}; 31};
31 32
33struct crypto_rfc3686_req_ctx {
34 u8 iv[CTR_RFC3686_BLOCK_SIZE];
35 struct ablkcipher_request subreq CRYPTO_MINALIGN_ATTR;
36};
37
32static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key, 38static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key,
33 unsigned int keylen) 39 unsigned int keylen)
34{ 40{
@@ -243,11 +249,11 @@ static struct crypto_template crypto_ctr_tmpl = {
243 .module = THIS_MODULE, 249 .module = THIS_MODULE,
244}; 250};
245 251
246static int crypto_rfc3686_setkey(struct crypto_tfm *parent, const u8 *key, 252static int crypto_rfc3686_setkey(struct crypto_ablkcipher *parent,
247 unsigned int keylen) 253 const u8 *key, unsigned int keylen)
248{ 254{
249 struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(parent); 255 struct crypto_rfc3686_ctx *ctx = crypto_ablkcipher_ctx(parent);
250 struct crypto_blkcipher *child = ctx->child; 256 struct crypto_ablkcipher *child = ctx->child;
251 int err; 257 int err;
252 258
253 /* the nonce is stored in bytes at end of key */ 259 /* the nonce is stored in bytes at end of key */
@@ -259,59 +265,64 @@ static int crypto_rfc3686_setkey(struct crypto_tfm *parent, const u8 *key,
259 265
260 keylen -= CTR_RFC3686_NONCE_SIZE; 266 keylen -= CTR_RFC3686_NONCE_SIZE;
261 267
262 crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); 268 crypto_ablkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
263 crypto_blkcipher_set_flags(child, crypto_tfm_get_flags(parent) & 269 crypto_ablkcipher_set_flags(child, crypto_ablkcipher_get_flags(parent) &
264 CRYPTO_TFM_REQ_MASK); 270 CRYPTO_TFM_REQ_MASK);
265 err = crypto_blkcipher_setkey(child, key, keylen); 271 err = crypto_ablkcipher_setkey(child, key, keylen);
266 crypto_tfm_set_flags(parent, crypto_blkcipher_get_flags(child) & 272 crypto_ablkcipher_set_flags(parent, crypto_ablkcipher_get_flags(child) &
267 CRYPTO_TFM_RES_MASK); 273 CRYPTO_TFM_RES_MASK);
268 274
269 return err; 275 return err;
270} 276}
271 277
272static int crypto_rfc3686_crypt(struct blkcipher_desc *desc, 278static int crypto_rfc3686_crypt(struct ablkcipher_request *req)
273 struct scatterlist *dst,
274 struct scatterlist *src, unsigned int nbytes)
275{ 279{
276 struct crypto_blkcipher *tfm = desc->tfm; 280 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
277 struct crypto_rfc3686_ctx *ctx = crypto_blkcipher_ctx(tfm); 281 struct crypto_rfc3686_ctx *ctx = crypto_ablkcipher_ctx(tfm);
278 struct crypto_blkcipher *child = ctx->child; 282 struct crypto_ablkcipher *child = ctx->child;
279 unsigned long alignmask = crypto_blkcipher_alignmask(tfm); 283 unsigned long align = crypto_ablkcipher_alignmask(tfm);
280 u8 ivblk[CTR_RFC3686_BLOCK_SIZE + alignmask]; 284 struct crypto_rfc3686_req_ctx *rctx =
281 u8 *iv = PTR_ALIGN(ivblk + 0, alignmask + 1); 285 (void *)PTR_ALIGN((u8 *)ablkcipher_request_ctx(req), align + 1);
282 u8 *info = desc->info; 286 struct ablkcipher_request *subreq = &rctx->subreq;
283 int err; 287 u8 *iv = rctx->iv;
284 288
285 /* set up counter block */ 289 /* set up counter block */
286 memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE); 290 memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
287 memcpy(iv + CTR_RFC3686_NONCE_SIZE, info, CTR_RFC3686_IV_SIZE); 291 memcpy(iv + CTR_RFC3686_NONCE_SIZE, req->info, CTR_RFC3686_IV_SIZE);
288 292
289 /* initialize counter portion of counter block */ 293 /* initialize counter portion of counter block */
290 *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) = 294 *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
291 cpu_to_be32(1); 295 cpu_to_be32(1);
292 296
293 desc->tfm = child; 297 ablkcipher_request_set_tfm(subreq, child);
294 desc->info = iv; 298 ablkcipher_request_set_callback(subreq, req->base.flags,
295 err = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes); 299 req->base.complete, req->base.data);
296 desc->tfm = tfm; 300 ablkcipher_request_set_crypt(subreq, req->src, req->dst, req->nbytes,
297 desc->info = info; 301 iv);
298 302
299 return err; 303 return crypto_ablkcipher_encrypt(subreq);
300} 304}
301 305
302static int crypto_rfc3686_init_tfm(struct crypto_tfm *tfm) 306static int crypto_rfc3686_init_tfm(struct crypto_tfm *tfm)
303{ 307{
304 struct crypto_instance *inst = (void *)tfm->__crt_alg; 308 struct crypto_instance *inst = (void *)tfm->__crt_alg;
305 struct crypto_spawn *spawn = crypto_instance_ctx(inst); 309 struct crypto_skcipher_spawn *spawn = crypto_instance_ctx(inst);
306 struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm); 310 struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm);
307 struct crypto_blkcipher *cipher; 311 struct crypto_ablkcipher *cipher;
312 unsigned long align;
308 313
309 cipher = crypto_spawn_blkcipher(spawn); 314 cipher = crypto_spawn_skcipher(spawn);
310 if (IS_ERR(cipher)) 315 if (IS_ERR(cipher))
311 return PTR_ERR(cipher); 316 return PTR_ERR(cipher);
312 317
313 ctx->child = cipher; 318 ctx->child = cipher;
314 319
320 align = crypto_tfm_alg_alignmask(tfm);
321 align &= ~(crypto_tfm_ctx_alignment() - 1);
322 tfm->crt_ablkcipher.reqsize = align +
323 sizeof(struct crypto_rfc3686_req_ctx) +
324 crypto_ablkcipher_reqsize(cipher);
325
315 return 0; 326 return 0;
316} 327}
317 328
@@ -319,74 +330,108 @@ static void crypto_rfc3686_exit_tfm(struct crypto_tfm *tfm)
319{ 330{
320 struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm); 331 struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm);
321 332
322 crypto_free_blkcipher(ctx->child); 333 crypto_free_ablkcipher(ctx->child);
323} 334}
324 335
325static struct crypto_instance *crypto_rfc3686_alloc(struct rtattr **tb) 336static struct crypto_instance *crypto_rfc3686_alloc(struct rtattr **tb)
326{ 337{
338 struct crypto_attr_type *algt;
327 struct crypto_instance *inst; 339 struct crypto_instance *inst;
328 struct crypto_alg *alg; 340 struct crypto_alg *alg;
341 struct crypto_skcipher_spawn *spawn;
342 const char *cipher_name;
329 int err; 343 int err;
330 344
331 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER); 345 algt = crypto_get_attr_type(tb);
346 if (IS_ERR(algt))
347 return ERR_CAST(algt);
348
349 if ((algt->type ^ CRYPTO_ALG_TYPE_BLKCIPHER) & algt->mask)
350 return ERR_PTR(-EINVAL);
351
352 cipher_name = crypto_attr_alg_name(tb[1]);
353 if (IS_ERR(cipher_name))
354 return ERR_CAST(cipher_name);
355
356 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
357 if (!inst)
358 return ERR_PTR(-ENOMEM);
359
360 spawn = crypto_instance_ctx(inst);
361
362 crypto_set_skcipher_spawn(spawn, inst);
363 err = crypto_grab_skcipher(spawn, cipher_name, 0,
364 crypto_requires_sync(algt->type,
365 algt->mask));
332 if (err) 366 if (err)
333 return ERR_PTR(err); 367 goto err_free_inst;
334 368
335 alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_BLKCIPHER, 369 alg = crypto_skcipher_spawn_alg(spawn);
336 CRYPTO_ALG_TYPE_MASK);
337 err = PTR_ERR(alg);
338 if (IS_ERR(alg))
339 return ERR_PTR(err);
340 370
341 /* We only support 16-byte blocks. */ 371 /* We only support 16-byte blocks. */
342 err = -EINVAL; 372 err = -EINVAL;
343 if (alg->cra_blkcipher.ivsize != CTR_RFC3686_BLOCK_SIZE) 373 if (alg->cra_ablkcipher.ivsize != CTR_RFC3686_BLOCK_SIZE)
344 goto out_put_alg; 374 goto err_drop_spawn;
345 375
346 /* Not a stream cipher? */ 376 /* Not a stream cipher? */
347 if (alg->cra_blocksize != 1) 377 if (alg->cra_blocksize != 1)
348 goto out_put_alg; 378 goto err_drop_spawn;
349 379
350 inst = crypto_alloc_instance("rfc3686", alg); 380 err = -ENAMETOOLONG;
351 if (IS_ERR(inst)) 381 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "rfc3686(%s)",
352 goto out; 382 alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
383 goto err_drop_spawn;
384 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
385 "rfc3686(%s)", alg->cra_driver_name) >=
386 CRYPTO_MAX_ALG_NAME)
387 goto err_drop_spawn;
353 388
354 inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
355 inst->alg.cra_priority = alg->cra_priority; 389 inst->alg.cra_priority = alg->cra_priority;
356 inst->alg.cra_blocksize = 1; 390 inst->alg.cra_blocksize = 1;
357 inst->alg.cra_alignmask = alg->cra_alignmask; 391 inst->alg.cra_alignmask = alg->cra_alignmask;
358 inst->alg.cra_type = &crypto_blkcipher_type;
359 392
360 inst->alg.cra_blkcipher.ivsize = CTR_RFC3686_IV_SIZE; 393 inst->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
361 inst->alg.cra_blkcipher.min_keysize = alg->cra_blkcipher.min_keysize 394 (alg->cra_flags & CRYPTO_ALG_ASYNC);
362 + CTR_RFC3686_NONCE_SIZE; 395 inst->alg.cra_type = &crypto_ablkcipher_type;
363 inst->alg.cra_blkcipher.max_keysize = alg->cra_blkcipher.max_keysize 396
364 + CTR_RFC3686_NONCE_SIZE; 397 inst->alg.cra_ablkcipher.ivsize = CTR_RFC3686_IV_SIZE;
398 inst->alg.cra_ablkcipher.min_keysize =
399 alg->cra_ablkcipher.min_keysize + CTR_RFC3686_NONCE_SIZE;
400 inst->alg.cra_ablkcipher.max_keysize =
401 alg->cra_ablkcipher.max_keysize + CTR_RFC3686_NONCE_SIZE;
365 402
366 inst->alg.cra_blkcipher.geniv = "seqiv"; 403 inst->alg.cra_ablkcipher.geniv = "seqiv";
404
405 inst->alg.cra_ablkcipher.setkey = crypto_rfc3686_setkey;
406 inst->alg.cra_ablkcipher.encrypt = crypto_rfc3686_crypt;
407 inst->alg.cra_ablkcipher.decrypt = crypto_rfc3686_crypt;
367 408
368 inst->alg.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx); 409 inst->alg.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx);
369 410
370 inst->alg.cra_init = crypto_rfc3686_init_tfm; 411 inst->alg.cra_init = crypto_rfc3686_init_tfm;
371 inst->alg.cra_exit = crypto_rfc3686_exit_tfm; 412 inst->alg.cra_exit = crypto_rfc3686_exit_tfm;
372 413
373 inst->alg.cra_blkcipher.setkey = crypto_rfc3686_setkey;
374 inst->alg.cra_blkcipher.encrypt = crypto_rfc3686_crypt;
375 inst->alg.cra_blkcipher.decrypt = crypto_rfc3686_crypt;
376
377out:
378 crypto_mod_put(alg);
379 return inst; 414 return inst;
380 415
381out_put_alg: 416err_drop_spawn:
382 inst = ERR_PTR(err); 417 crypto_drop_skcipher(spawn);
383 goto out; 418err_free_inst:
419 kfree(inst);
420 return ERR_PTR(err);
421}
422
423static void crypto_rfc3686_free(struct crypto_instance *inst)
424{
425 struct crypto_skcipher_spawn *spawn = crypto_instance_ctx(inst);
426
427 crypto_drop_skcipher(spawn);
428 kfree(inst);
384} 429}
385 430
386static struct crypto_template crypto_rfc3686_tmpl = { 431static struct crypto_template crypto_rfc3686_tmpl = {
387 .name = "rfc3686", 432 .name = "rfc3686",
388 .alloc = crypto_rfc3686_alloc, 433 .alloc = crypto_rfc3686_alloc,
389 .free = crypto_ctr_free, 434 .free = crypto_rfc3686_free,
390 .module = THIS_MODULE, 435 .module = THIS_MODULE,
391}; 436};
392 437
diff --git a/crypto/cts.c b/crypto/cts.c
index ccf9c5de3958..042223f8e733 100644
--- a/crypto/cts.c
+++ b/crypto/cts.c
@@ -282,9 +282,8 @@ static struct crypto_instance *crypto_cts_alloc(struct rtattr **tb)
282 282
283 alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_BLKCIPHER, 283 alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_BLKCIPHER,
284 CRYPTO_ALG_TYPE_MASK); 284 CRYPTO_ALG_TYPE_MASK);
285 err = PTR_ERR(alg);
286 if (IS_ERR(alg)) 285 if (IS_ERR(alg))
287 return ERR_PTR(err); 286 return ERR_CAST(alg);
288 287
289 inst = ERR_PTR(-EINVAL); 288 inst = ERR_PTR(-EINVAL);
290 if (!is_power_of_2(alg->cra_blocksize)) 289 if (!is_power_of_2(alg->cra_blocksize))
diff --git a/crypto/gcm.c b/crypto/gcm.c
index 1a252639ef91..137ad1ec5438 100644
--- a/crypto/gcm.c
+++ b/crypto/gcm.c
@@ -701,9 +701,8 @@ static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb,
701 int err; 701 int err;
702 702
703 algt = crypto_get_attr_type(tb); 703 algt = crypto_get_attr_type(tb);
704 err = PTR_ERR(algt);
705 if (IS_ERR(algt)) 704 if (IS_ERR(algt))
706 return ERR_PTR(err); 705 return ERR_CAST(algt);
707 706
708 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 707 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
709 return ERR_PTR(-EINVAL); 708 return ERR_PTR(-EINVAL);
@@ -711,9 +710,8 @@ static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb,
711 ghash_alg = crypto_find_alg(ghash_name, &crypto_ahash_type, 710 ghash_alg = crypto_find_alg(ghash_name, &crypto_ahash_type,
712 CRYPTO_ALG_TYPE_HASH, 711 CRYPTO_ALG_TYPE_HASH,
713 CRYPTO_ALG_TYPE_AHASH_MASK); 712 CRYPTO_ALG_TYPE_AHASH_MASK);
714 err = PTR_ERR(ghash_alg);
715 if (IS_ERR(ghash_alg)) 713 if (IS_ERR(ghash_alg))
716 return ERR_PTR(err); 714 return ERR_CAST(ghash_alg);
717 715
718 err = -ENOMEM; 716 err = -ENOMEM;
719 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 717 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
@@ -787,15 +785,13 @@ out_put_ghash:
787 785
788static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb) 786static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb)
789{ 787{
790 int err;
791 const char *cipher_name; 788 const char *cipher_name;
792 char ctr_name[CRYPTO_MAX_ALG_NAME]; 789 char ctr_name[CRYPTO_MAX_ALG_NAME];
793 char full_name[CRYPTO_MAX_ALG_NAME]; 790 char full_name[CRYPTO_MAX_ALG_NAME];
794 791
795 cipher_name = crypto_attr_alg_name(tb[1]); 792 cipher_name = crypto_attr_alg_name(tb[1]);
796 err = PTR_ERR(cipher_name);
797 if (IS_ERR(cipher_name)) 793 if (IS_ERR(cipher_name))
798 return ERR_PTR(err); 794 return ERR_CAST(cipher_name);
799 795
800 if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", cipher_name) >= 796 if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", cipher_name) >=
801 CRYPTO_MAX_ALG_NAME) 797 CRYPTO_MAX_ALG_NAME)
@@ -826,20 +822,17 @@ static struct crypto_template crypto_gcm_tmpl = {
826 822
827static struct crypto_instance *crypto_gcm_base_alloc(struct rtattr **tb) 823static struct crypto_instance *crypto_gcm_base_alloc(struct rtattr **tb)
828{ 824{
829 int err;
830 const char *ctr_name; 825 const char *ctr_name;
831 const char *ghash_name; 826 const char *ghash_name;
832 char full_name[CRYPTO_MAX_ALG_NAME]; 827 char full_name[CRYPTO_MAX_ALG_NAME];
833 828
834 ctr_name = crypto_attr_alg_name(tb[1]); 829 ctr_name = crypto_attr_alg_name(tb[1]);
835 err = PTR_ERR(ctr_name);
836 if (IS_ERR(ctr_name)) 830 if (IS_ERR(ctr_name))
837 return ERR_PTR(err); 831 return ERR_CAST(ctr_name);
838 832
839 ghash_name = crypto_attr_alg_name(tb[2]); 833 ghash_name = crypto_attr_alg_name(tb[2]);
840 err = PTR_ERR(ghash_name);
841 if (IS_ERR(ghash_name)) 834 if (IS_ERR(ghash_name))
842 return ERR_PTR(err); 835 return ERR_CAST(ghash_name);
843 836
844 if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm_base(%s,%s)", 837 if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm_base(%s,%s)",
845 ctr_name, ghash_name) >= CRYPTO_MAX_ALG_NAME) 838 ctr_name, ghash_name) >= CRYPTO_MAX_ALG_NAME)
@@ -971,17 +964,15 @@ static struct crypto_instance *crypto_rfc4106_alloc(struct rtattr **tb)
971 int err; 964 int err;
972 965
973 algt = crypto_get_attr_type(tb); 966 algt = crypto_get_attr_type(tb);
974 err = PTR_ERR(algt);
975 if (IS_ERR(algt)) 967 if (IS_ERR(algt))
976 return ERR_PTR(err); 968 return ERR_CAST(algt);
977 969
978 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 970 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
979 return ERR_PTR(-EINVAL); 971 return ERR_PTR(-EINVAL);
980 972
981 ccm_name = crypto_attr_alg_name(tb[1]); 973 ccm_name = crypto_attr_alg_name(tb[1]);
982 err = PTR_ERR(ccm_name);
983 if (IS_ERR(ccm_name)) 974 if (IS_ERR(ccm_name))
984 return ERR_PTR(err); 975 return ERR_CAST(ccm_name);
985 976
986 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 977 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
987 if (!inst) 978 if (!inst)
@@ -1222,17 +1213,15 @@ static struct crypto_instance *crypto_rfc4543_alloc(struct rtattr **tb)
1222 int err; 1213 int err;
1223 1214
1224 algt = crypto_get_attr_type(tb); 1215 algt = crypto_get_attr_type(tb);
1225 err = PTR_ERR(algt);
1226 if (IS_ERR(algt)) 1216 if (IS_ERR(algt))
1227 return ERR_PTR(err); 1217 return ERR_CAST(algt);
1228 1218
1229 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 1219 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
1230 return ERR_PTR(-EINVAL); 1220 return ERR_PTR(-EINVAL);
1231 1221
1232 ccm_name = crypto_attr_alg_name(tb[1]); 1222 ccm_name = crypto_attr_alg_name(tb[1]);
1233 err = PTR_ERR(ccm_name);
1234 if (IS_ERR(ccm_name)) 1223 if (IS_ERR(ccm_name))
1235 return ERR_PTR(err); 1224 return ERR_CAST(ccm_name);
1236 1225
1237 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 1226 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
1238 if (!inst) 1227 if (!inst)
diff --git a/crypto/pcompress.c b/crypto/pcompress.c
index 04e083ff5373..7140fe70c7af 100644
--- a/crypto/pcompress.c
+++ b/crypto/pcompress.c
@@ -53,8 +53,7 @@ static int crypto_pcomp_report(struct sk_buff *skb, struct crypto_alg *alg)
53{ 53{
54 struct crypto_report_comp rpcomp; 54 struct crypto_report_comp rpcomp;
55 55
56 snprintf(rpcomp.type, CRYPTO_MAX_ALG_NAME, "%s", "pcomp"); 56 strncpy(rpcomp.type, "pcomp", sizeof(rpcomp.type));
57
58 if (nla_put(skb, CRYPTOCFGA_REPORT_COMPRESS, 57 if (nla_put(skb, CRYPTOCFGA_REPORT_COMPRESS,
59 sizeof(struct crypto_report_comp), &rpcomp)) 58 sizeof(struct crypto_report_comp), &rpcomp))
60 goto nla_put_failure; 59 goto nla_put_failure;
diff --git a/crypto/rng.c b/crypto/rng.c
index f3b7894dec00..e0a25c2456de 100644
--- a/crypto/rng.c
+++ b/crypto/rng.c
@@ -65,7 +65,7 @@ static int crypto_rng_report(struct sk_buff *skb, struct crypto_alg *alg)
65{ 65{
66 struct crypto_report_rng rrng; 66 struct crypto_report_rng rrng;
67 67
68 snprintf(rrng.type, CRYPTO_MAX_ALG_NAME, "%s", "rng"); 68 strncpy(rrng.type, "rng", sizeof(rrng.type));
69 69
70 rrng.seedsize = alg->cra_rng.seedsize; 70 rrng.seedsize = alg->cra_rng.seedsize;
71 71
diff --git a/crypto/seqiv.c b/crypto/seqiv.c
index 4c4491229417..f2cba4ed6f25 100644
--- a/crypto/seqiv.c
+++ b/crypto/seqiv.c
@@ -305,9 +305,8 @@ static struct crypto_instance *seqiv_alloc(struct rtattr **tb)
305 int err; 305 int err;
306 306
307 algt = crypto_get_attr_type(tb); 307 algt = crypto_get_attr_type(tb);
308 err = PTR_ERR(algt);
309 if (IS_ERR(algt)) 308 if (IS_ERR(algt))
310 return ERR_PTR(err); 309 return ERR_CAST(algt);
311 310
312 err = crypto_get_default_rng(); 311 err = crypto_get_default_rng();
313 if (err) 312 if (err)
diff --git a/crypto/shash.c b/crypto/shash.c
index f426330f1017..929058a68561 100644
--- a/crypto/shash.c
+++ b/crypto/shash.c
@@ -530,7 +530,8 @@ static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
530 struct crypto_report_hash rhash; 530 struct crypto_report_hash rhash;
531 struct shash_alg *salg = __crypto_shash_alg(alg); 531 struct shash_alg *salg = __crypto_shash_alg(alg);
532 532
533 snprintf(rhash.type, CRYPTO_MAX_ALG_NAME, "%s", "shash"); 533 strncpy(rhash.type, "shash", sizeof(rhash.type));
534
534 rhash.blocksize = alg->cra_blocksize; 535 rhash.blocksize = alg->cra_blocksize;
535 rhash.digestsize = salg->digestsize; 536 rhash.digestsize = salg->digestsize;
536 537
diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c
index 7ae2130e1b00..87ef7d66bc20 100644
--- a/crypto/tcrypt.c
+++ b/crypto/tcrypt.c
@@ -1591,6 +1591,10 @@ static int do_test(int m)
1591 speed_template_16_24_32); 1591 speed_template_16_24_32);
1592 test_acipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0, 1592 test_acipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
1593 speed_template_16_24_32); 1593 speed_template_16_24_32);
1594 test_acipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL, 0,
1595 speed_template_20_28_36);
1596 test_acipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL, 0,
1597 speed_template_20_28_36);
1594 break; 1598 break;
1595 1599
1596 case 501: 1600 case 501:
diff --git a/crypto/tcrypt.h b/crypto/tcrypt.h
index cd2068524f3f..ecdeeb1a7b05 100644
--- a/crypto/tcrypt.h
+++ b/crypto/tcrypt.h
@@ -51,6 +51,7 @@ static u8 speed_template_8_16[] = {8, 16, 0};
51static u8 speed_template_8_32[] = {8, 32, 0}; 51static u8 speed_template_8_32[] = {8, 32, 0};
52static u8 speed_template_16_32[] = {16, 32, 0}; 52static u8 speed_template_16_32[] = {16, 32, 0};
53static u8 speed_template_16_24_32[] = {16, 24, 32, 0}; 53static u8 speed_template_16_24_32[] = {16, 24, 32, 0};
54static u8 speed_template_20_28_36[] = {20, 28, 36, 0};
54static u8 speed_template_32_40_48[] = {32, 40, 48, 0}; 55static u8 speed_template_32_40_48[] = {32, 40, 48, 0};
55static u8 speed_template_32_48[] = {32, 48, 0}; 56static u8 speed_template_32_48[] = {32, 48, 0};
56static u8 speed_template_32_48_64[] = {32, 48, 64, 0}; 57static u8 speed_template_32_48_64[] = {32, 48, 64, 0};
diff --git a/crypto/testmgr.c b/crypto/testmgr.c
index edf4a0818773..efd8b20e13dc 100644
--- a/crypto/testmgr.c
+++ b/crypto/testmgr.c
@@ -2269,6 +2269,21 @@ static const struct alg_test_desc alg_test_descs[] = {
2269 } 2269 }
2270 } 2270 }
2271 }, { 2271 }, {
2272 .alg = "ecb(fcrypt)",
2273 .test = alg_test_skcipher,
2274 .suite = {
2275 .cipher = {
2276 .enc = {
2277 .vecs = fcrypt_pcbc_enc_tv_template,
2278 .count = 1
2279 },
2280 .dec = {
2281 .vecs = fcrypt_pcbc_dec_tv_template,
2282 .count = 1
2283 }
2284 }
2285 }
2286 }, {
2272 .alg = "ecb(khazad)", 2287 .alg = "ecb(khazad)",
2273 .test = alg_test_skcipher, 2288 .test = alg_test_skcipher,
2274 .suite = { 2289 .suite = {
diff --git a/crypto/testmgr.h b/crypto/testmgr.h
index b5721e0b979c..3db1b7591559 100644
--- a/crypto/testmgr.h
+++ b/crypto/testmgr.h
@@ -25084,38 +25084,40 @@ static struct pcomp_testvec zlib_decomp_tv_template[] = {
25084static struct comp_testvec lzo_comp_tv_template[] = { 25084static struct comp_testvec lzo_comp_tv_template[] = {
25085 { 25085 {
25086 .inlen = 70, 25086 .inlen = 70,
25087 .outlen = 46, 25087 .outlen = 57,
25088 .input = "Join us now and share the software " 25088 .input = "Join us now and share the software "
25089 "Join us now and share the software ", 25089 "Join us now and share the software ",
25090 .output = "\x00\x0d\x4a\x6f\x69\x6e\x20\x75" 25090 .output = "\x00\x0d\x4a\x6f\x69\x6e\x20\x75"
25091 "\x73\x20\x6e\x6f\x77\x20\x61\x6e" 25091 "\x73\x20\x6e\x6f\x77\x20\x61\x6e"
25092 "\x64\x20\x73\x68\x61\x72\x65\x20" 25092 "\x64\x20\x73\x68\x61\x72\x65\x20"
25093 "\x74\x68\x65\x20\x73\x6f\x66\x74" 25093 "\x74\x68\x65\x20\x73\x6f\x66\x74"
25094 "\x77\x70\x01\x01\x4a\x6f\x69\x6e" 25094 "\x77\x70\x01\x32\x88\x00\x0c\x65"
25095 "\x3d\x88\x00\x11\x00\x00", 25095 "\x20\x74\x68\x65\x20\x73\x6f\x66"
25096 "\x74\x77\x61\x72\x65\x20\x11\x00"
25097 "\x00",
25096 }, { 25098 }, {
25097 .inlen = 159, 25099 .inlen = 159,
25098 .outlen = 133, 25100 .outlen = 131,
25099 .input = "This document describes a compression method based on the LZO " 25101 .input = "This document describes a compression method based on the LZO "
25100 "compression algorithm. This document defines the application of " 25102 "compression algorithm. This document defines the application of "
25101 "the LZO algorithm used in UBIFS.", 25103 "the LZO algorithm used in UBIFS.",
25102 .output = "\x00\x2b\x54\x68\x69\x73\x20\x64" 25104 .output = "\x00\x2c\x54\x68\x69\x73\x20\x64"
25103 "\x6f\x63\x75\x6d\x65\x6e\x74\x20" 25105 "\x6f\x63\x75\x6d\x65\x6e\x74\x20"
25104 "\x64\x65\x73\x63\x72\x69\x62\x65" 25106 "\x64\x65\x73\x63\x72\x69\x62\x65"
25105 "\x73\x20\x61\x20\x63\x6f\x6d\x70" 25107 "\x73\x20\x61\x20\x63\x6f\x6d\x70"
25106 "\x72\x65\x73\x73\x69\x6f\x6e\x20" 25108 "\x72\x65\x73\x73\x69\x6f\x6e\x20"
25107 "\x6d\x65\x74\x68\x6f\x64\x20\x62" 25109 "\x6d\x65\x74\x68\x6f\x64\x20\x62"
25108 "\x61\x73\x65\x64\x20\x6f\x6e\x20" 25110 "\x61\x73\x65\x64\x20\x6f\x6e\x20"
25109 "\x74\x68\x65\x20\x4c\x5a\x4f\x2b" 25111 "\x74\x68\x65\x20\x4c\x5a\x4f\x20"
25110 "\x8c\x00\x0d\x61\x6c\x67\x6f\x72" 25112 "\x2a\x8c\x00\x09\x61\x6c\x67\x6f"
25111 "\x69\x74\x68\x6d\x2e\x20\x20\x54" 25113 "\x72\x69\x74\x68\x6d\x2e\x20\x20"
25112 "\x68\x69\x73\x2a\x54\x01\x02\x66" 25114 "\x2e\x54\x01\x03\x66\x69\x6e\x65"
25113 "\x69\x6e\x65\x73\x94\x06\x05\x61" 25115 "\x73\x20\x74\x06\x05\x61\x70\x70"
25114 "\x70\x70\x6c\x69\x63\x61\x74\x76" 25116 "\x6c\x69\x63\x61\x74\x76\x0a\x6f"
25115 "\x0a\x6f\x66\x88\x02\x60\x09\x27" 25117 "\x66\x88\x02\x60\x09\x27\xf0\x00"
25116 "\xf0\x00\x0c\x20\x75\x73\x65\x64" 25118 "\x0c\x20\x75\x73\x65\x64\x20\x69"
25117 "\x20\x69\x6e\x20\x55\x42\x49\x46" 25119 "\x6e\x20\x55\x42\x49\x46\x53\x2e"
25118 "\x53\x2e\x11\x00\x00", 25120 "\x11\x00\x00",
25119 }, 25121 },
25120}; 25122};
25121 25123