diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2017-02-23 12:54:19 -0500 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2017-02-23 12:54:19 -0500 |
commit | 5bcbe22ca47da04cda3a858cef67f55b550c1d13 (patch) | |
tree | 49bd61e32eb2d652085a49182436322a3e0e9840 /crypto/skcipher.c | |
parent | 1db934a5b77a9e37c4742c704fde6af233187a98 (diff) | |
parent | 12cb3a1c4184f891d965d1f39f8cfcc9ef617647 (diff) |
Merge branch 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto update from Herbert Xu:
"API:
- Try to catch hash output overrun in testmgr
- Introduce walksize attribute for batched walking
- Make crypto_xor() and crypto_inc() alignment agnostic
Algorithms:
- Add time-invariant AES algorithm
- Add standalone CBCMAC algorithm
Drivers:
- Add NEON acclerated chacha20 on ARM/ARM64
- Expose AES-CTR as synchronous skcipher on ARM64
- Add scalar AES implementation on ARM64
- Improve scalar AES implementation on ARM
- Improve NEON AES implementation on ARM/ARM64
- Merge CRC32 and PMULL instruction based drivers on ARM64
- Add NEON acclerated CBCMAC/CMAC/XCBC AES on ARM64
- Add IPsec AUTHENC implementation in atmel
- Add Support for Octeon-tx CPT Engine
- Add Broadcom SPU driver
- Add MediaTek driver"
* 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (142 commits)
crypto: xts - Add ECB dependency
crypto: cavium - switch to pci_alloc_irq_vectors
crypto: cavium - switch to pci_alloc_irq_vectors
crypto: cavium - remove dead MSI-X related define
crypto: brcm - Avoid double free in ahash_finup()
crypto: cavium - fix Kconfig dependencies
crypto: cavium - cpt_bind_vq_to_grp could return an error code
crypto: doc - fix typo
hwrng: omap - update Kconfig help description
crypto: ccm - drop unnecessary minimum 32-bit alignment
crypto: ccm - honour alignmask of subordinate MAC cipher
crypto: caam - fix state buffer DMA (un)mapping
crypto: caam - abstract ahash request double buffering
crypto: caam - fix error path for ctx_dma mapping failure
crypto: caam - fix DMA API leaks for multiple setkey() calls
crypto: caam - don't dma_map key for hash algorithms
crypto: caam - use dma_map_sg() return code
crypto: caam - replace sg_count() with sg_nents_for_len()
crypto: caam - check sg_count() return value
crypto: caam - fix HW S/G in ablkcipher_giv_edesc_alloc()
..
Diffstat (limited to 'crypto/skcipher.c')
-rw-r--r-- | crypto/skcipher.c | 23 |
1 files changed, 14 insertions, 9 deletions
diff --git a/crypto/skcipher.c b/crypto/skcipher.c index 0e1e6c35188e..014af741fc6a 100644 --- a/crypto/skcipher.c +++ b/crypto/skcipher.c | |||
@@ -19,6 +19,7 @@ | |||
19 | #include <crypto/scatterwalk.h> | 19 | #include <crypto/scatterwalk.h> |
20 | #include <linux/bug.h> | 20 | #include <linux/bug.h> |
21 | #include <linux/cryptouser.h> | 21 | #include <linux/cryptouser.h> |
22 | #include <linux/compiler.h> | ||
22 | #include <linux/list.h> | 23 | #include <linux/list.h> |
23 | #include <linux/module.h> | 24 | #include <linux/module.h> |
24 | #include <linux/rtnetlink.h> | 25 | #include <linux/rtnetlink.h> |
@@ -185,12 +186,12 @@ void skcipher_walk_complete(struct skcipher_walk *walk, int err) | |||
185 | data = p->data; | 186 | data = p->data; |
186 | if (!data) { | 187 | if (!data) { |
187 | data = PTR_ALIGN(&p->buffer[0], walk->alignmask + 1); | 188 | data = PTR_ALIGN(&p->buffer[0], walk->alignmask + 1); |
188 | data = skcipher_get_spot(data, walk->chunksize); | 189 | data = skcipher_get_spot(data, walk->stride); |
189 | } | 190 | } |
190 | 191 | ||
191 | scatterwalk_copychunks(data, &p->dst, p->len, 1); | 192 | scatterwalk_copychunks(data, &p->dst, p->len, 1); |
192 | 193 | ||
193 | if (offset_in_page(p->data) + p->len + walk->chunksize > | 194 | if (offset_in_page(p->data) + p->len + walk->stride > |
194 | PAGE_SIZE) | 195 | PAGE_SIZE) |
195 | free_page((unsigned long)p->data); | 196 | free_page((unsigned long)p->data); |
196 | 197 | ||
@@ -299,7 +300,7 @@ static int skcipher_next_copy(struct skcipher_walk *walk) | |||
299 | p->len = walk->nbytes; | 300 | p->len = walk->nbytes; |
300 | skcipher_queue_write(walk, p); | 301 | skcipher_queue_write(walk, p); |
301 | 302 | ||
302 | if (offset_in_page(walk->page) + walk->nbytes + walk->chunksize > | 303 | if (offset_in_page(walk->page) + walk->nbytes + walk->stride > |
303 | PAGE_SIZE) | 304 | PAGE_SIZE) |
304 | walk->page = NULL; | 305 | walk->page = NULL; |
305 | else | 306 | else |
@@ -344,7 +345,7 @@ static int skcipher_walk_next(struct skcipher_walk *walk) | |||
344 | SKCIPHER_WALK_DIFF); | 345 | SKCIPHER_WALK_DIFF); |
345 | 346 | ||
346 | n = walk->total; | 347 | n = walk->total; |
347 | bsize = min(walk->chunksize, max(n, walk->blocksize)); | 348 | bsize = min(walk->stride, max(n, walk->blocksize)); |
348 | n = scatterwalk_clamp(&walk->in, n); | 349 | n = scatterwalk_clamp(&walk->in, n); |
349 | n = scatterwalk_clamp(&walk->out, n); | 350 | n = scatterwalk_clamp(&walk->out, n); |
350 | 351 | ||
@@ -393,7 +394,7 @@ static int skcipher_copy_iv(struct skcipher_walk *walk) | |||
393 | unsigned a = crypto_tfm_ctx_alignment() - 1; | 394 | unsigned a = crypto_tfm_ctx_alignment() - 1; |
394 | unsigned alignmask = walk->alignmask; | 395 | unsigned alignmask = walk->alignmask; |
395 | unsigned ivsize = walk->ivsize; | 396 | unsigned ivsize = walk->ivsize; |
396 | unsigned bs = walk->chunksize; | 397 | unsigned bs = walk->stride; |
397 | unsigned aligned_bs; | 398 | unsigned aligned_bs; |
398 | unsigned size; | 399 | unsigned size; |
399 | u8 *iv; | 400 | u8 *iv; |
@@ -463,7 +464,7 @@ static int skcipher_walk_skcipher(struct skcipher_walk *walk, | |||
463 | SKCIPHER_WALK_SLEEP : 0; | 464 | SKCIPHER_WALK_SLEEP : 0; |
464 | 465 | ||
465 | walk->blocksize = crypto_skcipher_blocksize(tfm); | 466 | walk->blocksize = crypto_skcipher_blocksize(tfm); |
466 | walk->chunksize = crypto_skcipher_chunksize(tfm); | 467 | walk->stride = crypto_skcipher_walksize(tfm); |
467 | walk->ivsize = crypto_skcipher_ivsize(tfm); | 468 | walk->ivsize = crypto_skcipher_ivsize(tfm); |
468 | walk->alignmask = crypto_skcipher_alignmask(tfm); | 469 | walk->alignmask = crypto_skcipher_alignmask(tfm); |
469 | 470 | ||
@@ -525,7 +526,7 @@ static int skcipher_walk_aead_common(struct skcipher_walk *walk, | |||
525 | walk->flags &= ~SKCIPHER_WALK_SLEEP; | 526 | walk->flags &= ~SKCIPHER_WALK_SLEEP; |
526 | 527 | ||
527 | walk->blocksize = crypto_aead_blocksize(tfm); | 528 | walk->blocksize = crypto_aead_blocksize(tfm); |
528 | walk->chunksize = crypto_aead_chunksize(tfm); | 529 | walk->stride = crypto_aead_chunksize(tfm); |
529 | walk->ivsize = crypto_aead_ivsize(tfm); | 530 | walk->ivsize = crypto_aead_ivsize(tfm); |
530 | walk->alignmask = crypto_aead_alignmask(tfm); | 531 | walk->alignmask = crypto_aead_alignmask(tfm); |
531 | 532 | ||
@@ -807,7 +808,7 @@ static void crypto_skcipher_free_instance(struct crypto_instance *inst) | |||
807 | } | 808 | } |
808 | 809 | ||
809 | static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg) | 810 | static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg) |
810 | __attribute__ ((unused)); | 811 | __maybe_unused; |
811 | static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg) | 812 | static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg) |
812 | { | 813 | { |
813 | struct skcipher_alg *skcipher = container_of(alg, struct skcipher_alg, | 814 | struct skcipher_alg *skcipher = container_of(alg, struct skcipher_alg, |
@@ -821,6 +822,7 @@ static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg) | |||
821 | seq_printf(m, "max keysize : %u\n", skcipher->max_keysize); | 822 | seq_printf(m, "max keysize : %u\n", skcipher->max_keysize); |
822 | seq_printf(m, "ivsize : %u\n", skcipher->ivsize); | 823 | seq_printf(m, "ivsize : %u\n", skcipher->ivsize); |
823 | seq_printf(m, "chunksize : %u\n", skcipher->chunksize); | 824 | seq_printf(m, "chunksize : %u\n", skcipher->chunksize); |
825 | seq_printf(m, "walksize : %u\n", skcipher->walksize); | ||
824 | } | 826 | } |
825 | 827 | ||
826 | #ifdef CONFIG_NET | 828 | #ifdef CONFIG_NET |
@@ -893,11 +895,14 @@ static int skcipher_prepare_alg(struct skcipher_alg *alg) | |||
893 | { | 895 | { |
894 | struct crypto_alg *base = &alg->base; | 896 | struct crypto_alg *base = &alg->base; |
895 | 897 | ||
896 | if (alg->ivsize > PAGE_SIZE / 8 || alg->chunksize > PAGE_SIZE / 8) | 898 | if (alg->ivsize > PAGE_SIZE / 8 || alg->chunksize > PAGE_SIZE / 8 || |
899 | alg->walksize > PAGE_SIZE / 8) | ||
897 | return -EINVAL; | 900 | return -EINVAL; |
898 | 901 | ||
899 | if (!alg->chunksize) | 902 | if (!alg->chunksize) |
900 | alg->chunksize = base->cra_blocksize; | 903 | alg->chunksize = base->cra_blocksize; |
904 | if (!alg->walksize) | ||
905 | alg->walksize = alg->chunksize; | ||
901 | 906 | ||
902 | base->cra_type = &crypto_skcipher_type2; | 907 | base->cra_type = &crypto_skcipher_type2; |
903 | base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; | 908 | base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; |