aboutsummaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
Diffstat (limited to 'crypto')
-rw-r--r--crypto/Kconfig1
-rw-r--r--crypto/Makefile4
-rw-r--r--crypto/ablkcipher.c3
-rw-r--r--crypto/async_tx/async_memcpy.c6
-rw-r--r--crypto/async_tx/async_memset.c6
-rw-r--r--crypto/async_tx/async_tx.c6
-rw-r--r--crypto/async_tx/async_xor.c12
-rw-r--r--crypto/blkcipher.c29
-rw-r--r--crypto/chainiv.c10
-rw-r--r--crypto/digest.c2
-rw-r--r--crypto/eseqiv.c10
-rw-r--r--crypto/xcbc.c6
-rw-r--r--crypto/xts.c13
13 files changed, 64 insertions, 44 deletions
diff --git a/crypto/Kconfig b/crypto/Kconfig
index 898acc5c1967..69f1be6816f7 100644
--- a/crypto/Kconfig
+++ b/crypto/Kconfig
@@ -575,6 +575,7 @@ config CRYPTO_TEST
575config CRYPTO_AUTHENC 575config CRYPTO_AUTHENC
576 tristate "Authenc support" 576 tristate "Authenc support"
577 select CRYPTO_AEAD 577 select CRYPTO_AEAD
578 select CRYPTO_BLKCIPHER
578 select CRYPTO_MANAGER 579 select CRYPTO_MANAGER
579 select CRYPTO_HASH 580 select CRYPTO_HASH
580 help 581 help
diff --git a/crypto/Makefile b/crypto/Makefile
index 48c758379954..7cf36253a75e 100644
--- a/crypto/Makefile
+++ b/crypto/Makefile
@@ -12,9 +12,9 @@ obj-$(CONFIG_CRYPTO_AEAD) += aead.o
12 12
13crypto_blkcipher-objs := ablkcipher.o 13crypto_blkcipher-objs := ablkcipher.o
14crypto_blkcipher-objs += blkcipher.o 14crypto_blkcipher-objs += blkcipher.o
15crypto_blkcipher-objs += chainiv.o
16crypto_blkcipher-objs += eseqiv.o
15obj-$(CONFIG_CRYPTO_BLKCIPHER) += crypto_blkcipher.o 17obj-$(CONFIG_CRYPTO_BLKCIPHER) += crypto_blkcipher.o
16obj-$(CONFIG_CRYPTO_BLKCIPHER) += chainiv.o
17obj-$(CONFIG_CRYPTO_BLKCIPHER) += eseqiv.o
18obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o 18obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o
19 19
20crypto_hash-objs := hash.o 20crypto_hash-objs := hash.o
diff --git a/crypto/ablkcipher.c b/crypto/ablkcipher.c
index 3bcb099b4a85..94140b3756fc 100644
--- a/crypto/ablkcipher.c
+++ b/crypto/ablkcipher.c
@@ -341,6 +341,3 @@ err:
341 return ERR_PTR(err); 341 return ERR_PTR(err);
342} 342}
343EXPORT_SYMBOL_GPL(crypto_alloc_ablkcipher); 343EXPORT_SYMBOL_GPL(crypto_alloc_ablkcipher);
344
345MODULE_LICENSE("GPL");
346MODULE_DESCRIPTION("Asynchronous block chaining cipher type");
diff --git a/crypto/async_tx/async_memcpy.c b/crypto/async_tx/async_memcpy.c
index 0f6282207b32..84caa4efc0d4 100644
--- a/crypto/async_tx/async_memcpy.c
+++ b/crypto/async_tx/async_memcpy.c
@@ -66,11 +66,11 @@ async_memcpy(struct page *dest, struct page *src, unsigned int dest_offset,
66 } 66 }
67 67
68 if (tx) { 68 if (tx) {
69 pr_debug("%s: (async) len: %zu\n", __FUNCTION__, len); 69 pr_debug("%s: (async) len: %zu\n", __func__, len);
70 async_tx_submit(chan, tx, flags, depend_tx, cb_fn, cb_param); 70 async_tx_submit(chan, tx, flags, depend_tx, cb_fn, cb_param);
71 } else { 71 } else {
72 void *dest_buf, *src_buf; 72 void *dest_buf, *src_buf;
73 pr_debug("%s: (sync) len: %zu\n", __FUNCTION__, len); 73 pr_debug("%s: (sync) len: %zu\n", __func__, len);
74 74
75 /* wait for any prerequisite operations */ 75 /* wait for any prerequisite operations */
76 if (depend_tx) { 76 if (depend_tx) {
@@ -80,7 +80,7 @@ async_memcpy(struct page *dest, struct page *src, unsigned int dest_offset,
80 BUG_ON(depend_tx->ack); 80 BUG_ON(depend_tx->ack);
81 if (dma_wait_for_async_tx(depend_tx) == DMA_ERROR) 81 if (dma_wait_for_async_tx(depend_tx) == DMA_ERROR)
82 panic("%s: DMA_ERROR waiting for depend_tx\n", 82 panic("%s: DMA_ERROR waiting for depend_tx\n",
83 __FUNCTION__); 83 __func__);
84 } 84 }
85 85
86 dest_buf = kmap_atomic(dest, KM_USER0) + dest_offset; 86 dest_buf = kmap_atomic(dest, KM_USER0) + dest_offset;
diff --git a/crypto/async_tx/async_memset.c b/crypto/async_tx/async_memset.c
index 09c0e83664bc..f5ff3906b035 100644
--- a/crypto/async_tx/async_memset.c
+++ b/crypto/async_tx/async_memset.c
@@ -63,11 +63,11 @@ async_memset(struct page *dest, int val, unsigned int offset,
63 } 63 }
64 64
65 if (tx) { 65 if (tx) {
66 pr_debug("%s: (async) len: %zu\n", __FUNCTION__, len); 66 pr_debug("%s: (async) len: %zu\n", __func__, len);
67 async_tx_submit(chan, tx, flags, depend_tx, cb_fn, cb_param); 67 async_tx_submit(chan, tx, flags, depend_tx, cb_fn, cb_param);
68 } else { /* run the memset synchronously */ 68 } else { /* run the memset synchronously */
69 void *dest_buf; 69 void *dest_buf;
70 pr_debug("%s: (sync) len: %zu\n", __FUNCTION__, len); 70 pr_debug("%s: (sync) len: %zu\n", __func__, len);
71 71
72 dest_buf = (void *) (((char *) page_address(dest)) + offset); 72 dest_buf = (void *) (((char *) page_address(dest)) + offset);
73 73
@@ -79,7 +79,7 @@ async_memset(struct page *dest, int val, unsigned int offset,
79 BUG_ON(depend_tx->ack); 79 BUG_ON(depend_tx->ack);
80 if (dma_wait_for_async_tx(depend_tx) == DMA_ERROR) 80 if (dma_wait_for_async_tx(depend_tx) == DMA_ERROR)
81 panic("%s: DMA_ERROR waiting for depend_tx\n", 81 panic("%s: DMA_ERROR waiting for depend_tx\n",
82 __FUNCTION__); 82 __func__);
83 } 83 }
84 84
85 memset(dest_buf, val, len); 85 memset(dest_buf, val, len);
diff --git a/crypto/async_tx/async_tx.c b/crypto/async_tx/async_tx.c
index 562882189de5..2be3bae89930 100644
--- a/crypto/async_tx/async_tx.c
+++ b/crypto/async_tx/async_tx.c
@@ -472,11 +472,11 @@ async_trigger_callback(enum async_tx_flags flags,
472 tx = NULL; 472 tx = NULL;
473 473
474 if (tx) { 474 if (tx) {
475 pr_debug("%s: (async)\n", __FUNCTION__); 475 pr_debug("%s: (async)\n", __func__);
476 476
477 async_tx_submit(chan, tx, flags, depend_tx, cb_fn, cb_param); 477 async_tx_submit(chan, tx, flags, depend_tx, cb_fn, cb_param);
478 } else { 478 } else {
479 pr_debug("%s: (sync)\n", __FUNCTION__); 479 pr_debug("%s: (sync)\n", __func__);
480 480
481 /* wait for any prerequisite operations */ 481 /* wait for any prerequisite operations */
482 if (depend_tx) { 482 if (depend_tx) {
@@ -486,7 +486,7 @@ async_trigger_callback(enum async_tx_flags flags,
486 BUG_ON(depend_tx->ack); 486 BUG_ON(depend_tx->ack);
487 if (dma_wait_for_async_tx(depend_tx) == DMA_ERROR) 487 if (dma_wait_for_async_tx(depend_tx) == DMA_ERROR)
488 panic("%s: DMA_ERROR waiting for depend_tx\n", 488 panic("%s: DMA_ERROR waiting for depend_tx\n",
489 __FUNCTION__); 489 __func__);
490 } 490 }
491 491
492 async_tx_sync_epilog(flags, depend_tx, cb_fn, cb_param); 492 async_tx_sync_epilog(flags, depend_tx, cb_fn, cb_param);
diff --git a/crypto/async_tx/async_xor.c b/crypto/async_tx/async_xor.c
index 2259a4ff15cb..7a9db353f198 100644
--- a/crypto/async_tx/async_xor.c
+++ b/crypto/async_tx/async_xor.c
@@ -47,7 +47,7 @@ do_async_xor(struct dma_device *device,
47 int i; 47 int i;
48 unsigned long dma_prep_flags = cb_fn ? DMA_PREP_INTERRUPT : 0; 48 unsigned long dma_prep_flags = cb_fn ? DMA_PREP_INTERRUPT : 0;
49 49
50 pr_debug("%s: len: %zu\n", __FUNCTION__, len); 50 pr_debug("%s: len: %zu\n", __func__, len);
51 51
52 dma_dest = dma_map_page(device->dev, dest, offset, len, 52 dma_dest = dma_map_page(device->dev, dest, offset, len,
53 DMA_FROM_DEVICE); 53 DMA_FROM_DEVICE);
@@ -86,7 +86,7 @@ do_sync_xor(struct page *dest, struct page **src_list, unsigned int offset,
86 void *_dest; 86 void *_dest;
87 int i; 87 int i;
88 88
89 pr_debug("%s: len: %zu\n", __FUNCTION__, len); 89 pr_debug("%s: len: %zu\n", __func__, len);
90 90
91 /* reuse the 'src_list' array to convert to buffer pointers */ 91 /* reuse the 'src_list' array to convert to buffer pointers */
92 for (i = 0; i < src_cnt; i++) 92 for (i = 0; i < src_cnt; i++)
@@ -196,7 +196,7 @@ async_xor(struct page *dest, struct page **src_list, unsigned int offset,
196 DMA_ERROR) 196 DMA_ERROR)
197 panic("%s: DMA_ERROR waiting for " 197 panic("%s: DMA_ERROR waiting for "
198 "depend_tx\n", 198 "depend_tx\n",
199 __FUNCTION__); 199 __func__);
200 } 200 }
201 201
202 do_sync_xor(dest, &src_list[src_off], offset, 202 do_sync_xor(dest, &src_list[src_off], offset,
@@ -276,7 +276,7 @@ async_xor_zero_sum(struct page *dest, struct page **src_list,
276 unsigned long dma_prep_flags = cb_fn ? DMA_PREP_INTERRUPT : 0; 276 unsigned long dma_prep_flags = cb_fn ? DMA_PREP_INTERRUPT : 0;
277 int i; 277 int i;
278 278
279 pr_debug("%s: (async) len: %zu\n", __FUNCTION__, len); 279 pr_debug("%s: (async) len: %zu\n", __func__, len);
280 280
281 for (i = 0; i < src_cnt; i++) 281 for (i = 0; i < src_cnt; i++)
282 dma_src[i] = dma_map_page(device->dev, src_list[i], 282 dma_src[i] = dma_map_page(device->dev, src_list[i],
@@ -299,7 +299,7 @@ async_xor_zero_sum(struct page *dest, struct page **src_list,
299 } else { 299 } else {
300 unsigned long xor_flags = flags; 300 unsigned long xor_flags = flags;
301 301
302 pr_debug("%s: (sync) len: %zu\n", __FUNCTION__, len); 302 pr_debug("%s: (sync) len: %zu\n", __func__, len);
303 303
304 xor_flags |= ASYNC_TX_XOR_DROP_DST; 304 xor_flags |= ASYNC_TX_XOR_DROP_DST;
305 xor_flags &= ~ASYNC_TX_ACK; 305 xor_flags &= ~ASYNC_TX_ACK;
@@ -310,7 +310,7 @@ async_xor_zero_sum(struct page *dest, struct page **src_list,
310 if (tx) { 310 if (tx) {
311 if (dma_wait_for_async_tx(tx) == DMA_ERROR) 311 if (dma_wait_for_async_tx(tx) == DMA_ERROR)
312 panic("%s: DMA_ERROR waiting for tx\n", 312 panic("%s: DMA_ERROR waiting for tx\n",
313 __FUNCTION__); 313 __func__);
314 async_tx_ack(tx); 314 async_tx_ack(tx);
315 } 315 }
316 316
diff --git a/crypto/blkcipher.c b/crypto/blkcipher.c
index 4a7e65c4df4d..185f955fb0d7 100644
--- a/crypto/blkcipher.c
+++ b/crypto/blkcipher.c
@@ -696,5 +696,34 @@ void skcipher_geniv_exit(struct crypto_tfm *tfm)
696} 696}
697EXPORT_SYMBOL_GPL(skcipher_geniv_exit); 697EXPORT_SYMBOL_GPL(skcipher_geniv_exit);
698 698
699static int __init blkcipher_module_init(void)
700{
701 int err;
702
703 err = chainiv_module_init();
704 if (err)
705 goto out;
706
707 err = eseqiv_module_init();
708 if (err)
709 goto eseqiv_err;
710
711out:
712 return err;
713
714eseqiv_err:
715 chainiv_module_exit();
716 goto out;
717}
718
719static void __exit blkcipher_module_exit(void)
720{
721 eseqiv_module_exit();
722 chainiv_module_exit();
723}
724
725module_init(blkcipher_module_init);
726module_exit(blkcipher_module_exit);
727
699MODULE_LICENSE("GPL"); 728MODULE_LICENSE("GPL");
700MODULE_DESCRIPTION("Generic block chaining cipher type"); 729MODULE_DESCRIPTION("Generic block chaining cipher type");
diff --git a/crypto/chainiv.c b/crypto/chainiv.c
index d17fa0454dc3..6da3f577e4db 100644
--- a/crypto/chainiv.c
+++ b/crypto/chainiv.c
@@ -314,18 +314,12 @@ static struct crypto_template chainiv_tmpl = {
314 .module = THIS_MODULE, 314 .module = THIS_MODULE,
315}; 315};
316 316
317static int __init chainiv_module_init(void) 317int __init chainiv_module_init(void)
318{ 318{
319 return crypto_register_template(&chainiv_tmpl); 319 return crypto_register_template(&chainiv_tmpl);
320} 320}
321 321
322static void __exit chainiv_module_exit(void) 322void chainiv_module_exit(void)
323{ 323{
324 crypto_unregister_template(&chainiv_tmpl); 324 crypto_unregister_template(&chainiv_tmpl);
325} 325}
326
327module_init(chainiv_module_init);
328module_exit(chainiv_module_exit);
329
330MODULE_LICENSE("GPL");
331MODULE_DESCRIPTION("Chain IV Generator");
diff --git a/crypto/digest.c b/crypto/digest.c
index 6fd43bddd545..b526cc348b79 100644
--- a/crypto/digest.c
+++ b/crypto/digest.c
@@ -21,6 +21,8 @@
21#include <linux/module.h> 21#include <linux/module.h>
22#include <linux/scatterlist.h> 22#include <linux/scatterlist.h>
23 23
24#include "internal.h"
25
24static int init(struct hash_desc *desc) 26static int init(struct hash_desc *desc)
25{ 27{
26 struct crypto_tfm *tfm = crypto_hash_tfm(desc->tfm); 28 struct crypto_tfm *tfm = crypto_hash_tfm(desc->tfm);
diff --git a/crypto/eseqiv.c b/crypto/eseqiv.c
index eb90d27ae118..b14f14e314b6 100644
--- a/crypto/eseqiv.c
+++ b/crypto/eseqiv.c
@@ -247,18 +247,12 @@ static struct crypto_template eseqiv_tmpl = {
247 .module = THIS_MODULE, 247 .module = THIS_MODULE,
248}; 248};
249 249
250static int __init eseqiv_module_init(void) 250int __init eseqiv_module_init(void)
251{ 251{
252 return crypto_register_template(&eseqiv_tmpl); 252 return crypto_register_template(&eseqiv_tmpl);
253} 253}
254 254
255static void __exit eseqiv_module_exit(void) 255void __exit eseqiv_module_exit(void)
256{ 256{
257 crypto_unregister_template(&eseqiv_tmpl); 257 crypto_unregister_template(&eseqiv_tmpl);
258} 258}
259
260module_init(eseqiv_module_init);
261module_exit(eseqiv_module_exit);
262
263MODULE_LICENSE("GPL");
264MODULE_DESCRIPTION("Encrypted Sequence Number IV Generator");
diff --git a/crypto/xcbc.c b/crypto/xcbc.c
index 86727403e5ab..2feb0f239c38 100644
--- a/crypto/xcbc.c
+++ b/crypto/xcbc.c
@@ -124,6 +124,11 @@ static int crypto_xcbc_digest_update2(struct hash_desc *pdesc,
124 unsigned int offset = sg[i].offset; 124 unsigned int offset = sg[i].offset;
125 unsigned int slen = sg[i].length; 125 unsigned int slen = sg[i].length;
126 126
127 if (unlikely(slen > nbytes))
128 slen = nbytes;
129
130 nbytes -= slen;
131
127 while (slen > 0) { 132 while (slen > 0) {
128 unsigned int len = min(slen, ((unsigned int)(PAGE_SIZE)) - offset); 133 unsigned int len = min(slen, ((unsigned int)(PAGE_SIZE)) - offset);
129 char *p = crypto_kmap(pg, 0) + offset; 134 char *p = crypto_kmap(pg, 0) + offset;
@@ -177,7 +182,6 @@ static int crypto_xcbc_digest_update2(struct hash_desc *pdesc,
177 offset = 0; 182 offset = 0;
178 pg++; 183 pg++;
179 } 184 }
180 nbytes-=sg[i].length;
181 i++; 185 i++;
182 } while (nbytes>0); 186 } while (nbytes>0);
183 187
diff --git a/crypto/xts.c b/crypto/xts.c
index 8eb08bfaf7c0..d87b0f3102c3 100644
--- a/crypto/xts.c
+++ b/crypto/xts.c
@@ -77,16 +77,16 @@ static int setkey(struct crypto_tfm *parent, const u8 *key,
77} 77}
78 78
79struct sinfo { 79struct sinfo {
80 be128 t; 80 be128 *t;
81 struct crypto_tfm *tfm; 81 struct crypto_tfm *tfm;
82 void (*fn)(struct crypto_tfm *, u8 *, const u8 *); 82 void (*fn)(struct crypto_tfm *, u8 *, const u8 *);
83}; 83};
84 84
85static inline void xts_round(struct sinfo *s, void *dst, const void *src) 85static inline void xts_round(struct sinfo *s, void *dst, const void *src)
86{ 86{
87 be128_xor(dst, &s->t, src); /* PP <- T xor P */ 87 be128_xor(dst, s->t, src); /* PP <- T xor P */
88 s->fn(s->tfm, dst, dst); /* CC <- E(Key1,PP) */ 88 s->fn(s->tfm, dst, dst); /* CC <- E(Key1,PP) */
89 be128_xor(dst, dst, &s->t); /* C <- T xor CC */ 89 be128_xor(dst, dst, s->t); /* C <- T xor CC */
90} 90}
91 91
92static int crypt(struct blkcipher_desc *d, 92static int crypt(struct blkcipher_desc *d,
@@ -101,7 +101,6 @@ static int crypt(struct blkcipher_desc *d,
101 .tfm = crypto_cipher_tfm(ctx->child), 101 .tfm = crypto_cipher_tfm(ctx->child),
102 .fn = fn 102 .fn = fn
103 }; 103 };
104 be128 *iv;
105 u8 *wsrc; 104 u8 *wsrc;
106 u8 *wdst; 105 u8 *wdst;
107 106
@@ -109,20 +108,20 @@ static int crypt(struct blkcipher_desc *d,
109 if (!w->nbytes) 108 if (!w->nbytes)
110 return err; 109 return err;
111 110
111 s.t = (be128 *)w->iv;
112 avail = w->nbytes; 112 avail = w->nbytes;
113 113
114 wsrc = w->src.virt.addr; 114 wsrc = w->src.virt.addr;
115 wdst = w->dst.virt.addr; 115 wdst = w->dst.virt.addr;
116 116
117 /* calculate first value of T */ 117 /* calculate first value of T */
118 iv = (be128 *)w->iv; 118 tw(crypto_cipher_tfm(ctx->tweak), w->iv, w->iv);
119 tw(crypto_cipher_tfm(ctx->tweak), (void *)&s.t, w->iv);
120 119
121 goto first; 120 goto first;
122 121
123 for (;;) { 122 for (;;) {
124 do { 123 do {
125 gf128mul_x_ble(&s.t, &s.t); 124 gf128mul_x_ble(s.t, s.t);
126 125
127first: 126first:
128 xts_round(&s, wdst, wsrc); 127 xts_round(&s, wdst, wsrc);