aboutsummaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
Diffstat (limited to 'crypto')
-rw-r--r--crypto/Kconfig30
-rw-r--r--crypto/Makefile5
-rw-r--r--crypto/ablkcipher.c29
-rw-r--r--crypto/aes_generic.c9
-rw-r--r--crypto/ahash.c336
-rw-r--r--crypto/algapi.c191
-rw-r--r--crypto/algboss.c5
-rw-r--r--crypto/ansi_cprng.c43
-rw-r--r--crypto/api.c54
-rw-r--r--crypto/async_tx/async_xor.c2
-rw-r--r--crypto/authenc.c358
-rw-r--r--crypto/cryptd.c321
-rw-r--r--crypto/ctr.c2
-rw-r--r--crypto/gcm.c580
-rw-r--r--crypto/ghash-generic.c170
-rw-r--r--crypto/hmac.c302
-rw-r--r--crypto/internal.h28
-rw-r--r--crypto/pcompress.c6
-rw-r--r--crypto/rng.c2
-rw-r--r--crypto/sha1_generic.c41
-rw-r--r--crypto/sha256_generic.c100
-rw-r--r--crypto/sha512_generic.c48
-rw-r--r--crypto/shash.c270
-rw-r--r--crypto/tcrypt.c22
-rw-r--r--crypto/testmgr.c30
-rw-r--r--crypto/testmgr.h16
-rw-r--r--crypto/vmac.c678
-rw-r--r--crypto/xcbc.c370
28 files changed, 2970 insertions, 1078 deletions
diff --git a/crypto/Kconfig b/crypto/Kconfig
index 4dfdd03e708f..26b5dd0cb564 100644
--- a/crypto/Kconfig
+++ b/crypto/Kconfig
@@ -23,11 +23,13 @@ comment "Crypto core or helper"
23 23
24config CRYPTO_FIPS 24config CRYPTO_FIPS
25 bool "FIPS 200 compliance" 25 bool "FIPS 200 compliance"
26 depends on CRYPTO_ANSI_CPRNG
26 help 27 help
27 This options enables the fips boot option which is 28 This options enables the fips boot option which is
28 required if you want to system to operate in a FIPS 200 29 required if you want to system to operate in a FIPS 200
29 certification. You should say no unless you know what 30 certification. You should say no unless you know what
30 this is. 31 this is. Note that CRYPTO_ANSI_CPRNG is requred if this
32 option is selected
31 33
32config CRYPTO_ALGAPI 34config CRYPTO_ALGAPI
33 tristate 35 tristate
@@ -156,7 +158,7 @@ config CRYPTO_GCM
156 tristate "GCM/GMAC support" 158 tristate "GCM/GMAC support"
157 select CRYPTO_CTR 159 select CRYPTO_CTR
158 select CRYPTO_AEAD 160 select CRYPTO_AEAD
159 select CRYPTO_GF128MUL 161 select CRYPTO_GHASH
160 help 162 help
161 Support for Galois/Counter Mode (GCM) and Galois Message 163 Support for Galois/Counter Mode (GCM) and Galois Message
162 Authentication Code (GMAC). Required for IPSec. 164 Authentication Code (GMAC). Required for IPSec.
@@ -267,6 +269,18 @@ config CRYPTO_XCBC
267 http://csrc.nist.gov/encryption/modes/proposedmodes/ 269 http://csrc.nist.gov/encryption/modes/proposedmodes/
268 xcbc-mac/xcbc-mac-spec.pdf 270 xcbc-mac/xcbc-mac-spec.pdf
269 271
272config CRYPTO_VMAC
273 tristate "VMAC support"
274 depends on EXPERIMENTAL
275 select CRYPTO_HASH
276 select CRYPTO_MANAGER
277 help
278 VMAC is a message authentication algorithm designed for
279 very high speed on 64-bit architectures.
280
281 See also:
282 <http://fastcrypto.org/vmac>
283
270comment "Digest" 284comment "Digest"
271 285
272config CRYPTO_CRC32C 286config CRYPTO_CRC32C
@@ -289,6 +303,13 @@ config CRYPTO_CRC32C_INTEL
289 gain performance compared with software implementation. 303 gain performance compared with software implementation.
290 Module will be crc32c-intel. 304 Module will be crc32c-intel.
291 305
306config CRYPTO_GHASH
307 tristate "GHASH digest algorithm"
308 select CRYPTO_SHASH
309 select CRYPTO_GF128MUL
310 help
311 GHASH is message digest algorithm for GCM (Galois/Counter Mode).
312
292config CRYPTO_MD4 313config CRYPTO_MD4
293 tristate "MD4 digest algorithm" 314 tristate "MD4 digest algorithm"
294 select CRYPTO_HASH 315 select CRYPTO_HASH
@@ -780,13 +801,14 @@ comment "Random Number Generation"
780 801
781config CRYPTO_ANSI_CPRNG 802config CRYPTO_ANSI_CPRNG
782 tristate "Pseudo Random Number Generation for Cryptographic modules" 803 tristate "Pseudo Random Number Generation for Cryptographic modules"
804 default m
783 select CRYPTO_AES 805 select CRYPTO_AES
784 select CRYPTO_RNG 806 select CRYPTO_RNG
785 select CRYPTO_FIPS
786 help 807 help
787 This option enables the generic pseudo random number generator 808 This option enables the generic pseudo random number generator
788 for cryptographic modules. Uses the Algorithm specified in 809 for cryptographic modules. Uses the Algorithm specified in
789 ANSI X9.31 A.2.4 810 ANSI X9.31 A.2.4. Not this option must be enabled if CRYPTO_FIPS
811 is selected
790 812
791source "drivers/crypto/Kconfig" 813source "drivers/crypto/Kconfig"
792 814
diff --git a/crypto/Makefile b/crypto/Makefile
index 673d9f7c1bda..9e8f61908cb5 100644
--- a/crypto/Makefile
+++ b/crypto/Makefile
@@ -3,7 +3,7 @@
3# 3#
4 4
5obj-$(CONFIG_CRYPTO) += crypto.o 5obj-$(CONFIG_CRYPTO) += crypto.o
6crypto-objs := api.o cipher.o digest.o compress.o 6crypto-objs := api.o cipher.o compress.o
7 7
8obj-$(CONFIG_CRYPTO_WORKQUEUE) += crypto_wq.o 8obj-$(CONFIG_CRYPTO_WORKQUEUE) += crypto_wq.o
9 9
@@ -22,7 +22,6 @@ obj-$(CONFIG_CRYPTO_BLKCIPHER2) += chainiv.o
22obj-$(CONFIG_CRYPTO_BLKCIPHER2) += eseqiv.o 22obj-$(CONFIG_CRYPTO_BLKCIPHER2) += eseqiv.o
23obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o 23obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o
24 24
25crypto_hash-objs := hash.o
26crypto_hash-objs += ahash.o 25crypto_hash-objs += ahash.o
27crypto_hash-objs += shash.o 26crypto_hash-objs += shash.o
28obj-$(CONFIG_CRYPTO_HASH2) += crypto_hash.o 27obj-$(CONFIG_CRYPTO_HASH2) += crypto_hash.o
@@ -33,6 +32,7 @@ cryptomgr-objs := algboss.o testmgr.o
33 32
34obj-$(CONFIG_CRYPTO_MANAGER2) += cryptomgr.o 33obj-$(CONFIG_CRYPTO_MANAGER2) += cryptomgr.o
35obj-$(CONFIG_CRYPTO_HMAC) += hmac.o 34obj-$(CONFIG_CRYPTO_HMAC) += hmac.o
35obj-$(CONFIG_CRYPTO_VMAC) += vmac.o
36obj-$(CONFIG_CRYPTO_XCBC) += xcbc.o 36obj-$(CONFIG_CRYPTO_XCBC) += xcbc.o
37obj-$(CONFIG_CRYPTO_NULL) += crypto_null.o 37obj-$(CONFIG_CRYPTO_NULL) += crypto_null.o
38obj-$(CONFIG_CRYPTO_MD4) += md4.o 38obj-$(CONFIG_CRYPTO_MD4) += md4.o
@@ -83,6 +83,7 @@ obj-$(CONFIG_CRYPTO_RNG2) += rng.o
83obj-$(CONFIG_CRYPTO_RNG2) += krng.o 83obj-$(CONFIG_CRYPTO_RNG2) += krng.o
84obj-$(CONFIG_CRYPTO_ANSI_CPRNG) += ansi_cprng.o 84obj-$(CONFIG_CRYPTO_ANSI_CPRNG) += ansi_cprng.o
85obj-$(CONFIG_CRYPTO_TEST) += tcrypt.o 85obj-$(CONFIG_CRYPTO_TEST) += tcrypt.o
86obj-$(CONFIG_CRYPTO_GHASH) += ghash-generic.o
86 87
87# 88#
88# generic algorithms and the async_tx api 89# generic algorithms and the async_tx api
diff --git a/crypto/ablkcipher.c b/crypto/ablkcipher.c
index e11ce37c7104..f6f08336df5d 100644
--- a/crypto/ablkcipher.c
+++ b/crypto/ablkcipher.c
@@ -14,6 +14,7 @@
14 */ 14 */
15 15
16#include <crypto/internal/skcipher.h> 16#include <crypto/internal/skcipher.h>
17#include <linux/cpumask.h>
17#include <linux/err.h> 18#include <linux/err.h>
18#include <linux/init.h> 19#include <linux/init.h>
19#include <linux/kernel.h> 20#include <linux/kernel.h>
@@ -25,6 +26,8 @@
25 26
26#include "internal.h" 27#include "internal.h"
27 28
29static const char *skcipher_default_geniv __read_mostly;
30
28static int setkey_unaligned(struct crypto_ablkcipher *tfm, const u8 *key, 31static int setkey_unaligned(struct crypto_ablkcipher *tfm, const u8 *key,
29 unsigned int keylen) 32 unsigned int keylen)
30{ 33{
@@ -180,7 +183,14 @@ EXPORT_SYMBOL_GPL(crypto_givcipher_type);
180 183
181const char *crypto_default_geniv(const struct crypto_alg *alg) 184const char *crypto_default_geniv(const struct crypto_alg *alg)
182{ 185{
183 return alg->cra_flags & CRYPTO_ALG_ASYNC ? "eseqiv" : "chainiv"; 186 if (((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
187 CRYPTO_ALG_TYPE_BLKCIPHER ? alg->cra_blkcipher.ivsize :
188 alg->cra_ablkcipher.ivsize) !=
189 alg->cra_blocksize)
190 return "chainiv";
191
192 return alg->cra_flags & CRYPTO_ALG_ASYNC ?
193 "eseqiv" : skcipher_default_geniv;
184} 194}
185 195
186static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask) 196static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask)
@@ -201,8 +211,9 @@ static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask)
201 int err; 211 int err;
202 212
203 larval = crypto_larval_lookup(alg->cra_driver_name, 213 larval = crypto_larval_lookup(alg->cra_driver_name,
214 (type & ~CRYPTO_ALG_TYPE_MASK) |
204 CRYPTO_ALG_TYPE_GIVCIPHER, 215 CRYPTO_ALG_TYPE_GIVCIPHER,
205 CRYPTO_ALG_TYPE_MASK); 216 mask | CRYPTO_ALG_TYPE_MASK);
206 err = PTR_ERR(larval); 217 err = PTR_ERR(larval);
207 if (IS_ERR(larval)) 218 if (IS_ERR(larval))
208 goto out; 219 goto out;
@@ -360,3 +371,17 @@ err:
360 return ERR_PTR(err); 371 return ERR_PTR(err);
361} 372}
362EXPORT_SYMBOL_GPL(crypto_alloc_ablkcipher); 373EXPORT_SYMBOL_GPL(crypto_alloc_ablkcipher);
374
375static int __init skcipher_module_init(void)
376{
377 skcipher_default_geniv = num_possible_cpus() > 1 ?
378 "eseqiv" : "chainiv";
379 return 0;
380}
381
382static void skcipher_module_exit(void)
383{
384}
385
386module_init(skcipher_module_init);
387module_exit(skcipher_module_exit);
diff --git a/crypto/aes_generic.c b/crypto/aes_generic.c
index b8b66ec3883b..e78b7ee44a74 100644
--- a/crypto/aes_generic.c
+++ b/crypto/aes_generic.c
@@ -1174,7 +1174,7 @@ EXPORT_SYMBOL_GPL(crypto_il_tab);
1174 ctx->key_enc[6 * i + 11] = t; \ 1174 ctx->key_enc[6 * i + 11] = t; \
1175} while (0) 1175} while (0)
1176 1176
1177#define loop8(i) do { \ 1177#define loop8tophalf(i) do { \
1178 t = ror32(t, 8); \ 1178 t = ror32(t, 8); \
1179 t = ls_box(t) ^ rco_tab[i]; \ 1179 t = ls_box(t) ^ rco_tab[i]; \
1180 t ^= ctx->key_enc[8 * i]; \ 1180 t ^= ctx->key_enc[8 * i]; \
@@ -1185,6 +1185,10 @@ EXPORT_SYMBOL_GPL(crypto_il_tab);
1185 ctx->key_enc[8 * i + 10] = t; \ 1185 ctx->key_enc[8 * i + 10] = t; \
1186 t ^= ctx->key_enc[8 * i + 3]; \ 1186 t ^= ctx->key_enc[8 * i + 3]; \
1187 ctx->key_enc[8 * i + 11] = t; \ 1187 ctx->key_enc[8 * i + 11] = t; \
1188} while (0)
1189
1190#define loop8(i) do { \
1191 loop8tophalf(i); \
1188 t = ctx->key_enc[8 * i + 4] ^ ls_box(t); \ 1192 t = ctx->key_enc[8 * i + 4] ^ ls_box(t); \
1189 ctx->key_enc[8 * i + 12] = t; \ 1193 ctx->key_enc[8 * i + 12] = t; \
1190 t ^= ctx->key_enc[8 * i + 5]; \ 1194 t ^= ctx->key_enc[8 * i + 5]; \
@@ -1245,8 +1249,9 @@ int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
1245 ctx->key_enc[5] = le32_to_cpu(key[5]); 1249 ctx->key_enc[5] = le32_to_cpu(key[5]);
1246 ctx->key_enc[6] = le32_to_cpu(key[6]); 1250 ctx->key_enc[6] = le32_to_cpu(key[6]);
1247 t = ctx->key_enc[7] = le32_to_cpu(key[7]); 1251 t = ctx->key_enc[7] = le32_to_cpu(key[7]);
1248 for (i = 0; i < 7; ++i) 1252 for (i = 0; i < 6; ++i)
1249 loop8(i); 1253 loop8(i);
1254 loop8tophalf(i);
1250 break; 1255 break;
1251 } 1256 }
1252 1257
diff --git a/crypto/ahash.c b/crypto/ahash.c
index f3476374f764..33a4ff45f842 100644
--- a/crypto/ahash.c
+++ b/crypto/ahash.c
@@ -24,6 +24,19 @@
24 24
25#include "internal.h" 25#include "internal.h"
26 26
27struct ahash_request_priv {
28 crypto_completion_t complete;
29 void *data;
30 u8 *result;
31 void *ubuf[] CRYPTO_MINALIGN_ATTR;
32};
33
34static inline struct ahash_alg *crypto_ahash_alg(struct crypto_ahash *hash)
35{
36 return container_of(crypto_hash_alg_common(hash), struct ahash_alg,
37 halg);
38}
39
27static int hash_walk_next(struct crypto_hash_walk *walk) 40static int hash_walk_next(struct crypto_hash_walk *walk)
28{ 41{
29 unsigned int alignmask = walk->alignmask; 42 unsigned int alignmask = walk->alignmask;
@@ -132,36 +145,34 @@ int crypto_hash_walk_first_compat(struct hash_desc *hdesc,
132static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key, 145static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key,
133 unsigned int keylen) 146 unsigned int keylen)
134{ 147{
135 struct ahash_alg *ahash = crypto_ahash_alg(tfm);
136 unsigned long alignmask = crypto_ahash_alignmask(tfm); 148 unsigned long alignmask = crypto_ahash_alignmask(tfm);
137 int ret; 149 int ret;
138 u8 *buffer, *alignbuffer; 150 u8 *buffer, *alignbuffer;
139 unsigned long absize; 151 unsigned long absize;
140 152
141 absize = keylen + alignmask; 153 absize = keylen + alignmask;
142 buffer = kmalloc(absize, GFP_ATOMIC); 154 buffer = kmalloc(absize, GFP_KERNEL);
143 if (!buffer) 155 if (!buffer)
144 return -ENOMEM; 156 return -ENOMEM;
145 157
146 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); 158 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
147 memcpy(alignbuffer, key, keylen); 159 memcpy(alignbuffer, key, keylen);
148 ret = ahash->setkey(tfm, alignbuffer, keylen); 160 ret = tfm->setkey(tfm, alignbuffer, keylen);
149 memset(alignbuffer, 0, keylen); 161 kzfree(buffer);
150 kfree(buffer);
151 return ret; 162 return ret;
152} 163}
153 164
154static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key, 165int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
155 unsigned int keylen) 166 unsigned int keylen)
156{ 167{
157 struct ahash_alg *ahash = crypto_ahash_alg(tfm);
158 unsigned long alignmask = crypto_ahash_alignmask(tfm); 168 unsigned long alignmask = crypto_ahash_alignmask(tfm);
159 169
160 if ((unsigned long)key & alignmask) 170 if ((unsigned long)key & alignmask)
161 return ahash_setkey_unaligned(tfm, key, keylen); 171 return ahash_setkey_unaligned(tfm, key, keylen);
162 172
163 return ahash->setkey(tfm, key, keylen); 173 return tfm->setkey(tfm, key, keylen);
164} 174}
175EXPORT_SYMBOL_GPL(crypto_ahash_setkey);
165 176
166static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, 177static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
167 unsigned int keylen) 178 unsigned int keylen)
@@ -169,44 +180,221 @@ static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
169 return -ENOSYS; 180 return -ENOSYS;
170} 181}
171 182
172int crypto_ahash_import(struct ahash_request *req, const u8 *in) 183static inline unsigned int ahash_align_buffer_size(unsigned len,
184 unsigned long mask)
185{
186 return len + (mask & ~(crypto_tfm_ctx_alignment() - 1));
187}
188
189static void ahash_op_unaligned_finish(struct ahash_request *req, int err)
190{
191 struct ahash_request_priv *priv = req->priv;
192
193 if (err == -EINPROGRESS)
194 return;
195
196 if (!err)
197 memcpy(priv->result, req->result,
198 crypto_ahash_digestsize(crypto_ahash_reqtfm(req)));
199
200 kzfree(priv);
201}
202
203static void ahash_op_unaligned_done(struct crypto_async_request *req, int err)
204{
205 struct ahash_request *areq = req->data;
206 struct ahash_request_priv *priv = areq->priv;
207 crypto_completion_t complete = priv->complete;
208 void *data = priv->data;
209
210 ahash_op_unaligned_finish(areq, err);
211
212 complete(data, err);
213}
214
215static int ahash_op_unaligned(struct ahash_request *req,
216 int (*op)(struct ahash_request *))
173{ 217{
174 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 218 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
175 struct ahash_alg *alg = crypto_ahash_alg(tfm); 219 unsigned long alignmask = crypto_ahash_alignmask(tfm);
220 unsigned int ds = crypto_ahash_digestsize(tfm);
221 struct ahash_request_priv *priv;
222 int err;
223
224 priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask),
225 (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
226 GFP_KERNEL : GFP_ATOMIC);
227 if (!priv)
228 return -ENOMEM;
176 229
177 memcpy(ahash_request_ctx(req), in, crypto_ahash_reqsize(tfm)); 230 priv->result = req->result;
231 priv->complete = req->base.complete;
232 priv->data = req->base.data;
178 233
179 if (alg->reinit) 234 req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1);
180 alg->reinit(req); 235 req->base.complete = ahash_op_unaligned_done;
236 req->base.data = req;
237 req->priv = priv;
181 238
182 return 0; 239 err = op(req);
240 ahash_op_unaligned_finish(req, err);
241
242 return err;
183} 243}
184EXPORT_SYMBOL_GPL(crypto_ahash_import);
185 244
186static unsigned int crypto_ahash_ctxsize(struct crypto_alg *alg, u32 type, 245static int crypto_ahash_op(struct ahash_request *req,
187 u32 mask) 246 int (*op)(struct ahash_request *))
188{ 247{
189 return alg->cra_ctxsize; 248 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
249 unsigned long alignmask = crypto_ahash_alignmask(tfm);
250
251 if ((unsigned long)req->result & alignmask)
252 return ahash_op_unaligned(req, op);
253
254 return op(req);
190} 255}
191 256
192static int crypto_init_ahash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) 257int crypto_ahash_final(struct ahash_request *req)
193{ 258{
194 struct ahash_alg *alg = &tfm->__crt_alg->cra_ahash; 259 return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->final);
195 struct ahash_tfm *crt = &tfm->crt_ahash; 260}
261EXPORT_SYMBOL_GPL(crypto_ahash_final);
196 262
197 if (alg->digestsize > PAGE_SIZE / 8) 263int crypto_ahash_finup(struct ahash_request *req)
198 return -EINVAL; 264{
265 return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->finup);
266}
267EXPORT_SYMBOL_GPL(crypto_ahash_finup);
268
269int crypto_ahash_digest(struct ahash_request *req)
270{
271 return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->digest);
272}
273EXPORT_SYMBOL_GPL(crypto_ahash_digest);
274
275static void ahash_def_finup_finish2(struct ahash_request *req, int err)
276{
277 struct ahash_request_priv *priv = req->priv;
278
279 if (err == -EINPROGRESS)
280 return;
281
282 if (!err)
283 memcpy(priv->result, req->result,
284 crypto_ahash_digestsize(crypto_ahash_reqtfm(req)));
199 285
200 crt->init = alg->init; 286 kzfree(priv);
201 crt->update = alg->update; 287}
202 crt->final = alg->final; 288
203 crt->digest = alg->digest; 289static void ahash_def_finup_done2(struct crypto_async_request *req, int err)
204 crt->setkey = alg->setkey ? ahash_setkey : ahash_nosetkey; 290{
205 crt->digestsize = alg->digestsize; 291 struct ahash_request *areq = req->data;
292 struct ahash_request_priv *priv = areq->priv;
293 crypto_completion_t complete = priv->complete;
294 void *data = priv->data;
295
296 ahash_def_finup_finish2(areq, err);
297
298 complete(data, err);
299}
300
301static int ahash_def_finup_finish1(struct ahash_request *req, int err)
302{
303 if (err)
304 goto out;
305
306 req->base.complete = ahash_def_finup_done2;
307 req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
308 err = crypto_ahash_reqtfm(req)->final(req);
309
310out:
311 ahash_def_finup_finish2(req, err);
312 return err;
313}
314
315static void ahash_def_finup_done1(struct crypto_async_request *req, int err)
316{
317 struct ahash_request *areq = req->data;
318 struct ahash_request_priv *priv = areq->priv;
319 crypto_completion_t complete = priv->complete;
320 void *data = priv->data;
321
322 err = ahash_def_finup_finish1(areq, err);
323
324 complete(data, err);
325}
326
327static int ahash_def_finup(struct ahash_request *req)
328{
329 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
330 unsigned long alignmask = crypto_ahash_alignmask(tfm);
331 unsigned int ds = crypto_ahash_digestsize(tfm);
332 struct ahash_request_priv *priv;
333
334 priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask),
335 (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
336 GFP_KERNEL : GFP_ATOMIC);
337 if (!priv)
338 return -ENOMEM;
339
340 priv->result = req->result;
341 priv->complete = req->base.complete;
342 priv->data = req->base.data;
343
344 req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1);
345 req->base.complete = ahash_def_finup_done1;
346 req->base.data = req;
347 req->priv = priv;
348
349 return ahash_def_finup_finish1(req, tfm->update(req));
350}
351
352static int ahash_no_export(struct ahash_request *req, void *out)
353{
354 return -ENOSYS;
355}
356
357static int ahash_no_import(struct ahash_request *req, const void *in)
358{
359 return -ENOSYS;
360}
361
362static int crypto_ahash_init_tfm(struct crypto_tfm *tfm)
363{
364 struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
365 struct ahash_alg *alg = crypto_ahash_alg(hash);
366
367 hash->setkey = ahash_nosetkey;
368 hash->export = ahash_no_export;
369 hash->import = ahash_no_import;
370
371 if (tfm->__crt_alg->cra_type != &crypto_ahash_type)
372 return crypto_init_shash_ops_async(tfm);
373
374 hash->init = alg->init;
375 hash->update = alg->update;
376 hash->final = alg->final;
377 hash->finup = alg->finup ?: ahash_def_finup;
378 hash->digest = alg->digest;
379
380 if (alg->setkey)
381 hash->setkey = alg->setkey;
382 if (alg->export)
383 hash->export = alg->export;
384 if (alg->import)
385 hash->import = alg->import;
206 386
207 return 0; 387 return 0;
208} 388}
209 389
390static unsigned int crypto_ahash_extsize(struct crypto_alg *alg)
391{
392 if (alg->cra_type == &crypto_ahash_type)
393 return alg->cra_ctxsize;
394
395 return sizeof(struct crypto_shash *);
396}
397
210static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) 398static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
211 __attribute__ ((unused)); 399 __attribute__ ((unused));
212static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) 400static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
@@ -215,17 +403,101 @@ static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
215 seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ? 403 seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
216 "yes" : "no"); 404 "yes" : "no");
217 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); 405 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
218 seq_printf(m, "digestsize : %u\n", alg->cra_ahash.digestsize); 406 seq_printf(m, "digestsize : %u\n",
407 __crypto_hash_alg_common(alg)->digestsize);
219} 408}
220 409
221const struct crypto_type crypto_ahash_type = { 410const struct crypto_type crypto_ahash_type = {
222 .ctxsize = crypto_ahash_ctxsize, 411 .extsize = crypto_ahash_extsize,
223 .init = crypto_init_ahash_ops, 412 .init_tfm = crypto_ahash_init_tfm,
224#ifdef CONFIG_PROC_FS 413#ifdef CONFIG_PROC_FS
225 .show = crypto_ahash_show, 414 .show = crypto_ahash_show,
226#endif 415#endif
416 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
417 .maskset = CRYPTO_ALG_TYPE_AHASH_MASK,
418 .type = CRYPTO_ALG_TYPE_AHASH,
419 .tfmsize = offsetof(struct crypto_ahash, base),
227}; 420};
228EXPORT_SYMBOL_GPL(crypto_ahash_type); 421EXPORT_SYMBOL_GPL(crypto_ahash_type);
229 422
423struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type,
424 u32 mask)
425{
426 return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask);
427}
428EXPORT_SYMBOL_GPL(crypto_alloc_ahash);
429
430static int ahash_prepare_alg(struct ahash_alg *alg)
431{
432 struct crypto_alg *base = &alg->halg.base;
433
434 if (alg->halg.digestsize > PAGE_SIZE / 8 ||
435 alg->halg.statesize > PAGE_SIZE / 8)
436 return -EINVAL;
437
438 base->cra_type = &crypto_ahash_type;
439 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
440 base->cra_flags |= CRYPTO_ALG_TYPE_AHASH;
441
442 return 0;
443}
444
445int crypto_register_ahash(struct ahash_alg *alg)
446{
447 struct crypto_alg *base = &alg->halg.base;
448 int err;
449
450 err = ahash_prepare_alg(alg);
451 if (err)
452 return err;
453
454 return crypto_register_alg(base);
455}
456EXPORT_SYMBOL_GPL(crypto_register_ahash);
457
458int crypto_unregister_ahash(struct ahash_alg *alg)
459{
460 return crypto_unregister_alg(&alg->halg.base);
461}
462EXPORT_SYMBOL_GPL(crypto_unregister_ahash);
463
464int ahash_register_instance(struct crypto_template *tmpl,
465 struct ahash_instance *inst)
466{
467 int err;
468
469 err = ahash_prepare_alg(&inst->alg);
470 if (err)
471 return err;
472
473 return crypto_register_instance(tmpl, ahash_crypto_instance(inst));
474}
475EXPORT_SYMBOL_GPL(ahash_register_instance);
476
477void ahash_free_instance(struct crypto_instance *inst)
478{
479 crypto_drop_spawn(crypto_instance_ctx(inst));
480 kfree(ahash_instance(inst));
481}
482EXPORT_SYMBOL_GPL(ahash_free_instance);
483
484int crypto_init_ahash_spawn(struct crypto_ahash_spawn *spawn,
485 struct hash_alg_common *alg,
486 struct crypto_instance *inst)
487{
488 return crypto_init_spawn2(&spawn->base, &alg->base, inst,
489 &crypto_ahash_type);
490}
491EXPORT_SYMBOL_GPL(crypto_init_ahash_spawn);
492
493struct hash_alg_common *ahash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
494{
495 struct crypto_alg *alg;
496
497 alg = crypto_attr_alg2(rta, &crypto_ahash_type, type, mask);
498 return IS_ERR(alg) ? ERR_CAST(alg) : __crypto_hash_alg_common(alg);
499}
500EXPORT_SYMBOL_GPL(ahash_attr_alg);
501
230MODULE_LICENSE("GPL"); 502MODULE_LICENSE("GPL");
231MODULE_DESCRIPTION("Asynchronous cryptographic hash type"); 503MODULE_DESCRIPTION("Asynchronous cryptographic hash type");
diff --git a/crypto/algapi.c b/crypto/algapi.c
index 56c62e2858d5..f149b1c8b76d 100644
--- a/crypto/algapi.c
+++ b/crypto/algapi.c
@@ -81,16 +81,35 @@ static void crypto_destroy_instance(struct crypto_alg *alg)
81 crypto_tmpl_put(tmpl); 81 crypto_tmpl_put(tmpl);
82} 82}
83 83
84static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
85 struct list_head *stack,
86 struct list_head *top,
87 struct list_head *secondary_spawns)
88{
89 struct crypto_spawn *spawn, *n;
90
91 if (list_empty(stack))
92 return NULL;
93
94 spawn = list_first_entry(stack, struct crypto_spawn, list);
95 n = list_entry(spawn->list.next, struct crypto_spawn, list);
96
97 if (spawn->alg && &n->list != stack && !n->alg)
98 n->alg = (n->list.next == stack) ? alg :
99 &list_entry(n->list.next, struct crypto_spawn,
100 list)->inst->alg;
101
102 list_move(&spawn->list, secondary_spawns);
103
104 return &n->list == stack ? top : &n->inst->alg.cra_users;
105}
106
84static void crypto_remove_spawn(struct crypto_spawn *spawn, 107static void crypto_remove_spawn(struct crypto_spawn *spawn,
85 struct list_head *list, 108 struct list_head *list)
86 struct list_head *secondary_spawns)
87{ 109{
88 struct crypto_instance *inst = spawn->inst; 110 struct crypto_instance *inst = spawn->inst;
89 struct crypto_template *tmpl = inst->tmpl; 111 struct crypto_template *tmpl = inst->tmpl;
90 112
91 list_del_init(&spawn->list);
92 spawn->alg = NULL;
93
94 if (crypto_is_dead(&inst->alg)) 113 if (crypto_is_dead(&inst->alg))
95 return; 114 return;
96 115
@@ -106,25 +125,55 @@ static void crypto_remove_spawn(struct crypto_spawn *spawn,
106 hlist_del(&inst->list); 125 hlist_del(&inst->list);
107 inst->alg.cra_destroy = crypto_destroy_instance; 126 inst->alg.cra_destroy = crypto_destroy_instance;
108 127
109 list_splice(&inst->alg.cra_users, secondary_spawns); 128 BUG_ON(!list_empty(&inst->alg.cra_users));
110} 129}
111 130
112static void crypto_remove_spawns(struct list_head *spawns, 131static void crypto_remove_spawns(struct crypto_alg *alg,
113 struct list_head *list, u32 new_type) 132 struct list_head *list,
133 struct crypto_alg *nalg)
114{ 134{
135 u32 new_type = (nalg ?: alg)->cra_flags;
115 struct crypto_spawn *spawn, *n; 136 struct crypto_spawn *spawn, *n;
116 LIST_HEAD(secondary_spawns); 137 LIST_HEAD(secondary_spawns);
138 struct list_head *spawns;
139 LIST_HEAD(stack);
140 LIST_HEAD(top);
117 141
142 spawns = &alg->cra_users;
118 list_for_each_entry_safe(spawn, n, spawns, list) { 143 list_for_each_entry_safe(spawn, n, spawns, list) {
119 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask) 144 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
120 continue; 145 continue;
121 146
122 crypto_remove_spawn(spawn, list, &secondary_spawns); 147 list_move(&spawn->list, &top);
123 } 148 }
124 149
125 while (!list_empty(&secondary_spawns)) { 150 spawns = &top;
126 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) 151 do {
127 crypto_remove_spawn(spawn, list, &secondary_spawns); 152 while (!list_empty(spawns)) {
153 struct crypto_instance *inst;
154
155 spawn = list_first_entry(spawns, struct crypto_spawn,
156 list);
157 inst = spawn->inst;
158
159 BUG_ON(&inst->alg == alg);
160
161 list_move(&spawn->list, &stack);
162
163 if (&inst->alg == nalg)
164 break;
165
166 spawn->alg = NULL;
167 spawns = &inst->alg.cra_users;
168 }
169 } while ((spawns = crypto_more_spawns(alg, &stack, &top,
170 &secondary_spawns)));
171
172 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
173 if (spawn->alg)
174 list_move(&spawn->list, &spawn->alg->cra_users);
175 else
176 crypto_remove_spawn(spawn, list);
128 } 177 }
129} 178}
130 179
@@ -258,7 +307,7 @@ found:
258 q->cra_priority > alg->cra_priority) 307 q->cra_priority > alg->cra_priority)
259 continue; 308 continue;
260 309
261 crypto_remove_spawns(&q->cra_users, &list, alg->cra_flags); 310 crypto_remove_spawns(q, &list, alg);
262 } 311 }
263 312
264complete: 313complete:
@@ -330,7 +379,7 @@ static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
330 379
331 crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, alg); 380 crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, alg);
332 list_del_init(&alg->cra_list); 381 list_del_init(&alg->cra_list);
333 crypto_remove_spawns(&alg->cra_users, list, alg->cra_flags); 382 crypto_remove_spawns(alg, list, NULL);
334 383
335 return 0; 384 return 0;
336} 385}
@@ -488,20 +537,38 @@ int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
488} 537}
489EXPORT_SYMBOL_GPL(crypto_init_spawn); 538EXPORT_SYMBOL_GPL(crypto_init_spawn);
490 539
540int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
541 struct crypto_instance *inst,
542 const struct crypto_type *frontend)
543{
544 int err = -EINVAL;
545
546 if (frontend && (alg->cra_flags ^ frontend->type) & frontend->maskset)
547 goto out;
548
549 spawn->frontend = frontend;
550 err = crypto_init_spawn(spawn, alg, inst, frontend->maskset);
551
552out:
553 return err;
554}
555EXPORT_SYMBOL_GPL(crypto_init_spawn2);
556
491void crypto_drop_spawn(struct crypto_spawn *spawn) 557void crypto_drop_spawn(struct crypto_spawn *spawn)
492{ 558{
559 if (!spawn->alg)
560 return;
561
493 down_write(&crypto_alg_sem); 562 down_write(&crypto_alg_sem);
494 list_del(&spawn->list); 563 list_del(&spawn->list);
495 up_write(&crypto_alg_sem); 564 up_write(&crypto_alg_sem);
496} 565}
497EXPORT_SYMBOL_GPL(crypto_drop_spawn); 566EXPORT_SYMBOL_GPL(crypto_drop_spawn);
498 567
499struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, 568static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
500 u32 mask)
501{ 569{
502 struct crypto_alg *alg; 570 struct crypto_alg *alg;
503 struct crypto_alg *alg2; 571 struct crypto_alg *alg2;
504 struct crypto_tfm *tfm;
505 572
506 down_read(&crypto_alg_sem); 573 down_read(&crypto_alg_sem);
507 alg = spawn->alg; 574 alg = spawn->alg;
@@ -516,6 +583,19 @@ struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
516 return ERR_PTR(-EAGAIN); 583 return ERR_PTR(-EAGAIN);
517 } 584 }
518 585
586 return alg;
587}
588
589struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
590 u32 mask)
591{
592 struct crypto_alg *alg;
593 struct crypto_tfm *tfm;
594
595 alg = crypto_spawn_alg(spawn);
596 if (IS_ERR(alg))
597 return ERR_CAST(alg);
598
519 tfm = ERR_PTR(-EINVAL); 599 tfm = ERR_PTR(-EINVAL);
520 if (unlikely((alg->cra_flags ^ type) & mask)) 600 if (unlikely((alg->cra_flags ^ type) & mask))
521 goto out_put_alg; 601 goto out_put_alg;
@@ -532,6 +612,27 @@ out_put_alg:
532} 612}
533EXPORT_SYMBOL_GPL(crypto_spawn_tfm); 613EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
534 614
615void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
616{
617 struct crypto_alg *alg;
618 struct crypto_tfm *tfm;
619
620 alg = crypto_spawn_alg(spawn);
621 if (IS_ERR(alg))
622 return ERR_CAST(alg);
623
624 tfm = crypto_create_tfm(alg, spawn->frontend);
625 if (IS_ERR(tfm))
626 goto out_put_alg;
627
628 return tfm;
629
630out_put_alg:
631 crypto_mod_put(alg);
632 return tfm;
633}
634EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
635
535int crypto_register_notifier(struct notifier_block *nb) 636int crypto_register_notifier(struct notifier_block *nb)
536{ 637{
537 return blocking_notifier_chain_register(&crypto_chain, nb); 638 return blocking_notifier_chain_register(&crypto_chain, nb);
@@ -595,7 +696,9 @@ const char *crypto_attr_alg_name(struct rtattr *rta)
595} 696}
596EXPORT_SYMBOL_GPL(crypto_attr_alg_name); 697EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
597 698
598struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask) 699struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
700 const struct crypto_type *frontend,
701 u32 type, u32 mask)
599{ 702{
600 const char *name; 703 const char *name;
601 int err; 704 int err;
@@ -605,9 +708,9 @@ struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask)
605 if (IS_ERR(name)) 708 if (IS_ERR(name))
606 return ERR_PTR(err); 709 return ERR_PTR(err);
607 710
608 return crypto_alg_mod_lookup(name, type, mask); 711 return crypto_find_alg(name, frontend, type, mask);
609} 712}
610EXPORT_SYMBOL_GPL(crypto_attr_alg); 713EXPORT_SYMBOL_GPL(crypto_attr_alg2);
611 714
612int crypto_attr_u32(struct rtattr *rta, u32 *num) 715int crypto_attr_u32(struct rtattr *rta, u32 *num)
613{ 716{
@@ -627,17 +730,20 @@ int crypto_attr_u32(struct rtattr *rta, u32 *num)
627} 730}
628EXPORT_SYMBOL_GPL(crypto_attr_u32); 731EXPORT_SYMBOL_GPL(crypto_attr_u32);
629 732
630struct crypto_instance *crypto_alloc_instance(const char *name, 733void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg,
631 struct crypto_alg *alg) 734 unsigned int head)
632{ 735{
633 struct crypto_instance *inst; 736 struct crypto_instance *inst;
634 struct crypto_spawn *spawn; 737 char *p;
635 int err; 738 int err;
636 739
637 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 740 p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn),
638 if (!inst) 741 GFP_KERNEL);
742 if (!p)
639 return ERR_PTR(-ENOMEM); 743 return ERR_PTR(-ENOMEM);
640 744
745 inst = (void *)(p + head);
746
641 err = -ENAMETOOLONG; 747 err = -ENAMETOOLONG;
642 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name, 748 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
643 alg->cra_name) >= CRYPTO_MAX_ALG_NAME) 749 alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
@@ -647,6 +753,25 @@ struct crypto_instance *crypto_alloc_instance(const char *name,
647 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 753 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
648 goto err_free_inst; 754 goto err_free_inst;
649 755
756 return p;
757
758err_free_inst:
759 kfree(p);
760 return ERR_PTR(err);
761}
762EXPORT_SYMBOL_GPL(crypto_alloc_instance2);
763
764struct crypto_instance *crypto_alloc_instance(const char *name,
765 struct crypto_alg *alg)
766{
767 struct crypto_instance *inst;
768 struct crypto_spawn *spawn;
769 int err;
770
771 inst = crypto_alloc_instance2(name, alg, 0);
772 if (IS_ERR(inst))
773 goto out;
774
650 spawn = crypto_instance_ctx(inst); 775 spawn = crypto_instance_ctx(inst);
651 err = crypto_init_spawn(spawn, alg, inst, 776 err = crypto_init_spawn(spawn, alg, inst,
652 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); 777 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
@@ -658,7 +783,10 @@ struct crypto_instance *crypto_alloc_instance(const char *name,
658 783
659err_free_inst: 784err_free_inst:
660 kfree(inst); 785 kfree(inst);
661 return ERR_PTR(err); 786 inst = ERR_PTR(err);
787
788out:
789 return inst;
662} 790}
663EXPORT_SYMBOL_GPL(crypto_alloc_instance); 791EXPORT_SYMBOL_GPL(crypto_alloc_instance);
664 792
@@ -692,7 +820,7 @@ out:
692} 820}
693EXPORT_SYMBOL_GPL(crypto_enqueue_request); 821EXPORT_SYMBOL_GPL(crypto_enqueue_request);
694 822
695struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue) 823void *__crypto_dequeue_request(struct crypto_queue *queue, unsigned int offset)
696{ 824{
697 struct list_head *request; 825 struct list_head *request;
698 826
@@ -707,7 +835,14 @@ struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
707 request = queue->list.next; 835 request = queue->list.next;
708 list_del(request); 836 list_del(request);
709 837
710 return list_entry(request, struct crypto_async_request, list); 838 return (char *)list_entry(request, struct crypto_async_request, list) -
839 offset;
840}
841EXPORT_SYMBOL_GPL(__crypto_dequeue_request);
842
843struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
844{
845 return __crypto_dequeue_request(queue, 0);
711} 846}
712EXPORT_SYMBOL_GPL(crypto_dequeue_request); 847EXPORT_SYMBOL_GPL(crypto_dequeue_request);
713 848
diff --git a/crypto/algboss.c b/crypto/algboss.c
index 9908dd830c26..412241ce4cfa 100644
--- a/crypto/algboss.c
+++ b/crypto/algboss.c
@@ -68,6 +68,11 @@ static int cryptomgr_probe(void *data)
68 goto err; 68 goto err;
69 69
70 do { 70 do {
71 if (tmpl->create) {
72 err = tmpl->create(tmpl, param->tb);
73 continue;
74 }
75
71 inst = tmpl->alloc(param->tb); 76 inst = tmpl->alloc(param->tb);
72 if (IS_ERR(inst)) 77 if (IS_ERR(inst))
73 err = PTR_ERR(inst); 78 err = PTR_ERR(inst);
diff --git a/crypto/ansi_cprng.c b/crypto/ansi_cprng.c
index d80ed4c1e009..3aa6e3834bfe 100644
--- a/crypto/ansi_cprng.c
+++ b/crypto/ansi_cprng.c
@@ -187,7 +187,6 @@ static int _get_more_prng_bytes(struct prng_context *ctx)
187/* Our exported functions */ 187/* Our exported functions */
188static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx) 188static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx)
189{ 189{
190 unsigned long flags;
191 unsigned char *ptr = buf; 190 unsigned char *ptr = buf;
192 unsigned int byte_count = (unsigned int)nbytes; 191 unsigned int byte_count = (unsigned int)nbytes;
193 int err; 192 int err;
@@ -196,7 +195,7 @@ static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx)
196 if (nbytes < 0) 195 if (nbytes < 0)
197 return -EINVAL; 196 return -EINVAL;
198 197
199 spin_lock_irqsave(&ctx->prng_lock, flags); 198 spin_lock_bh(&ctx->prng_lock);
200 199
201 err = -EINVAL; 200 err = -EINVAL;
202 if (ctx->flags & PRNG_NEED_RESET) 201 if (ctx->flags & PRNG_NEED_RESET)
@@ -268,7 +267,7 @@ empty_rbuf:
268 goto remainder; 267 goto remainder;
269 268
270done: 269done:
271 spin_unlock_irqrestore(&ctx->prng_lock, flags); 270 spin_unlock_bh(&ctx->prng_lock);
272 dbgprint(KERN_CRIT "returning %d from get_prng_bytes in context %p\n", 271 dbgprint(KERN_CRIT "returning %d from get_prng_bytes in context %p\n",
273 err, ctx); 272 err, ctx);
274 return err; 273 return err;
@@ -284,10 +283,9 @@ static int reset_prng_context(struct prng_context *ctx,
284 unsigned char *V, unsigned char *DT) 283 unsigned char *V, unsigned char *DT)
285{ 284{
286 int ret; 285 int ret;
287 int rc = -EINVAL;
288 unsigned char *prng_key; 286 unsigned char *prng_key;
289 287
290 spin_lock(&ctx->prng_lock); 288 spin_lock_bh(&ctx->prng_lock);
291 ctx->flags |= PRNG_NEED_RESET; 289 ctx->flags |= PRNG_NEED_RESET;
292 290
293 prng_key = (key != NULL) ? key : (unsigned char *)DEFAULT_PRNG_KEY; 291 prng_key = (key != NULL) ? key : (unsigned char *)DEFAULT_PRNG_KEY;
@@ -308,34 +306,20 @@ static int reset_prng_context(struct prng_context *ctx,
308 memset(ctx->rand_data, 0, DEFAULT_BLK_SZ); 306 memset(ctx->rand_data, 0, DEFAULT_BLK_SZ);
309 memset(ctx->last_rand_data, 0, DEFAULT_BLK_SZ); 307 memset(ctx->last_rand_data, 0, DEFAULT_BLK_SZ);
310 308
311 if (ctx->tfm)
312 crypto_free_cipher(ctx->tfm);
313
314 ctx->tfm = crypto_alloc_cipher("aes", 0, 0);
315 if (IS_ERR(ctx->tfm)) {
316 dbgprint(KERN_CRIT "Failed to alloc tfm for context %p\n",
317 ctx);
318 ctx->tfm = NULL;
319 goto out;
320 }
321
322 ctx->rand_data_valid = DEFAULT_BLK_SZ; 309 ctx->rand_data_valid = DEFAULT_BLK_SZ;
323 310
324 ret = crypto_cipher_setkey(ctx->tfm, prng_key, klen); 311 ret = crypto_cipher_setkey(ctx->tfm, prng_key, klen);
325 if (ret) { 312 if (ret) {
326 dbgprint(KERN_CRIT "PRNG: setkey() failed flags=%x\n", 313 dbgprint(KERN_CRIT "PRNG: setkey() failed flags=%x\n",
327 crypto_cipher_get_flags(ctx->tfm)); 314 crypto_cipher_get_flags(ctx->tfm));
328 crypto_free_cipher(ctx->tfm);
329 goto out; 315 goto out;
330 } 316 }
331 317
332 rc = 0; 318 ret = 0;
333 ctx->flags &= ~PRNG_NEED_RESET; 319 ctx->flags &= ~PRNG_NEED_RESET;
334out: 320out:
335 spin_unlock(&ctx->prng_lock); 321 spin_unlock_bh(&ctx->prng_lock);
336 322 return ret;
337 return rc;
338
339} 323}
340 324
341static int cprng_init(struct crypto_tfm *tfm) 325static int cprng_init(struct crypto_tfm *tfm)
@@ -343,6 +327,12 @@ static int cprng_init(struct crypto_tfm *tfm)
343 struct prng_context *ctx = crypto_tfm_ctx(tfm); 327 struct prng_context *ctx = crypto_tfm_ctx(tfm);
344 328
345 spin_lock_init(&ctx->prng_lock); 329 spin_lock_init(&ctx->prng_lock);
330 ctx->tfm = crypto_alloc_cipher("aes", 0, 0);
331 if (IS_ERR(ctx->tfm)) {
332 dbgprint(KERN_CRIT "Failed to alloc tfm for context %p\n",
333 ctx);
334 return PTR_ERR(ctx->tfm);
335 }
346 336
347 if (reset_prng_context(ctx, NULL, DEFAULT_PRNG_KSZ, NULL, NULL) < 0) 337 if (reset_prng_context(ctx, NULL, DEFAULT_PRNG_KSZ, NULL, NULL) < 0)
348 return -EINVAL; 338 return -EINVAL;
@@ -418,17 +408,10 @@ static struct crypto_alg rng_alg = {
418/* Module initalization */ 408/* Module initalization */
419static int __init prng_mod_init(void) 409static int __init prng_mod_init(void)
420{ 410{
421 int ret = 0;
422
423 if (fips_enabled) 411 if (fips_enabled)
424 rng_alg.cra_priority += 200; 412 rng_alg.cra_priority += 200;
425 413
426 ret = crypto_register_alg(&rng_alg); 414 return crypto_register_alg(&rng_alg);
427
428 if (ret)
429 goto out;
430out:
431 return 0;
432} 415}
433 416
434static void __exit prng_mod_fini(void) 417static void __exit prng_mod_fini(void)
diff --git a/crypto/api.c b/crypto/api.c
index d5944f92b416..798526d90538 100644
--- a/crypto/api.c
+++ b/crypto/api.c
@@ -285,13 +285,6 @@ static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
285 switch (crypto_tfm_alg_type(tfm)) { 285 switch (crypto_tfm_alg_type(tfm)) {
286 case CRYPTO_ALG_TYPE_CIPHER: 286 case CRYPTO_ALG_TYPE_CIPHER:
287 return crypto_init_cipher_ops(tfm); 287 return crypto_init_cipher_ops(tfm);
288
289 case CRYPTO_ALG_TYPE_DIGEST:
290 if ((mask & CRYPTO_ALG_TYPE_HASH_MASK) !=
291 CRYPTO_ALG_TYPE_HASH_MASK)
292 return crypto_init_digest_ops_async(tfm);
293 else
294 return crypto_init_digest_ops(tfm);
295 288
296 case CRYPTO_ALG_TYPE_COMPRESS: 289 case CRYPTO_ALG_TYPE_COMPRESS:
297 return crypto_init_compress_ops(tfm); 290 return crypto_init_compress_ops(tfm);
@@ -318,11 +311,7 @@ static void crypto_exit_ops(struct crypto_tfm *tfm)
318 case CRYPTO_ALG_TYPE_CIPHER: 311 case CRYPTO_ALG_TYPE_CIPHER:
319 crypto_exit_cipher_ops(tfm); 312 crypto_exit_cipher_ops(tfm);
320 break; 313 break;
321 314
322 case CRYPTO_ALG_TYPE_DIGEST:
323 crypto_exit_digest_ops(tfm);
324 break;
325
326 case CRYPTO_ALG_TYPE_COMPRESS: 315 case CRYPTO_ALG_TYPE_COMPRESS:
327 crypto_exit_compress_ops(tfm); 316 crypto_exit_compress_ops(tfm);
328 break; 317 break;
@@ -349,11 +338,7 @@ static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
349 case CRYPTO_ALG_TYPE_CIPHER: 338 case CRYPTO_ALG_TYPE_CIPHER:
350 len += crypto_cipher_ctxsize(alg); 339 len += crypto_cipher_ctxsize(alg);
351 break; 340 break;
352 341
353 case CRYPTO_ALG_TYPE_DIGEST:
354 len += crypto_digest_ctxsize(alg);
355 break;
356
357 case CRYPTO_ALG_TYPE_COMPRESS: 342 case CRYPTO_ALG_TYPE_COMPRESS:
358 len += crypto_compress_ctxsize(alg); 343 len += crypto_compress_ctxsize(alg);
359 break; 344 break;
@@ -472,7 +457,7 @@ void *crypto_create_tfm(struct crypto_alg *alg,
472 int err = -ENOMEM; 457 int err = -ENOMEM;
473 458
474 tfmsize = frontend->tfmsize; 459 tfmsize = frontend->tfmsize;
475 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg, frontend); 460 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
476 461
477 mem = kzalloc(total, GFP_KERNEL); 462 mem = kzalloc(total, GFP_KERNEL);
478 if (mem == NULL) 463 if (mem == NULL)
@@ -481,7 +466,7 @@ void *crypto_create_tfm(struct crypto_alg *alg,
481 tfm = (struct crypto_tfm *)(mem + tfmsize); 466 tfm = (struct crypto_tfm *)(mem + tfmsize);
482 tfm->__crt_alg = alg; 467 tfm->__crt_alg = alg;
483 468
484 err = frontend->init_tfm(tfm, frontend); 469 err = frontend->init_tfm(tfm);
485 if (err) 470 if (err)
486 goto out_free_tfm; 471 goto out_free_tfm;
487 472
@@ -503,6 +488,27 @@ out:
503} 488}
504EXPORT_SYMBOL_GPL(crypto_create_tfm); 489EXPORT_SYMBOL_GPL(crypto_create_tfm);
505 490
491struct crypto_alg *crypto_find_alg(const char *alg_name,
492 const struct crypto_type *frontend,
493 u32 type, u32 mask)
494{
495 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) =
496 crypto_alg_mod_lookup;
497
498 if (frontend) {
499 type &= frontend->maskclear;
500 mask &= frontend->maskclear;
501 type |= frontend->type;
502 mask |= frontend->maskset;
503
504 if (frontend->lookup)
505 lookup = frontend->lookup;
506 }
507
508 return lookup(alg_name, type, mask);
509}
510EXPORT_SYMBOL_GPL(crypto_find_alg);
511
506/* 512/*
507 * crypto_alloc_tfm - Locate algorithm and allocate transform 513 * crypto_alloc_tfm - Locate algorithm and allocate transform
508 * @alg_name: Name of algorithm 514 * @alg_name: Name of algorithm
@@ -526,21 +532,13 @@ EXPORT_SYMBOL_GPL(crypto_create_tfm);
526void *crypto_alloc_tfm(const char *alg_name, 532void *crypto_alloc_tfm(const char *alg_name,
527 const struct crypto_type *frontend, u32 type, u32 mask) 533 const struct crypto_type *frontend, u32 type, u32 mask)
528{ 534{
529 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
530 void *tfm; 535 void *tfm;
531 int err; 536 int err;
532 537
533 type &= frontend->maskclear;
534 mask &= frontend->maskclear;
535 type |= frontend->type;
536 mask |= frontend->maskset;
537
538 lookup = frontend->lookup ?: crypto_alg_mod_lookup;
539
540 for (;;) { 538 for (;;) {
541 struct crypto_alg *alg; 539 struct crypto_alg *alg;
542 540
543 alg = lookup(alg_name, type, mask); 541 alg = crypto_find_alg(alg_name, frontend, type, mask);
544 if (IS_ERR(alg)) { 542 if (IS_ERR(alg)) {
545 err = PTR_ERR(alg); 543 err = PTR_ERR(alg);
546 goto err; 544 goto err;
diff --git a/crypto/async_tx/async_xor.c b/crypto/async_tx/async_xor.c
index 95fe2c8d6c51..90dd3f8bd283 100644
--- a/crypto/async_tx/async_xor.c
+++ b/crypto/async_tx/async_xor.c
@@ -300,7 +300,7 @@ EXPORT_SYMBOL_GPL(async_xor_zero_sum);
300 300
301static int __init async_xor_init(void) 301static int __init async_xor_init(void)
302{ 302{
303 #ifdef CONFIG_DMA_ENGINE 303 #ifdef CONFIG_ASYNC_TX_DMA
304 /* To conserve stack space the input src_list (array of page pointers) 304 /* To conserve stack space the input src_list (array of page pointers)
305 * is reused to hold the array of dma addresses passed to the driver. 305 * is reused to hold the array of dma addresses passed to the driver.
306 * This conversion is only possible when dma_addr_t is less than the 306 * This conversion is only possible when dma_addr_t is less than the
diff --git a/crypto/authenc.c b/crypto/authenc.c
index 5793b64c81a8..4d6f49a5daeb 100644
--- a/crypto/authenc.c
+++ b/crypto/authenc.c
@@ -23,24 +23,36 @@
23#include <linux/slab.h> 23#include <linux/slab.h>
24#include <linux/spinlock.h> 24#include <linux/spinlock.h>
25 25
26typedef u8 *(*authenc_ahash_t)(struct aead_request *req, unsigned int flags);
27
26struct authenc_instance_ctx { 28struct authenc_instance_ctx {
27 struct crypto_spawn auth; 29 struct crypto_ahash_spawn auth;
28 struct crypto_skcipher_spawn enc; 30 struct crypto_skcipher_spawn enc;
29}; 31};
30 32
31struct crypto_authenc_ctx { 33struct crypto_authenc_ctx {
32 spinlock_t auth_lock; 34 unsigned int reqoff;
33 struct crypto_hash *auth; 35 struct crypto_ahash *auth;
34 struct crypto_ablkcipher *enc; 36 struct crypto_ablkcipher *enc;
35}; 37};
36 38
39struct authenc_request_ctx {
40 unsigned int cryptlen;
41 struct scatterlist *sg;
42 struct scatterlist asg[2];
43 struct scatterlist cipher[2];
44 crypto_completion_t complete;
45 crypto_completion_t update_complete;
46 char tail[];
47};
48
37static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key, 49static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
38 unsigned int keylen) 50 unsigned int keylen)
39{ 51{
40 unsigned int authkeylen; 52 unsigned int authkeylen;
41 unsigned int enckeylen; 53 unsigned int enckeylen;
42 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); 54 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
43 struct crypto_hash *auth = ctx->auth; 55 struct crypto_ahash *auth = ctx->auth;
44 struct crypto_ablkcipher *enc = ctx->enc; 56 struct crypto_ablkcipher *enc = ctx->enc;
45 struct rtattr *rta = (void *)key; 57 struct rtattr *rta = (void *)key;
46 struct crypto_authenc_key_param *param; 58 struct crypto_authenc_key_param *param;
@@ -64,11 +76,11 @@ static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
64 76
65 authkeylen = keylen - enckeylen; 77 authkeylen = keylen - enckeylen;
66 78
67 crypto_hash_clear_flags(auth, CRYPTO_TFM_REQ_MASK); 79 crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK);
68 crypto_hash_set_flags(auth, crypto_aead_get_flags(authenc) & 80 crypto_ahash_set_flags(auth, crypto_aead_get_flags(authenc) &
69 CRYPTO_TFM_REQ_MASK); 81 CRYPTO_TFM_REQ_MASK);
70 err = crypto_hash_setkey(auth, key, authkeylen); 82 err = crypto_ahash_setkey(auth, key, authkeylen);
71 crypto_aead_set_flags(authenc, crypto_hash_get_flags(auth) & 83 crypto_aead_set_flags(authenc, crypto_ahash_get_flags(auth) &
72 CRYPTO_TFM_RES_MASK); 84 CRYPTO_TFM_RES_MASK);
73 85
74 if (err) 86 if (err)
@@ -103,40 +115,198 @@ static void authenc_chain(struct scatterlist *head, struct scatterlist *sg,
103 sg_mark_end(head); 115 sg_mark_end(head);
104} 116}
105 117
106static u8 *crypto_authenc_hash(struct aead_request *req, unsigned int flags, 118static void authenc_geniv_ahash_update_done(struct crypto_async_request *areq,
107 struct scatterlist *cipher, 119 int err)
108 unsigned int cryptlen) 120{
121 struct aead_request *req = areq->data;
122 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
123 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
124 struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
125 struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
126
127 if (err)
128 goto out;
129
130 ahash_request_set_crypt(ahreq, areq_ctx->sg, ahreq->result,
131 areq_ctx->cryptlen);
132 ahash_request_set_callback(ahreq, aead_request_flags(req) &
133 CRYPTO_TFM_REQ_MAY_SLEEP,
134 areq_ctx->complete, req);
135
136 err = crypto_ahash_finup(ahreq);
137 if (err)
138 goto out;
139
140 scatterwalk_map_and_copy(ahreq->result, areq_ctx->sg,
141 areq_ctx->cryptlen,
142 crypto_aead_authsize(authenc), 1);
143
144out:
145 aead_request_complete(req, err);
146}
147
148static void authenc_geniv_ahash_done(struct crypto_async_request *areq, int err)
149{
150 struct aead_request *req = areq->data;
151 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
152 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
153 struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
154 struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
155
156 if (err)
157 goto out;
158
159 scatterwalk_map_and_copy(ahreq->result, areq_ctx->sg,
160 areq_ctx->cryptlen,
161 crypto_aead_authsize(authenc), 1);
162
163out:
164 aead_request_complete(req, err);
165}
166
167static void authenc_verify_ahash_update_done(struct crypto_async_request *areq,
168 int err)
109{ 169{
170 u8 *ihash;
171 unsigned int authsize;
172 struct ablkcipher_request *abreq;
173 struct aead_request *req = areq->data;
110 struct crypto_aead *authenc = crypto_aead_reqtfm(req); 174 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
111 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); 175 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
112 struct crypto_hash *auth = ctx->auth; 176 struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
113 struct hash_desc desc = { 177 struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
114 .tfm = auth, 178
115 .flags = aead_request_flags(req) & flags, 179 if (err)
116 }; 180 goto out;
117 u8 *hash = aead_request_ctx(req); 181
182 ahash_request_set_crypt(ahreq, areq_ctx->sg, ahreq->result,
183 areq_ctx->cryptlen);
184 ahash_request_set_callback(ahreq, aead_request_flags(req) &
185 CRYPTO_TFM_REQ_MAY_SLEEP,
186 areq_ctx->complete, req);
187
188 err = crypto_ahash_finup(ahreq);
189 if (err)
190 goto out;
191
192 authsize = crypto_aead_authsize(authenc);
193 ihash = ahreq->result + authsize;
194 scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen,
195 authsize, 0);
196
197 err = memcmp(ihash, ahreq->result, authsize) ? -EBADMSG: 0;
198 if (err)
199 goto out;
200
201 abreq = aead_request_ctx(req);
202 ablkcipher_request_set_tfm(abreq, ctx->enc);
203 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
204 req->base.complete, req->base.data);
205 ablkcipher_request_set_crypt(abreq, req->src, req->dst,
206 req->cryptlen, req->iv);
207
208 err = crypto_ablkcipher_decrypt(abreq);
209
210out:
211 aead_request_complete(req, err);
212}
213
214static void authenc_verify_ahash_done(struct crypto_async_request *areq,
215 int err)
216{
217 u8 *ihash;
218 unsigned int authsize;
219 struct ablkcipher_request *abreq;
220 struct aead_request *req = areq->data;
221 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
222 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
223 struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
224 struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
225
226 if (err)
227 goto out;
228
229 authsize = crypto_aead_authsize(authenc);
230 ihash = ahreq->result + authsize;
231 scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen,
232 authsize, 0);
233
234 err = memcmp(ihash, ahreq->result, authsize) ? -EBADMSG: 0;
235 if (err)
236 goto out;
237
238 abreq = aead_request_ctx(req);
239 ablkcipher_request_set_tfm(abreq, ctx->enc);
240 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
241 req->base.complete, req->base.data);
242 ablkcipher_request_set_crypt(abreq, req->src, req->dst,
243 req->cryptlen, req->iv);
244
245 err = crypto_ablkcipher_decrypt(abreq);
246
247out:
248 aead_request_complete(req, err);
249}
250
251static u8 *crypto_authenc_ahash_fb(struct aead_request *req, unsigned int flags)
252{
253 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
254 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
255 struct crypto_ahash *auth = ctx->auth;
256 struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
257 struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
258 u8 *hash = areq_ctx->tail;
118 int err; 259 int err;
119 260
120 hash = (u8 *)ALIGN((unsigned long)hash + crypto_hash_alignmask(auth), 261 hash = (u8 *)ALIGN((unsigned long)hash + crypto_ahash_alignmask(auth),
121 crypto_hash_alignmask(auth) + 1); 262 crypto_ahash_alignmask(auth) + 1);
263
264 ahash_request_set_tfm(ahreq, auth);
122 265
123 spin_lock_bh(&ctx->auth_lock); 266 err = crypto_ahash_init(ahreq);
124 err = crypto_hash_init(&desc);
125 if (err) 267 if (err)
126 goto auth_unlock; 268 return ERR_PTR(err);
269
270 ahash_request_set_crypt(ahreq, req->assoc, hash, req->assoclen);
271 ahash_request_set_callback(ahreq, aead_request_flags(req) & flags,
272 areq_ctx->update_complete, req);
127 273
128 err = crypto_hash_update(&desc, req->assoc, req->assoclen); 274 err = crypto_ahash_update(ahreq);
129 if (err) 275 if (err)
130 goto auth_unlock; 276 return ERR_PTR(err);
277
278 ahash_request_set_crypt(ahreq, areq_ctx->sg, hash,
279 areq_ctx->cryptlen);
280 ahash_request_set_callback(ahreq, aead_request_flags(req) & flags,
281 areq_ctx->complete, req);
131 282
132 err = crypto_hash_update(&desc, cipher, cryptlen); 283 err = crypto_ahash_finup(ahreq);
133 if (err) 284 if (err)
134 goto auth_unlock; 285 return ERR_PTR(err);
135 286
136 err = crypto_hash_final(&desc, hash); 287 return hash;
137auth_unlock: 288}
138 spin_unlock_bh(&ctx->auth_lock); 289
290static u8 *crypto_authenc_ahash(struct aead_request *req, unsigned int flags)
291{
292 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
293 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
294 struct crypto_ahash *auth = ctx->auth;
295 struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
296 struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
297 u8 *hash = areq_ctx->tail;
298 int err;
139 299
300 hash = (u8 *)ALIGN((unsigned long)hash + crypto_ahash_alignmask(auth),
301 crypto_ahash_alignmask(auth) + 1);
302
303 ahash_request_set_tfm(ahreq, auth);
304 ahash_request_set_crypt(ahreq, areq_ctx->sg, hash,
305 areq_ctx->cryptlen);
306 ahash_request_set_callback(ahreq, aead_request_flags(req) & flags,
307 areq_ctx->complete, req);
308
309 err = crypto_ahash_digest(ahreq);
140 if (err) 310 if (err)
141 return ERR_PTR(err); 311 return ERR_PTR(err);
142 312
@@ -147,11 +317,15 @@ static int crypto_authenc_genicv(struct aead_request *req, u8 *iv,
147 unsigned int flags) 317 unsigned int flags)
148{ 318{
149 struct crypto_aead *authenc = crypto_aead_reqtfm(req); 319 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
320 struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
150 struct scatterlist *dst = req->dst; 321 struct scatterlist *dst = req->dst;
151 struct scatterlist cipher[2]; 322 struct scatterlist *assoc = req->assoc;
152 struct page *dstp; 323 struct scatterlist *cipher = areq_ctx->cipher;
324 struct scatterlist *asg = areq_ctx->asg;
153 unsigned int ivsize = crypto_aead_ivsize(authenc); 325 unsigned int ivsize = crypto_aead_ivsize(authenc);
154 unsigned int cryptlen; 326 unsigned int cryptlen = req->cryptlen;
327 authenc_ahash_t authenc_ahash_fn = crypto_authenc_ahash_fb;
328 struct page *dstp;
155 u8 *vdst; 329 u8 *vdst;
156 u8 *hash; 330 u8 *hash;
157 331
@@ -163,10 +337,25 @@ static int crypto_authenc_genicv(struct aead_request *req, u8 *iv,
163 sg_set_buf(cipher, iv, ivsize); 337 sg_set_buf(cipher, iv, ivsize);
164 authenc_chain(cipher, dst, vdst == iv + ivsize); 338 authenc_chain(cipher, dst, vdst == iv + ivsize);
165 dst = cipher; 339 dst = cipher;
340 cryptlen += ivsize;
166 } 341 }
167 342
168 cryptlen = req->cryptlen + ivsize; 343 if (sg_is_last(assoc)) {
169 hash = crypto_authenc_hash(req, flags, dst, cryptlen); 344 authenc_ahash_fn = crypto_authenc_ahash;
345 sg_init_table(asg, 2);
346 sg_set_page(asg, sg_page(assoc), assoc->length, assoc->offset);
347 authenc_chain(asg, dst, 0);
348 dst = asg;
349 cryptlen += req->assoclen;
350 }
351
352 areq_ctx->cryptlen = cryptlen;
353 areq_ctx->sg = dst;
354
355 areq_ctx->complete = authenc_geniv_ahash_done;
356 areq_ctx->update_complete = authenc_geniv_ahash_update_done;
357
358 hash = authenc_ahash_fn(req, flags);
170 if (IS_ERR(hash)) 359 if (IS_ERR(hash))
171 return PTR_ERR(hash); 360 return PTR_ERR(hash);
172 361
@@ -256,22 +445,25 @@ static int crypto_authenc_givencrypt(struct aead_givcrypt_request *req)
256} 445}
257 446
258static int crypto_authenc_verify(struct aead_request *req, 447static int crypto_authenc_verify(struct aead_request *req,
259 struct scatterlist *cipher, 448 authenc_ahash_t authenc_ahash_fn)
260 unsigned int cryptlen)
261{ 449{
262 struct crypto_aead *authenc = crypto_aead_reqtfm(req); 450 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
451 struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
263 u8 *ohash; 452 u8 *ohash;
264 u8 *ihash; 453 u8 *ihash;
265 unsigned int authsize; 454 unsigned int authsize;
266 455
267 ohash = crypto_authenc_hash(req, CRYPTO_TFM_REQ_MAY_SLEEP, cipher, 456 areq_ctx->complete = authenc_verify_ahash_done;
268 cryptlen); 457 areq_ctx->complete = authenc_verify_ahash_update_done;
458
459 ohash = authenc_ahash_fn(req, CRYPTO_TFM_REQ_MAY_SLEEP);
269 if (IS_ERR(ohash)) 460 if (IS_ERR(ohash))
270 return PTR_ERR(ohash); 461 return PTR_ERR(ohash);
271 462
272 authsize = crypto_aead_authsize(authenc); 463 authsize = crypto_aead_authsize(authenc);
273 ihash = ohash + authsize; 464 ihash = ohash + authsize;
274 scatterwalk_map_and_copy(ihash, cipher, cryptlen, authsize, 0); 465 scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen,
466 authsize, 0);
275 return memcmp(ihash, ohash, authsize) ? -EBADMSG: 0; 467 return memcmp(ihash, ohash, authsize) ? -EBADMSG: 0;
276} 468}
277 469
@@ -279,10 +471,14 @@ static int crypto_authenc_iverify(struct aead_request *req, u8 *iv,
279 unsigned int cryptlen) 471 unsigned int cryptlen)
280{ 472{
281 struct crypto_aead *authenc = crypto_aead_reqtfm(req); 473 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
474 struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
282 struct scatterlist *src = req->src; 475 struct scatterlist *src = req->src;
283 struct scatterlist cipher[2]; 476 struct scatterlist *assoc = req->assoc;
284 struct page *srcp; 477 struct scatterlist *cipher = areq_ctx->cipher;
478 struct scatterlist *asg = areq_ctx->asg;
285 unsigned int ivsize = crypto_aead_ivsize(authenc); 479 unsigned int ivsize = crypto_aead_ivsize(authenc);
480 authenc_ahash_t authenc_ahash_fn = crypto_authenc_ahash_fb;
481 struct page *srcp;
286 u8 *vsrc; 482 u8 *vsrc;
287 483
288 srcp = sg_page(src); 484 srcp = sg_page(src);
@@ -293,9 +489,22 @@ static int crypto_authenc_iverify(struct aead_request *req, u8 *iv,
293 sg_set_buf(cipher, iv, ivsize); 489 sg_set_buf(cipher, iv, ivsize);
294 authenc_chain(cipher, src, vsrc == iv + ivsize); 490 authenc_chain(cipher, src, vsrc == iv + ivsize);
295 src = cipher; 491 src = cipher;
492 cryptlen += ivsize;
493 }
494
495 if (sg_is_last(assoc)) {
496 authenc_ahash_fn = crypto_authenc_ahash;
497 sg_init_table(asg, 2);
498 sg_set_page(asg, sg_page(assoc), assoc->length, assoc->offset);
499 authenc_chain(asg, src, 0);
500 src = asg;
501 cryptlen += req->assoclen;
296 } 502 }
297 503
298 return crypto_authenc_verify(req, src, cryptlen + ivsize); 504 areq_ctx->cryptlen = cryptlen;
505 areq_ctx->sg = src;
506
507 return crypto_authenc_verify(req, authenc_ahash_fn);
299} 508}
300 509
301static int crypto_authenc_decrypt(struct aead_request *req) 510static int crypto_authenc_decrypt(struct aead_request *req)
@@ -326,38 +535,41 @@ static int crypto_authenc_decrypt(struct aead_request *req)
326 535
327static int crypto_authenc_init_tfm(struct crypto_tfm *tfm) 536static int crypto_authenc_init_tfm(struct crypto_tfm *tfm)
328{ 537{
329 struct crypto_instance *inst = (void *)tfm->__crt_alg; 538 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
330 struct authenc_instance_ctx *ictx = crypto_instance_ctx(inst); 539 struct authenc_instance_ctx *ictx = crypto_instance_ctx(inst);
331 struct crypto_authenc_ctx *ctx = crypto_tfm_ctx(tfm); 540 struct crypto_authenc_ctx *ctx = crypto_tfm_ctx(tfm);
332 struct crypto_hash *auth; 541 struct crypto_ahash *auth;
333 struct crypto_ablkcipher *enc; 542 struct crypto_ablkcipher *enc;
334 int err; 543 int err;
335 544
336 auth = crypto_spawn_hash(&ictx->auth); 545 auth = crypto_spawn_ahash(&ictx->auth);
337 if (IS_ERR(auth)) 546 if (IS_ERR(auth))
338 return PTR_ERR(auth); 547 return PTR_ERR(auth);
339 548
549 ctx->reqoff = ALIGN(2 * crypto_ahash_digestsize(auth) +
550 crypto_ahash_alignmask(auth),
551 crypto_ahash_alignmask(auth) + 1);
552
340 enc = crypto_spawn_skcipher(&ictx->enc); 553 enc = crypto_spawn_skcipher(&ictx->enc);
341 err = PTR_ERR(enc); 554 err = PTR_ERR(enc);
342 if (IS_ERR(enc)) 555 if (IS_ERR(enc))
343 goto err_free_hash; 556 goto err_free_ahash;
344 557
345 ctx->auth = auth; 558 ctx->auth = auth;
346 ctx->enc = enc; 559 ctx->enc = enc;
560
347 tfm->crt_aead.reqsize = max_t(unsigned int, 561 tfm->crt_aead.reqsize = max_t(unsigned int,
348 (crypto_hash_alignmask(auth) & 562 crypto_ahash_reqsize(auth) + ctx->reqoff +
349 ~(crypto_tfm_ctx_alignment() - 1)) + 563 sizeof(struct authenc_request_ctx) +
350 crypto_hash_digestsize(auth) * 2, 564 sizeof(struct ahash_request),
351 sizeof(struct skcipher_givcrypt_request) + 565 sizeof(struct skcipher_givcrypt_request) +
352 crypto_ablkcipher_reqsize(enc) + 566 crypto_ablkcipher_reqsize(enc) +
353 crypto_ablkcipher_ivsize(enc)); 567 crypto_ablkcipher_ivsize(enc));
354
355 spin_lock_init(&ctx->auth_lock);
356 568
357 return 0; 569 return 0;
358 570
359err_free_hash: 571err_free_ahash:
360 crypto_free_hash(auth); 572 crypto_free_ahash(auth);
361 return err; 573 return err;
362} 574}
363 575
@@ -365,7 +577,7 @@ static void crypto_authenc_exit_tfm(struct crypto_tfm *tfm)
365{ 577{
366 struct crypto_authenc_ctx *ctx = crypto_tfm_ctx(tfm); 578 struct crypto_authenc_ctx *ctx = crypto_tfm_ctx(tfm);
367 579
368 crypto_free_hash(ctx->auth); 580 crypto_free_ahash(ctx->auth);
369 crypto_free_ablkcipher(ctx->enc); 581 crypto_free_ablkcipher(ctx->enc);
370} 582}
371 583
@@ -373,7 +585,8 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
373{ 585{
374 struct crypto_attr_type *algt; 586 struct crypto_attr_type *algt;
375 struct crypto_instance *inst; 587 struct crypto_instance *inst;
376 struct crypto_alg *auth; 588 struct hash_alg_common *auth;
589 struct crypto_alg *auth_base;
377 struct crypto_alg *enc; 590 struct crypto_alg *enc;
378 struct authenc_instance_ctx *ctx; 591 struct authenc_instance_ctx *ctx;
379 const char *enc_name; 592 const char *enc_name;
@@ -387,11 +600,13 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
387 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 600 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
388 return ERR_PTR(-EINVAL); 601 return ERR_PTR(-EINVAL);
389 602
390 auth = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_HASH, 603 auth = ahash_attr_alg(tb[1], CRYPTO_ALG_TYPE_HASH,
391 CRYPTO_ALG_TYPE_HASH_MASK); 604 CRYPTO_ALG_TYPE_AHASH_MASK);
392 if (IS_ERR(auth)) 605 if (IS_ERR(auth))
393 return ERR_PTR(PTR_ERR(auth)); 606 return ERR_PTR(PTR_ERR(auth));
394 607
608 auth_base = &auth->base;
609
395 enc_name = crypto_attr_alg_name(tb[2]); 610 enc_name = crypto_attr_alg_name(tb[2]);
396 err = PTR_ERR(enc_name); 611 err = PTR_ERR(enc_name);
397 if (IS_ERR(enc_name)) 612 if (IS_ERR(enc_name))
@@ -404,7 +619,7 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
404 619
405 ctx = crypto_instance_ctx(inst); 620 ctx = crypto_instance_ctx(inst);
406 621
407 err = crypto_init_spawn(&ctx->auth, auth, inst, CRYPTO_ALG_TYPE_MASK); 622 err = crypto_init_ahash_spawn(&ctx->auth, auth, inst);
408 if (err) 623 if (err)
409 goto err_free_inst; 624 goto err_free_inst;
410 625
@@ -419,28 +634,25 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
419 634
420 err = -ENAMETOOLONG; 635 err = -ENAMETOOLONG;
421 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, 636 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
422 "authenc(%s,%s)", auth->cra_name, enc->cra_name) >= 637 "authenc(%s,%s)", auth_base->cra_name, enc->cra_name) >=
423 CRYPTO_MAX_ALG_NAME) 638 CRYPTO_MAX_ALG_NAME)
424 goto err_drop_enc; 639 goto err_drop_enc;
425 640
426 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, 641 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
427 "authenc(%s,%s)", auth->cra_driver_name, 642 "authenc(%s,%s)", auth_base->cra_driver_name,
428 enc->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 643 enc->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
429 goto err_drop_enc; 644 goto err_drop_enc;
430 645
431 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD; 646 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
432 inst->alg.cra_flags |= enc->cra_flags & CRYPTO_ALG_ASYNC; 647 inst->alg.cra_flags |= enc->cra_flags & CRYPTO_ALG_ASYNC;
433 inst->alg.cra_priority = enc->cra_priority * 10 + auth->cra_priority; 648 inst->alg.cra_priority = enc->cra_priority *
649 10 + auth_base->cra_priority;
434 inst->alg.cra_blocksize = enc->cra_blocksize; 650 inst->alg.cra_blocksize = enc->cra_blocksize;
435 inst->alg.cra_alignmask = auth->cra_alignmask | enc->cra_alignmask; 651 inst->alg.cra_alignmask = auth_base->cra_alignmask | enc->cra_alignmask;
436 inst->alg.cra_type = &crypto_aead_type; 652 inst->alg.cra_type = &crypto_aead_type;
437 653
438 inst->alg.cra_aead.ivsize = enc->cra_ablkcipher.ivsize; 654 inst->alg.cra_aead.ivsize = enc->cra_ablkcipher.ivsize;
439 inst->alg.cra_aead.maxauthsize = auth->cra_type == &crypto_hash_type ? 655 inst->alg.cra_aead.maxauthsize = auth->digestsize;
440 auth->cra_hash.digestsize :
441 auth->cra_type ?
442 __crypto_shash_alg(auth)->digestsize :
443 auth->cra_digest.dia_digestsize;
444 656
445 inst->alg.cra_ctxsize = sizeof(struct crypto_authenc_ctx); 657 inst->alg.cra_ctxsize = sizeof(struct crypto_authenc_ctx);
446 658
@@ -453,13 +665,13 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
453 inst->alg.cra_aead.givencrypt = crypto_authenc_givencrypt; 665 inst->alg.cra_aead.givencrypt = crypto_authenc_givencrypt;
454 666
455out: 667out:
456 crypto_mod_put(auth); 668 crypto_mod_put(auth_base);
457 return inst; 669 return inst;
458 670
459err_drop_enc: 671err_drop_enc:
460 crypto_drop_skcipher(&ctx->enc); 672 crypto_drop_skcipher(&ctx->enc);
461err_drop_auth: 673err_drop_auth:
462 crypto_drop_spawn(&ctx->auth); 674 crypto_drop_ahash(&ctx->auth);
463err_free_inst: 675err_free_inst:
464 kfree(inst); 676 kfree(inst);
465out_put_auth: 677out_put_auth:
@@ -472,7 +684,7 @@ static void crypto_authenc_free(struct crypto_instance *inst)
472 struct authenc_instance_ctx *ctx = crypto_instance_ctx(inst); 684 struct authenc_instance_ctx *ctx = crypto_instance_ctx(inst);
473 685
474 crypto_drop_skcipher(&ctx->enc); 686 crypto_drop_skcipher(&ctx->enc);
475 crypto_drop_spawn(&ctx->auth); 687 crypto_drop_ahash(&ctx->auth);
476 kfree(inst); 688 kfree(inst);
477} 689}
478 690
diff --git a/crypto/cryptd.c b/crypto/cryptd.c
index ae5fa99d5d36..35335825a4ef 100644
--- a/crypto/cryptd.c
+++ b/crypto/cryptd.c
@@ -39,6 +39,11 @@ struct cryptd_instance_ctx {
39 struct cryptd_queue *queue; 39 struct cryptd_queue *queue;
40}; 40};
41 41
42struct hashd_instance_ctx {
43 struct crypto_shash_spawn spawn;
44 struct cryptd_queue *queue;
45};
46
42struct cryptd_blkcipher_ctx { 47struct cryptd_blkcipher_ctx {
43 struct crypto_blkcipher *child; 48 struct crypto_blkcipher *child;
44}; 49};
@@ -48,11 +53,12 @@ struct cryptd_blkcipher_request_ctx {
48}; 53};
49 54
50struct cryptd_hash_ctx { 55struct cryptd_hash_ctx {
51 struct crypto_hash *child; 56 struct crypto_shash *child;
52}; 57};
53 58
54struct cryptd_hash_request_ctx { 59struct cryptd_hash_request_ctx {
55 crypto_completion_t complete; 60 crypto_completion_t complete;
61 struct shash_desc desc;
56}; 62};
57 63
58static void cryptd_queue_worker(struct work_struct *work); 64static void cryptd_queue_worker(struct work_struct *work);
@@ -249,32 +255,24 @@ static void cryptd_blkcipher_exit_tfm(struct crypto_tfm *tfm)
249 crypto_free_blkcipher(ctx->child); 255 crypto_free_blkcipher(ctx->child);
250} 256}
251 257
252static struct crypto_instance *cryptd_alloc_instance(struct crypto_alg *alg, 258static void *cryptd_alloc_instance(struct crypto_alg *alg, unsigned int head,
253 struct cryptd_queue *queue) 259 unsigned int tail)
254{ 260{
261 char *p;
255 struct crypto_instance *inst; 262 struct crypto_instance *inst;
256 struct cryptd_instance_ctx *ctx;
257 int err; 263 int err;
258 264
259 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 265 p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL);
260 if (!inst) { 266 if (!p)
261 inst = ERR_PTR(-ENOMEM); 267 return ERR_PTR(-ENOMEM);
262 goto out; 268
263 } 269 inst = (void *)(p + head);
264 270
265 err = -ENAMETOOLONG; 271 err = -ENAMETOOLONG;
266 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, 272 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
267 "cryptd(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 273 "cryptd(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
268 goto out_free_inst; 274 goto out_free_inst;
269 275
270 ctx = crypto_instance_ctx(inst);
271 err = crypto_init_spawn(&ctx->spawn, alg, inst,
272 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
273 if (err)
274 goto out_free_inst;
275
276 ctx->queue = queue;
277
278 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); 276 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
279 277
280 inst->alg.cra_priority = alg->cra_priority + 50; 278 inst->alg.cra_priority = alg->cra_priority + 50;
@@ -282,29 +280,41 @@ static struct crypto_instance *cryptd_alloc_instance(struct crypto_alg *alg,
282 inst->alg.cra_alignmask = alg->cra_alignmask; 280 inst->alg.cra_alignmask = alg->cra_alignmask;
283 281
284out: 282out:
285 return inst; 283 return p;
286 284
287out_free_inst: 285out_free_inst:
288 kfree(inst); 286 kfree(p);
289 inst = ERR_PTR(err); 287 p = ERR_PTR(err);
290 goto out; 288 goto out;
291} 289}
292 290
293static struct crypto_instance *cryptd_alloc_blkcipher( 291static int cryptd_create_blkcipher(struct crypto_template *tmpl,
294 struct rtattr **tb, struct cryptd_queue *queue) 292 struct rtattr **tb,
293 struct cryptd_queue *queue)
295{ 294{
295 struct cryptd_instance_ctx *ctx;
296 struct crypto_instance *inst; 296 struct crypto_instance *inst;
297 struct crypto_alg *alg; 297 struct crypto_alg *alg;
298 int err;
298 299
299 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_BLKCIPHER, 300 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_BLKCIPHER,
300 CRYPTO_ALG_TYPE_MASK); 301 CRYPTO_ALG_TYPE_MASK);
301 if (IS_ERR(alg)) 302 if (IS_ERR(alg))
302 return ERR_CAST(alg); 303 return PTR_ERR(alg);
303 304
304 inst = cryptd_alloc_instance(alg, queue); 305 inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx));
306 err = PTR_ERR(inst);
305 if (IS_ERR(inst)) 307 if (IS_ERR(inst))
306 goto out_put_alg; 308 goto out_put_alg;
307 309
310 ctx = crypto_instance_ctx(inst);
311 ctx->queue = queue;
312
313 err = crypto_init_spawn(&ctx->spawn, alg, inst,
314 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
315 if (err)
316 goto out_free_inst;
317
308 inst->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC; 318 inst->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC;
309 inst->alg.cra_type = &crypto_ablkcipher_type; 319 inst->alg.cra_type = &crypto_ablkcipher_type;
310 320
@@ -323,26 +333,34 @@ static struct crypto_instance *cryptd_alloc_blkcipher(
323 inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue; 333 inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue;
324 inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue; 334 inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue;
325 335
336 err = crypto_register_instance(tmpl, inst);
337 if (err) {
338 crypto_drop_spawn(&ctx->spawn);
339out_free_inst:
340 kfree(inst);
341 }
342
326out_put_alg: 343out_put_alg:
327 crypto_mod_put(alg); 344 crypto_mod_put(alg);
328 return inst; 345 return err;
329} 346}
330 347
331static int cryptd_hash_init_tfm(struct crypto_tfm *tfm) 348static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
332{ 349{
333 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); 350 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
334 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst); 351 struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
335 struct crypto_spawn *spawn = &ictx->spawn; 352 struct crypto_shash_spawn *spawn = &ictx->spawn;
336 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm); 353 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
337 struct crypto_hash *cipher; 354 struct crypto_shash *hash;
338 355
339 cipher = crypto_spawn_hash(spawn); 356 hash = crypto_spawn_shash(spawn);
340 if (IS_ERR(cipher)) 357 if (IS_ERR(hash))
341 return PTR_ERR(cipher); 358 return PTR_ERR(hash);
342 359
343 ctx->child = cipher; 360 ctx->child = hash;
344 tfm->crt_ahash.reqsize = 361 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
345 sizeof(struct cryptd_hash_request_ctx); 362 sizeof(struct cryptd_hash_request_ctx) +
363 crypto_shash_descsize(hash));
346 return 0; 364 return 0;
347} 365}
348 366
@@ -350,22 +368,22 @@ static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
350{ 368{
351 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm); 369 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
352 370
353 crypto_free_hash(ctx->child); 371 crypto_free_shash(ctx->child);
354} 372}
355 373
356static int cryptd_hash_setkey(struct crypto_ahash *parent, 374static int cryptd_hash_setkey(struct crypto_ahash *parent,
357 const u8 *key, unsigned int keylen) 375 const u8 *key, unsigned int keylen)
358{ 376{
359 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent); 377 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent);
360 struct crypto_hash *child = ctx->child; 378 struct crypto_shash *child = ctx->child;
361 int err; 379 int err;
362 380
363 crypto_hash_clear_flags(child, CRYPTO_TFM_REQ_MASK); 381 crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
364 crypto_hash_set_flags(child, crypto_ahash_get_flags(parent) & 382 crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
365 CRYPTO_TFM_REQ_MASK); 383 CRYPTO_TFM_REQ_MASK);
366 err = crypto_hash_setkey(child, key, keylen); 384 err = crypto_shash_setkey(child, key, keylen);
367 crypto_ahash_set_flags(parent, crypto_hash_get_flags(child) & 385 crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) &
368 CRYPTO_TFM_RES_MASK); 386 CRYPTO_TFM_RES_MASK);
369 return err; 387 return err;
370} 388}
371 389
@@ -385,21 +403,19 @@ static int cryptd_hash_enqueue(struct ahash_request *req,
385 403
386static void cryptd_hash_init(struct crypto_async_request *req_async, int err) 404static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
387{ 405{
388 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); 406 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
389 struct crypto_hash *child = ctx->child; 407 struct crypto_shash *child = ctx->child;
390 struct ahash_request *req = ahash_request_cast(req_async); 408 struct ahash_request *req = ahash_request_cast(req_async);
391 struct cryptd_hash_request_ctx *rctx; 409 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
392 struct hash_desc desc; 410 struct shash_desc *desc = &rctx->desc;
393
394 rctx = ahash_request_ctx(req);
395 411
396 if (unlikely(err == -EINPROGRESS)) 412 if (unlikely(err == -EINPROGRESS))
397 goto out; 413 goto out;
398 414
399 desc.tfm = child; 415 desc->tfm = child;
400 desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; 416 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
401 417
402 err = crypto_hash_crt(child)->init(&desc); 418 err = crypto_shash_init(desc);
403 419
404 req->base.complete = rctx->complete; 420 req->base.complete = rctx->complete;
405 421
@@ -416,23 +432,15 @@ static int cryptd_hash_init_enqueue(struct ahash_request *req)
416 432
417static void cryptd_hash_update(struct crypto_async_request *req_async, int err) 433static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
418{ 434{
419 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); 435 struct ahash_request *req = ahash_request_cast(req_async);
420 struct crypto_hash *child = ctx->child;
421 struct ahash_request *req = ahash_request_cast(req_async);
422 struct cryptd_hash_request_ctx *rctx; 436 struct cryptd_hash_request_ctx *rctx;
423 struct hash_desc desc;
424 437
425 rctx = ahash_request_ctx(req); 438 rctx = ahash_request_ctx(req);
426 439
427 if (unlikely(err == -EINPROGRESS)) 440 if (unlikely(err == -EINPROGRESS))
428 goto out; 441 goto out;
429 442
430 desc.tfm = child; 443 err = shash_ahash_update(req, &rctx->desc);
431 desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
432
433 err = crypto_hash_crt(child)->update(&desc,
434 req->src,
435 req->nbytes);
436 444
437 req->base.complete = rctx->complete; 445 req->base.complete = rctx->complete;
438 446
@@ -449,21 +457,13 @@ static int cryptd_hash_update_enqueue(struct ahash_request *req)
449 457
450static void cryptd_hash_final(struct crypto_async_request *req_async, int err) 458static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
451{ 459{
452 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); 460 struct ahash_request *req = ahash_request_cast(req_async);
453 struct crypto_hash *child = ctx->child; 461 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
454 struct ahash_request *req = ahash_request_cast(req_async);
455 struct cryptd_hash_request_ctx *rctx;
456 struct hash_desc desc;
457
458 rctx = ahash_request_ctx(req);
459 462
460 if (unlikely(err == -EINPROGRESS)) 463 if (unlikely(err == -EINPROGRESS))
461 goto out; 464 goto out;
462 465
463 desc.tfm = child; 466 err = crypto_shash_final(&rctx->desc, req->result);
464 desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
465
466 err = crypto_hash_crt(child)->final(&desc, req->result);
467 467
468 req->base.complete = rctx->complete; 468 req->base.complete = rctx->complete;
469 469
@@ -478,26 +478,44 @@ static int cryptd_hash_final_enqueue(struct ahash_request *req)
478 return cryptd_hash_enqueue(req, cryptd_hash_final); 478 return cryptd_hash_enqueue(req, cryptd_hash_final);
479} 479}
480 480
481static void cryptd_hash_digest(struct crypto_async_request *req_async, int err) 481static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
482{ 482{
483 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm); 483 struct ahash_request *req = ahash_request_cast(req_async);
484 struct crypto_hash *child = ctx->child; 484 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
485 struct ahash_request *req = ahash_request_cast(req_async);
486 struct cryptd_hash_request_ctx *rctx;
487 struct hash_desc desc;
488 485
489 rctx = ahash_request_ctx(req); 486 if (unlikely(err == -EINPROGRESS))
487 goto out;
488
489 err = shash_ahash_finup(req, &rctx->desc);
490
491 req->base.complete = rctx->complete;
492
493out:
494 local_bh_disable();
495 rctx->complete(&req->base, err);
496 local_bh_enable();
497}
498
499static int cryptd_hash_finup_enqueue(struct ahash_request *req)
500{
501 return cryptd_hash_enqueue(req, cryptd_hash_finup);
502}
503
504static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
505{
506 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
507 struct crypto_shash *child = ctx->child;
508 struct ahash_request *req = ahash_request_cast(req_async);
509 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
510 struct shash_desc *desc = &rctx->desc;
490 511
491 if (unlikely(err == -EINPROGRESS)) 512 if (unlikely(err == -EINPROGRESS))
492 goto out; 513 goto out;
493 514
494 desc.tfm = child; 515 desc->tfm = child;
495 desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; 516 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
496 517
497 err = crypto_hash_crt(child)->digest(&desc, 518 err = shash_ahash_digest(req, desc);
498 req->src,
499 req->nbytes,
500 req->result);
501 519
502 req->base.complete = rctx->complete; 520 req->base.complete = rctx->complete;
503 521
@@ -512,64 +530,108 @@ static int cryptd_hash_digest_enqueue(struct ahash_request *req)
512 return cryptd_hash_enqueue(req, cryptd_hash_digest); 530 return cryptd_hash_enqueue(req, cryptd_hash_digest);
513} 531}
514 532
515static struct crypto_instance *cryptd_alloc_hash( 533static int cryptd_hash_export(struct ahash_request *req, void *out)
516 struct rtattr **tb, struct cryptd_queue *queue)
517{ 534{
518 struct crypto_instance *inst; 535 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
536
537 return crypto_shash_export(&rctx->desc, out);
538}
539
540static int cryptd_hash_import(struct ahash_request *req, const void *in)
541{
542 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
543
544 return crypto_shash_import(&rctx->desc, in);
545}
546
547static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
548 struct cryptd_queue *queue)
549{
550 struct hashd_instance_ctx *ctx;
551 struct ahash_instance *inst;
552 struct shash_alg *salg;
519 struct crypto_alg *alg; 553 struct crypto_alg *alg;
554 int err;
520 555
521 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_HASH, 556 salg = shash_attr_alg(tb[1], 0, 0);
522 CRYPTO_ALG_TYPE_HASH_MASK); 557 if (IS_ERR(salg))
523 if (IS_ERR(alg)) 558 return PTR_ERR(salg);
524 return ERR_PTR(PTR_ERR(alg));
525 559
526 inst = cryptd_alloc_instance(alg, queue); 560 alg = &salg->base;
561 inst = cryptd_alloc_instance(alg, ahash_instance_headroom(),
562 sizeof(*ctx));
563 err = PTR_ERR(inst);
527 if (IS_ERR(inst)) 564 if (IS_ERR(inst))
528 goto out_put_alg; 565 goto out_put_alg;
529 566
530 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC; 567 ctx = ahash_instance_ctx(inst);
531 inst->alg.cra_type = &crypto_ahash_type; 568 ctx->queue = queue;
532 569
533 inst->alg.cra_ahash.digestsize = alg->cra_hash.digestsize; 570 err = crypto_init_shash_spawn(&ctx->spawn, salg,
534 inst->alg.cra_ctxsize = sizeof(struct cryptd_hash_ctx); 571 ahash_crypto_instance(inst));
572 if (err)
573 goto out_free_inst;
535 574
536 inst->alg.cra_init = cryptd_hash_init_tfm; 575 inst->alg.halg.base.cra_flags = CRYPTO_ALG_ASYNC;
537 inst->alg.cra_exit = cryptd_hash_exit_tfm;
538 576
539 inst->alg.cra_ahash.init = cryptd_hash_init_enqueue; 577 inst->alg.halg.digestsize = salg->digestsize;
540 inst->alg.cra_ahash.update = cryptd_hash_update_enqueue; 578 inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
541 inst->alg.cra_ahash.final = cryptd_hash_final_enqueue; 579
542 inst->alg.cra_ahash.setkey = cryptd_hash_setkey; 580 inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
543 inst->alg.cra_ahash.digest = cryptd_hash_digest_enqueue; 581 inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
582
583 inst->alg.init = cryptd_hash_init_enqueue;
584 inst->alg.update = cryptd_hash_update_enqueue;
585 inst->alg.final = cryptd_hash_final_enqueue;
586 inst->alg.finup = cryptd_hash_finup_enqueue;
587 inst->alg.export = cryptd_hash_export;
588 inst->alg.import = cryptd_hash_import;
589 inst->alg.setkey = cryptd_hash_setkey;
590 inst->alg.digest = cryptd_hash_digest_enqueue;
591
592 err = ahash_register_instance(tmpl, inst);
593 if (err) {
594 crypto_drop_shash(&ctx->spawn);
595out_free_inst:
596 kfree(inst);
597 }
544 598
545out_put_alg: 599out_put_alg:
546 crypto_mod_put(alg); 600 crypto_mod_put(alg);
547 return inst; 601 return err;
548} 602}
549 603
550static struct cryptd_queue queue; 604static struct cryptd_queue queue;
551 605
552static struct crypto_instance *cryptd_alloc(struct rtattr **tb) 606static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
553{ 607{
554 struct crypto_attr_type *algt; 608 struct crypto_attr_type *algt;
555 609
556 algt = crypto_get_attr_type(tb); 610 algt = crypto_get_attr_type(tb);
557 if (IS_ERR(algt)) 611 if (IS_ERR(algt))
558 return ERR_CAST(algt); 612 return PTR_ERR(algt);
559 613
560 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) { 614 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
561 case CRYPTO_ALG_TYPE_BLKCIPHER: 615 case CRYPTO_ALG_TYPE_BLKCIPHER:
562 return cryptd_alloc_blkcipher(tb, &queue); 616 return cryptd_create_blkcipher(tmpl, tb, &queue);
563 case CRYPTO_ALG_TYPE_DIGEST: 617 case CRYPTO_ALG_TYPE_DIGEST:
564 return cryptd_alloc_hash(tb, &queue); 618 return cryptd_create_hash(tmpl, tb, &queue);
565 } 619 }
566 620
567 return ERR_PTR(-EINVAL); 621 return -EINVAL;
568} 622}
569 623
570static void cryptd_free(struct crypto_instance *inst) 624static void cryptd_free(struct crypto_instance *inst)
571{ 625{
572 struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst); 626 struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst);
627 struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst);
628
629 switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) {
630 case CRYPTO_ALG_TYPE_AHASH:
631 crypto_drop_shash(&hctx->spawn);
632 kfree(ahash_instance(inst));
633 return;
634 }
573 635
574 crypto_drop_spawn(&ctx->spawn); 636 crypto_drop_spawn(&ctx->spawn);
575 kfree(inst); 637 kfree(inst);
@@ -577,7 +639,7 @@ static void cryptd_free(struct crypto_instance *inst)
577 639
578static struct crypto_template cryptd_tmpl = { 640static struct crypto_template cryptd_tmpl = {
579 .name = "cryptd", 641 .name = "cryptd",
580 .alloc = cryptd_alloc, 642 .create = cryptd_create,
581 .free = cryptd_free, 643 .free = cryptd_free,
582 .module = THIS_MODULE, 644 .module = THIS_MODULE,
583}; 645};
@@ -620,6 +682,41 @@ void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm)
620} 682}
621EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher); 683EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher);
622 684
685struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
686 u32 type, u32 mask)
687{
688 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
689 struct crypto_ahash *tfm;
690
691 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
692 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
693 return ERR_PTR(-EINVAL);
694 tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
695 if (IS_ERR(tfm))
696 return ERR_CAST(tfm);
697 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
698 crypto_free_ahash(tfm);
699 return ERR_PTR(-EINVAL);
700 }
701
702 return __cryptd_ahash_cast(tfm);
703}
704EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
705
706struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
707{
708 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
709
710 return ctx->child;
711}
712EXPORT_SYMBOL_GPL(cryptd_ahash_child);
713
714void cryptd_free_ahash(struct cryptd_ahash *tfm)
715{
716 crypto_free_ahash(&tfm->base);
717}
718EXPORT_SYMBOL_GPL(cryptd_free_ahash);
719
623static int __init cryptd_init(void) 720static int __init cryptd_init(void)
624{ 721{
625 int err; 722 int err;
diff --git a/crypto/ctr.c b/crypto/ctr.c
index 2d7425f0e7b8..6c3bfabb9d1d 100644
--- a/crypto/ctr.c
+++ b/crypto/ctr.c
@@ -219,6 +219,8 @@ static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
219 inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt; 219 inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt;
220 inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt; 220 inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt;
221 221
222 inst->alg.cra_blkcipher.geniv = "chainiv";
223
222out: 224out:
223 crypto_mod_put(alg); 225 crypto_mod_put(alg);
224 return inst; 226 return inst;
diff --git a/crypto/gcm.c b/crypto/gcm.c
index e70afd0c73dd..5fc3292483ef 100644
--- a/crypto/gcm.c
+++ b/crypto/gcm.c
@@ -11,7 +11,10 @@
11#include <crypto/gf128mul.h> 11#include <crypto/gf128mul.h>
12#include <crypto/internal/aead.h> 12#include <crypto/internal/aead.h>
13#include <crypto/internal/skcipher.h> 13#include <crypto/internal/skcipher.h>
14#include <crypto/internal/hash.h>
14#include <crypto/scatterwalk.h> 15#include <crypto/scatterwalk.h>
16#include <crypto/hash.h>
17#include "internal.h"
15#include <linux/completion.h> 18#include <linux/completion.h>
16#include <linux/err.h> 19#include <linux/err.h>
17#include <linux/init.h> 20#include <linux/init.h>
@@ -21,11 +24,12 @@
21 24
22struct gcm_instance_ctx { 25struct gcm_instance_ctx {
23 struct crypto_skcipher_spawn ctr; 26 struct crypto_skcipher_spawn ctr;
27 struct crypto_ahash_spawn ghash;
24}; 28};
25 29
26struct crypto_gcm_ctx { 30struct crypto_gcm_ctx {
27 struct crypto_ablkcipher *ctr; 31 struct crypto_ablkcipher *ctr;
28 struct gf128mul_4k *gf128; 32 struct crypto_ahash *ghash;
29}; 33};
30 34
31struct crypto_rfc4106_ctx { 35struct crypto_rfc4106_ctx {
@@ -34,10 +38,9 @@ struct crypto_rfc4106_ctx {
34}; 38};
35 39
36struct crypto_gcm_ghash_ctx { 40struct crypto_gcm_ghash_ctx {
37 u32 bytes; 41 unsigned int cryptlen;
38 u32 flags; 42 struct scatterlist *src;
39 struct gf128mul_4k *gf128; 43 crypto_completion_t complete;
40 u8 buffer[16];
41}; 44};
42 45
43struct crypto_gcm_req_priv_ctx { 46struct crypto_gcm_req_priv_ctx {
@@ -45,8 +48,11 @@ struct crypto_gcm_req_priv_ctx {
45 u8 iauth_tag[16]; 48 u8 iauth_tag[16];
46 struct scatterlist src[2]; 49 struct scatterlist src[2];
47 struct scatterlist dst[2]; 50 struct scatterlist dst[2];
48 struct crypto_gcm_ghash_ctx ghash; 51 struct crypto_gcm_ghash_ctx ghash_ctx;
49 struct ablkcipher_request abreq; 52 union {
53 struct ahash_request ahreq;
54 struct ablkcipher_request abreq;
55 } u;
50}; 56};
51 57
52struct crypto_gcm_setkey_result { 58struct crypto_gcm_setkey_result {
@@ -54,6 +60,8 @@ struct crypto_gcm_setkey_result {
54 struct completion completion; 60 struct completion completion;
55}; 61};
56 62
63static void *gcm_zeroes;
64
57static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx( 65static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
58 struct aead_request *req) 66 struct aead_request *req)
59{ 67{
@@ -62,113 +70,6 @@ static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
62 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1); 70 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
63} 71}
64 72
65static void crypto_gcm_ghash_init(struct crypto_gcm_ghash_ctx *ctx, u32 flags,
66 struct gf128mul_4k *gf128)
67{
68 ctx->bytes = 0;
69 ctx->flags = flags;
70 ctx->gf128 = gf128;
71 memset(ctx->buffer, 0, 16);
72}
73
74static void crypto_gcm_ghash_update(struct crypto_gcm_ghash_ctx *ctx,
75 const u8 *src, unsigned int srclen)
76{
77 u8 *dst = ctx->buffer;
78
79 if (ctx->bytes) {
80 int n = min(srclen, ctx->bytes);
81 u8 *pos = dst + (16 - ctx->bytes);
82
83 ctx->bytes -= n;
84 srclen -= n;
85
86 while (n--)
87 *pos++ ^= *src++;
88
89 if (!ctx->bytes)
90 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
91 }
92
93 while (srclen >= 16) {
94 crypto_xor(dst, src, 16);
95 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
96 src += 16;
97 srclen -= 16;
98 }
99
100 if (srclen) {
101 ctx->bytes = 16 - srclen;
102 while (srclen--)
103 *dst++ ^= *src++;
104 }
105}
106
107static void crypto_gcm_ghash_update_sg(struct crypto_gcm_ghash_ctx *ctx,
108 struct scatterlist *sg, int len)
109{
110 struct scatter_walk walk;
111 u8 *src;
112 int n;
113
114 if (!len)
115 return;
116
117 scatterwalk_start(&walk, sg);
118
119 while (len) {
120 n = scatterwalk_clamp(&walk, len);
121
122 if (!n) {
123 scatterwalk_start(&walk, scatterwalk_sg_next(walk.sg));
124 n = scatterwalk_clamp(&walk, len);
125 }
126
127 src = scatterwalk_map(&walk, 0);
128
129 crypto_gcm_ghash_update(ctx, src, n);
130 len -= n;
131
132 scatterwalk_unmap(src, 0);
133 scatterwalk_advance(&walk, n);
134 scatterwalk_done(&walk, 0, len);
135 if (len)
136 crypto_yield(ctx->flags);
137 }
138}
139
140static void crypto_gcm_ghash_flush(struct crypto_gcm_ghash_ctx *ctx)
141{
142 u8 *dst = ctx->buffer;
143
144 if (ctx->bytes) {
145 u8 *tmp = dst + (16 - ctx->bytes);
146
147 while (ctx->bytes--)
148 *tmp++ ^= 0;
149
150 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
151 }
152
153 ctx->bytes = 0;
154}
155
156static void crypto_gcm_ghash_final_xor(struct crypto_gcm_ghash_ctx *ctx,
157 unsigned int authlen,
158 unsigned int cryptlen, u8 *dst)
159{
160 u8 *buf = ctx->buffer;
161 u128 lengths;
162
163 lengths.a = cpu_to_be64(authlen * 8);
164 lengths.b = cpu_to_be64(cryptlen * 8);
165
166 crypto_gcm_ghash_flush(ctx);
167 crypto_xor(buf, (u8 *)&lengths, 16);
168 gf128mul_4k_lle((be128 *)buf, ctx->gf128);
169 crypto_xor(dst, buf, 16);
170}
171
172static void crypto_gcm_setkey_done(struct crypto_async_request *req, int err) 73static void crypto_gcm_setkey_done(struct crypto_async_request *req, int err)
173{ 74{
174 struct crypto_gcm_setkey_result *result = req->data; 75 struct crypto_gcm_setkey_result *result = req->data;
@@ -184,6 +85,7 @@ static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
184 unsigned int keylen) 85 unsigned int keylen)
185{ 86{
186 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead); 87 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
88 struct crypto_ahash *ghash = ctx->ghash;
187 struct crypto_ablkcipher *ctr = ctx->ctr; 89 struct crypto_ablkcipher *ctr = ctx->ctr;
188 struct { 90 struct {
189 be128 hash; 91 be128 hash;
@@ -233,13 +135,12 @@ static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
233 if (err) 135 if (err)
234 goto out; 136 goto out;
235 137
236 if (ctx->gf128 != NULL) 138 crypto_ahash_clear_flags(ghash, CRYPTO_TFM_REQ_MASK);
237 gf128mul_free_4k(ctx->gf128); 139 crypto_ahash_set_flags(ghash, crypto_aead_get_flags(aead) &
238 140 CRYPTO_TFM_REQ_MASK);
239 ctx->gf128 = gf128mul_init_4k_lle(&data->hash); 141 err = crypto_ahash_setkey(ghash, (u8 *)&data->hash, sizeof(be128));
240 142 crypto_aead_set_flags(aead, crypto_ahash_get_flags(ghash) &
241 if (ctx->gf128 == NULL) 143 CRYPTO_TFM_RES_MASK);
242 err = -ENOMEM;
243 144
244out: 145out:
245 kfree(data); 146 kfree(data);
@@ -272,8 +173,6 @@ static void crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req,
272 struct crypto_aead *aead = crypto_aead_reqtfm(req); 173 struct crypto_aead *aead = crypto_aead_reqtfm(req);
273 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead); 174 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
274 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 175 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
275 u32 flags = req->base.tfm->crt_flags;
276 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
277 struct scatterlist *dst; 176 struct scatterlist *dst;
278 __be32 counter = cpu_to_be32(1); 177 __be32 counter = cpu_to_be32(1);
279 178
@@ -296,108 +195,398 @@ static void crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req,
296 ablkcipher_request_set_crypt(ablk_req, pctx->src, dst, 195 ablkcipher_request_set_crypt(ablk_req, pctx->src, dst,
297 cryptlen + sizeof(pctx->auth_tag), 196 cryptlen + sizeof(pctx->auth_tag),
298 req->iv); 197 req->iv);
198}
199
200static inline unsigned int gcm_remain(unsigned int len)
201{
202 len &= 0xfU;
203 return len ? 16 - len : 0;
204}
205
206static void gcm_hash_len_done(struct crypto_async_request *areq, int err);
207static void gcm_hash_final_done(struct crypto_async_request *areq, int err);
299 208
300 crypto_gcm_ghash_init(ghash, flags, ctx->gf128); 209static int gcm_hash_update(struct aead_request *req,
210 struct crypto_gcm_req_priv_ctx *pctx,
211 crypto_completion_t complete,
212 struct scatterlist *src,
213 unsigned int len)
214{
215 struct ahash_request *ahreq = &pctx->u.ahreq;
301 216
302 crypto_gcm_ghash_update_sg(ghash, req->assoc, req->assoclen); 217 ahash_request_set_callback(ahreq, aead_request_flags(req),
303 crypto_gcm_ghash_flush(ghash); 218 complete, req);
219 ahash_request_set_crypt(ahreq, src, NULL, len);
220
221 return crypto_ahash_update(ahreq);
304} 222}
305 223
306static int crypto_gcm_hash(struct aead_request *req) 224static int gcm_hash_remain(struct aead_request *req,
225 struct crypto_gcm_req_priv_ctx *pctx,
226 unsigned int remain,
227 crypto_completion_t complete)
307{ 228{
308 struct crypto_aead *aead = crypto_aead_reqtfm(req); 229 struct ahash_request *ahreq = &pctx->u.ahreq;
230
231 ahash_request_set_callback(ahreq, aead_request_flags(req),
232 complete, req);
233 sg_init_one(pctx->src, gcm_zeroes, remain);
234 ahash_request_set_crypt(ahreq, pctx->src, NULL, remain);
235
236 return crypto_ahash_update(ahreq);
237}
238
239static int gcm_hash_len(struct aead_request *req,
240 struct crypto_gcm_req_priv_ctx *pctx)
241{
242 struct ahash_request *ahreq = &pctx->u.ahreq;
243 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
244 u128 lengths;
245
246 lengths.a = cpu_to_be64(req->assoclen * 8);
247 lengths.b = cpu_to_be64(gctx->cryptlen * 8);
248 memcpy(pctx->iauth_tag, &lengths, 16);
249 sg_init_one(pctx->src, pctx->iauth_tag, 16);
250 ahash_request_set_callback(ahreq, aead_request_flags(req),
251 gcm_hash_len_done, req);
252 ahash_request_set_crypt(ahreq, pctx->src,
253 NULL, sizeof(lengths));
254
255 return crypto_ahash_update(ahreq);
256}
257
258static int gcm_hash_final(struct aead_request *req,
259 struct crypto_gcm_req_priv_ctx *pctx)
260{
261 struct ahash_request *ahreq = &pctx->u.ahreq;
262
263 ahash_request_set_callback(ahreq, aead_request_flags(req),
264 gcm_hash_final_done, req);
265 ahash_request_set_crypt(ahreq, NULL, pctx->iauth_tag, 0);
266
267 return crypto_ahash_final(ahreq);
268}
269
270static void gcm_hash_final_done(struct crypto_async_request *areq,
271 int err)
272{
273 struct aead_request *req = areq->data;
309 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 274 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
310 u8 *auth_tag = pctx->auth_tag; 275 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
311 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash; 276
277 if (!err)
278 crypto_xor(pctx->auth_tag, pctx->iauth_tag, 16);
312 279
313 crypto_gcm_ghash_update_sg(ghash, req->dst, req->cryptlen); 280 gctx->complete(areq, err);
314 crypto_gcm_ghash_final_xor(ghash, req->assoclen, req->cryptlen, 281}
315 auth_tag); 282
283static void gcm_hash_len_done(struct crypto_async_request *areq,
284 int err)
285{
286 struct aead_request *req = areq->data;
287 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
288
289 if (!err) {
290 err = gcm_hash_final(req, pctx);
291 if (err == -EINPROGRESS || err == -EBUSY)
292 return;
293 }
294
295 gcm_hash_final_done(areq, err);
296}
297
298static void gcm_hash_crypt_remain_done(struct crypto_async_request *areq,
299 int err)
300{
301 struct aead_request *req = areq->data;
302 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
303
304 if (!err) {
305 err = gcm_hash_len(req, pctx);
306 if (err == -EINPROGRESS || err == -EBUSY)
307 return;
308 }
309
310 gcm_hash_len_done(areq, err);
311}
312
313static void gcm_hash_crypt_done(struct crypto_async_request *areq,
314 int err)
315{
316 struct aead_request *req = areq->data;
317 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
318 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
319 unsigned int remain;
320
321 if (!err) {
322 remain = gcm_remain(gctx->cryptlen);
323 BUG_ON(!remain);
324 err = gcm_hash_remain(req, pctx, remain,
325 gcm_hash_crypt_remain_done);
326 if (err == -EINPROGRESS || err == -EBUSY)
327 return;
328 }
329
330 gcm_hash_crypt_remain_done(areq, err);
331}
332
333static void gcm_hash_assoc_remain_done(struct crypto_async_request *areq,
334 int err)
335{
336 struct aead_request *req = areq->data;
337 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
338 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
339 crypto_completion_t complete;
340 unsigned int remain = 0;
341
342 if (!err && gctx->cryptlen) {
343 remain = gcm_remain(gctx->cryptlen);
344 complete = remain ? gcm_hash_crypt_done :
345 gcm_hash_crypt_remain_done;
346 err = gcm_hash_update(req, pctx, complete,
347 gctx->src, gctx->cryptlen);
348 if (err == -EINPROGRESS || err == -EBUSY)
349 return;
350 }
351
352 if (remain)
353 gcm_hash_crypt_done(areq, err);
354 else
355 gcm_hash_crypt_remain_done(areq, err);
356}
357
358static void gcm_hash_assoc_done(struct crypto_async_request *areq,
359 int err)
360{
361 struct aead_request *req = areq->data;
362 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
363 unsigned int remain;
364
365 if (!err) {
366 remain = gcm_remain(req->assoclen);
367 BUG_ON(!remain);
368 err = gcm_hash_remain(req, pctx, remain,
369 gcm_hash_assoc_remain_done);
370 if (err == -EINPROGRESS || err == -EBUSY)
371 return;
372 }
373
374 gcm_hash_assoc_remain_done(areq, err);
375}
376
377static void gcm_hash_init_done(struct crypto_async_request *areq,
378 int err)
379{
380 struct aead_request *req = areq->data;
381 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
382 crypto_completion_t complete;
383 unsigned int remain = 0;
384
385 if (!err && req->assoclen) {
386 remain = gcm_remain(req->assoclen);
387 complete = remain ? gcm_hash_assoc_done :
388 gcm_hash_assoc_remain_done;
389 err = gcm_hash_update(req, pctx, complete,
390 req->assoc, req->assoclen);
391 if (err == -EINPROGRESS || err == -EBUSY)
392 return;
393 }
394
395 if (remain)
396 gcm_hash_assoc_done(areq, err);
397 else
398 gcm_hash_assoc_remain_done(areq, err);
399}
400
401static int gcm_hash(struct aead_request *req,
402 struct crypto_gcm_req_priv_ctx *pctx)
403{
404 struct ahash_request *ahreq = &pctx->u.ahreq;
405 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
406 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
407 unsigned int remain;
408 crypto_completion_t complete;
409 int err;
410
411 ahash_request_set_tfm(ahreq, ctx->ghash);
412
413 ahash_request_set_callback(ahreq, aead_request_flags(req),
414 gcm_hash_init_done, req);
415 err = crypto_ahash_init(ahreq);
416 if (err)
417 return err;
418 remain = gcm_remain(req->assoclen);
419 complete = remain ? gcm_hash_assoc_done : gcm_hash_assoc_remain_done;
420 err = gcm_hash_update(req, pctx, complete, req->assoc, req->assoclen);
421 if (err)
422 return err;
423 if (remain) {
424 err = gcm_hash_remain(req, pctx, remain,
425 gcm_hash_assoc_remain_done);
426 if (err)
427 return err;
428 }
429 remain = gcm_remain(gctx->cryptlen);
430 complete = remain ? gcm_hash_crypt_done : gcm_hash_crypt_remain_done;
431 err = gcm_hash_update(req, pctx, complete, gctx->src, gctx->cryptlen);
432 if (err)
433 return err;
434 if (remain) {
435 err = gcm_hash_remain(req, pctx, remain,
436 gcm_hash_crypt_remain_done);
437 if (err)
438 return err;
439 }
440 err = gcm_hash_len(req, pctx);
441 if (err)
442 return err;
443 err = gcm_hash_final(req, pctx);
444 if (err)
445 return err;
446
447 return 0;
448}
449
450static void gcm_enc_copy_hash(struct aead_request *req,
451 struct crypto_gcm_req_priv_ctx *pctx)
452{
453 struct crypto_aead *aead = crypto_aead_reqtfm(req);
454 u8 *auth_tag = pctx->auth_tag;
316 455
317 scatterwalk_map_and_copy(auth_tag, req->dst, req->cryptlen, 456 scatterwalk_map_and_copy(auth_tag, req->dst, req->cryptlen,
318 crypto_aead_authsize(aead), 1); 457 crypto_aead_authsize(aead), 1);
319 return 0;
320} 458}
321 459
322static void crypto_gcm_encrypt_done(struct crypto_async_request *areq, int err) 460static void gcm_enc_hash_done(struct crypto_async_request *areq,
461 int err)
323{ 462{
324 struct aead_request *req = areq->data; 463 struct aead_request *req = areq->data;
464 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
325 465
326 if (!err) 466 if (!err)
327 err = crypto_gcm_hash(req); 467 gcm_enc_copy_hash(req, pctx);
328 468
329 aead_request_complete(req, err); 469 aead_request_complete(req, err);
330} 470}
331 471
472static void gcm_encrypt_done(struct crypto_async_request *areq,
473 int err)
474{
475 struct aead_request *req = areq->data;
476 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
477
478 if (!err) {
479 err = gcm_hash(req, pctx);
480 if (err == -EINPROGRESS || err == -EBUSY)
481 return;
482 }
483
484 gcm_enc_hash_done(areq, err);
485}
486
332static int crypto_gcm_encrypt(struct aead_request *req) 487static int crypto_gcm_encrypt(struct aead_request *req)
333{ 488{
334 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 489 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
335 struct ablkcipher_request *abreq = &pctx->abreq; 490 struct ablkcipher_request *abreq = &pctx->u.abreq;
491 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
336 int err; 492 int err;
337 493
338 crypto_gcm_init_crypt(abreq, req, req->cryptlen); 494 crypto_gcm_init_crypt(abreq, req, req->cryptlen);
339 ablkcipher_request_set_callback(abreq, aead_request_flags(req), 495 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
340 crypto_gcm_encrypt_done, req); 496 gcm_encrypt_done, req);
497
498 gctx->src = req->dst;
499 gctx->cryptlen = req->cryptlen;
500 gctx->complete = gcm_enc_hash_done;
341 501
342 err = crypto_ablkcipher_encrypt(abreq); 502 err = crypto_ablkcipher_encrypt(abreq);
343 if (err) 503 if (err)
344 return err; 504 return err;
345 505
346 return crypto_gcm_hash(req); 506 err = gcm_hash(req, pctx);
507 if (err)
508 return err;
509
510 crypto_xor(pctx->auth_tag, pctx->iauth_tag, 16);
511 gcm_enc_copy_hash(req, pctx);
512
513 return 0;
347} 514}
348 515
349static int crypto_gcm_verify(struct aead_request *req) 516static int crypto_gcm_verify(struct aead_request *req,
517 struct crypto_gcm_req_priv_ctx *pctx)
350{ 518{
351 struct crypto_aead *aead = crypto_aead_reqtfm(req); 519 struct crypto_aead *aead = crypto_aead_reqtfm(req);
352 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
353 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
354 u8 *auth_tag = pctx->auth_tag; 520 u8 *auth_tag = pctx->auth_tag;
355 u8 *iauth_tag = pctx->iauth_tag; 521 u8 *iauth_tag = pctx->iauth_tag;
356 unsigned int authsize = crypto_aead_authsize(aead); 522 unsigned int authsize = crypto_aead_authsize(aead);
357 unsigned int cryptlen = req->cryptlen - authsize; 523 unsigned int cryptlen = req->cryptlen - authsize;
358 524
359 crypto_gcm_ghash_final_xor(ghash, req->assoclen, cryptlen, auth_tag); 525 crypto_xor(auth_tag, iauth_tag, 16);
360
361 authsize = crypto_aead_authsize(aead);
362 scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0); 526 scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0);
363 return memcmp(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0; 527 return memcmp(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0;
364} 528}
365 529
366static void crypto_gcm_decrypt_done(struct crypto_async_request *areq, int err) 530static void gcm_decrypt_done(struct crypto_async_request *areq, int err)
367{ 531{
368 struct aead_request *req = areq->data; 532 struct aead_request *req = areq->data;
533 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
369 534
370 if (!err) 535 if (!err)
371 err = crypto_gcm_verify(req); 536 err = crypto_gcm_verify(req, pctx);
372 537
373 aead_request_complete(req, err); 538 aead_request_complete(req, err);
374} 539}
375 540
541static void gcm_dec_hash_done(struct crypto_async_request *areq, int err)
542{
543 struct aead_request *req = areq->data;
544 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
545 struct ablkcipher_request *abreq = &pctx->u.abreq;
546 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
547
548 if (!err) {
549 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
550 gcm_decrypt_done, req);
551 crypto_gcm_init_crypt(abreq, req, gctx->cryptlen);
552 err = crypto_ablkcipher_decrypt(abreq);
553 if (err == -EINPROGRESS || err == -EBUSY)
554 return;
555 }
556
557 gcm_decrypt_done(areq, err);
558}
559
376static int crypto_gcm_decrypt(struct aead_request *req) 560static int crypto_gcm_decrypt(struct aead_request *req)
377{ 561{
378 struct crypto_aead *aead = crypto_aead_reqtfm(req); 562 struct crypto_aead *aead = crypto_aead_reqtfm(req);
379 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 563 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
380 struct ablkcipher_request *abreq = &pctx->abreq; 564 struct ablkcipher_request *abreq = &pctx->u.abreq;
381 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash; 565 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
382 unsigned int cryptlen = req->cryptlen;
383 unsigned int authsize = crypto_aead_authsize(aead); 566 unsigned int authsize = crypto_aead_authsize(aead);
567 unsigned int cryptlen = req->cryptlen;
384 int err; 568 int err;
385 569
386 if (cryptlen < authsize) 570 if (cryptlen < authsize)
387 return -EINVAL; 571 return -EINVAL;
388 cryptlen -= authsize; 572 cryptlen -= authsize;
389 573
390 crypto_gcm_init_crypt(abreq, req, cryptlen); 574 gctx->src = req->src;
391 ablkcipher_request_set_callback(abreq, aead_request_flags(req), 575 gctx->cryptlen = cryptlen;
392 crypto_gcm_decrypt_done, req); 576 gctx->complete = gcm_dec_hash_done;
393 577
394 crypto_gcm_ghash_update_sg(ghash, req->src, cryptlen); 578 err = gcm_hash(req, pctx);
579 if (err)
580 return err;
395 581
582 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
583 gcm_decrypt_done, req);
584 crypto_gcm_init_crypt(abreq, req, cryptlen);
396 err = crypto_ablkcipher_decrypt(abreq); 585 err = crypto_ablkcipher_decrypt(abreq);
397 if (err) 586 if (err)
398 return err; 587 return err;
399 588
400 return crypto_gcm_verify(req); 589 return crypto_gcm_verify(req, pctx);
401} 590}
402 591
403static int crypto_gcm_init_tfm(struct crypto_tfm *tfm) 592static int crypto_gcm_init_tfm(struct crypto_tfm *tfm)
@@ -406,43 +595,56 @@ static int crypto_gcm_init_tfm(struct crypto_tfm *tfm)
406 struct gcm_instance_ctx *ictx = crypto_instance_ctx(inst); 595 struct gcm_instance_ctx *ictx = crypto_instance_ctx(inst);
407 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm); 596 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
408 struct crypto_ablkcipher *ctr; 597 struct crypto_ablkcipher *ctr;
598 struct crypto_ahash *ghash;
409 unsigned long align; 599 unsigned long align;
410 int err; 600 int err;
411 601
602 ghash = crypto_spawn_ahash(&ictx->ghash);
603 if (IS_ERR(ghash))
604 return PTR_ERR(ghash);
605
412 ctr = crypto_spawn_skcipher(&ictx->ctr); 606 ctr = crypto_spawn_skcipher(&ictx->ctr);
413 err = PTR_ERR(ctr); 607 err = PTR_ERR(ctr);
414 if (IS_ERR(ctr)) 608 if (IS_ERR(ctr))
415 return err; 609 goto err_free_hash;
416 610
417 ctx->ctr = ctr; 611 ctx->ctr = ctr;
418 ctx->gf128 = NULL; 612 ctx->ghash = ghash;
419 613
420 align = crypto_tfm_alg_alignmask(tfm); 614 align = crypto_tfm_alg_alignmask(tfm);
421 align &= ~(crypto_tfm_ctx_alignment() - 1); 615 align &= ~(crypto_tfm_ctx_alignment() - 1);
422 tfm->crt_aead.reqsize = align + 616 tfm->crt_aead.reqsize = align +
423 sizeof(struct crypto_gcm_req_priv_ctx) + 617 offsetof(struct crypto_gcm_req_priv_ctx, u) +
424 crypto_ablkcipher_reqsize(ctr); 618 max(sizeof(struct ablkcipher_request) +
619 crypto_ablkcipher_reqsize(ctr),
620 sizeof(struct ahash_request) +
621 crypto_ahash_reqsize(ghash));
425 622
426 return 0; 623 return 0;
624
625err_free_hash:
626 crypto_free_ahash(ghash);
627 return err;
427} 628}
428 629
429static void crypto_gcm_exit_tfm(struct crypto_tfm *tfm) 630static void crypto_gcm_exit_tfm(struct crypto_tfm *tfm)
430{ 631{
431 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm); 632 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
432 633
433 if (ctx->gf128 != NULL) 634 crypto_free_ahash(ctx->ghash);
434 gf128mul_free_4k(ctx->gf128);
435
436 crypto_free_ablkcipher(ctx->ctr); 635 crypto_free_ablkcipher(ctx->ctr);
437} 636}
438 637
439static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb, 638static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb,
440 const char *full_name, 639 const char *full_name,
441 const char *ctr_name) 640 const char *ctr_name,
641 const char *ghash_name)
442{ 642{
443 struct crypto_attr_type *algt; 643 struct crypto_attr_type *algt;
444 struct crypto_instance *inst; 644 struct crypto_instance *inst;
445 struct crypto_alg *ctr; 645 struct crypto_alg *ctr;
646 struct crypto_alg *ghash_alg;
647 struct ahash_alg *ghash_ahash_alg;
446 struct gcm_instance_ctx *ctx; 648 struct gcm_instance_ctx *ctx;
447 int err; 649 int err;
448 650
@@ -454,17 +656,31 @@ static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb,
454 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 656 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
455 return ERR_PTR(-EINVAL); 657 return ERR_PTR(-EINVAL);
456 658
659 ghash_alg = crypto_find_alg(ghash_name, &crypto_ahash_type,
660 CRYPTO_ALG_TYPE_HASH,
661 CRYPTO_ALG_TYPE_AHASH_MASK);
662 err = PTR_ERR(ghash_alg);
663 if (IS_ERR(ghash_alg))
664 return ERR_PTR(err);
665
666 err = -ENOMEM;
457 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 667 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
458 if (!inst) 668 if (!inst)
459 return ERR_PTR(-ENOMEM); 669 goto out_put_ghash;
460 670
461 ctx = crypto_instance_ctx(inst); 671 ctx = crypto_instance_ctx(inst);
672 ghash_ahash_alg = container_of(ghash_alg, struct ahash_alg, halg.base);
673 err = crypto_init_ahash_spawn(&ctx->ghash, &ghash_ahash_alg->halg,
674 inst);
675 if (err)
676 goto err_free_inst;
677
462 crypto_set_skcipher_spawn(&ctx->ctr, inst); 678 crypto_set_skcipher_spawn(&ctx->ctr, inst);
463 err = crypto_grab_skcipher(&ctx->ctr, ctr_name, 0, 679 err = crypto_grab_skcipher(&ctx->ctr, ctr_name, 0,
464 crypto_requires_sync(algt->type, 680 crypto_requires_sync(algt->type,
465 algt->mask)); 681 algt->mask));
466 if (err) 682 if (err)
467 goto err_free_inst; 683 goto err_drop_ghash;
468 684
469 ctr = crypto_skcipher_spawn_alg(&ctx->ctr); 685 ctr = crypto_skcipher_spawn_alg(&ctx->ctr);
470 686
@@ -479,7 +695,8 @@ static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb,
479 695
480 err = -ENAMETOOLONG; 696 err = -ENAMETOOLONG;
481 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, 697 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
482 "gcm_base(%s)", ctr->cra_driver_name) >= 698 "gcm_base(%s,%s)", ctr->cra_driver_name,
699 ghash_alg->cra_driver_name) >=
483 CRYPTO_MAX_ALG_NAME) 700 CRYPTO_MAX_ALG_NAME)
484 goto out_put_ctr; 701 goto out_put_ctr;
485 702
@@ -502,12 +719,16 @@ static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb,
502 inst->alg.cra_aead.decrypt = crypto_gcm_decrypt; 719 inst->alg.cra_aead.decrypt = crypto_gcm_decrypt;
503 720
504out: 721out:
722 crypto_mod_put(ghash_alg);
505 return inst; 723 return inst;
506 724
507out_put_ctr: 725out_put_ctr:
508 crypto_drop_skcipher(&ctx->ctr); 726 crypto_drop_skcipher(&ctx->ctr);
727err_drop_ghash:
728 crypto_drop_ahash(&ctx->ghash);
509err_free_inst: 729err_free_inst:
510 kfree(inst); 730 kfree(inst);
731out_put_ghash:
511 inst = ERR_PTR(err); 732 inst = ERR_PTR(err);
512 goto out; 733 goto out;
513} 734}
@@ -532,7 +753,7 @@ static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb)
532 CRYPTO_MAX_ALG_NAME) 753 CRYPTO_MAX_ALG_NAME)
533 return ERR_PTR(-ENAMETOOLONG); 754 return ERR_PTR(-ENAMETOOLONG);
534 755
535 return crypto_gcm_alloc_common(tb, full_name, ctr_name); 756 return crypto_gcm_alloc_common(tb, full_name, ctr_name, "ghash");
536} 757}
537 758
538static void crypto_gcm_free(struct crypto_instance *inst) 759static void crypto_gcm_free(struct crypto_instance *inst)
@@ -540,6 +761,7 @@ static void crypto_gcm_free(struct crypto_instance *inst)
540 struct gcm_instance_ctx *ctx = crypto_instance_ctx(inst); 761 struct gcm_instance_ctx *ctx = crypto_instance_ctx(inst);
541 762
542 crypto_drop_skcipher(&ctx->ctr); 763 crypto_drop_skcipher(&ctx->ctr);
764 crypto_drop_ahash(&ctx->ghash);
543 kfree(inst); 765 kfree(inst);
544} 766}
545 767
@@ -554,6 +776,7 @@ static struct crypto_instance *crypto_gcm_base_alloc(struct rtattr **tb)
554{ 776{
555 int err; 777 int err;
556 const char *ctr_name; 778 const char *ctr_name;
779 const char *ghash_name;
557 char full_name[CRYPTO_MAX_ALG_NAME]; 780 char full_name[CRYPTO_MAX_ALG_NAME];
558 781
559 ctr_name = crypto_attr_alg_name(tb[1]); 782 ctr_name = crypto_attr_alg_name(tb[1]);
@@ -561,11 +784,16 @@ static struct crypto_instance *crypto_gcm_base_alloc(struct rtattr **tb)
561 if (IS_ERR(ctr_name)) 784 if (IS_ERR(ctr_name))
562 return ERR_PTR(err); 785 return ERR_PTR(err);
563 786
564 if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm_base(%s)", 787 ghash_name = crypto_attr_alg_name(tb[2]);
565 ctr_name) >= CRYPTO_MAX_ALG_NAME) 788 err = PTR_ERR(ghash_name);
789 if (IS_ERR(ghash_name))
790 return ERR_PTR(err);
791
792 if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm_base(%s,%s)",
793 ctr_name, ghash_name) >= CRYPTO_MAX_ALG_NAME)
566 return ERR_PTR(-ENAMETOOLONG); 794 return ERR_PTR(-ENAMETOOLONG);
567 795
568 return crypto_gcm_alloc_common(tb, full_name, ctr_name); 796 return crypto_gcm_alloc_common(tb, full_name, ctr_name, ghash_name);
569} 797}
570 798
571static struct crypto_template crypto_gcm_base_tmpl = { 799static struct crypto_template crypto_gcm_base_tmpl = {
@@ -784,6 +1012,10 @@ static int __init crypto_gcm_module_init(void)
784{ 1012{
785 int err; 1013 int err;
786 1014
1015 gcm_zeroes = kzalloc(16, GFP_KERNEL);
1016 if (!gcm_zeroes)
1017 return -ENOMEM;
1018
787 err = crypto_register_template(&crypto_gcm_base_tmpl); 1019 err = crypto_register_template(&crypto_gcm_base_tmpl);
788 if (err) 1020 if (err)
789 goto out; 1021 goto out;
@@ -796,18 +1028,20 @@ static int __init crypto_gcm_module_init(void)
796 if (err) 1028 if (err)
797 goto out_undo_gcm; 1029 goto out_undo_gcm;
798 1030
799out: 1031 return 0;
800 return err;
801 1032
802out_undo_gcm: 1033out_undo_gcm:
803 crypto_unregister_template(&crypto_gcm_tmpl); 1034 crypto_unregister_template(&crypto_gcm_tmpl);
804out_undo_base: 1035out_undo_base:
805 crypto_unregister_template(&crypto_gcm_base_tmpl); 1036 crypto_unregister_template(&crypto_gcm_base_tmpl);
806 goto out; 1037out:
1038 kfree(gcm_zeroes);
1039 return err;
807} 1040}
808 1041
809static void __exit crypto_gcm_module_exit(void) 1042static void __exit crypto_gcm_module_exit(void)
810{ 1043{
1044 kfree(gcm_zeroes);
811 crypto_unregister_template(&crypto_rfc4106_tmpl); 1045 crypto_unregister_template(&crypto_rfc4106_tmpl);
812 crypto_unregister_template(&crypto_gcm_tmpl); 1046 crypto_unregister_template(&crypto_gcm_tmpl);
813 crypto_unregister_template(&crypto_gcm_base_tmpl); 1047 crypto_unregister_template(&crypto_gcm_base_tmpl);
diff --git a/crypto/ghash-generic.c b/crypto/ghash-generic.c
new file mode 100644
index 000000000000..be4425616931
--- /dev/null
+++ b/crypto/ghash-generic.c
@@ -0,0 +1,170 @@
1/*
2 * GHASH: digest algorithm for GCM (Galois/Counter Mode).
3 *
4 * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
5 * Copyright (c) 2009 Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
7 *
8 * The algorithm implementation is copied from gcm.c.
9 *
10 * This program is free software; you can redistribute it and/or modify it
11 * under the terms of the GNU General Public License version 2 as published
12 * by the Free Software Foundation.
13 */
14
15#include <crypto/algapi.h>
16#include <crypto/gf128mul.h>
17#include <crypto/internal/hash.h>
18#include <linux/crypto.h>
19#include <linux/init.h>
20#include <linux/kernel.h>
21#include <linux/module.h>
22
23#define GHASH_BLOCK_SIZE 16
24#define GHASH_DIGEST_SIZE 16
25
26struct ghash_ctx {
27 struct gf128mul_4k *gf128;
28};
29
30struct ghash_desc_ctx {
31 u8 buffer[GHASH_BLOCK_SIZE];
32 u32 bytes;
33};
34
35static int ghash_init(struct shash_desc *desc)
36{
37 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
38
39 memset(dctx, 0, sizeof(*dctx));
40
41 return 0;
42}
43
44static int ghash_setkey(struct crypto_shash *tfm,
45 const u8 *key, unsigned int keylen)
46{
47 struct ghash_ctx *ctx = crypto_shash_ctx(tfm);
48
49 if (keylen != GHASH_BLOCK_SIZE) {
50 crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
51 return -EINVAL;
52 }
53
54 if (ctx->gf128)
55 gf128mul_free_4k(ctx->gf128);
56 ctx->gf128 = gf128mul_init_4k_lle((be128 *)key);
57 if (!ctx->gf128)
58 return -ENOMEM;
59
60 return 0;
61}
62
63static int ghash_update(struct shash_desc *desc,
64 const u8 *src, unsigned int srclen)
65{
66 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
67 struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
68 u8 *dst = dctx->buffer;
69
70 if (dctx->bytes) {
71 int n = min(srclen, dctx->bytes);
72 u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
73
74 dctx->bytes -= n;
75 srclen -= n;
76
77 while (n--)
78 *pos++ ^= *src++;
79
80 if (!dctx->bytes)
81 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
82 }
83
84 while (srclen >= GHASH_BLOCK_SIZE) {
85 crypto_xor(dst, src, GHASH_BLOCK_SIZE);
86 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
87 src += GHASH_BLOCK_SIZE;
88 srclen -= GHASH_BLOCK_SIZE;
89 }
90
91 if (srclen) {
92 dctx->bytes = GHASH_BLOCK_SIZE - srclen;
93 while (srclen--)
94 *dst++ ^= *src++;
95 }
96
97 return 0;
98}
99
100static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx)
101{
102 u8 *dst = dctx->buffer;
103
104 if (dctx->bytes) {
105 u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
106
107 while (dctx->bytes--)
108 *tmp++ ^= 0;
109
110 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
111 }
112
113 dctx->bytes = 0;
114}
115
116static int ghash_final(struct shash_desc *desc, u8 *dst)
117{
118 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
119 struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
120 u8 *buf = dctx->buffer;
121
122 ghash_flush(ctx, dctx);
123 memcpy(dst, buf, GHASH_BLOCK_SIZE);
124
125 return 0;
126}
127
128static void ghash_exit_tfm(struct crypto_tfm *tfm)
129{
130 struct ghash_ctx *ctx = crypto_tfm_ctx(tfm);
131 if (ctx->gf128)
132 gf128mul_free_4k(ctx->gf128);
133}
134
135static struct shash_alg ghash_alg = {
136 .digestsize = GHASH_DIGEST_SIZE,
137 .init = ghash_init,
138 .update = ghash_update,
139 .final = ghash_final,
140 .setkey = ghash_setkey,
141 .descsize = sizeof(struct ghash_desc_ctx),
142 .base = {
143 .cra_name = "ghash",
144 .cra_driver_name = "ghash-generic",
145 .cra_priority = 100,
146 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
147 .cra_blocksize = GHASH_BLOCK_SIZE,
148 .cra_ctxsize = sizeof(struct ghash_ctx),
149 .cra_module = THIS_MODULE,
150 .cra_list = LIST_HEAD_INIT(ghash_alg.base.cra_list),
151 .cra_exit = ghash_exit_tfm,
152 },
153};
154
155static int __init ghash_mod_init(void)
156{
157 return crypto_register_shash(&ghash_alg);
158}
159
160static void __exit ghash_mod_exit(void)
161{
162 crypto_unregister_shash(&ghash_alg);
163}
164
165module_init(ghash_mod_init);
166module_exit(ghash_mod_exit);
167
168MODULE_LICENSE("GPL");
169MODULE_DESCRIPTION("GHASH Message Digest Algorithm");
170MODULE_ALIAS("ghash");
diff --git a/crypto/hmac.c b/crypto/hmac.c
index 0ad39c374963..15c2eb534541 100644
--- a/crypto/hmac.c
+++ b/crypto/hmac.c
@@ -27,7 +27,7 @@
27#include <linux/string.h> 27#include <linux/string.h>
28 28
29struct hmac_ctx { 29struct hmac_ctx {
30 struct crypto_hash *child; 30 struct crypto_shash *hash;
31}; 31};
32 32
33static inline void *align_ptr(void *p, unsigned int align) 33static inline void *align_ptr(void *p, unsigned int align)
@@ -35,65 +35,45 @@ static inline void *align_ptr(void *p, unsigned int align)
35 return (void *)ALIGN((unsigned long)p, align); 35 return (void *)ALIGN((unsigned long)p, align);
36} 36}
37 37
38static inline struct hmac_ctx *hmac_ctx(struct crypto_hash *tfm) 38static inline struct hmac_ctx *hmac_ctx(struct crypto_shash *tfm)
39{ 39{
40 return align_ptr(crypto_hash_ctx_aligned(tfm) + 40 return align_ptr(crypto_shash_ctx_aligned(tfm) +
41 crypto_hash_blocksize(tfm) * 2 + 41 crypto_shash_statesize(tfm) * 2,
42 crypto_hash_digestsize(tfm), sizeof(void *)); 42 crypto_tfm_ctx_alignment());
43} 43}
44 44
45static int hmac_setkey(struct crypto_hash *parent, 45static int hmac_setkey(struct crypto_shash *parent,
46 const u8 *inkey, unsigned int keylen) 46 const u8 *inkey, unsigned int keylen)
47{ 47{
48 int bs = crypto_hash_blocksize(parent); 48 int bs = crypto_shash_blocksize(parent);
49 int ds = crypto_hash_digestsize(parent); 49 int ds = crypto_shash_digestsize(parent);
50 char *ipad = crypto_hash_ctx_aligned(parent); 50 int ss = crypto_shash_statesize(parent);
51 char *opad = ipad + bs; 51 char *ipad = crypto_shash_ctx_aligned(parent);
52 char *digest = opad + bs; 52 char *opad = ipad + ss;
53 struct hmac_ctx *ctx = align_ptr(digest + ds, sizeof(void *)); 53 struct hmac_ctx *ctx = align_ptr(opad + ss,
54 struct crypto_hash *tfm = ctx->child; 54 crypto_tfm_ctx_alignment());
55 struct crypto_shash *hash = ctx->hash;
56 struct {
57 struct shash_desc shash;
58 char ctx[crypto_shash_descsize(hash)];
59 } desc;
55 unsigned int i; 60 unsigned int i;
56 61
62 desc.shash.tfm = hash;
63 desc.shash.flags = crypto_shash_get_flags(parent) &
64 CRYPTO_TFM_REQ_MAY_SLEEP;
65
57 if (keylen > bs) { 66 if (keylen > bs) {
58 struct hash_desc desc;
59 struct scatterlist tmp;
60 int tmplen;
61 int err; 67 int err;
62 68
63 desc.tfm = tfm; 69 err = crypto_shash_digest(&desc.shash, inkey, keylen, ipad);
64 desc.flags = crypto_hash_get_flags(parent);
65 desc.flags &= CRYPTO_TFM_REQ_MAY_SLEEP;
66
67 err = crypto_hash_init(&desc);
68 if (err) 70 if (err)
69 return err; 71 return err;
70 72
71 tmplen = bs * 2 + ds;
72 sg_init_one(&tmp, ipad, tmplen);
73
74 for (; keylen > tmplen; inkey += tmplen, keylen -= tmplen) {
75 memcpy(ipad, inkey, tmplen);
76 err = crypto_hash_update(&desc, &tmp, tmplen);
77 if (err)
78 return err;
79 }
80
81 if (keylen) {
82 memcpy(ipad, inkey, keylen);
83 err = crypto_hash_update(&desc, &tmp, keylen);
84 if (err)
85 return err;
86 }
87
88 err = crypto_hash_final(&desc, digest);
89 if (err)
90 return err;
91
92 inkey = digest;
93 keylen = ds; 73 keylen = ds;
94 } 74 } else
75 memcpy(ipad, inkey, keylen);
95 76
96 memcpy(ipad, inkey, keylen);
97 memset(ipad + keylen, 0, bs - keylen); 77 memset(ipad + keylen, 0, bs - keylen);
98 memcpy(opad, ipad, bs); 78 memcpy(opad, ipad, bs);
99 79
@@ -102,184 +82,178 @@ static int hmac_setkey(struct crypto_hash *parent,
102 opad[i] ^= 0x5c; 82 opad[i] ^= 0x5c;
103 } 83 }
104 84
105 return 0; 85 return crypto_shash_init(&desc.shash) ?:
86 crypto_shash_update(&desc.shash, ipad, bs) ?:
87 crypto_shash_export(&desc.shash, ipad) ?:
88 crypto_shash_init(&desc.shash) ?:
89 crypto_shash_update(&desc.shash, opad, bs) ?:
90 crypto_shash_export(&desc.shash, opad);
106} 91}
107 92
108static int hmac_init(struct hash_desc *pdesc) 93static int hmac_export(struct shash_desc *pdesc, void *out)
109{ 94{
110 struct crypto_hash *parent = pdesc->tfm; 95 struct shash_desc *desc = shash_desc_ctx(pdesc);
111 int bs = crypto_hash_blocksize(parent);
112 int ds = crypto_hash_digestsize(parent);
113 char *ipad = crypto_hash_ctx_aligned(parent);
114 struct hmac_ctx *ctx = align_ptr(ipad + bs * 2 + ds, sizeof(void *));
115 struct hash_desc desc;
116 struct scatterlist tmp;
117 int err;
118 96
119 desc.tfm = ctx->child; 97 desc->flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
120 desc.flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
121 sg_init_one(&tmp, ipad, bs);
122 98
123 err = crypto_hash_init(&desc); 99 return crypto_shash_export(desc, out);
124 if (unlikely(err))
125 return err;
126
127 return crypto_hash_update(&desc, &tmp, bs);
128} 100}
129 101
130static int hmac_update(struct hash_desc *pdesc, 102static int hmac_import(struct shash_desc *pdesc, const void *in)
131 struct scatterlist *sg, unsigned int nbytes)
132{ 103{
104 struct shash_desc *desc = shash_desc_ctx(pdesc);
133 struct hmac_ctx *ctx = hmac_ctx(pdesc->tfm); 105 struct hmac_ctx *ctx = hmac_ctx(pdesc->tfm);
134 struct hash_desc desc;
135 106
136 desc.tfm = ctx->child; 107 desc->tfm = ctx->hash;
137 desc.flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP; 108 desc->flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
138 109
139 return crypto_hash_update(&desc, sg, nbytes); 110 return crypto_shash_import(desc, in);
140} 111}
141 112
142static int hmac_final(struct hash_desc *pdesc, u8 *out) 113static int hmac_init(struct shash_desc *pdesc)
143{ 114{
144 struct crypto_hash *parent = pdesc->tfm; 115 return hmac_import(pdesc, crypto_shash_ctx_aligned(pdesc->tfm));
145 int bs = crypto_hash_blocksize(parent); 116}
146 int ds = crypto_hash_digestsize(parent);
147 char *opad = crypto_hash_ctx_aligned(parent) + bs;
148 char *digest = opad + bs;
149 struct hmac_ctx *ctx = align_ptr(digest + ds, sizeof(void *));
150 struct hash_desc desc;
151 struct scatterlist tmp;
152 int err;
153 117
154 desc.tfm = ctx->child; 118static int hmac_update(struct shash_desc *pdesc,
155 desc.flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP; 119 const u8 *data, unsigned int nbytes)
156 sg_init_one(&tmp, opad, bs + ds); 120{
121 struct shash_desc *desc = shash_desc_ctx(pdesc);
157 122
158 err = crypto_hash_final(&desc, digest); 123 desc->flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
159 if (unlikely(err))
160 return err;
161 124
162 return crypto_hash_digest(&desc, &tmp, bs + ds, out); 125 return crypto_shash_update(desc, data, nbytes);
163} 126}
164 127
165static int hmac_digest(struct hash_desc *pdesc, struct scatterlist *sg, 128static int hmac_final(struct shash_desc *pdesc, u8 *out)
166 unsigned int nbytes, u8 *out)
167{ 129{
168 struct crypto_hash *parent = pdesc->tfm; 130 struct crypto_shash *parent = pdesc->tfm;
169 int bs = crypto_hash_blocksize(parent); 131 int ds = crypto_shash_digestsize(parent);
170 int ds = crypto_hash_digestsize(parent); 132 int ss = crypto_shash_statesize(parent);
171 char *ipad = crypto_hash_ctx_aligned(parent); 133 char *opad = crypto_shash_ctx_aligned(parent) + ss;
172 char *opad = ipad + bs; 134 struct shash_desc *desc = shash_desc_ctx(pdesc);
173 char *digest = opad + bs;
174 struct hmac_ctx *ctx = align_ptr(digest + ds, sizeof(void *));
175 struct hash_desc desc;
176 struct scatterlist sg1[2];
177 struct scatterlist sg2[1];
178 int err;
179 135
180 desc.tfm = ctx->child; 136 desc->flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
181 desc.flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
182 137
183 sg_init_table(sg1, 2); 138 return crypto_shash_final(desc, out) ?:
184 sg_set_buf(sg1, ipad, bs); 139 crypto_shash_import(desc, opad) ?:
185 scatterwalk_sg_chain(sg1, 2, sg); 140 crypto_shash_finup(desc, out, ds, out);
141}
186 142
187 sg_init_table(sg2, 1); 143static int hmac_finup(struct shash_desc *pdesc, const u8 *data,
188 sg_set_buf(sg2, opad, bs + ds); 144 unsigned int nbytes, u8 *out)
145{
189 146
190 err = crypto_hash_digest(&desc, sg1, nbytes + bs, digest); 147 struct crypto_shash *parent = pdesc->tfm;
191 if (unlikely(err)) 148 int ds = crypto_shash_digestsize(parent);
192 return err; 149 int ss = crypto_shash_statesize(parent);
150 char *opad = crypto_shash_ctx_aligned(parent) + ss;
151 struct shash_desc *desc = shash_desc_ctx(pdesc);
193 152
194 return crypto_hash_digest(&desc, sg2, bs + ds, out); 153 desc->flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
154
155 return crypto_shash_finup(desc, data, nbytes, out) ?:
156 crypto_shash_import(desc, opad) ?:
157 crypto_shash_finup(desc, out, ds, out);
195} 158}
196 159
197static int hmac_init_tfm(struct crypto_tfm *tfm) 160static int hmac_init_tfm(struct crypto_tfm *tfm)
198{ 161{
199 struct crypto_hash *hash; 162 struct crypto_shash *parent = __crypto_shash_cast(tfm);
163 struct crypto_shash *hash;
200 struct crypto_instance *inst = (void *)tfm->__crt_alg; 164 struct crypto_instance *inst = (void *)tfm->__crt_alg;
201 struct crypto_spawn *spawn = crypto_instance_ctx(inst); 165 struct crypto_shash_spawn *spawn = crypto_instance_ctx(inst);
202 struct hmac_ctx *ctx = hmac_ctx(__crypto_hash_cast(tfm)); 166 struct hmac_ctx *ctx = hmac_ctx(parent);
203 167
204 hash = crypto_spawn_hash(spawn); 168 hash = crypto_spawn_shash(spawn);
205 if (IS_ERR(hash)) 169 if (IS_ERR(hash))
206 return PTR_ERR(hash); 170 return PTR_ERR(hash);
207 171
208 ctx->child = hash; 172 parent->descsize = sizeof(struct shash_desc) +
173 crypto_shash_descsize(hash);
174
175 ctx->hash = hash;
209 return 0; 176 return 0;
210} 177}
211 178
212static void hmac_exit_tfm(struct crypto_tfm *tfm) 179static void hmac_exit_tfm(struct crypto_tfm *tfm)
213{ 180{
214 struct hmac_ctx *ctx = hmac_ctx(__crypto_hash_cast(tfm)); 181 struct hmac_ctx *ctx = hmac_ctx(__crypto_shash_cast(tfm));
215 crypto_free_hash(ctx->child); 182 crypto_free_shash(ctx->hash);
216} 183}
217 184
218static void hmac_free(struct crypto_instance *inst) 185static int hmac_create(struct crypto_template *tmpl, struct rtattr **tb)
219{ 186{
220 crypto_drop_spawn(crypto_instance_ctx(inst)); 187 struct shash_instance *inst;
221 kfree(inst);
222}
223
224static struct crypto_instance *hmac_alloc(struct rtattr **tb)
225{
226 struct crypto_instance *inst;
227 struct crypto_alg *alg; 188 struct crypto_alg *alg;
189 struct shash_alg *salg;
228 int err; 190 int err;
229 int ds; 191 int ds;
192 int ss;
230 193
231 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_HASH); 194 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH);
232 if (err) 195 if (err)
233 return ERR_PTR(err); 196 return err;
234 197
235 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_HASH, 198 salg = shash_attr_alg(tb[1], 0, 0);
236 CRYPTO_ALG_TYPE_HASH_MASK); 199 if (IS_ERR(salg))
237 if (IS_ERR(alg)) 200 return PTR_ERR(salg);
238 return ERR_CAST(alg); 201
239 202 err = -EINVAL;
240 inst = ERR_PTR(-EINVAL); 203 ds = salg->digestsize;
241 ds = alg->cra_type == &crypto_hash_type ? 204 ss = salg->statesize;
242 alg->cra_hash.digestsize : 205 alg = &salg->base;
243 alg->cra_type ? 206 if (ds > alg->cra_blocksize ||
244 __crypto_shash_alg(alg)->digestsize : 207 ss < alg->cra_blocksize)
245 alg->cra_digest.dia_digestsize;
246 if (ds > alg->cra_blocksize)
247 goto out_put_alg; 208 goto out_put_alg;
248 209
249 inst = crypto_alloc_instance("hmac", alg); 210 inst = shash_alloc_instance("hmac", alg);
211 err = PTR_ERR(inst);
250 if (IS_ERR(inst)) 212 if (IS_ERR(inst))
251 goto out_put_alg; 213 goto out_put_alg;
252 214
253 inst->alg.cra_flags = CRYPTO_ALG_TYPE_HASH; 215 err = crypto_init_shash_spawn(shash_instance_ctx(inst), salg,
254 inst->alg.cra_priority = alg->cra_priority; 216 shash_crypto_instance(inst));
255 inst->alg.cra_blocksize = alg->cra_blocksize; 217 if (err)
256 inst->alg.cra_alignmask = alg->cra_alignmask; 218 goto out_free_inst;
257 inst->alg.cra_type = &crypto_hash_type; 219
258 220 inst->alg.base.cra_priority = alg->cra_priority;
259 inst->alg.cra_hash.digestsize = ds; 221 inst->alg.base.cra_blocksize = alg->cra_blocksize;
260 222 inst->alg.base.cra_alignmask = alg->cra_alignmask;
261 inst->alg.cra_ctxsize = sizeof(struct hmac_ctx) + 223
262 ALIGN(inst->alg.cra_blocksize * 2 + ds, 224 ss = ALIGN(ss, alg->cra_alignmask + 1);
263 sizeof(void *)); 225 inst->alg.digestsize = ds;
264 226 inst->alg.statesize = ss;
265 inst->alg.cra_init = hmac_init_tfm; 227
266 inst->alg.cra_exit = hmac_exit_tfm; 228 inst->alg.base.cra_ctxsize = sizeof(struct hmac_ctx) +
267 229 ALIGN(ss * 2, crypto_tfm_ctx_alignment());
268 inst->alg.cra_hash.init = hmac_init; 230
269 inst->alg.cra_hash.update = hmac_update; 231 inst->alg.base.cra_init = hmac_init_tfm;
270 inst->alg.cra_hash.final = hmac_final; 232 inst->alg.base.cra_exit = hmac_exit_tfm;
271 inst->alg.cra_hash.digest = hmac_digest; 233
272 inst->alg.cra_hash.setkey = hmac_setkey; 234 inst->alg.init = hmac_init;
235 inst->alg.update = hmac_update;
236 inst->alg.final = hmac_final;
237 inst->alg.finup = hmac_finup;
238 inst->alg.export = hmac_export;
239 inst->alg.import = hmac_import;
240 inst->alg.setkey = hmac_setkey;
241
242 err = shash_register_instance(tmpl, inst);
243 if (err) {
244out_free_inst:
245 shash_free_instance(shash_crypto_instance(inst));
246 }
273 247
274out_put_alg: 248out_put_alg:
275 crypto_mod_put(alg); 249 crypto_mod_put(alg);
276 return inst; 250 return err;
277} 251}
278 252
279static struct crypto_template hmac_tmpl = { 253static struct crypto_template hmac_tmpl = {
280 .name = "hmac", 254 .name = "hmac",
281 .alloc = hmac_alloc, 255 .create = hmac_create,
282 .free = hmac_free, 256 .free = shash_free_instance,
283 .module = THIS_MODULE, 257 .module = THIS_MODULE,
284}; 258};
285 259
diff --git a/crypto/internal.h b/crypto/internal.h
index 113579a82dff..2d226362e594 100644
--- a/crypto/internal.h
+++ b/crypto/internal.h
@@ -25,12 +25,7 @@
25#include <linux/notifier.h> 25#include <linux/notifier.h>
26#include <linux/rwsem.h> 26#include <linux/rwsem.h>
27#include <linux/slab.h> 27#include <linux/slab.h>
28 28#include <linux/fips.h>
29#ifdef CONFIG_CRYPTO_FIPS
30extern int fips_enabled;
31#else
32#define fips_enabled 0
33#endif
34 29
35/* Crypto notification events. */ 30/* Crypto notification events. */
36enum { 31enum {
@@ -65,18 +60,6 @@ static inline void crypto_exit_proc(void)
65{ } 60{ }
66#endif 61#endif
67 62
68static inline unsigned int crypto_digest_ctxsize(struct crypto_alg *alg)
69{
70 unsigned int len = alg->cra_ctxsize;
71
72 if (alg->cra_alignmask) {
73 len = ALIGN(len, (unsigned long)alg->cra_alignmask + 1);
74 len += alg->cra_digest.dia_digestsize;
75 }
76
77 return len;
78}
79
80static inline unsigned int crypto_cipher_ctxsize(struct crypto_alg *alg) 63static inline unsigned int crypto_cipher_ctxsize(struct crypto_alg *alg)
81{ 64{
82 return alg->cra_ctxsize; 65 return alg->cra_ctxsize;
@@ -91,12 +74,9 @@ struct crypto_alg *crypto_mod_get(struct crypto_alg *alg);
91struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask); 74struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask);
92struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask); 75struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask);
93 76
94int crypto_init_digest_ops(struct crypto_tfm *tfm);
95int crypto_init_digest_ops_async(struct crypto_tfm *tfm);
96int crypto_init_cipher_ops(struct crypto_tfm *tfm); 77int crypto_init_cipher_ops(struct crypto_tfm *tfm);
97int crypto_init_compress_ops(struct crypto_tfm *tfm); 78int crypto_init_compress_ops(struct crypto_tfm *tfm);
98 79
99void crypto_exit_digest_ops(struct crypto_tfm *tfm);
100void crypto_exit_cipher_ops(struct crypto_tfm *tfm); 80void crypto_exit_cipher_ops(struct crypto_tfm *tfm);
101void crypto_exit_compress_ops(struct crypto_tfm *tfm); 81void crypto_exit_compress_ops(struct crypto_tfm *tfm);
102 82
@@ -111,12 +91,12 @@ struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
111 u32 mask); 91 u32 mask);
112void *crypto_create_tfm(struct crypto_alg *alg, 92void *crypto_create_tfm(struct crypto_alg *alg,
113 const struct crypto_type *frontend); 93 const struct crypto_type *frontend);
94struct crypto_alg *crypto_find_alg(const char *alg_name,
95 const struct crypto_type *frontend,
96 u32 type, u32 mask);
114void *crypto_alloc_tfm(const char *alg_name, 97void *crypto_alloc_tfm(const char *alg_name,
115 const struct crypto_type *frontend, u32 type, u32 mask); 98 const struct crypto_type *frontend, u32 type, u32 mask);
116 99
117int crypto_register_instance(struct crypto_template *tmpl,
118 struct crypto_instance *inst);
119
120int crypto_register_notifier(struct notifier_block *nb); 100int crypto_register_notifier(struct notifier_block *nb);
121int crypto_unregister_notifier(struct notifier_block *nb); 101int crypto_unregister_notifier(struct notifier_block *nb);
122int crypto_probing_notify(unsigned long val, void *v); 102int crypto_probing_notify(unsigned long val, void *v);
diff --git a/crypto/pcompress.c b/crypto/pcompress.c
index bcadc03726b7..f7c4a7d7412e 100644
--- a/crypto/pcompress.c
+++ b/crypto/pcompress.c
@@ -36,14 +36,12 @@ static int crypto_pcomp_init(struct crypto_tfm *tfm, u32 type, u32 mask)
36 return 0; 36 return 0;
37} 37}
38 38
39static unsigned int crypto_pcomp_extsize(struct crypto_alg *alg, 39static unsigned int crypto_pcomp_extsize(struct crypto_alg *alg)
40 const struct crypto_type *frontend)
41{ 40{
42 return alg->cra_ctxsize; 41 return alg->cra_ctxsize;
43} 42}
44 43
45static int crypto_pcomp_init_tfm(struct crypto_tfm *tfm, 44static int crypto_pcomp_init_tfm(struct crypto_tfm *tfm)
46 const struct crypto_type *frontend)
47{ 45{
48 return 0; 46 return 0;
49} 47}
diff --git a/crypto/rng.c b/crypto/rng.c
index 6e94bc735578..ba05e7380e76 100644
--- a/crypto/rng.c
+++ b/crypto/rng.c
@@ -123,4 +123,4 @@ void crypto_put_default_rng(void)
123EXPORT_SYMBOL_GPL(crypto_put_default_rng); 123EXPORT_SYMBOL_GPL(crypto_put_default_rng);
124 124
125MODULE_LICENSE("GPL"); 125MODULE_LICENSE("GPL");
126MODULE_DESCRIPTION("Random Number Genertor"); 126MODULE_DESCRIPTION("Random Number Generator");
diff --git a/crypto/sha1_generic.c b/crypto/sha1_generic.c
index 9efef20454cb..0416091bf45a 100644
--- a/crypto/sha1_generic.c
+++ b/crypto/sha1_generic.c
@@ -25,31 +25,21 @@
25#include <crypto/sha.h> 25#include <crypto/sha.h>
26#include <asm/byteorder.h> 26#include <asm/byteorder.h>
27 27
28struct sha1_ctx {
29 u64 count;
30 u32 state[5];
31 u8 buffer[64];
32};
33
34static int sha1_init(struct shash_desc *desc) 28static int sha1_init(struct shash_desc *desc)
35{ 29{
36 struct sha1_ctx *sctx = shash_desc_ctx(desc); 30 struct sha1_state *sctx = shash_desc_ctx(desc);
37 31
38 static const struct sha1_ctx initstate = { 32 *sctx = (struct sha1_state){
39 0, 33 .state = { SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 },
40 { SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 },
41 { 0, }
42 }; 34 };
43 35
44 *sctx = initstate;
45
46 return 0; 36 return 0;
47} 37}
48 38
49static int sha1_update(struct shash_desc *desc, const u8 *data, 39static int sha1_update(struct shash_desc *desc, const u8 *data,
50 unsigned int len) 40 unsigned int len)
51{ 41{
52 struct sha1_ctx *sctx = shash_desc_ctx(desc); 42 struct sha1_state *sctx = shash_desc_ctx(desc);
53 unsigned int partial, done; 43 unsigned int partial, done;
54 const u8 *src; 44 const u8 *src;
55 45
@@ -85,7 +75,7 @@ static int sha1_update(struct shash_desc *desc, const u8 *data,
85/* Add padding and return the message digest. */ 75/* Add padding and return the message digest. */
86static int sha1_final(struct shash_desc *desc, u8 *out) 76static int sha1_final(struct shash_desc *desc, u8 *out)
87{ 77{
88 struct sha1_ctx *sctx = shash_desc_ctx(desc); 78 struct sha1_state *sctx = shash_desc_ctx(desc);
89 __be32 *dst = (__be32 *)out; 79 __be32 *dst = (__be32 *)out;
90 u32 i, index, padlen; 80 u32 i, index, padlen;
91 __be64 bits; 81 __be64 bits;
@@ -111,12 +101,31 @@ static int sha1_final(struct shash_desc *desc, u8 *out)
111 return 0; 101 return 0;
112} 102}
113 103
104static int sha1_export(struct shash_desc *desc, void *out)
105{
106 struct sha1_state *sctx = shash_desc_ctx(desc);
107
108 memcpy(out, sctx, sizeof(*sctx));
109 return 0;
110}
111
112static int sha1_import(struct shash_desc *desc, const void *in)
113{
114 struct sha1_state *sctx = shash_desc_ctx(desc);
115
116 memcpy(sctx, in, sizeof(*sctx));
117 return 0;
118}
119
114static struct shash_alg alg = { 120static struct shash_alg alg = {
115 .digestsize = SHA1_DIGEST_SIZE, 121 .digestsize = SHA1_DIGEST_SIZE,
116 .init = sha1_init, 122 .init = sha1_init,
117 .update = sha1_update, 123 .update = sha1_update,
118 .final = sha1_final, 124 .final = sha1_final,
119 .descsize = sizeof(struct sha1_ctx), 125 .export = sha1_export,
126 .import = sha1_import,
127 .descsize = sizeof(struct sha1_state),
128 .statesize = sizeof(struct sha1_state),
120 .base = { 129 .base = {
121 .cra_name = "sha1", 130 .cra_name = "sha1",
122 .cra_driver_name= "sha1-generic", 131 .cra_driver_name= "sha1-generic",
diff --git a/crypto/sha256_generic.c b/crypto/sha256_generic.c
index 6349d8339d37..c48459ebf05b 100644
--- a/crypto/sha256_generic.c
+++ b/crypto/sha256_generic.c
@@ -25,12 +25,6 @@
25#include <crypto/sha.h> 25#include <crypto/sha.h>
26#include <asm/byteorder.h> 26#include <asm/byteorder.h>
27 27
28struct sha256_ctx {
29 u32 count[2];
30 u32 state[8];
31 u8 buf[128];
32};
33
34static inline u32 Ch(u32 x, u32 y, u32 z) 28static inline u32 Ch(u32 x, u32 y, u32 z)
35{ 29{
36 return z ^ (x & (y ^ z)); 30 return z ^ (x & (y ^ z));
@@ -222,7 +216,7 @@ static void sha256_transform(u32 *state, const u8 *input)
222 216
223static int sha224_init(struct shash_desc *desc) 217static int sha224_init(struct shash_desc *desc)
224{ 218{
225 struct sha256_ctx *sctx = shash_desc_ctx(desc); 219 struct sha256_state *sctx = shash_desc_ctx(desc);
226 sctx->state[0] = SHA224_H0; 220 sctx->state[0] = SHA224_H0;
227 sctx->state[1] = SHA224_H1; 221 sctx->state[1] = SHA224_H1;
228 sctx->state[2] = SHA224_H2; 222 sctx->state[2] = SHA224_H2;
@@ -231,15 +225,14 @@ static int sha224_init(struct shash_desc *desc)
231 sctx->state[5] = SHA224_H5; 225 sctx->state[5] = SHA224_H5;
232 sctx->state[6] = SHA224_H6; 226 sctx->state[6] = SHA224_H6;
233 sctx->state[7] = SHA224_H7; 227 sctx->state[7] = SHA224_H7;
234 sctx->count[0] = 0; 228 sctx->count = 0;
235 sctx->count[1] = 0;
236 229
237 return 0; 230 return 0;
238} 231}
239 232
240static int sha256_init(struct shash_desc *desc) 233static int sha256_init(struct shash_desc *desc)
241{ 234{
242 struct sha256_ctx *sctx = shash_desc_ctx(desc); 235 struct sha256_state *sctx = shash_desc_ctx(desc);
243 sctx->state[0] = SHA256_H0; 236 sctx->state[0] = SHA256_H0;
244 sctx->state[1] = SHA256_H1; 237 sctx->state[1] = SHA256_H1;
245 sctx->state[2] = SHA256_H2; 238 sctx->state[2] = SHA256_H2;
@@ -248,7 +241,7 @@ static int sha256_init(struct shash_desc *desc)
248 sctx->state[5] = SHA256_H5; 241 sctx->state[5] = SHA256_H5;
249 sctx->state[6] = SHA256_H6; 242 sctx->state[6] = SHA256_H6;
250 sctx->state[7] = SHA256_H7; 243 sctx->state[7] = SHA256_H7;
251 sctx->count[0] = sctx->count[1] = 0; 244 sctx->count = 0;
252 245
253 return 0; 246 return 0;
254} 247}
@@ -256,58 +249,54 @@ static int sha256_init(struct shash_desc *desc)
256static int sha256_update(struct shash_desc *desc, const u8 *data, 249static int sha256_update(struct shash_desc *desc, const u8 *data,
257 unsigned int len) 250 unsigned int len)
258{ 251{
259 struct sha256_ctx *sctx = shash_desc_ctx(desc); 252 struct sha256_state *sctx = shash_desc_ctx(desc);
260 unsigned int i, index, part_len; 253 unsigned int partial, done;
261 254 const u8 *src;
262 /* Compute number of bytes mod 128 */ 255
263 index = (unsigned int)((sctx->count[0] >> 3) & 0x3f); 256 partial = sctx->count & 0x3f;
264 257 sctx->count += len;
265 /* Update number of bits */ 258 done = 0;
266 if ((sctx->count[0] += (len << 3)) < (len << 3)) { 259 src = data;
267 sctx->count[1]++; 260
268 sctx->count[1] += (len >> 29); 261 if ((partial + len) > 63) {
269 } 262 if (partial) {
270 263 done = -partial;
271 part_len = 64 - index; 264 memcpy(sctx->buf + partial, data, done + 64);
272 265 src = sctx->buf;
273 /* Transform as many times as possible. */ 266 }
274 if (len >= part_len) { 267
275 memcpy(&sctx->buf[index], data, part_len); 268 do {
276 sha256_transform(sctx->state, sctx->buf); 269 sha256_transform(sctx->state, src);
277 270 done += 64;
278 for (i = part_len; i + 63 < len; i += 64) 271 src = data + done;
279 sha256_transform(sctx->state, &data[i]); 272 } while (done + 63 < len);
280 index = 0; 273
281 } else { 274 partial = 0;
282 i = 0;
283 } 275 }
284 276 memcpy(sctx->buf + partial, src, len - done);
285 /* Buffer remaining input */
286 memcpy(&sctx->buf[index], &data[i], len-i);
287 277
288 return 0; 278 return 0;
289} 279}
290 280
291static int sha256_final(struct shash_desc *desc, u8 *out) 281static int sha256_final(struct shash_desc *desc, u8 *out)
292{ 282{
293 struct sha256_ctx *sctx = shash_desc_ctx(desc); 283 struct sha256_state *sctx = shash_desc_ctx(desc);
294 __be32 *dst = (__be32 *)out; 284 __be32 *dst = (__be32 *)out;
295 __be32 bits[2]; 285 __be64 bits;
296 unsigned int index, pad_len; 286 unsigned int index, pad_len;
297 int i; 287 int i;
298 static const u8 padding[64] = { 0x80, }; 288 static const u8 padding[64] = { 0x80, };
299 289
300 /* Save number of bits */ 290 /* Save number of bits */
301 bits[1] = cpu_to_be32(sctx->count[0]); 291 bits = cpu_to_be64(sctx->count << 3);
302 bits[0] = cpu_to_be32(sctx->count[1]);
303 292
304 /* Pad out to 56 mod 64. */ 293 /* Pad out to 56 mod 64. */
305 index = (sctx->count[0] >> 3) & 0x3f; 294 index = sctx->count & 0x3f;
306 pad_len = (index < 56) ? (56 - index) : ((64+56) - index); 295 pad_len = (index < 56) ? (56 - index) : ((64+56) - index);
307 sha256_update(desc, padding, pad_len); 296 sha256_update(desc, padding, pad_len);
308 297
309 /* Append length (before padding) */ 298 /* Append length (before padding) */
310 sha256_update(desc, (const u8 *)bits, sizeof(bits)); 299 sha256_update(desc, (const u8 *)&bits, sizeof(bits));
311 300
312 /* Store state in digest */ 301 /* Store state in digest */
313 for (i = 0; i < 8; i++) 302 for (i = 0; i < 8; i++)
@@ -331,12 +320,31 @@ static int sha224_final(struct shash_desc *desc, u8 *hash)
331 return 0; 320 return 0;
332} 321}
333 322
323static int sha256_export(struct shash_desc *desc, void *out)
324{
325 struct sha256_state *sctx = shash_desc_ctx(desc);
326
327 memcpy(out, sctx, sizeof(*sctx));
328 return 0;
329}
330
331static int sha256_import(struct shash_desc *desc, const void *in)
332{
333 struct sha256_state *sctx = shash_desc_ctx(desc);
334
335 memcpy(sctx, in, sizeof(*sctx));
336 return 0;
337}
338
334static struct shash_alg sha256 = { 339static struct shash_alg sha256 = {
335 .digestsize = SHA256_DIGEST_SIZE, 340 .digestsize = SHA256_DIGEST_SIZE,
336 .init = sha256_init, 341 .init = sha256_init,
337 .update = sha256_update, 342 .update = sha256_update,
338 .final = sha256_final, 343 .final = sha256_final,
339 .descsize = sizeof(struct sha256_ctx), 344 .export = sha256_export,
345 .import = sha256_import,
346 .descsize = sizeof(struct sha256_state),
347 .statesize = sizeof(struct sha256_state),
340 .base = { 348 .base = {
341 .cra_name = "sha256", 349 .cra_name = "sha256",
342 .cra_driver_name= "sha256-generic", 350 .cra_driver_name= "sha256-generic",
@@ -351,7 +359,7 @@ static struct shash_alg sha224 = {
351 .init = sha224_init, 359 .init = sha224_init,
352 .update = sha256_update, 360 .update = sha256_update,
353 .final = sha224_final, 361 .final = sha224_final,
354 .descsize = sizeof(struct sha256_ctx), 362 .descsize = sizeof(struct sha256_state),
355 .base = { 363 .base = {
356 .cra_name = "sha224", 364 .cra_name = "sha224",
357 .cra_driver_name= "sha224-generic", 365 .cra_driver_name= "sha224-generic",
diff --git a/crypto/sha512_generic.c b/crypto/sha512_generic.c
index 3bea38d12242..9ed9f60316e5 100644
--- a/crypto/sha512_generic.c
+++ b/crypto/sha512_generic.c
@@ -21,12 +21,6 @@
21#include <linux/percpu.h> 21#include <linux/percpu.h>
22#include <asm/byteorder.h> 22#include <asm/byteorder.h>
23 23
24struct sha512_ctx {
25 u64 state[8];
26 u32 count[4];
27 u8 buf[128];
28};
29
30static DEFINE_PER_CPU(u64[80], msg_schedule); 24static DEFINE_PER_CPU(u64[80], msg_schedule);
31 25
32static inline u64 Ch(u64 x, u64 y, u64 z) 26static inline u64 Ch(u64 x, u64 y, u64 z)
@@ -141,7 +135,7 @@ sha512_transform(u64 *state, const u8 *input)
141static int 135static int
142sha512_init(struct shash_desc *desc) 136sha512_init(struct shash_desc *desc)
143{ 137{
144 struct sha512_ctx *sctx = shash_desc_ctx(desc); 138 struct sha512_state *sctx = shash_desc_ctx(desc);
145 sctx->state[0] = SHA512_H0; 139 sctx->state[0] = SHA512_H0;
146 sctx->state[1] = SHA512_H1; 140 sctx->state[1] = SHA512_H1;
147 sctx->state[2] = SHA512_H2; 141 sctx->state[2] = SHA512_H2;
@@ -150,7 +144,7 @@ sha512_init(struct shash_desc *desc)
150 sctx->state[5] = SHA512_H5; 144 sctx->state[5] = SHA512_H5;
151 sctx->state[6] = SHA512_H6; 145 sctx->state[6] = SHA512_H6;
152 sctx->state[7] = SHA512_H7; 146 sctx->state[7] = SHA512_H7;
153 sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0; 147 sctx->count[0] = sctx->count[1] = 0;
154 148
155 return 0; 149 return 0;
156} 150}
@@ -158,7 +152,7 @@ sha512_init(struct shash_desc *desc)
158static int 152static int
159sha384_init(struct shash_desc *desc) 153sha384_init(struct shash_desc *desc)
160{ 154{
161 struct sha512_ctx *sctx = shash_desc_ctx(desc); 155 struct sha512_state *sctx = shash_desc_ctx(desc);
162 sctx->state[0] = SHA384_H0; 156 sctx->state[0] = SHA384_H0;
163 sctx->state[1] = SHA384_H1; 157 sctx->state[1] = SHA384_H1;
164 sctx->state[2] = SHA384_H2; 158 sctx->state[2] = SHA384_H2;
@@ -167,7 +161,7 @@ sha384_init(struct shash_desc *desc)
167 sctx->state[5] = SHA384_H5; 161 sctx->state[5] = SHA384_H5;
168 sctx->state[6] = SHA384_H6; 162 sctx->state[6] = SHA384_H6;
169 sctx->state[7] = SHA384_H7; 163 sctx->state[7] = SHA384_H7;
170 sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0; 164 sctx->count[0] = sctx->count[1] = 0;
171 165
172 return 0; 166 return 0;
173} 167}
@@ -175,20 +169,16 @@ sha384_init(struct shash_desc *desc)
175static int 169static int
176sha512_update(struct shash_desc *desc, const u8 *data, unsigned int len) 170sha512_update(struct shash_desc *desc, const u8 *data, unsigned int len)
177{ 171{
178 struct sha512_ctx *sctx = shash_desc_ctx(desc); 172 struct sha512_state *sctx = shash_desc_ctx(desc);
179 173
180 unsigned int i, index, part_len; 174 unsigned int i, index, part_len;
181 175
182 /* Compute number of bytes mod 128 */ 176 /* Compute number of bytes mod 128 */
183 index = (unsigned int)((sctx->count[0] >> 3) & 0x7F); 177 index = sctx->count[0] & 0x7f;
184 178
185 /* Update number of bits */ 179 /* Update number of bytes */
186 if ((sctx->count[0] += (len << 3)) < (len << 3)) { 180 if (!(sctx->count[0] += len))
187 if ((sctx->count[1] += 1) < 1) 181 sctx->count[1]++;
188 if ((sctx->count[2] += 1) < 1)
189 sctx->count[3]++;
190 sctx->count[1] += (len >> 29);
191 }
192 182
193 part_len = 128 - index; 183 part_len = 128 - index;
194 184
@@ -214,21 +204,19 @@ sha512_update(struct shash_desc *desc, const u8 *data, unsigned int len)
214static int 204static int
215sha512_final(struct shash_desc *desc, u8 *hash) 205sha512_final(struct shash_desc *desc, u8 *hash)
216{ 206{
217 struct sha512_ctx *sctx = shash_desc_ctx(desc); 207 struct sha512_state *sctx = shash_desc_ctx(desc);
218 static u8 padding[128] = { 0x80, }; 208 static u8 padding[128] = { 0x80, };
219 __be64 *dst = (__be64 *)hash; 209 __be64 *dst = (__be64 *)hash;
220 __be32 bits[4]; 210 __be64 bits[2];
221 unsigned int index, pad_len; 211 unsigned int index, pad_len;
222 int i; 212 int i;
223 213
224 /* Save number of bits */ 214 /* Save number of bits */
225 bits[3] = cpu_to_be32(sctx->count[0]); 215 bits[1] = cpu_to_be64(sctx->count[0] << 3);
226 bits[2] = cpu_to_be32(sctx->count[1]); 216 bits[0] = cpu_to_be64(sctx->count[1] << 3 | sctx->count[0] >> 61);
227 bits[1] = cpu_to_be32(sctx->count[2]);
228 bits[0] = cpu_to_be32(sctx->count[3]);
229 217
230 /* Pad out to 112 mod 128. */ 218 /* Pad out to 112 mod 128. */
231 index = (sctx->count[0] >> 3) & 0x7f; 219 index = sctx->count[0] & 0x7f;
232 pad_len = (index < 112) ? (112 - index) : ((128+112) - index); 220 pad_len = (index < 112) ? (112 - index) : ((128+112) - index);
233 sha512_update(desc, padding, pad_len); 221 sha512_update(desc, padding, pad_len);
234 222
@@ -240,7 +228,7 @@ sha512_final(struct shash_desc *desc, u8 *hash)
240 dst[i] = cpu_to_be64(sctx->state[i]); 228 dst[i] = cpu_to_be64(sctx->state[i]);
241 229
242 /* Zeroize sensitive information. */ 230 /* Zeroize sensitive information. */
243 memset(sctx, 0, sizeof(struct sha512_ctx)); 231 memset(sctx, 0, sizeof(struct sha512_state));
244 232
245 return 0; 233 return 0;
246} 234}
@@ -262,7 +250,7 @@ static struct shash_alg sha512 = {
262 .init = sha512_init, 250 .init = sha512_init,
263 .update = sha512_update, 251 .update = sha512_update,
264 .final = sha512_final, 252 .final = sha512_final,
265 .descsize = sizeof(struct sha512_ctx), 253 .descsize = sizeof(struct sha512_state),
266 .base = { 254 .base = {
267 .cra_name = "sha512", 255 .cra_name = "sha512",
268 .cra_flags = CRYPTO_ALG_TYPE_SHASH, 256 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
@@ -276,7 +264,7 @@ static struct shash_alg sha384 = {
276 .init = sha384_init, 264 .init = sha384_init,
277 .update = sha512_update, 265 .update = sha512_update,
278 .final = sha384_final, 266 .final = sha384_final,
279 .descsize = sizeof(struct sha512_ctx), 267 .descsize = sizeof(struct sha512_state),
280 .base = { 268 .base = {
281 .cra_name = "sha384", 269 .cra_name = "sha384",
282 .cra_flags = CRYPTO_ALG_TYPE_SHASH, 270 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
diff --git a/crypto/shash.c b/crypto/shash.c
index 2ccc8b0076ce..91f7b9d83881 100644
--- a/crypto/shash.c
+++ b/crypto/shash.c
@@ -22,6 +22,12 @@
22 22
23static const struct crypto_type crypto_shash_type; 23static const struct crypto_type crypto_shash_type;
24 24
25static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
26 unsigned int keylen)
27{
28 return -ENOSYS;
29}
30
25static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key, 31static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
26 unsigned int keylen) 32 unsigned int keylen)
27{ 33{
@@ -39,8 +45,7 @@ static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
39 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); 45 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
40 memcpy(alignbuffer, key, keylen); 46 memcpy(alignbuffer, key, keylen);
41 err = shash->setkey(tfm, alignbuffer, keylen); 47 err = shash->setkey(tfm, alignbuffer, keylen);
42 memset(alignbuffer, 0, keylen); 48 kzfree(buffer);
43 kfree(buffer);
44 return err; 49 return err;
45} 50}
46 51
@@ -50,9 +55,6 @@ int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
50 struct shash_alg *shash = crypto_shash_alg(tfm); 55 struct shash_alg *shash = crypto_shash_alg(tfm);
51 unsigned long alignmask = crypto_shash_alignmask(tfm); 56 unsigned long alignmask = crypto_shash_alignmask(tfm);
52 57
53 if (!shash->setkey)
54 return -ENOSYS;
55
56 if ((unsigned long)key & alignmask) 58 if ((unsigned long)key & alignmask)
57 return shash_setkey_unaligned(tfm, key, keylen); 59 return shash_setkey_unaligned(tfm, key, keylen);
58 60
@@ -74,15 +76,19 @@ static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
74 unsigned long alignmask = crypto_shash_alignmask(tfm); 76 unsigned long alignmask = crypto_shash_alignmask(tfm);
75 unsigned int unaligned_len = alignmask + 1 - 77 unsigned int unaligned_len = alignmask + 1 -
76 ((unsigned long)data & alignmask); 78 ((unsigned long)data & alignmask);
77 u8 buf[shash_align_buffer_size(unaligned_len, alignmask)] 79 u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
78 __attribute__ ((aligned)); 80 __attribute__ ((aligned));
81 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
82 int err;
79 83
80 if (unaligned_len > len) 84 if (unaligned_len > len)
81 unaligned_len = len; 85 unaligned_len = len;
82 86
83 memcpy(buf, data, unaligned_len); 87 memcpy(buf, data, unaligned_len);
88 err = shash->update(desc, buf, unaligned_len);
89 memset(buf, 0, unaligned_len);
84 90
85 return shash->update(desc, buf, unaligned_len) ?: 91 return err ?:
86 shash->update(desc, data + unaligned_len, len - unaligned_len); 92 shash->update(desc, data + unaligned_len, len - unaligned_len);
87} 93}
88 94
@@ -106,12 +112,19 @@ static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
106 unsigned long alignmask = crypto_shash_alignmask(tfm); 112 unsigned long alignmask = crypto_shash_alignmask(tfm);
107 struct shash_alg *shash = crypto_shash_alg(tfm); 113 struct shash_alg *shash = crypto_shash_alg(tfm);
108 unsigned int ds = crypto_shash_digestsize(tfm); 114 unsigned int ds = crypto_shash_digestsize(tfm);
109 u8 buf[shash_align_buffer_size(ds, alignmask)] 115 u8 ubuf[shash_align_buffer_size(ds, alignmask)]
110 __attribute__ ((aligned)); 116 __attribute__ ((aligned));
117 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
111 int err; 118 int err;
112 119
113 err = shash->final(desc, buf); 120 err = shash->final(desc, buf);
121 if (err)
122 goto out;
123
114 memcpy(out, buf, ds); 124 memcpy(out, buf, ds);
125
126out:
127 memset(buf, 0, ds);
115 return err; 128 return err;
116} 129}
117 130
@@ -142,8 +155,7 @@ int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
142 struct shash_alg *shash = crypto_shash_alg(tfm); 155 struct shash_alg *shash = crypto_shash_alg(tfm);
143 unsigned long alignmask = crypto_shash_alignmask(tfm); 156 unsigned long alignmask = crypto_shash_alignmask(tfm);
144 157
145 if (((unsigned long)data | (unsigned long)out) & alignmask || 158 if (((unsigned long)data | (unsigned long)out) & alignmask)
146 !shash->finup)
147 return shash_finup_unaligned(desc, data, len, out); 159 return shash_finup_unaligned(desc, data, len, out);
148 160
149 return shash->finup(desc, data, len, out); 161 return shash->finup(desc, data, len, out);
@@ -154,8 +166,7 @@ static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
154 unsigned int len, u8 *out) 166 unsigned int len, u8 *out)
155{ 167{
156 return crypto_shash_init(desc) ?: 168 return crypto_shash_init(desc) ?:
157 crypto_shash_update(desc, data, len) ?: 169 crypto_shash_finup(desc, data, len, out);
158 crypto_shash_final(desc, out);
159} 170}
160 171
161int crypto_shash_digest(struct shash_desc *desc, const u8 *data, 172int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
@@ -165,27 +176,24 @@ int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
165 struct shash_alg *shash = crypto_shash_alg(tfm); 176 struct shash_alg *shash = crypto_shash_alg(tfm);
166 unsigned long alignmask = crypto_shash_alignmask(tfm); 177 unsigned long alignmask = crypto_shash_alignmask(tfm);
167 178
168 if (((unsigned long)data | (unsigned long)out) & alignmask || 179 if (((unsigned long)data | (unsigned long)out) & alignmask)
169 !shash->digest)
170 return shash_digest_unaligned(desc, data, len, out); 180 return shash_digest_unaligned(desc, data, len, out);
171 181
172 return shash->digest(desc, data, len, out); 182 return shash->digest(desc, data, len, out);
173} 183}
174EXPORT_SYMBOL_GPL(crypto_shash_digest); 184EXPORT_SYMBOL_GPL(crypto_shash_digest);
175 185
176int crypto_shash_import(struct shash_desc *desc, const u8 *in) 186static int shash_default_export(struct shash_desc *desc, void *out)
177{ 187{
178 struct crypto_shash *tfm = desc->tfm; 188 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
179 struct shash_alg *alg = crypto_shash_alg(tfm); 189 return 0;
180 190}
181 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(tfm));
182
183 if (alg->reinit)
184 alg->reinit(desc);
185 191
192static int shash_default_import(struct shash_desc *desc, const void *in)
193{
194 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
186 return 0; 195 return 0;
187} 196}
188EXPORT_SYMBOL_GPL(crypto_shash_import);
189 197
190static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key, 198static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
191 unsigned int keylen) 199 unsigned int keylen)
@@ -206,9 +214,8 @@ static int shash_async_init(struct ahash_request *req)
206 return crypto_shash_init(desc); 214 return crypto_shash_init(desc);
207} 215}
208 216
209static int shash_async_update(struct ahash_request *req) 217int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
210{ 218{
211 struct shash_desc *desc = ahash_request_ctx(req);
212 struct crypto_hash_walk walk; 219 struct crypto_hash_walk walk;
213 int nbytes; 220 int nbytes;
214 221
@@ -218,13 +225,51 @@ static int shash_async_update(struct ahash_request *req)
218 225
219 return nbytes; 226 return nbytes;
220} 227}
228EXPORT_SYMBOL_GPL(shash_ahash_update);
229
230static int shash_async_update(struct ahash_request *req)
231{
232 return shash_ahash_update(req, ahash_request_ctx(req));
233}
221 234
222static int shash_async_final(struct ahash_request *req) 235static int shash_async_final(struct ahash_request *req)
223{ 236{
224 return crypto_shash_final(ahash_request_ctx(req), req->result); 237 return crypto_shash_final(ahash_request_ctx(req), req->result);
225} 238}
226 239
227static int shash_async_digest(struct ahash_request *req) 240int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
241{
242 struct crypto_hash_walk walk;
243 int nbytes;
244
245 nbytes = crypto_hash_walk_first(req, &walk);
246 if (!nbytes)
247 return crypto_shash_final(desc, req->result);
248
249 do {
250 nbytes = crypto_hash_walk_last(&walk) ?
251 crypto_shash_finup(desc, walk.data, nbytes,
252 req->result) :
253 crypto_shash_update(desc, walk.data, nbytes);
254 nbytes = crypto_hash_walk_done(&walk, nbytes);
255 } while (nbytes > 0);
256
257 return nbytes;
258}
259EXPORT_SYMBOL_GPL(shash_ahash_finup);
260
261static int shash_async_finup(struct ahash_request *req)
262{
263 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
264 struct shash_desc *desc = ahash_request_ctx(req);
265
266 desc->tfm = *ctx;
267 desc->flags = req->base.flags;
268
269 return shash_ahash_finup(req, desc);
270}
271
272int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
228{ 273{
229 struct scatterlist *sg = req->src; 274 struct scatterlist *sg = req->src;
230 unsigned int offset = sg->offset; 275 unsigned int offset = sg->offset;
@@ -232,34 +277,40 @@ static int shash_async_digest(struct ahash_request *req)
232 int err; 277 int err;
233 278
234 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { 279 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
235 struct crypto_shash **ctx =
236 crypto_ahash_ctx(crypto_ahash_reqtfm(req));
237 struct shash_desc *desc = ahash_request_ctx(req);
238 void *data; 280 void *data;
239 281
240 desc->tfm = *ctx;
241 desc->flags = req->base.flags;
242
243 data = crypto_kmap(sg_page(sg), 0); 282 data = crypto_kmap(sg_page(sg), 0);
244 err = crypto_shash_digest(desc, data + offset, nbytes, 283 err = crypto_shash_digest(desc, data + offset, nbytes,
245 req->result); 284 req->result);
246 crypto_kunmap(data, 0); 285 crypto_kunmap(data, 0);
247 crypto_yield(desc->flags); 286 crypto_yield(desc->flags);
248 goto out; 287 } else
249 } 288 err = crypto_shash_init(desc) ?:
289 shash_ahash_finup(req, desc);
250 290
251 err = shash_async_init(req); 291 return err;
252 if (err) 292}
253 goto out; 293EXPORT_SYMBOL_GPL(shash_ahash_digest);
254 294
255 err = shash_async_update(req); 295static int shash_async_digest(struct ahash_request *req)
256 if (err) 296{
257 goto out; 297 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
298 struct shash_desc *desc = ahash_request_ctx(req);
258 299
259 err = shash_async_final(req); 300 desc->tfm = *ctx;
301 desc->flags = req->base.flags;
260 302
261out: 303 return shash_ahash_digest(req, desc);
262 return err; 304}
305
306static int shash_async_export(struct ahash_request *req, void *out)
307{
308 return crypto_shash_export(ahash_request_ctx(req), out);
309}
310
311static int shash_async_import(struct ahash_request *req, const void *in)
312{
313 return crypto_shash_import(ahash_request_ctx(req), in);
263} 314}
264 315
265static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) 316static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
@@ -269,11 +320,11 @@ static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
269 crypto_free_shash(*ctx); 320 crypto_free_shash(*ctx);
270} 321}
271 322
272static int crypto_init_shash_ops_async(struct crypto_tfm *tfm) 323int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
273{ 324{
274 struct crypto_alg *calg = tfm->__crt_alg; 325 struct crypto_alg *calg = tfm->__crt_alg;
275 struct shash_alg *alg = __crypto_shash_alg(calg); 326 struct shash_alg *alg = __crypto_shash_alg(calg);
276 struct ahash_tfm *crt = &tfm->crt_ahash; 327 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
277 struct crypto_shash **ctx = crypto_tfm_ctx(tfm); 328 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
278 struct crypto_shash *shash; 329 struct crypto_shash *shash;
279 330
@@ -291,11 +342,17 @@ static int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
291 342
292 crt->init = shash_async_init; 343 crt->init = shash_async_init;
293 crt->update = shash_async_update; 344 crt->update = shash_async_update;
294 crt->final = shash_async_final; 345 crt->final = shash_async_final;
346 crt->finup = shash_async_finup;
295 crt->digest = shash_async_digest; 347 crt->digest = shash_async_digest;
296 crt->setkey = shash_async_setkey;
297 348
298 crt->digestsize = alg->digestsize; 349 if (alg->setkey)
350 crt->setkey = shash_async_setkey;
351 if (alg->export)
352 crt->export = shash_async_export;
353 if (alg->import)
354 crt->import = shash_async_import;
355
299 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); 356 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
300 357
301 return 0; 358 return 0;
@@ -304,14 +361,16 @@ static int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
304static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key, 361static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key,
305 unsigned int keylen) 362 unsigned int keylen)
306{ 363{
307 struct shash_desc *desc = crypto_hash_ctx(tfm); 364 struct shash_desc **descp = crypto_hash_ctx(tfm);
365 struct shash_desc *desc = *descp;
308 366
309 return crypto_shash_setkey(desc->tfm, key, keylen); 367 return crypto_shash_setkey(desc->tfm, key, keylen);
310} 368}
311 369
312static int shash_compat_init(struct hash_desc *hdesc) 370static int shash_compat_init(struct hash_desc *hdesc)
313{ 371{
314 struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm); 372 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
373 struct shash_desc *desc = *descp;
315 374
316 desc->flags = hdesc->flags; 375 desc->flags = hdesc->flags;
317 376
@@ -321,7 +380,8 @@ static int shash_compat_init(struct hash_desc *hdesc)
321static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg, 380static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
322 unsigned int len) 381 unsigned int len)
323{ 382{
324 struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm); 383 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
384 struct shash_desc *desc = *descp;
325 struct crypto_hash_walk walk; 385 struct crypto_hash_walk walk;
326 int nbytes; 386 int nbytes;
327 387
@@ -334,7 +394,9 @@ static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
334 394
335static int shash_compat_final(struct hash_desc *hdesc, u8 *out) 395static int shash_compat_final(struct hash_desc *hdesc, u8 *out)
336{ 396{
337 return crypto_shash_final(crypto_hash_ctx(hdesc->tfm), out); 397 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
398
399 return crypto_shash_final(*descp, out);
338} 400}
339 401
340static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg, 402static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
@@ -344,7 +406,8 @@ static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
344 int err; 406 int err;
345 407
346 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { 408 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
347 struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm); 409 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
410 struct shash_desc *desc = *descp;
348 void *data; 411 void *data;
349 412
350 desc->flags = hdesc->flags; 413 desc->flags = hdesc->flags;
@@ -372,9 +435,11 @@ out:
372 435
373static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm) 436static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm)
374{ 437{
375 struct shash_desc *desc= crypto_tfm_ctx(tfm); 438 struct shash_desc **descp = crypto_tfm_ctx(tfm);
439 struct shash_desc *desc = *descp;
376 440
377 crypto_free_shash(desc->tfm); 441 crypto_free_shash(desc->tfm);
442 kzfree(desc);
378} 443}
379 444
380static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm) 445static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
@@ -382,8 +447,9 @@ static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
382 struct hash_tfm *crt = &tfm->crt_hash; 447 struct hash_tfm *crt = &tfm->crt_hash;
383 struct crypto_alg *calg = tfm->__crt_alg; 448 struct crypto_alg *calg = tfm->__crt_alg;
384 struct shash_alg *alg = __crypto_shash_alg(calg); 449 struct shash_alg *alg = __crypto_shash_alg(calg);
385 struct shash_desc *desc = crypto_tfm_ctx(tfm); 450 struct shash_desc **descp = crypto_tfm_ctx(tfm);
386 struct crypto_shash *shash; 451 struct crypto_shash *shash;
452 struct shash_desc *desc;
387 453
388 if (!crypto_mod_get(calg)) 454 if (!crypto_mod_get(calg))
389 return -EAGAIN; 455 return -EAGAIN;
@@ -394,6 +460,14 @@ static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
394 return PTR_ERR(shash); 460 return PTR_ERR(shash);
395 } 461 }
396 462
463 desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(shash),
464 GFP_KERNEL);
465 if (!desc) {
466 crypto_free_shash(shash);
467 return -ENOMEM;
468 }
469
470 *descp = desc;
397 desc->tfm = shash; 471 desc->tfm = shash;
398 tfm->exit = crypto_exit_shash_ops_compat; 472 tfm->exit = crypto_exit_shash_ops_compat;
399 473
@@ -413,8 +487,6 @@ static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
413 switch (mask & CRYPTO_ALG_TYPE_MASK) { 487 switch (mask & CRYPTO_ALG_TYPE_MASK) {
414 case CRYPTO_ALG_TYPE_HASH_MASK: 488 case CRYPTO_ALG_TYPE_HASH_MASK:
415 return crypto_init_shash_ops_compat(tfm); 489 return crypto_init_shash_ops_compat(tfm);
416 case CRYPTO_ALG_TYPE_AHASH_MASK:
417 return crypto_init_shash_ops_async(tfm);
418 } 490 }
419 491
420 return -EINVAL; 492 return -EINVAL;
@@ -423,26 +495,23 @@ static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
423static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type, 495static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type,
424 u32 mask) 496 u32 mask)
425{ 497{
426 struct shash_alg *salg = __crypto_shash_alg(alg);
427
428 switch (mask & CRYPTO_ALG_TYPE_MASK) { 498 switch (mask & CRYPTO_ALG_TYPE_MASK) {
429 case CRYPTO_ALG_TYPE_HASH_MASK: 499 case CRYPTO_ALG_TYPE_HASH_MASK:
430 return sizeof(struct shash_desc) + salg->descsize; 500 return sizeof(struct shash_desc *);
431 case CRYPTO_ALG_TYPE_AHASH_MASK:
432 return sizeof(struct crypto_shash *);
433 } 501 }
434 502
435 return 0; 503 return 0;
436} 504}
437 505
438static int crypto_shash_init_tfm(struct crypto_tfm *tfm, 506static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
439 const struct crypto_type *frontend)
440{ 507{
508 struct crypto_shash *hash = __crypto_shash_cast(tfm);
509
510 hash->descsize = crypto_shash_alg(hash)->descsize;
441 return 0; 511 return 0;
442} 512}
443 513
444static unsigned int crypto_shash_extsize(struct crypto_alg *alg, 514static unsigned int crypto_shash_extsize(struct crypto_alg *alg)
445 const struct crypto_type *frontend)
446{ 515{
447 return alg->cra_ctxsize; 516 return alg->cra_ctxsize;
448} 517}
@@ -456,7 +525,6 @@ static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
456 seq_printf(m, "type : shash\n"); 525 seq_printf(m, "type : shash\n");
457 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); 526 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
458 seq_printf(m, "digestsize : %u\n", salg->digestsize); 527 seq_printf(m, "digestsize : %u\n", salg->digestsize);
459 seq_printf(m, "descsize : %u\n", salg->descsize);
460} 528}
461 529
462static const struct crypto_type crypto_shash_type = { 530static const struct crypto_type crypto_shash_type = {
@@ -480,18 +548,43 @@ struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
480} 548}
481EXPORT_SYMBOL_GPL(crypto_alloc_shash); 549EXPORT_SYMBOL_GPL(crypto_alloc_shash);
482 550
483int crypto_register_shash(struct shash_alg *alg) 551static int shash_prepare_alg(struct shash_alg *alg)
484{ 552{
485 struct crypto_alg *base = &alg->base; 553 struct crypto_alg *base = &alg->base;
486 554
487 if (alg->digestsize > PAGE_SIZE / 8 || 555 if (alg->digestsize > PAGE_SIZE / 8 ||
488 alg->descsize > PAGE_SIZE / 8) 556 alg->descsize > PAGE_SIZE / 8 ||
557 alg->statesize > PAGE_SIZE / 8)
489 return -EINVAL; 558 return -EINVAL;
490 559
491 base->cra_type = &crypto_shash_type; 560 base->cra_type = &crypto_shash_type;
492 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; 561 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
493 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH; 562 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
494 563
564 if (!alg->finup)
565 alg->finup = shash_finup_unaligned;
566 if (!alg->digest)
567 alg->digest = shash_digest_unaligned;
568 if (!alg->export) {
569 alg->export = shash_default_export;
570 alg->import = shash_default_import;
571 alg->statesize = alg->descsize;
572 }
573 if (!alg->setkey)
574 alg->setkey = shash_no_setkey;
575
576 return 0;
577}
578
579int crypto_register_shash(struct shash_alg *alg)
580{
581 struct crypto_alg *base = &alg->base;
582 int err;
583
584 err = shash_prepare_alg(alg);
585 if (err)
586 return err;
587
495 return crypto_register_alg(base); 588 return crypto_register_alg(base);
496} 589}
497EXPORT_SYMBOL_GPL(crypto_register_shash); 590EXPORT_SYMBOL_GPL(crypto_register_shash);
@@ -502,5 +595,44 @@ int crypto_unregister_shash(struct shash_alg *alg)
502} 595}
503EXPORT_SYMBOL_GPL(crypto_unregister_shash); 596EXPORT_SYMBOL_GPL(crypto_unregister_shash);
504 597
598int shash_register_instance(struct crypto_template *tmpl,
599 struct shash_instance *inst)
600{
601 int err;
602
603 err = shash_prepare_alg(&inst->alg);
604 if (err)
605 return err;
606
607 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
608}
609EXPORT_SYMBOL_GPL(shash_register_instance);
610
611void shash_free_instance(struct crypto_instance *inst)
612{
613 crypto_drop_spawn(crypto_instance_ctx(inst));
614 kfree(shash_instance(inst));
615}
616EXPORT_SYMBOL_GPL(shash_free_instance);
617
618int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
619 struct shash_alg *alg,
620 struct crypto_instance *inst)
621{
622 return crypto_init_spawn2(&spawn->base, &alg->base, inst,
623 &crypto_shash_type);
624}
625EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
626
627struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
628{
629 struct crypto_alg *alg;
630
631 alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
632 return IS_ERR(alg) ? ERR_CAST(alg) :
633 container_of(alg, struct shash_alg, base);
634}
635EXPORT_SYMBOL_GPL(shash_attr_alg);
636
505MODULE_LICENSE("GPL"); 637MODULE_LICENSE("GPL");
506MODULE_DESCRIPTION("Synchronous cryptographic hash type"); 638MODULE_DESCRIPTION("Synchronous cryptographic hash type");
diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c
index d59ba5079d14..aa3f84ccc786 100644
--- a/crypto/tcrypt.c
+++ b/crypto/tcrypt.c
@@ -45,6 +45,9 @@
45 */ 45 */
46static unsigned int sec; 46static unsigned int sec;
47 47
48static char *alg = NULL;
49static u32 type;
50static u32 mask;
48static int mode; 51static int mode;
49static char *tvmem[TVMEMSIZE]; 52static char *tvmem[TVMEMSIZE];
50 53
@@ -716,6 +719,10 @@ static int do_test(int m)
716 ret += tcrypt_test("hmac(rmd160)"); 719 ret += tcrypt_test("hmac(rmd160)");
717 break; 720 break;
718 721
722 case 109:
723 ret += tcrypt_test("vmac(aes)");
724 break;
725
719 case 150: 726 case 150:
720 ret += tcrypt_test("ansi_cprng"); 727 ret += tcrypt_test("ansi_cprng");
721 break; 728 break;
@@ -885,6 +892,12 @@ static int do_test(int m)
885 return ret; 892 return ret;
886} 893}
887 894
895static int do_alg_test(const char *alg, u32 type, u32 mask)
896{
897 return crypto_has_alg(alg, type, mask ?: CRYPTO_ALG_TYPE_MASK) ?
898 0 : -ENOENT;
899}
900
888static int __init tcrypt_mod_init(void) 901static int __init tcrypt_mod_init(void)
889{ 902{
890 int err = -ENOMEM; 903 int err = -ENOMEM;
@@ -896,7 +909,11 @@ static int __init tcrypt_mod_init(void)
896 goto err_free_tv; 909 goto err_free_tv;
897 } 910 }
898 911
899 err = do_test(mode); 912 if (alg)
913 err = do_alg_test(alg, type, mask);
914 else
915 err = do_test(mode);
916
900 if (err) { 917 if (err) {
901 printk(KERN_ERR "tcrypt: one or more tests failed!\n"); 918 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
902 goto err_free_tv; 919 goto err_free_tv;
@@ -928,6 +945,9 @@ static void __exit tcrypt_mod_fini(void) { }
928module_init(tcrypt_mod_init); 945module_init(tcrypt_mod_init);
929module_exit(tcrypt_mod_fini); 946module_exit(tcrypt_mod_fini);
930 947
948module_param(alg, charp, 0);
949module_param(type, uint, 0);
950module_param(mask, uint, 0);
931module_param(mode, int, 0); 951module_param(mode, int, 0);
932module_param(sec, uint, 0); 952module_param(sec, uint, 0);
933MODULE_PARM_DESC(sec, "Length in seconds of speed tests " 953MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
diff --git a/crypto/testmgr.c b/crypto/testmgr.c
index e9e9d84293b9..6d5b746637be 100644
--- a/crypto/testmgr.c
+++ b/crypto/testmgr.c
@@ -190,10 +190,6 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
190 190
191 hash_buff = xbuf[0]; 191 hash_buff = xbuf[0];
192 192
193 ret = -EINVAL;
194 if (WARN_ON(template[i].psize > PAGE_SIZE))
195 goto out;
196
197 memcpy(hash_buff, template[i].plaintext, template[i].psize); 193 memcpy(hash_buff, template[i].plaintext, template[i].psize);
198 sg_init_one(&sg[0], hash_buff, template[i].psize); 194 sg_init_one(&sg[0], hash_buff, template[i].psize);
199 195
@@ -2252,6 +2248,15 @@ static const struct alg_test_desc alg_test_descs[] = {
2252 } 2248 }
2253 } 2249 }
2254 }, { 2250 }, {
2251 .alg = "vmac(aes)",
2252 .test = alg_test_hash,
2253 .suite = {
2254 .hash = {
2255 .vecs = aes_vmac128_tv_template,
2256 .count = VMAC_AES_TEST_VECTORS
2257 }
2258 }
2259 }, {
2255 .alg = "wp256", 2260 .alg = "wp256",
2256 .test = alg_test_hash, 2261 .test = alg_test_hash,
2257 .suite = { 2262 .suite = {
@@ -2348,6 +2353,7 @@ static int alg_find_test(const char *alg)
2348int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 2353int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
2349{ 2354{
2350 int i; 2355 int i;
2356 int j;
2351 int rc; 2357 int rc;
2352 2358
2353 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { 2359 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
@@ -2369,14 +2375,22 @@ int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
2369 } 2375 }
2370 2376
2371 i = alg_find_test(alg); 2377 i = alg_find_test(alg);
2372 if (i < 0) 2378 j = alg_find_test(driver);
2379 if (i < 0 && j < 0)
2373 goto notest; 2380 goto notest;
2374 2381
2375 if (fips_enabled && !alg_test_descs[i].fips_allowed) 2382 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
2383 (j >= 0 && !alg_test_descs[j].fips_allowed)))
2376 goto non_fips_alg; 2384 goto non_fips_alg;
2377 2385
2378 rc = alg_test_descs[i].test(alg_test_descs + i, driver, 2386 rc = 0;
2379 type, mask); 2387 if (i >= 0)
2388 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
2389 type, mask);
2390 if (j >= 0)
2391 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
2392 type, mask);
2393
2380test_done: 2394test_done:
2381 if (fips_enabled && rc) 2395 if (fips_enabled && rc)
2382 panic("%s: %s alg self test failed in fips mode!\n", driver, alg); 2396 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
diff --git a/crypto/testmgr.h b/crypto/testmgr.h
index 69316228fc19..9963b18983ab 100644
--- a/crypto/testmgr.h
+++ b/crypto/testmgr.h
@@ -1654,6 +1654,22 @@ static struct hash_testvec aes_xcbc128_tv_template[] = {
1654 } 1654 }
1655}; 1655};
1656 1656
1657#define VMAC_AES_TEST_VECTORS 1
1658static char vmac_string[128] = {'\x01', '\x01', '\x01', '\x01',
1659 '\x02', '\x03', '\x02', '\x02',
1660 '\x02', '\x04', '\x01', '\x07',
1661 '\x04', '\x01', '\x04', '\x03',};
1662static struct hash_testvec aes_vmac128_tv_template[] = {
1663 {
1664 .key = "\x00\x01\x02\x03\x04\x05\x06\x07"
1665 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
1666 .plaintext = vmac_string,
1667 .digest = "\xcb\xd7\x8a\xfd\xb7\x33\x79\xe7",
1668 .psize = 128,
1669 .ksize = 16,
1670 },
1671};
1672
1657/* 1673/*
1658 * SHA384 HMAC test vectors from RFC4231 1674 * SHA384 HMAC test vectors from RFC4231
1659 */ 1675 */
diff --git a/crypto/vmac.c b/crypto/vmac.c
new file mode 100644
index 000000000000..0a9468e575de
--- /dev/null
+++ b/crypto/vmac.c
@@ -0,0 +1,678 @@
1/*
2 * Modified to interface to the Linux kernel
3 * Copyright (c) 2009, Intel Corporation.
4 *
5 * This program is free software; you can redistribute it and/or modify it
6 * under the terms and conditions of the GNU General Public License,
7 * version 2, as published by the Free Software Foundation.
8 *
9 * This program is distributed in the hope it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
12 * more details.
13 *
14 * You should have received a copy of the GNU General Public License along with
15 * this program; if not, write to the Free Software Foundation, Inc., 59 Temple
16 * Place - Suite 330, Boston, MA 02111-1307 USA.
17 */
18
19/* --------------------------------------------------------------------------
20 * VMAC and VHASH Implementation by Ted Krovetz (tdk@acm.org) and Wei Dai.
21 * This implementation is herby placed in the public domain.
22 * The authors offers no warranty. Use at your own risk.
23 * Please send bug reports to the authors.
24 * Last modified: 17 APR 08, 1700 PDT
25 * ----------------------------------------------------------------------- */
26
27#include <linux/init.h>
28#include <linux/types.h>
29#include <linux/crypto.h>
30#include <linux/scatterlist.h>
31#include <asm/byteorder.h>
32#include <crypto/scatterwalk.h>
33#include <crypto/vmac.h>
34#include <crypto/internal/hash.h>
35
36/*
37 * Constants and masks
38 */
39#define UINT64_C(x) x##ULL
40const u64 p64 = UINT64_C(0xfffffffffffffeff); /* 2^64 - 257 prime */
41const u64 m62 = UINT64_C(0x3fffffffffffffff); /* 62-bit mask */
42const u64 m63 = UINT64_C(0x7fffffffffffffff); /* 63-bit mask */
43const u64 m64 = UINT64_C(0xffffffffffffffff); /* 64-bit mask */
44const u64 mpoly = UINT64_C(0x1fffffff1fffffff); /* Poly key mask */
45
46#ifdef __LITTLE_ENDIAN
47#define INDEX_HIGH 1
48#define INDEX_LOW 0
49#else
50#define INDEX_HIGH 0
51#define INDEX_LOW 1
52#endif
53
54/*
55 * The following routines are used in this implementation. They are
56 * written via macros to simulate zero-overhead call-by-reference.
57 *
58 * MUL64: 64x64->128-bit multiplication
59 * PMUL64: assumes top bits cleared on inputs
60 * ADD128: 128x128->128-bit addition
61 */
62
63#define ADD128(rh, rl, ih, il) \
64 do { \
65 u64 _il = (il); \
66 (rl) += (_il); \
67 if ((rl) < (_il)) \
68 (rh)++; \
69 (rh) += (ih); \
70 } while (0)
71
72#define MUL32(i1, i2) ((u64)(u32)(i1)*(u32)(i2))
73
74#define PMUL64(rh, rl, i1, i2) /* Assumes m doesn't overflow */ \
75 do { \
76 u64 _i1 = (i1), _i2 = (i2); \
77 u64 m = MUL32(_i1, _i2>>32) + MUL32(_i1>>32, _i2); \
78 rh = MUL32(_i1>>32, _i2>>32); \
79 rl = MUL32(_i1, _i2); \
80 ADD128(rh, rl, (m >> 32), (m << 32)); \
81 } while (0)
82
83#define MUL64(rh, rl, i1, i2) \
84 do { \
85 u64 _i1 = (i1), _i2 = (i2); \
86 u64 m1 = MUL32(_i1, _i2>>32); \
87 u64 m2 = MUL32(_i1>>32, _i2); \
88 rh = MUL32(_i1>>32, _i2>>32); \
89 rl = MUL32(_i1, _i2); \
90 ADD128(rh, rl, (m1 >> 32), (m1 << 32)); \
91 ADD128(rh, rl, (m2 >> 32), (m2 << 32)); \
92 } while (0)
93
94/*
95 * For highest performance the L1 NH and L2 polynomial hashes should be
96 * carefully implemented to take advantage of one's target architechture.
97 * Here these two hash functions are defined multiple time; once for
98 * 64-bit architectures, once for 32-bit SSE2 architectures, and once
99 * for the rest (32-bit) architectures.
100 * For each, nh_16 *must* be defined (works on multiples of 16 bytes).
101 * Optionally, nh_vmac_nhbytes can be defined (for multiples of
102 * VMAC_NHBYTES), and nh_16_2 and nh_vmac_nhbytes_2 (versions that do two
103 * NH computations at once).
104 */
105
106#ifdef CONFIG_64BIT
107
108#define nh_16(mp, kp, nw, rh, rl) \
109 do { \
110 int i; u64 th, tl; \
111 rh = rl = 0; \
112 for (i = 0; i < nw; i += 2) { \
113 MUL64(th, tl, le64_to_cpup((mp)+i)+(kp)[i], \
114 le64_to_cpup((mp)+i+1)+(kp)[i+1]); \
115 ADD128(rh, rl, th, tl); \
116 } \
117 } while (0)
118
119#define nh_16_2(mp, kp, nw, rh, rl, rh1, rl1) \
120 do { \
121 int i; u64 th, tl; \
122 rh1 = rl1 = rh = rl = 0; \
123 for (i = 0; i < nw; i += 2) { \
124 MUL64(th, tl, le64_to_cpup((mp)+i)+(kp)[i], \
125 le64_to_cpup((mp)+i+1)+(kp)[i+1]); \
126 ADD128(rh, rl, th, tl); \
127 MUL64(th, tl, le64_to_cpup((mp)+i)+(kp)[i+2], \
128 le64_to_cpup((mp)+i+1)+(kp)[i+3]); \
129 ADD128(rh1, rl1, th, tl); \
130 } \
131 } while (0)
132
133#if (VMAC_NHBYTES >= 64) /* These versions do 64-bytes of message at a time */
134#define nh_vmac_nhbytes(mp, kp, nw, rh, rl) \
135 do { \
136 int i; u64 th, tl; \
137 rh = rl = 0; \
138 for (i = 0; i < nw; i += 8) { \
139 MUL64(th, tl, le64_to_cpup((mp)+i)+(kp)[i], \
140 le64_to_cpup((mp)+i+1)+(kp)[i+1]); \
141 ADD128(rh, rl, th, tl); \
142 MUL64(th, tl, le64_to_cpup((mp)+i+2)+(kp)[i+2], \
143 le64_to_cpup((mp)+i+3)+(kp)[i+3]); \
144 ADD128(rh, rl, th, tl); \
145 MUL64(th, tl, le64_to_cpup((mp)+i+4)+(kp)[i+4], \
146 le64_to_cpup((mp)+i+5)+(kp)[i+5]); \
147 ADD128(rh, rl, th, tl); \
148 MUL64(th, tl, le64_to_cpup((mp)+i+6)+(kp)[i+6], \
149 le64_to_cpup((mp)+i+7)+(kp)[i+7]); \
150 ADD128(rh, rl, th, tl); \
151 } \
152 } while (0)
153
154#define nh_vmac_nhbytes_2(mp, kp, nw, rh, rl, rh1, rl1) \
155 do { \
156 int i; u64 th, tl; \
157 rh1 = rl1 = rh = rl = 0; \
158 for (i = 0; i < nw; i += 8) { \
159 MUL64(th, tl, le64_to_cpup((mp)+i)+(kp)[i], \
160 le64_to_cpup((mp)+i+1)+(kp)[i+1]); \
161 ADD128(rh, rl, th, tl); \
162 MUL64(th, tl, le64_to_cpup((mp)+i)+(kp)[i+2], \
163 le64_to_cpup((mp)+i+1)+(kp)[i+3]); \
164 ADD128(rh1, rl1, th, tl); \
165 MUL64(th, tl, le64_to_cpup((mp)+i+2)+(kp)[i+2], \
166 le64_to_cpup((mp)+i+3)+(kp)[i+3]); \
167 ADD128(rh, rl, th, tl); \
168 MUL64(th, tl, le64_to_cpup((mp)+i+2)+(kp)[i+4], \
169 le64_to_cpup((mp)+i+3)+(kp)[i+5]); \
170 ADD128(rh1, rl1, th, tl); \
171 MUL64(th, tl, le64_to_cpup((mp)+i+4)+(kp)[i+4], \
172 le64_to_cpup((mp)+i+5)+(kp)[i+5]); \
173 ADD128(rh, rl, th, tl); \
174 MUL64(th, tl, le64_to_cpup((mp)+i+4)+(kp)[i+6], \
175 le64_to_cpup((mp)+i+5)+(kp)[i+7]); \
176 ADD128(rh1, rl1, th, tl); \
177 MUL64(th, tl, le64_to_cpup((mp)+i+6)+(kp)[i+6], \
178 le64_to_cpup((mp)+i+7)+(kp)[i+7]); \
179 ADD128(rh, rl, th, tl); \
180 MUL64(th, tl, le64_to_cpup((mp)+i+6)+(kp)[i+8], \
181 le64_to_cpup((mp)+i+7)+(kp)[i+9]); \
182 ADD128(rh1, rl1, th, tl); \
183 } \
184 } while (0)
185#endif
186
187#define poly_step(ah, al, kh, kl, mh, ml) \
188 do { \
189 u64 t1h, t1l, t2h, t2l, t3h, t3l, z = 0; \
190 /* compute ab*cd, put bd into result registers */ \
191 PMUL64(t3h, t3l, al, kh); \
192 PMUL64(t2h, t2l, ah, kl); \
193 PMUL64(t1h, t1l, ah, 2*kh); \
194 PMUL64(ah, al, al, kl); \
195 /* add 2 * ac to result */ \
196 ADD128(ah, al, t1h, t1l); \
197 /* add together ad + bc */ \
198 ADD128(t2h, t2l, t3h, t3l); \
199 /* now (ah,al), (t2l,2*t2h) need summing */ \
200 /* first add the high registers, carrying into t2h */ \
201 ADD128(t2h, ah, z, t2l); \
202 /* double t2h and add top bit of ah */ \
203 t2h = 2 * t2h + (ah >> 63); \
204 ah &= m63; \
205 /* now add the low registers */ \
206 ADD128(ah, al, mh, ml); \
207 ADD128(ah, al, z, t2h); \
208 } while (0)
209
210#else /* ! CONFIG_64BIT */
211
212#ifndef nh_16
213#define nh_16(mp, kp, nw, rh, rl) \
214 do { \
215 u64 t1, t2, m1, m2, t; \
216 int i; \
217 rh = rl = t = 0; \
218 for (i = 0; i < nw; i += 2) { \
219 t1 = le64_to_cpup(mp+i) + kp[i]; \
220 t2 = le64_to_cpup(mp+i+1) + kp[i+1]; \
221 m2 = MUL32(t1 >> 32, t2); \
222 m1 = MUL32(t1, t2 >> 32); \
223 ADD128(rh, rl, MUL32(t1 >> 32, t2 >> 32), \
224 MUL32(t1, t2)); \
225 rh += (u64)(u32)(m1 >> 32) \
226 + (u32)(m2 >> 32); \
227 t += (u64)(u32)m1 + (u32)m2; \
228 } \
229 ADD128(rh, rl, (t >> 32), (t << 32)); \
230 } while (0)
231#endif
232
233static void poly_step_func(u64 *ahi, u64 *alo,
234 const u64 *kh, const u64 *kl,
235 const u64 *mh, const u64 *ml)
236{
237#define a0 (*(((u32 *)alo)+INDEX_LOW))
238#define a1 (*(((u32 *)alo)+INDEX_HIGH))
239#define a2 (*(((u32 *)ahi)+INDEX_LOW))
240#define a3 (*(((u32 *)ahi)+INDEX_HIGH))
241#define k0 (*(((u32 *)kl)+INDEX_LOW))
242#define k1 (*(((u32 *)kl)+INDEX_HIGH))
243#define k2 (*(((u32 *)kh)+INDEX_LOW))
244#define k3 (*(((u32 *)kh)+INDEX_HIGH))
245
246 u64 p, q, t;
247 u32 t2;
248
249 p = MUL32(a3, k3);
250 p += p;
251 p += *(u64 *)mh;
252 p += MUL32(a0, k2);
253 p += MUL32(a1, k1);
254 p += MUL32(a2, k0);
255 t = (u32)(p);
256 p >>= 32;
257 p += MUL32(a0, k3);
258 p += MUL32(a1, k2);
259 p += MUL32(a2, k1);
260 p += MUL32(a3, k0);
261 t |= ((u64)((u32)p & 0x7fffffff)) << 32;
262 p >>= 31;
263 p += (u64)(((u32 *)ml)[INDEX_LOW]);
264 p += MUL32(a0, k0);
265 q = MUL32(a1, k3);
266 q += MUL32(a2, k2);
267 q += MUL32(a3, k1);
268 q += q;
269 p += q;
270 t2 = (u32)(p);
271 p >>= 32;
272 p += (u64)(((u32 *)ml)[INDEX_HIGH]);
273 p += MUL32(a0, k1);
274 p += MUL32(a1, k0);
275 q = MUL32(a2, k3);
276 q += MUL32(a3, k2);
277 q += q;
278 p += q;
279 *(u64 *)(alo) = (p << 32) | t2;
280 p >>= 32;
281 *(u64 *)(ahi) = p + t;
282
283#undef a0
284#undef a1
285#undef a2
286#undef a3
287#undef k0
288#undef k1
289#undef k2
290#undef k3
291}
292
293#define poly_step(ah, al, kh, kl, mh, ml) \
294 poly_step_func(&(ah), &(al), &(kh), &(kl), &(mh), &(ml))
295
296#endif /* end of specialized NH and poly definitions */
297
298/* At least nh_16 is defined. Defined others as needed here */
299#ifndef nh_16_2
300#define nh_16_2(mp, kp, nw, rh, rl, rh2, rl2) \
301 do { \
302 nh_16(mp, kp, nw, rh, rl); \
303 nh_16(mp, ((kp)+2), nw, rh2, rl2); \
304 } while (0)
305#endif
306#ifndef nh_vmac_nhbytes
307#define nh_vmac_nhbytes(mp, kp, nw, rh, rl) \
308 nh_16(mp, kp, nw, rh, rl)
309#endif
310#ifndef nh_vmac_nhbytes_2
311#define nh_vmac_nhbytes_2(mp, kp, nw, rh, rl, rh2, rl2) \
312 do { \
313 nh_vmac_nhbytes(mp, kp, nw, rh, rl); \
314 nh_vmac_nhbytes(mp, ((kp)+2), nw, rh2, rl2); \
315 } while (0)
316#endif
317
318static void vhash_abort(struct vmac_ctx *ctx)
319{
320 ctx->polytmp[0] = ctx->polykey[0] ;
321 ctx->polytmp[1] = ctx->polykey[1] ;
322 ctx->first_block_processed = 0;
323}
324
325static u64 l3hash(u64 p1, u64 p2,
326 u64 k1, u64 k2, u64 len)
327{
328 u64 rh, rl, t, z = 0;
329
330 /* fully reduce (p1,p2)+(len,0) mod p127 */
331 t = p1 >> 63;
332 p1 &= m63;
333 ADD128(p1, p2, len, t);
334 /* At this point, (p1,p2) is at most 2^127+(len<<64) */
335 t = (p1 > m63) + ((p1 == m63) && (p2 == m64));
336 ADD128(p1, p2, z, t);
337 p1 &= m63;
338
339 /* compute (p1,p2)/(2^64-2^32) and (p1,p2)%(2^64-2^32) */
340 t = p1 + (p2 >> 32);
341 t += (t >> 32);
342 t += (u32)t > 0xfffffffeu;
343 p1 += (t >> 32);
344 p2 += (p1 << 32);
345
346 /* compute (p1+k1)%p64 and (p2+k2)%p64 */
347 p1 += k1;
348 p1 += (0 - (p1 < k1)) & 257;
349 p2 += k2;
350 p2 += (0 - (p2 < k2)) & 257;
351
352 /* compute (p1+k1)*(p2+k2)%p64 */
353 MUL64(rh, rl, p1, p2);
354 t = rh >> 56;
355 ADD128(t, rl, z, rh);
356 rh <<= 8;
357 ADD128(t, rl, z, rh);
358 t += t << 8;
359 rl += t;
360 rl += (0 - (rl < t)) & 257;
361 rl += (0 - (rl > p64-1)) & 257;
362 return rl;
363}
364
365static void vhash_update(const unsigned char *m,
366 unsigned int mbytes, /* Pos multiple of VMAC_NHBYTES */
367 struct vmac_ctx *ctx)
368{
369 u64 rh, rl, *mptr;
370 const u64 *kptr = (u64 *)ctx->nhkey;
371 int i;
372 u64 ch, cl;
373 u64 pkh = ctx->polykey[0];
374 u64 pkl = ctx->polykey[1];
375
376 mptr = (u64 *)m;
377 i = mbytes / VMAC_NHBYTES; /* Must be non-zero */
378
379 ch = ctx->polytmp[0];
380 cl = ctx->polytmp[1];
381
382 if (!ctx->first_block_processed) {
383 ctx->first_block_processed = 1;
384 nh_vmac_nhbytes(mptr, kptr, VMAC_NHBYTES/8, rh, rl);
385 rh &= m62;
386 ADD128(ch, cl, rh, rl);
387 mptr += (VMAC_NHBYTES/sizeof(u64));
388 i--;
389 }
390
391 while (i--) {
392 nh_vmac_nhbytes(mptr, kptr, VMAC_NHBYTES/8, rh, rl);
393 rh &= m62;
394 poly_step(ch, cl, pkh, pkl, rh, rl);
395 mptr += (VMAC_NHBYTES/sizeof(u64));
396 }
397
398 ctx->polytmp[0] = ch;
399 ctx->polytmp[1] = cl;
400}
401
402static u64 vhash(unsigned char m[], unsigned int mbytes,
403 u64 *tagl, struct vmac_ctx *ctx)
404{
405 u64 rh, rl, *mptr;
406 const u64 *kptr = (u64 *)ctx->nhkey;
407 int i, remaining;
408 u64 ch, cl;
409 u64 pkh = ctx->polykey[0];
410 u64 pkl = ctx->polykey[1];
411
412 mptr = (u64 *)m;
413 i = mbytes / VMAC_NHBYTES;
414 remaining = mbytes % VMAC_NHBYTES;
415
416 if (ctx->first_block_processed) {
417 ch = ctx->polytmp[0];
418 cl = ctx->polytmp[1];
419 } else if (i) {
420 nh_vmac_nhbytes(mptr, kptr, VMAC_NHBYTES/8, ch, cl);
421 ch &= m62;
422 ADD128(ch, cl, pkh, pkl);
423 mptr += (VMAC_NHBYTES/sizeof(u64));
424 i--;
425 } else if (remaining) {
426 nh_16(mptr, kptr, 2*((remaining+15)/16), ch, cl);
427 ch &= m62;
428 ADD128(ch, cl, pkh, pkl);
429 mptr += (VMAC_NHBYTES/sizeof(u64));
430 goto do_l3;
431 } else {/* Empty String */
432 ch = pkh; cl = pkl;
433 goto do_l3;
434 }
435
436 while (i--) {
437 nh_vmac_nhbytes(mptr, kptr, VMAC_NHBYTES/8, rh, rl);
438 rh &= m62;
439 poly_step(ch, cl, pkh, pkl, rh, rl);
440 mptr += (VMAC_NHBYTES/sizeof(u64));
441 }
442 if (remaining) {
443 nh_16(mptr, kptr, 2*((remaining+15)/16), rh, rl);
444 rh &= m62;
445 poly_step(ch, cl, pkh, pkl, rh, rl);
446 }
447
448do_l3:
449 vhash_abort(ctx);
450 remaining *= 8;
451 return l3hash(ch, cl, ctx->l3key[0], ctx->l3key[1], remaining);
452}
453
454static u64 vmac(unsigned char m[], unsigned int mbytes,
455 unsigned char n[16], u64 *tagl,
456 struct vmac_ctx_t *ctx)
457{
458 u64 *in_n, *out_p;
459 u64 p, h;
460 int i;
461
462 in_n = ctx->__vmac_ctx.cached_nonce;
463 out_p = ctx->__vmac_ctx.cached_aes;
464
465 i = n[15] & 1;
466 if ((*(u64 *)(n+8) != in_n[1]) || (*(u64 *)(n) != in_n[0])) {
467 in_n[0] = *(u64 *)(n);
468 in_n[1] = *(u64 *)(n+8);
469 ((unsigned char *)in_n)[15] &= 0xFE;
470 crypto_cipher_encrypt_one(ctx->child,
471 (unsigned char *)out_p, (unsigned char *)in_n);
472
473 ((unsigned char *)in_n)[15] |= (unsigned char)(1-i);
474 }
475 p = be64_to_cpup(out_p + i);
476 h = vhash(m, mbytes, (u64 *)0, &ctx->__vmac_ctx);
477 return p + h;
478}
479
480static int vmac_set_key(unsigned char user_key[], struct vmac_ctx_t *ctx)
481{
482 u64 in[2] = {0}, out[2];
483 unsigned i;
484 int err = 0;
485
486 err = crypto_cipher_setkey(ctx->child, user_key, VMAC_KEY_LEN);
487 if (err)
488 return err;
489
490 /* Fill nh key */
491 ((unsigned char *)in)[0] = 0x80;
492 for (i = 0; i < sizeof(ctx->__vmac_ctx.nhkey)/8; i += 2) {
493 crypto_cipher_encrypt_one(ctx->child,
494 (unsigned char *)out, (unsigned char *)in);
495 ctx->__vmac_ctx.nhkey[i] = be64_to_cpup(out);
496 ctx->__vmac_ctx.nhkey[i+1] = be64_to_cpup(out+1);
497 ((unsigned char *)in)[15] += 1;
498 }
499
500 /* Fill poly key */
501 ((unsigned char *)in)[0] = 0xC0;
502 in[1] = 0;
503 for (i = 0; i < sizeof(ctx->__vmac_ctx.polykey)/8; i += 2) {
504 crypto_cipher_encrypt_one(ctx->child,
505 (unsigned char *)out, (unsigned char *)in);
506 ctx->__vmac_ctx.polytmp[i] =
507 ctx->__vmac_ctx.polykey[i] =
508 be64_to_cpup(out) & mpoly;
509 ctx->__vmac_ctx.polytmp[i+1] =
510 ctx->__vmac_ctx.polykey[i+1] =
511 be64_to_cpup(out+1) & mpoly;
512 ((unsigned char *)in)[15] += 1;
513 }
514
515 /* Fill ip key */
516 ((unsigned char *)in)[0] = 0xE0;
517 in[1] = 0;
518 for (i = 0; i < sizeof(ctx->__vmac_ctx.l3key)/8; i += 2) {
519 do {
520 crypto_cipher_encrypt_one(ctx->child,
521 (unsigned char *)out, (unsigned char *)in);
522 ctx->__vmac_ctx.l3key[i] = be64_to_cpup(out);
523 ctx->__vmac_ctx.l3key[i+1] = be64_to_cpup(out+1);
524 ((unsigned char *)in)[15] += 1;
525 } while (ctx->__vmac_ctx.l3key[i] >= p64
526 || ctx->__vmac_ctx.l3key[i+1] >= p64);
527 }
528
529 /* Invalidate nonce/aes cache and reset other elements */
530 ctx->__vmac_ctx.cached_nonce[0] = (u64)-1; /* Ensure illegal nonce */
531 ctx->__vmac_ctx.cached_nonce[1] = (u64)0; /* Ensure illegal nonce */
532 ctx->__vmac_ctx.first_block_processed = 0;
533
534 return err;
535}
536
537static int vmac_setkey(struct crypto_shash *parent,
538 const u8 *key, unsigned int keylen)
539{
540 struct vmac_ctx_t *ctx = crypto_shash_ctx(parent);
541
542 if (keylen != VMAC_KEY_LEN) {
543 crypto_shash_set_flags(parent, CRYPTO_TFM_RES_BAD_KEY_LEN);
544 return -EINVAL;
545 }
546
547 return vmac_set_key((u8 *)key, ctx);
548}
549
550static int vmac_init(struct shash_desc *pdesc)
551{
552 struct crypto_shash *parent = pdesc->tfm;
553 struct vmac_ctx_t *ctx = crypto_shash_ctx(parent);
554
555 memset(&ctx->__vmac_ctx, 0, sizeof(struct vmac_ctx));
556 return 0;
557}
558
559static int vmac_update(struct shash_desc *pdesc, const u8 *p,
560 unsigned int len)
561{
562 struct crypto_shash *parent = pdesc->tfm;
563 struct vmac_ctx_t *ctx = crypto_shash_ctx(parent);
564
565 vhash_update(p, len, &ctx->__vmac_ctx);
566
567 return 0;
568}
569
570static int vmac_final(struct shash_desc *pdesc, u8 *out)
571{
572 struct crypto_shash *parent = pdesc->tfm;
573 struct vmac_ctx_t *ctx = crypto_shash_ctx(parent);
574 vmac_t mac;
575 u8 nonce[16] = {};
576
577 mac = vmac(NULL, 0, nonce, NULL, ctx);
578 memcpy(out, &mac, sizeof(vmac_t));
579 memset(&mac, 0, sizeof(vmac_t));
580 memset(&ctx->__vmac_ctx, 0, sizeof(struct vmac_ctx));
581 return 0;
582}
583
584static int vmac_init_tfm(struct crypto_tfm *tfm)
585{
586 struct crypto_cipher *cipher;
587 struct crypto_instance *inst = (void *)tfm->__crt_alg;
588 struct crypto_spawn *spawn = crypto_instance_ctx(inst);
589 struct vmac_ctx_t *ctx = crypto_tfm_ctx(tfm);
590
591 cipher = crypto_spawn_cipher(spawn);
592 if (IS_ERR(cipher))
593 return PTR_ERR(cipher);
594
595 ctx->child = cipher;
596 return 0;
597}
598
599static void vmac_exit_tfm(struct crypto_tfm *tfm)
600{
601 struct vmac_ctx_t *ctx = crypto_tfm_ctx(tfm);
602 crypto_free_cipher(ctx->child);
603}
604
605static int vmac_create(struct crypto_template *tmpl, struct rtattr **tb)
606{
607 struct shash_instance *inst;
608 struct crypto_alg *alg;
609 int err;
610
611 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH);
612 if (err)
613 return err;
614
615 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
616 CRYPTO_ALG_TYPE_MASK);
617 if (IS_ERR(alg))
618 return PTR_ERR(alg);
619
620 inst = shash_alloc_instance("vmac", alg);
621 err = PTR_ERR(inst);
622 if (IS_ERR(inst))
623 goto out_put_alg;
624
625 err = crypto_init_spawn(shash_instance_ctx(inst), alg,
626 shash_crypto_instance(inst),
627 CRYPTO_ALG_TYPE_MASK);
628 if (err)
629 goto out_free_inst;
630
631 inst->alg.base.cra_priority = alg->cra_priority;
632 inst->alg.base.cra_blocksize = alg->cra_blocksize;
633 inst->alg.base.cra_alignmask = alg->cra_alignmask;
634
635 inst->alg.digestsize = sizeof(vmac_t);
636 inst->alg.base.cra_ctxsize = sizeof(struct vmac_ctx_t);
637 inst->alg.base.cra_init = vmac_init_tfm;
638 inst->alg.base.cra_exit = vmac_exit_tfm;
639
640 inst->alg.init = vmac_init;
641 inst->alg.update = vmac_update;
642 inst->alg.final = vmac_final;
643 inst->alg.setkey = vmac_setkey;
644
645 err = shash_register_instance(tmpl, inst);
646 if (err) {
647out_free_inst:
648 shash_free_instance(shash_crypto_instance(inst));
649 }
650
651out_put_alg:
652 crypto_mod_put(alg);
653 return err;
654}
655
656static struct crypto_template vmac_tmpl = {
657 .name = "vmac",
658 .create = vmac_create,
659 .free = shash_free_instance,
660 .module = THIS_MODULE,
661};
662
663static int __init vmac_module_init(void)
664{
665 return crypto_register_template(&vmac_tmpl);
666}
667
668static void __exit vmac_module_exit(void)
669{
670 crypto_unregister_template(&vmac_tmpl);
671}
672
673module_init(vmac_module_init);
674module_exit(vmac_module_exit);
675
676MODULE_LICENSE("GPL");
677MODULE_DESCRIPTION("VMAC hash algorithm");
678
diff --git a/crypto/xcbc.c b/crypto/xcbc.c
index b63b633e549c..bb7b67fba349 100644
--- a/crypto/xcbc.c
+++ b/crypto/xcbc.c
@@ -19,211 +19,142 @@
19 * Kazunori Miyazawa <miyazawa@linux-ipv6.org> 19 * Kazunori Miyazawa <miyazawa@linux-ipv6.org>
20 */ 20 */
21 21
22#include <crypto/scatterwalk.h> 22#include <crypto/internal/hash.h>
23#include <linux/crypto.h>
24#include <linux/err.h> 23#include <linux/err.h>
25#include <linux/hardirq.h>
26#include <linux/kernel.h> 24#include <linux/kernel.h>
27#include <linux/mm.h>
28#include <linux/rtnetlink.h>
29#include <linux/slab.h>
30#include <linux/scatterlist.h>
31 25
32static u_int32_t ks[12] = {0x01010101, 0x01010101, 0x01010101, 0x01010101, 26static u_int32_t ks[12] = {0x01010101, 0x01010101, 0x01010101, 0x01010101,
33 0x02020202, 0x02020202, 0x02020202, 0x02020202, 27 0x02020202, 0x02020202, 0x02020202, 0x02020202,
34 0x03030303, 0x03030303, 0x03030303, 0x03030303}; 28 0x03030303, 0x03030303, 0x03030303, 0x03030303};
29
35/* 30/*
36 * +------------------------ 31 * +------------------------
37 * | <parent tfm> 32 * | <parent tfm>
38 * +------------------------ 33 * +------------------------
39 * | crypto_xcbc_ctx 34 * | xcbc_tfm_ctx
40 * +------------------------ 35 * +------------------------
41 * | odds (block size) 36 * | consts (block size * 2)
42 * +------------------------ 37 * +------------------------
43 * | prev (block size) 38 */
39struct xcbc_tfm_ctx {
40 struct crypto_cipher *child;
41 u8 ctx[];
42};
43
44/*
44 * +------------------------ 45 * +------------------------
45 * | key (block size) 46 * | <shash desc>
46 * +------------------------ 47 * +------------------------
47 * | consts (block size * 3) 48 * | xcbc_desc_ctx
49 * +------------------------
50 * | odds (block size)
51 * +------------------------
52 * | prev (block size)
48 * +------------------------ 53 * +------------------------
49 */ 54 */
50struct crypto_xcbc_ctx { 55struct xcbc_desc_ctx {
51 struct crypto_cipher *child;
52 u8 *odds;
53 u8 *prev;
54 u8 *key;
55 u8 *consts;
56 void (*xor)(u8 *a, const u8 *b, unsigned int bs);
57 unsigned int keylen;
58 unsigned int len; 56 unsigned int len;
57 u8 ctx[];
59}; 58};
60 59
61static void xor_128(u8 *a, const u8 *b, unsigned int bs) 60static int crypto_xcbc_digest_setkey(struct crypto_shash *parent,
62{ 61 const u8 *inkey, unsigned int keylen)
63 ((u32 *)a)[0] ^= ((u32 *)b)[0];
64 ((u32 *)a)[1] ^= ((u32 *)b)[1];
65 ((u32 *)a)[2] ^= ((u32 *)b)[2];
66 ((u32 *)a)[3] ^= ((u32 *)b)[3];
67}
68
69static int _crypto_xcbc_digest_setkey(struct crypto_hash *parent,
70 struct crypto_xcbc_ctx *ctx)
71{ 62{
72 int bs = crypto_hash_blocksize(parent); 63 unsigned long alignmask = crypto_shash_alignmask(parent);
64 struct xcbc_tfm_ctx *ctx = crypto_shash_ctx(parent);
65 int bs = crypto_shash_blocksize(parent);
66 u8 *consts = PTR_ALIGN(&ctx->ctx[0], alignmask + 1);
73 int err = 0; 67 int err = 0;
74 u8 key1[bs]; 68 u8 key1[bs];
75 69
76 if ((err = crypto_cipher_setkey(ctx->child, ctx->key, ctx->keylen))) 70 if ((err = crypto_cipher_setkey(ctx->child, inkey, keylen)))
77 return err; 71 return err;
78 72
79 crypto_cipher_encrypt_one(ctx->child, key1, ctx->consts); 73 crypto_cipher_encrypt_one(ctx->child, consts, (u8 *)ks + bs);
74 crypto_cipher_encrypt_one(ctx->child, consts + bs, (u8 *)ks + bs * 2);
75 crypto_cipher_encrypt_one(ctx->child, key1, (u8 *)ks);
80 76
81 return crypto_cipher_setkey(ctx->child, key1, bs); 77 return crypto_cipher_setkey(ctx->child, key1, bs);
82}
83
84static int crypto_xcbc_digest_setkey(struct crypto_hash *parent,
85 const u8 *inkey, unsigned int keylen)
86{
87 struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent);
88
89 if (keylen != crypto_cipher_blocksize(ctx->child))
90 return -EINVAL;
91 78
92 ctx->keylen = keylen;
93 memcpy(ctx->key, inkey, keylen);
94 ctx->consts = (u8*)ks;
95
96 return _crypto_xcbc_digest_setkey(parent, ctx);
97} 79}
98 80
99static int crypto_xcbc_digest_init(struct hash_desc *pdesc) 81static int crypto_xcbc_digest_init(struct shash_desc *pdesc)
100{ 82{
101 struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(pdesc->tfm); 83 unsigned long alignmask = crypto_shash_alignmask(pdesc->tfm);
102 int bs = crypto_hash_blocksize(pdesc->tfm); 84 struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc);
85 int bs = crypto_shash_blocksize(pdesc->tfm);
86 u8 *prev = PTR_ALIGN(&ctx->ctx[0], alignmask + 1) + bs;
103 87
104 ctx->len = 0; 88 ctx->len = 0;
105 memset(ctx->odds, 0, bs); 89 memset(prev, 0, bs);
106 memset(ctx->prev, 0, bs);
107 90
108 return 0; 91 return 0;
109} 92}
110 93
111static int crypto_xcbc_digest_update2(struct hash_desc *pdesc, 94static int crypto_xcbc_digest_update(struct shash_desc *pdesc, const u8 *p,
112 struct scatterlist *sg, 95 unsigned int len)
113 unsigned int nbytes)
114{ 96{
115 struct crypto_hash *parent = pdesc->tfm; 97 struct crypto_shash *parent = pdesc->tfm;
116 struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent); 98 unsigned long alignmask = crypto_shash_alignmask(parent);
117 struct crypto_cipher *tfm = ctx->child; 99 struct xcbc_tfm_ctx *tctx = crypto_shash_ctx(parent);
118 int bs = crypto_hash_blocksize(parent); 100 struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc);
119 101 struct crypto_cipher *tfm = tctx->child;
120 for (;;) { 102 int bs = crypto_shash_blocksize(parent);
121 struct page *pg = sg_page(sg); 103 u8 *odds = PTR_ALIGN(&ctx->ctx[0], alignmask + 1);
122 unsigned int offset = sg->offset; 104 u8 *prev = odds + bs;
123 unsigned int slen = sg->length; 105
124 106 /* checking the data can fill the block */
125 if (unlikely(slen > nbytes)) 107 if ((ctx->len + len) <= bs) {
126 slen = nbytes; 108 memcpy(odds + ctx->len, p, len);
127 109 ctx->len += len;
128 nbytes -= slen; 110 return 0;
129
130 while (slen > 0) {
131 unsigned int len = min(slen, ((unsigned int)(PAGE_SIZE)) - offset);
132 char *p = crypto_kmap(pg, 0) + offset;
133
134 /* checking the data can fill the block */
135 if ((ctx->len + len) <= bs) {
136 memcpy(ctx->odds + ctx->len, p, len);
137 ctx->len += len;
138 slen -= len;
139
140 /* checking the rest of the page */
141 if (len + offset >= PAGE_SIZE) {
142 offset = 0;
143 pg++;
144 } else
145 offset += len;
146
147 crypto_kunmap(p, 0);
148 crypto_yield(pdesc->flags);
149 continue;
150 }
151
152 /* filling odds with new data and encrypting it */
153 memcpy(ctx->odds + ctx->len, p, bs - ctx->len);
154 len -= bs - ctx->len;
155 p += bs - ctx->len;
156
157 ctx->xor(ctx->prev, ctx->odds, bs);
158 crypto_cipher_encrypt_one(tfm, ctx->prev, ctx->prev);
159
160 /* clearing the length */
161 ctx->len = 0;
162
163 /* encrypting the rest of data */
164 while (len > bs) {
165 ctx->xor(ctx->prev, p, bs);
166 crypto_cipher_encrypt_one(tfm, ctx->prev,
167 ctx->prev);
168 p += bs;
169 len -= bs;
170 }
171
172 /* keeping the surplus of blocksize */
173 if (len) {
174 memcpy(ctx->odds, p, len);
175 ctx->len = len;
176 }
177 crypto_kunmap(p, 0);
178 crypto_yield(pdesc->flags);
179 slen -= min(slen, ((unsigned int)(PAGE_SIZE)) - offset);
180 offset = 0;
181 pg++;
182 }
183
184 if (!nbytes)
185 break;
186 sg = scatterwalk_sg_next(sg);
187 } 111 }
188 112
189 return 0; 113 /* filling odds with new data and encrypting it */
190} 114 memcpy(odds + ctx->len, p, bs - ctx->len);
115 len -= bs - ctx->len;
116 p += bs - ctx->len;
191 117
192static int crypto_xcbc_digest_update(struct hash_desc *pdesc, 118 crypto_xor(prev, odds, bs);
193 struct scatterlist *sg, 119 crypto_cipher_encrypt_one(tfm, prev, prev);
194 unsigned int nbytes)
195{
196 if (WARN_ON_ONCE(in_irq()))
197 return -EDEADLK;
198 return crypto_xcbc_digest_update2(pdesc, sg, nbytes);
199}
200 120
201static int crypto_xcbc_digest_final(struct hash_desc *pdesc, u8 *out) 121 /* clearing the length */
202{ 122 ctx->len = 0;
203 struct crypto_hash *parent = pdesc->tfm;
204 struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent);
205 struct crypto_cipher *tfm = ctx->child;
206 int bs = crypto_hash_blocksize(parent);
207 int err = 0;
208
209 if (ctx->len == bs) {
210 u8 key2[bs];
211 123
212 if ((err = crypto_cipher_setkey(tfm, ctx->key, ctx->keylen)) != 0) 124 /* encrypting the rest of data */
213 return err; 125 while (len > bs) {
126 crypto_xor(prev, p, bs);
127 crypto_cipher_encrypt_one(tfm, prev, prev);
128 p += bs;
129 len -= bs;
130 }
214 131
215 crypto_cipher_encrypt_one(tfm, key2, 132 /* keeping the surplus of blocksize */
216 (u8 *)(ctx->consts + bs)); 133 if (len) {
134 memcpy(odds, p, len);
135 ctx->len = len;
136 }
217 137
218 ctx->xor(ctx->prev, ctx->odds, bs); 138 return 0;
219 ctx->xor(ctx->prev, key2, bs); 139}
220 _crypto_xcbc_digest_setkey(parent, ctx);
221 140
222 crypto_cipher_encrypt_one(tfm, out, ctx->prev); 141static int crypto_xcbc_digest_final(struct shash_desc *pdesc, u8 *out)
223 } else { 142{
224 u8 key3[bs]; 143 struct crypto_shash *parent = pdesc->tfm;
144 unsigned long alignmask = crypto_shash_alignmask(parent);
145 struct xcbc_tfm_ctx *tctx = crypto_shash_ctx(parent);
146 struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc);
147 struct crypto_cipher *tfm = tctx->child;
148 int bs = crypto_shash_blocksize(parent);
149 u8 *consts = PTR_ALIGN(&tctx->ctx[0], alignmask + 1);
150 u8 *odds = PTR_ALIGN(&ctx->ctx[0], alignmask + 1);
151 u8 *prev = odds + bs;
152 unsigned int offset = 0;
153
154 if (ctx->len != bs) {
225 unsigned int rlen; 155 unsigned int rlen;
226 u8 *p = ctx->odds + ctx->len; 156 u8 *p = odds + ctx->len;
157
227 *p = 0x80; 158 *p = 0x80;
228 p++; 159 p++;
229 160
@@ -231,32 +162,15 @@ static int crypto_xcbc_digest_final(struct hash_desc *pdesc, u8 *out)
231 if (rlen) 162 if (rlen)
232 memset(p, 0, rlen); 163 memset(p, 0, rlen);
233 164
234 if ((err = crypto_cipher_setkey(tfm, ctx->key, ctx->keylen)) != 0) 165 offset += bs;
235 return err;
236
237 crypto_cipher_encrypt_one(tfm, key3,
238 (u8 *)(ctx->consts + bs * 2));
239
240 ctx->xor(ctx->prev, ctx->odds, bs);
241 ctx->xor(ctx->prev, key3, bs);
242
243 _crypto_xcbc_digest_setkey(parent, ctx);
244
245 crypto_cipher_encrypt_one(tfm, out, ctx->prev);
246 } 166 }
247 167
248 return 0; 168 crypto_xor(prev, odds, bs);
249} 169 crypto_xor(prev, consts + offset, bs);
250 170
251static int crypto_xcbc_digest(struct hash_desc *pdesc, 171 crypto_cipher_encrypt_one(tfm, out, prev);
252 struct scatterlist *sg, unsigned int nbytes, u8 *out)
253{
254 if (WARN_ON_ONCE(in_irq()))
255 return -EDEADLK;
256 172
257 crypto_xcbc_digest_init(pdesc); 173 return 0;
258 crypto_xcbc_digest_update2(pdesc, sg, nbytes);
259 return crypto_xcbc_digest_final(pdesc, out);
260} 174}
261 175
262static int xcbc_init_tfm(struct crypto_tfm *tfm) 176static int xcbc_init_tfm(struct crypto_tfm *tfm)
@@ -264,95 +178,95 @@ static int xcbc_init_tfm(struct crypto_tfm *tfm)
264 struct crypto_cipher *cipher; 178 struct crypto_cipher *cipher;
265 struct crypto_instance *inst = (void *)tfm->__crt_alg; 179 struct crypto_instance *inst = (void *)tfm->__crt_alg;
266 struct crypto_spawn *spawn = crypto_instance_ctx(inst); 180 struct crypto_spawn *spawn = crypto_instance_ctx(inst);
267 struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(__crypto_hash_cast(tfm)); 181 struct xcbc_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
268 int bs = crypto_hash_blocksize(__crypto_hash_cast(tfm));
269 182
270 cipher = crypto_spawn_cipher(spawn); 183 cipher = crypto_spawn_cipher(spawn);
271 if (IS_ERR(cipher)) 184 if (IS_ERR(cipher))
272 return PTR_ERR(cipher); 185 return PTR_ERR(cipher);
273 186
274 switch(bs) {
275 case 16:
276 ctx->xor = xor_128;
277 break;
278 default:
279 return -EINVAL;
280 }
281
282 ctx->child = cipher; 187 ctx->child = cipher;
283 ctx->odds = (u8*)(ctx+1);
284 ctx->prev = ctx->odds + bs;
285 ctx->key = ctx->prev + bs;
286 188
287 return 0; 189 return 0;
288}; 190};
289 191
290static void xcbc_exit_tfm(struct crypto_tfm *tfm) 192static void xcbc_exit_tfm(struct crypto_tfm *tfm)
291{ 193{
292 struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(__crypto_hash_cast(tfm)); 194 struct xcbc_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
293 crypto_free_cipher(ctx->child); 195 crypto_free_cipher(ctx->child);
294} 196}
295 197
296static struct crypto_instance *xcbc_alloc(struct rtattr **tb) 198static int xcbc_create(struct crypto_template *tmpl, struct rtattr **tb)
297{ 199{
298 struct crypto_instance *inst; 200 struct shash_instance *inst;
299 struct crypto_alg *alg; 201 struct crypto_alg *alg;
202 unsigned long alignmask;
300 int err; 203 int err;
301 204
302 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_HASH); 205 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH);
303 if (err) 206 if (err)
304 return ERR_PTR(err); 207 return err;
305 208
306 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, 209 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
307 CRYPTO_ALG_TYPE_MASK); 210 CRYPTO_ALG_TYPE_MASK);
308 if (IS_ERR(alg)) 211 if (IS_ERR(alg))
309 return ERR_CAST(alg); 212 return PTR_ERR(alg);
310 213
311 switch(alg->cra_blocksize) { 214 switch(alg->cra_blocksize) {
312 case 16: 215 case 16:
313 break; 216 break;
314 default: 217 default:
315 inst = ERR_PTR(-EINVAL);
316 goto out_put_alg; 218 goto out_put_alg;
317 } 219 }
318 220
319 inst = crypto_alloc_instance("xcbc", alg); 221 inst = shash_alloc_instance("xcbc", alg);
222 err = PTR_ERR(inst);
320 if (IS_ERR(inst)) 223 if (IS_ERR(inst))
321 goto out_put_alg; 224 goto out_put_alg;
322 225
323 inst->alg.cra_flags = CRYPTO_ALG_TYPE_HASH; 226 err = crypto_init_spawn(shash_instance_ctx(inst), alg,
324 inst->alg.cra_priority = alg->cra_priority; 227 shash_crypto_instance(inst),
325 inst->alg.cra_blocksize = alg->cra_blocksize; 228 CRYPTO_ALG_TYPE_MASK);
326 inst->alg.cra_alignmask = alg->cra_alignmask; 229 if (err)
327 inst->alg.cra_type = &crypto_hash_type; 230 goto out_free_inst;
328 231
329 inst->alg.cra_hash.digestsize = alg->cra_blocksize; 232 alignmask = alg->cra_alignmask | 3;
330 inst->alg.cra_ctxsize = sizeof(struct crypto_xcbc_ctx) + 233 inst->alg.base.cra_alignmask = alignmask;
331 ALIGN(inst->alg.cra_blocksize * 3, sizeof(void *)); 234 inst->alg.base.cra_priority = alg->cra_priority;
332 inst->alg.cra_init = xcbc_init_tfm; 235 inst->alg.base.cra_blocksize = alg->cra_blocksize;
333 inst->alg.cra_exit = xcbc_exit_tfm; 236
334 237 inst->alg.digestsize = alg->cra_blocksize;
335 inst->alg.cra_hash.init = crypto_xcbc_digest_init; 238 inst->alg.descsize = ALIGN(sizeof(struct xcbc_desc_ctx),
336 inst->alg.cra_hash.update = crypto_xcbc_digest_update; 239 crypto_tfm_ctx_alignment()) +
337 inst->alg.cra_hash.final = crypto_xcbc_digest_final; 240 (alignmask &
338 inst->alg.cra_hash.digest = crypto_xcbc_digest; 241 ~(crypto_tfm_ctx_alignment() - 1)) +
339 inst->alg.cra_hash.setkey = crypto_xcbc_digest_setkey; 242 alg->cra_blocksize * 2;
243
244 inst->alg.base.cra_ctxsize = ALIGN(sizeof(struct xcbc_tfm_ctx),
245 alignmask + 1) +
246 alg->cra_blocksize * 2;
247 inst->alg.base.cra_init = xcbc_init_tfm;
248 inst->alg.base.cra_exit = xcbc_exit_tfm;
249
250 inst->alg.init = crypto_xcbc_digest_init;
251 inst->alg.update = crypto_xcbc_digest_update;
252 inst->alg.final = crypto_xcbc_digest_final;
253 inst->alg.setkey = crypto_xcbc_digest_setkey;
254
255 err = shash_register_instance(tmpl, inst);
256 if (err) {
257out_free_inst:
258 shash_free_instance(shash_crypto_instance(inst));
259 }
340 260
341out_put_alg: 261out_put_alg:
342 crypto_mod_put(alg); 262 crypto_mod_put(alg);
343 return inst; 263 return err;
344}
345
346static void xcbc_free(struct crypto_instance *inst)
347{
348 crypto_drop_spawn(crypto_instance_ctx(inst));
349 kfree(inst);
350} 264}
351 265
352static struct crypto_template crypto_xcbc_tmpl = { 266static struct crypto_template crypto_xcbc_tmpl = {
353 .name = "xcbc", 267 .name = "xcbc",
354 .alloc = xcbc_alloc, 268 .create = xcbc_create,
355 .free = xcbc_free, 269 .free = shash_free_instance,
356 .module = THIS_MODULE, 270 .module = THIS_MODULE,
357}; 271};
358 272