diff options
Diffstat (limited to 'crypto/shash.c')
-rw-r--r-- | crypto/shash.c | 508 |
1 files changed, 508 insertions, 0 deletions
diff --git a/crypto/shash.c b/crypto/shash.c new file mode 100644 index 000000000000..c9df367332ff --- /dev/null +++ b/crypto/shash.c | |||
@@ -0,0 +1,508 @@ | |||
1 | /* | ||
2 | * Synchronous Cryptographic Hash operations. | ||
3 | * | ||
4 | * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> | ||
5 | * | ||
6 | * This program is free software; you can redistribute it and/or modify it | ||
7 | * under the terms of the GNU General Public License as published by the Free | ||
8 | * Software Foundation; either version 2 of the License, or (at your option) | ||
9 | * any later version. | ||
10 | * | ||
11 | */ | ||
12 | |||
13 | #include <crypto/scatterwalk.h> | ||
14 | #include <crypto/internal/hash.h> | ||
15 | #include <linux/err.h> | ||
16 | #include <linux/kernel.h> | ||
17 | #include <linux/module.h> | ||
18 | #include <linux/slab.h> | ||
19 | #include <linux/seq_file.h> | ||
20 | |||
21 | static const struct crypto_type crypto_shash_type; | ||
22 | |||
23 | static inline struct crypto_shash *__crypto_shash_cast(struct crypto_tfm *tfm) | ||
24 | { | ||
25 | return container_of(tfm, struct crypto_shash, base); | ||
26 | } | ||
27 | |||
28 | #include "internal.h" | ||
29 | |||
30 | static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key, | ||
31 | unsigned int keylen) | ||
32 | { | ||
33 | struct shash_alg *shash = crypto_shash_alg(tfm); | ||
34 | unsigned long alignmask = crypto_shash_alignmask(tfm); | ||
35 | unsigned long absize; | ||
36 | u8 *buffer, *alignbuffer; | ||
37 | int err; | ||
38 | |||
39 | absize = keylen + (alignmask & ~(CRYPTO_MINALIGN - 1)); | ||
40 | buffer = kmalloc(absize, GFP_KERNEL); | ||
41 | if (!buffer) | ||
42 | return -ENOMEM; | ||
43 | |||
44 | alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | ||
45 | memcpy(alignbuffer, key, keylen); | ||
46 | err = shash->setkey(tfm, alignbuffer, keylen); | ||
47 | memset(alignbuffer, 0, keylen); | ||
48 | kfree(buffer); | ||
49 | return err; | ||
50 | } | ||
51 | |||
52 | int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key, | ||
53 | unsigned int keylen) | ||
54 | { | ||
55 | struct shash_alg *shash = crypto_shash_alg(tfm); | ||
56 | unsigned long alignmask = crypto_shash_alignmask(tfm); | ||
57 | |||
58 | if (!shash->setkey) | ||
59 | return -ENOSYS; | ||
60 | |||
61 | if ((unsigned long)key & alignmask) | ||
62 | return shash_setkey_unaligned(tfm, key, keylen); | ||
63 | |||
64 | return shash->setkey(tfm, key, keylen); | ||
65 | } | ||
66 | EXPORT_SYMBOL_GPL(crypto_shash_setkey); | ||
67 | |||
68 | static inline unsigned int shash_align_buffer_size(unsigned len, | ||
69 | unsigned long mask) | ||
70 | { | ||
71 | return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1)); | ||
72 | } | ||
73 | |||
74 | static int shash_update_unaligned(struct shash_desc *desc, const u8 *data, | ||
75 | unsigned int len) | ||
76 | { | ||
77 | struct crypto_shash *tfm = desc->tfm; | ||
78 | struct shash_alg *shash = crypto_shash_alg(tfm); | ||
79 | unsigned long alignmask = crypto_shash_alignmask(tfm); | ||
80 | unsigned int unaligned_len = alignmask + 1 - | ||
81 | ((unsigned long)data & alignmask); | ||
82 | u8 buf[shash_align_buffer_size(unaligned_len, alignmask)] | ||
83 | __attribute__ ((aligned)); | ||
84 | |||
85 | memcpy(buf, data, unaligned_len); | ||
86 | |||
87 | return shash->update(desc, buf, unaligned_len) ?: | ||
88 | shash->update(desc, data + unaligned_len, len - unaligned_len); | ||
89 | } | ||
90 | |||
91 | int crypto_shash_update(struct shash_desc *desc, const u8 *data, | ||
92 | unsigned int len) | ||
93 | { | ||
94 | struct crypto_shash *tfm = desc->tfm; | ||
95 | struct shash_alg *shash = crypto_shash_alg(tfm); | ||
96 | unsigned long alignmask = crypto_shash_alignmask(tfm); | ||
97 | |||
98 | if ((unsigned long)data & alignmask) | ||
99 | return shash_update_unaligned(desc, data, len); | ||
100 | |||
101 | return shash->update(desc, data, len); | ||
102 | } | ||
103 | EXPORT_SYMBOL_GPL(crypto_shash_update); | ||
104 | |||
105 | static int shash_final_unaligned(struct shash_desc *desc, u8 *out) | ||
106 | { | ||
107 | struct crypto_shash *tfm = desc->tfm; | ||
108 | unsigned long alignmask = crypto_shash_alignmask(tfm); | ||
109 | struct shash_alg *shash = crypto_shash_alg(tfm); | ||
110 | unsigned int ds = crypto_shash_digestsize(tfm); | ||
111 | u8 buf[shash_align_buffer_size(ds, alignmask)] | ||
112 | __attribute__ ((aligned)); | ||
113 | int err; | ||
114 | |||
115 | err = shash->final(desc, buf); | ||
116 | memcpy(out, buf, ds); | ||
117 | return err; | ||
118 | } | ||
119 | |||
120 | int crypto_shash_final(struct shash_desc *desc, u8 *out) | ||
121 | { | ||
122 | struct crypto_shash *tfm = desc->tfm; | ||
123 | struct shash_alg *shash = crypto_shash_alg(tfm); | ||
124 | unsigned long alignmask = crypto_shash_alignmask(tfm); | ||
125 | |||
126 | if ((unsigned long)out & alignmask) | ||
127 | return shash_final_unaligned(desc, out); | ||
128 | |||
129 | return shash->final(desc, out); | ||
130 | } | ||
131 | EXPORT_SYMBOL_GPL(crypto_shash_final); | ||
132 | |||
133 | static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data, | ||
134 | unsigned int len, u8 *out) | ||
135 | { | ||
136 | return crypto_shash_update(desc, data, len) ?: | ||
137 | crypto_shash_final(desc, out); | ||
138 | } | ||
139 | |||
140 | int crypto_shash_finup(struct shash_desc *desc, const u8 *data, | ||
141 | unsigned int len, u8 *out) | ||
142 | { | ||
143 | struct crypto_shash *tfm = desc->tfm; | ||
144 | struct shash_alg *shash = crypto_shash_alg(tfm); | ||
145 | unsigned long alignmask = crypto_shash_alignmask(tfm); | ||
146 | |||
147 | if (((unsigned long)data | (unsigned long)out) & alignmask || | ||
148 | !shash->finup) | ||
149 | return shash_finup_unaligned(desc, data, len, out); | ||
150 | |||
151 | return shash->finup(desc, data, len, out); | ||
152 | } | ||
153 | EXPORT_SYMBOL_GPL(crypto_shash_finup); | ||
154 | |||
155 | static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data, | ||
156 | unsigned int len, u8 *out) | ||
157 | { | ||
158 | return crypto_shash_init(desc) ?: | ||
159 | crypto_shash_update(desc, data, len) ?: | ||
160 | crypto_shash_final(desc, out); | ||
161 | } | ||
162 | |||
163 | int crypto_shash_digest(struct shash_desc *desc, const u8 *data, | ||
164 | unsigned int len, u8 *out) | ||
165 | { | ||
166 | struct crypto_shash *tfm = desc->tfm; | ||
167 | struct shash_alg *shash = crypto_shash_alg(tfm); | ||
168 | unsigned long alignmask = crypto_shash_alignmask(tfm); | ||
169 | |||
170 | if (((unsigned long)data | (unsigned long)out) & alignmask || | ||
171 | !shash->digest) | ||
172 | return shash_digest_unaligned(desc, data, len, out); | ||
173 | |||
174 | return shash->digest(desc, data, len, out); | ||
175 | } | ||
176 | EXPORT_SYMBOL_GPL(crypto_shash_digest); | ||
177 | |||
178 | int crypto_shash_import(struct shash_desc *desc, const u8 *in) | ||
179 | { | ||
180 | struct crypto_shash *tfm = desc->tfm; | ||
181 | struct shash_alg *alg = crypto_shash_alg(tfm); | ||
182 | |||
183 | memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(tfm)); | ||
184 | |||
185 | if (alg->reinit) | ||
186 | alg->reinit(desc); | ||
187 | |||
188 | return 0; | ||
189 | } | ||
190 | EXPORT_SYMBOL_GPL(crypto_shash_import); | ||
191 | |||
192 | static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key, | ||
193 | unsigned int keylen) | ||
194 | { | ||
195 | struct crypto_shash **ctx = crypto_ahash_ctx(tfm); | ||
196 | |||
197 | return crypto_shash_setkey(*ctx, key, keylen); | ||
198 | } | ||
199 | |||
200 | static int shash_async_init(struct ahash_request *req) | ||
201 | { | ||
202 | struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); | ||
203 | struct shash_desc *desc = ahash_request_ctx(req); | ||
204 | |||
205 | desc->tfm = *ctx; | ||
206 | desc->flags = req->base.flags; | ||
207 | |||
208 | return crypto_shash_init(desc); | ||
209 | } | ||
210 | |||
211 | static int shash_async_update(struct ahash_request *req) | ||
212 | { | ||
213 | struct shash_desc *desc = ahash_request_ctx(req); | ||
214 | struct crypto_hash_walk walk; | ||
215 | int nbytes; | ||
216 | |||
217 | for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; | ||
218 | nbytes = crypto_hash_walk_done(&walk, nbytes)) | ||
219 | nbytes = crypto_shash_update(desc, walk.data, nbytes); | ||
220 | |||
221 | return nbytes; | ||
222 | } | ||
223 | |||
224 | static int shash_async_final(struct ahash_request *req) | ||
225 | { | ||
226 | return crypto_shash_final(ahash_request_ctx(req), req->result); | ||
227 | } | ||
228 | |||
229 | static int shash_async_digest(struct ahash_request *req) | ||
230 | { | ||
231 | struct scatterlist *sg = req->src; | ||
232 | unsigned int offset = sg->offset; | ||
233 | unsigned int nbytes = req->nbytes; | ||
234 | int err; | ||
235 | |||
236 | if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { | ||
237 | struct crypto_shash **ctx = | ||
238 | crypto_ahash_ctx(crypto_ahash_reqtfm(req)); | ||
239 | struct shash_desc *desc = ahash_request_ctx(req); | ||
240 | void *data; | ||
241 | |||
242 | desc->tfm = *ctx; | ||
243 | desc->flags = req->base.flags; | ||
244 | |||
245 | data = crypto_kmap(sg_page(sg), 0); | ||
246 | err = crypto_shash_digest(desc, data + offset, nbytes, | ||
247 | req->result); | ||
248 | crypto_kunmap(data, 0); | ||
249 | crypto_yield(desc->flags); | ||
250 | goto out; | ||
251 | } | ||
252 | |||
253 | err = shash_async_init(req); | ||
254 | if (err) | ||
255 | goto out; | ||
256 | |||
257 | err = shash_async_update(req); | ||
258 | if (err) | ||
259 | goto out; | ||
260 | |||
261 | err = shash_async_final(req); | ||
262 | |||
263 | out: | ||
264 | return err; | ||
265 | } | ||
266 | |||
267 | static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) | ||
268 | { | ||
269 | struct crypto_shash **ctx = crypto_tfm_ctx(tfm); | ||
270 | |||
271 | crypto_free_shash(*ctx); | ||
272 | } | ||
273 | |||
274 | static int crypto_init_shash_ops_async(struct crypto_tfm *tfm) | ||
275 | { | ||
276 | struct crypto_alg *calg = tfm->__crt_alg; | ||
277 | struct shash_alg *alg = __crypto_shash_alg(calg); | ||
278 | struct ahash_tfm *crt = &tfm->crt_ahash; | ||
279 | struct crypto_shash **ctx = crypto_tfm_ctx(tfm); | ||
280 | struct crypto_shash *shash; | ||
281 | |||
282 | if (!crypto_mod_get(calg)) | ||
283 | return -EAGAIN; | ||
284 | |||
285 | shash = __crypto_shash_cast(crypto_create_tfm( | ||
286 | calg, &crypto_shash_type)); | ||
287 | if (IS_ERR(shash)) { | ||
288 | crypto_mod_put(calg); | ||
289 | return PTR_ERR(shash); | ||
290 | } | ||
291 | |||
292 | *ctx = shash; | ||
293 | tfm->exit = crypto_exit_shash_ops_async; | ||
294 | |||
295 | crt->init = shash_async_init; | ||
296 | crt->update = shash_async_update; | ||
297 | crt->final = shash_async_final; | ||
298 | crt->digest = shash_async_digest; | ||
299 | crt->setkey = shash_async_setkey; | ||
300 | |||
301 | crt->digestsize = alg->digestsize; | ||
302 | crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); | ||
303 | |||
304 | return 0; | ||
305 | } | ||
306 | |||
307 | static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key, | ||
308 | unsigned int keylen) | ||
309 | { | ||
310 | struct shash_desc *desc = crypto_hash_ctx(tfm); | ||
311 | |||
312 | return crypto_shash_setkey(desc->tfm, key, keylen); | ||
313 | } | ||
314 | |||
315 | static int shash_compat_init(struct hash_desc *hdesc) | ||
316 | { | ||
317 | struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm); | ||
318 | |||
319 | desc->flags = hdesc->flags; | ||
320 | |||
321 | return crypto_shash_init(desc); | ||
322 | } | ||
323 | |||
324 | static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg, | ||
325 | unsigned int len) | ||
326 | { | ||
327 | struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm); | ||
328 | struct crypto_hash_walk walk; | ||
329 | int nbytes; | ||
330 | |||
331 | for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len); | ||
332 | nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes)) | ||
333 | nbytes = crypto_shash_update(desc, walk.data, nbytes); | ||
334 | |||
335 | return nbytes; | ||
336 | } | ||
337 | |||
338 | static int shash_compat_final(struct hash_desc *hdesc, u8 *out) | ||
339 | { | ||
340 | return crypto_shash_final(crypto_hash_ctx(hdesc->tfm), out); | ||
341 | } | ||
342 | |||
343 | static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg, | ||
344 | unsigned int nbytes, u8 *out) | ||
345 | { | ||
346 | unsigned int offset = sg->offset; | ||
347 | int err; | ||
348 | |||
349 | if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { | ||
350 | struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm); | ||
351 | void *data; | ||
352 | |||
353 | desc->flags = hdesc->flags; | ||
354 | |||
355 | data = crypto_kmap(sg_page(sg), 0); | ||
356 | err = crypto_shash_digest(desc, data + offset, nbytes, out); | ||
357 | crypto_kunmap(data, 0); | ||
358 | crypto_yield(desc->flags); | ||
359 | goto out; | ||
360 | } | ||
361 | |||
362 | err = shash_compat_init(hdesc); | ||
363 | if (err) | ||
364 | goto out; | ||
365 | |||
366 | err = shash_compat_update(hdesc, sg, nbytes); | ||
367 | if (err) | ||
368 | goto out; | ||
369 | |||
370 | err = shash_compat_final(hdesc, out); | ||
371 | |||
372 | out: | ||
373 | return err; | ||
374 | } | ||
375 | |||
376 | static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm) | ||
377 | { | ||
378 | struct shash_desc *desc= crypto_tfm_ctx(tfm); | ||
379 | |||
380 | crypto_free_shash(desc->tfm); | ||
381 | } | ||
382 | |||
383 | static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm) | ||
384 | { | ||
385 | struct hash_tfm *crt = &tfm->crt_hash; | ||
386 | struct crypto_alg *calg = tfm->__crt_alg; | ||
387 | struct shash_alg *alg = __crypto_shash_alg(calg); | ||
388 | struct shash_desc *desc = crypto_tfm_ctx(tfm); | ||
389 | struct crypto_shash *shash; | ||
390 | |||
391 | shash = __crypto_shash_cast(crypto_create_tfm( | ||
392 | calg, &crypto_shash_type)); | ||
393 | if (IS_ERR(shash)) | ||
394 | return PTR_ERR(shash); | ||
395 | |||
396 | desc->tfm = shash; | ||
397 | tfm->exit = crypto_exit_shash_ops_compat; | ||
398 | |||
399 | crt->init = shash_compat_init; | ||
400 | crt->update = shash_compat_update; | ||
401 | crt->final = shash_compat_final; | ||
402 | crt->digest = shash_compat_digest; | ||
403 | crt->setkey = shash_compat_setkey; | ||
404 | |||
405 | crt->digestsize = alg->digestsize; | ||
406 | |||
407 | return 0; | ||
408 | } | ||
409 | |||
410 | static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) | ||
411 | { | ||
412 | switch (mask & CRYPTO_ALG_TYPE_MASK) { | ||
413 | case CRYPTO_ALG_TYPE_HASH_MASK: | ||
414 | return crypto_init_shash_ops_compat(tfm); | ||
415 | case CRYPTO_ALG_TYPE_AHASH_MASK: | ||
416 | return crypto_init_shash_ops_async(tfm); | ||
417 | } | ||
418 | |||
419 | return -EINVAL; | ||
420 | } | ||
421 | |||
422 | static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type, | ||
423 | u32 mask) | ||
424 | { | ||
425 | struct shash_alg *salg = __crypto_shash_alg(alg); | ||
426 | |||
427 | switch (mask & CRYPTO_ALG_TYPE_MASK) { | ||
428 | case CRYPTO_ALG_TYPE_HASH_MASK: | ||
429 | return sizeof(struct shash_desc) + salg->descsize; | ||
430 | case CRYPTO_ALG_TYPE_AHASH_MASK: | ||
431 | return sizeof(struct crypto_shash *); | ||
432 | } | ||
433 | |||
434 | return 0; | ||
435 | } | ||
436 | |||
437 | static int crypto_shash_init_tfm(struct crypto_tfm *tfm, | ||
438 | const struct crypto_type *frontend) | ||
439 | { | ||
440 | if (frontend->type != CRYPTO_ALG_TYPE_SHASH) | ||
441 | return -EINVAL; | ||
442 | return 0; | ||
443 | } | ||
444 | |||
445 | static unsigned int crypto_shash_extsize(struct crypto_alg *alg, | ||
446 | const struct crypto_type *frontend) | ||
447 | { | ||
448 | return alg->cra_ctxsize; | ||
449 | } | ||
450 | |||
451 | static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) | ||
452 | __attribute__ ((unused)); | ||
453 | static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) | ||
454 | { | ||
455 | struct shash_alg *salg = __crypto_shash_alg(alg); | ||
456 | |||
457 | seq_printf(m, "type : shash\n"); | ||
458 | seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); | ||
459 | seq_printf(m, "digestsize : %u\n", salg->digestsize); | ||
460 | seq_printf(m, "descsize : %u\n", salg->descsize); | ||
461 | } | ||
462 | |||
463 | static const struct crypto_type crypto_shash_type = { | ||
464 | .ctxsize = crypto_shash_ctxsize, | ||
465 | .extsize = crypto_shash_extsize, | ||
466 | .init = crypto_init_shash_ops, | ||
467 | .init_tfm = crypto_shash_init_tfm, | ||
468 | #ifdef CONFIG_PROC_FS | ||
469 | .show = crypto_shash_show, | ||
470 | #endif | ||
471 | .maskclear = ~CRYPTO_ALG_TYPE_MASK, | ||
472 | .maskset = CRYPTO_ALG_TYPE_MASK, | ||
473 | .type = CRYPTO_ALG_TYPE_SHASH, | ||
474 | .tfmsize = offsetof(struct crypto_shash, base), | ||
475 | }; | ||
476 | |||
477 | struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type, | ||
478 | u32 mask) | ||
479 | { | ||
480 | return __crypto_shash_cast( | ||
481 | crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask)); | ||
482 | } | ||
483 | EXPORT_SYMBOL_GPL(crypto_alloc_shash); | ||
484 | |||
485 | int crypto_register_shash(struct shash_alg *alg) | ||
486 | { | ||
487 | struct crypto_alg *base = &alg->base; | ||
488 | |||
489 | if (alg->digestsize > PAGE_SIZE / 8 || | ||
490 | alg->descsize > PAGE_SIZE / 8) | ||
491 | return -EINVAL; | ||
492 | |||
493 | base->cra_type = &crypto_shash_type; | ||
494 | base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; | ||
495 | base->cra_flags |= CRYPTO_ALG_TYPE_SHASH; | ||
496 | |||
497 | return crypto_register_alg(base); | ||
498 | } | ||
499 | EXPORT_SYMBOL_GPL(crypto_register_shash); | ||
500 | |||
501 | int crypto_unregister_shash(struct shash_alg *alg) | ||
502 | { | ||
503 | return crypto_unregister_alg(&alg->base); | ||
504 | } | ||
505 | EXPORT_SYMBOL_GPL(crypto_unregister_shash); | ||
506 | |||
507 | MODULE_LICENSE("GPL"); | ||
508 | MODULE_DESCRIPTION("Synchronous cryptographic hash type"); | ||