diff options
Diffstat (limited to 'crypto/ahash.c')
-rw-r--r-- | crypto/ahash.c | 336 |
1 files changed, 304 insertions, 32 deletions
diff --git a/crypto/ahash.c b/crypto/ahash.c index f3476374f764..33a4ff45f842 100644 --- a/crypto/ahash.c +++ b/crypto/ahash.c | |||
@@ -24,6 +24,19 @@ | |||
24 | 24 | ||
25 | #include "internal.h" | 25 | #include "internal.h" |
26 | 26 | ||
27 | struct ahash_request_priv { | ||
28 | crypto_completion_t complete; | ||
29 | void *data; | ||
30 | u8 *result; | ||
31 | void *ubuf[] CRYPTO_MINALIGN_ATTR; | ||
32 | }; | ||
33 | |||
34 | static inline struct ahash_alg *crypto_ahash_alg(struct crypto_ahash *hash) | ||
35 | { | ||
36 | return container_of(crypto_hash_alg_common(hash), struct ahash_alg, | ||
37 | halg); | ||
38 | } | ||
39 | |||
27 | static int hash_walk_next(struct crypto_hash_walk *walk) | 40 | static int hash_walk_next(struct crypto_hash_walk *walk) |
28 | { | 41 | { |
29 | unsigned int alignmask = walk->alignmask; | 42 | unsigned int alignmask = walk->alignmask; |
@@ -132,36 +145,34 @@ int crypto_hash_walk_first_compat(struct hash_desc *hdesc, | |||
132 | static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key, | 145 | static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key, |
133 | unsigned int keylen) | 146 | unsigned int keylen) |
134 | { | 147 | { |
135 | struct ahash_alg *ahash = crypto_ahash_alg(tfm); | ||
136 | unsigned long alignmask = crypto_ahash_alignmask(tfm); | 148 | unsigned long alignmask = crypto_ahash_alignmask(tfm); |
137 | int ret; | 149 | int ret; |
138 | u8 *buffer, *alignbuffer; | 150 | u8 *buffer, *alignbuffer; |
139 | unsigned long absize; | 151 | unsigned long absize; |
140 | 152 | ||
141 | absize = keylen + alignmask; | 153 | absize = keylen + alignmask; |
142 | buffer = kmalloc(absize, GFP_ATOMIC); | 154 | buffer = kmalloc(absize, GFP_KERNEL); |
143 | if (!buffer) | 155 | if (!buffer) |
144 | return -ENOMEM; | 156 | return -ENOMEM; |
145 | 157 | ||
146 | alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | 158 | alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); |
147 | memcpy(alignbuffer, key, keylen); | 159 | memcpy(alignbuffer, key, keylen); |
148 | ret = ahash->setkey(tfm, alignbuffer, keylen); | 160 | ret = tfm->setkey(tfm, alignbuffer, keylen); |
149 | memset(alignbuffer, 0, keylen); | 161 | kzfree(buffer); |
150 | kfree(buffer); | ||
151 | return ret; | 162 | return ret; |
152 | } | 163 | } |
153 | 164 | ||
154 | static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key, | 165 | int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key, |
155 | unsigned int keylen) | 166 | unsigned int keylen) |
156 | { | 167 | { |
157 | struct ahash_alg *ahash = crypto_ahash_alg(tfm); | ||
158 | unsigned long alignmask = crypto_ahash_alignmask(tfm); | 168 | unsigned long alignmask = crypto_ahash_alignmask(tfm); |
159 | 169 | ||
160 | if ((unsigned long)key & alignmask) | 170 | if ((unsigned long)key & alignmask) |
161 | return ahash_setkey_unaligned(tfm, key, keylen); | 171 | return ahash_setkey_unaligned(tfm, key, keylen); |
162 | 172 | ||
163 | return ahash->setkey(tfm, key, keylen); | 173 | return tfm->setkey(tfm, key, keylen); |
164 | } | 174 | } |
175 | EXPORT_SYMBOL_GPL(crypto_ahash_setkey); | ||
165 | 176 | ||
166 | static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, | 177 | static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, |
167 | unsigned int keylen) | 178 | unsigned int keylen) |
@@ -169,44 +180,221 @@ static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, | |||
169 | return -ENOSYS; | 180 | return -ENOSYS; |
170 | } | 181 | } |
171 | 182 | ||
172 | int crypto_ahash_import(struct ahash_request *req, const u8 *in) | 183 | static inline unsigned int ahash_align_buffer_size(unsigned len, |
184 | unsigned long mask) | ||
185 | { | ||
186 | return len + (mask & ~(crypto_tfm_ctx_alignment() - 1)); | ||
187 | } | ||
188 | |||
189 | static void ahash_op_unaligned_finish(struct ahash_request *req, int err) | ||
190 | { | ||
191 | struct ahash_request_priv *priv = req->priv; | ||
192 | |||
193 | if (err == -EINPROGRESS) | ||
194 | return; | ||
195 | |||
196 | if (!err) | ||
197 | memcpy(priv->result, req->result, | ||
198 | crypto_ahash_digestsize(crypto_ahash_reqtfm(req))); | ||
199 | |||
200 | kzfree(priv); | ||
201 | } | ||
202 | |||
203 | static void ahash_op_unaligned_done(struct crypto_async_request *req, int err) | ||
204 | { | ||
205 | struct ahash_request *areq = req->data; | ||
206 | struct ahash_request_priv *priv = areq->priv; | ||
207 | crypto_completion_t complete = priv->complete; | ||
208 | void *data = priv->data; | ||
209 | |||
210 | ahash_op_unaligned_finish(areq, err); | ||
211 | |||
212 | complete(data, err); | ||
213 | } | ||
214 | |||
215 | static int ahash_op_unaligned(struct ahash_request *req, | ||
216 | int (*op)(struct ahash_request *)) | ||
173 | { | 217 | { |
174 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | 218 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); |
175 | struct ahash_alg *alg = crypto_ahash_alg(tfm); | 219 | unsigned long alignmask = crypto_ahash_alignmask(tfm); |
220 | unsigned int ds = crypto_ahash_digestsize(tfm); | ||
221 | struct ahash_request_priv *priv; | ||
222 | int err; | ||
223 | |||
224 | priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask), | ||
225 | (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? | ||
226 | GFP_KERNEL : GFP_ATOMIC); | ||
227 | if (!priv) | ||
228 | return -ENOMEM; | ||
176 | 229 | ||
177 | memcpy(ahash_request_ctx(req), in, crypto_ahash_reqsize(tfm)); | 230 | priv->result = req->result; |
231 | priv->complete = req->base.complete; | ||
232 | priv->data = req->base.data; | ||
178 | 233 | ||
179 | if (alg->reinit) | 234 | req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1); |
180 | alg->reinit(req); | 235 | req->base.complete = ahash_op_unaligned_done; |
236 | req->base.data = req; | ||
237 | req->priv = priv; | ||
181 | 238 | ||
182 | return 0; | 239 | err = op(req); |
240 | ahash_op_unaligned_finish(req, err); | ||
241 | |||
242 | return err; | ||
183 | } | 243 | } |
184 | EXPORT_SYMBOL_GPL(crypto_ahash_import); | ||
185 | 244 | ||
186 | static unsigned int crypto_ahash_ctxsize(struct crypto_alg *alg, u32 type, | 245 | static int crypto_ahash_op(struct ahash_request *req, |
187 | u32 mask) | 246 | int (*op)(struct ahash_request *)) |
188 | { | 247 | { |
189 | return alg->cra_ctxsize; | 248 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); |
249 | unsigned long alignmask = crypto_ahash_alignmask(tfm); | ||
250 | |||
251 | if ((unsigned long)req->result & alignmask) | ||
252 | return ahash_op_unaligned(req, op); | ||
253 | |||
254 | return op(req); | ||
190 | } | 255 | } |
191 | 256 | ||
192 | static int crypto_init_ahash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) | 257 | int crypto_ahash_final(struct ahash_request *req) |
193 | { | 258 | { |
194 | struct ahash_alg *alg = &tfm->__crt_alg->cra_ahash; | 259 | return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->final); |
195 | struct ahash_tfm *crt = &tfm->crt_ahash; | 260 | } |
261 | EXPORT_SYMBOL_GPL(crypto_ahash_final); | ||
196 | 262 | ||
197 | if (alg->digestsize > PAGE_SIZE / 8) | 263 | int crypto_ahash_finup(struct ahash_request *req) |
198 | return -EINVAL; | 264 | { |
265 | return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->finup); | ||
266 | } | ||
267 | EXPORT_SYMBOL_GPL(crypto_ahash_finup); | ||
268 | |||
269 | int crypto_ahash_digest(struct ahash_request *req) | ||
270 | { | ||
271 | return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->digest); | ||
272 | } | ||
273 | EXPORT_SYMBOL_GPL(crypto_ahash_digest); | ||
274 | |||
275 | static void ahash_def_finup_finish2(struct ahash_request *req, int err) | ||
276 | { | ||
277 | struct ahash_request_priv *priv = req->priv; | ||
278 | |||
279 | if (err == -EINPROGRESS) | ||
280 | return; | ||
281 | |||
282 | if (!err) | ||
283 | memcpy(priv->result, req->result, | ||
284 | crypto_ahash_digestsize(crypto_ahash_reqtfm(req))); | ||
199 | 285 | ||
200 | crt->init = alg->init; | 286 | kzfree(priv); |
201 | crt->update = alg->update; | 287 | } |
202 | crt->final = alg->final; | 288 | |
203 | crt->digest = alg->digest; | 289 | static void ahash_def_finup_done2(struct crypto_async_request *req, int err) |
204 | crt->setkey = alg->setkey ? ahash_setkey : ahash_nosetkey; | 290 | { |
205 | crt->digestsize = alg->digestsize; | 291 | struct ahash_request *areq = req->data; |
292 | struct ahash_request_priv *priv = areq->priv; | ||
293 | crypto_completion_t complete = priv->complete; | ||
294 | void *data = priv->data; | ||
295 | |||
296 | ahash_def_finup_finish2(areq, err); | ||
297 | |||
298 | complete(data, err); | ||
299 | } | ||
300 | |||
301 | static int ahash_def_finup_finish1(struct ahash_request *req, int err) | ||
302 | { | ||
303 | if (err) | ||
304 | goto out; | ||
305 | |||
306 | req->base.complete = ahash_def_finup_done2; | ||
307 | req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; | ||
308 | err = crypto_ahash_reqtfm(req)->final(req); | ||
309 | |||
310 | out: | ||
311 | ahash_def_finup_finish2(req, err); | ||
312 | return err; | ||
313 | } | ||
314 | |||
315 | static void ahash_def_finup_done1(struct crypto_async_request *req, int err) | ||
316 | { | ||
317 | struct ahash_request *areq = req->data; | ||
318 | struct ahash_request_priv *priv = areq->priv; | ||
319 | crypto_completion_t complete = priv->complete; | ||
320 | void *data = priv->data; | ||
321 | |||
322 | err = ahash_def_finup_finish1(areq, err); | ||
323 | |||
324 | complete(data, err); | ||
325 | } | ||
326 | |||
327 | static int ahash_def_finup(struct ahash_request *req) | ||
328 | { | ||
329 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | ||
330 | unsigned long alignmask = crypto_ahash_alignmask(tfm); | ||
331 | unsigned int ds = crypto_ahash_digestsize(tfm); | ||
332 | struct ahash_request_priv *priv; | ||
333 | |||
334 | priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask), | ||
335 | (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? | ||
336 | GFP_KERNEL : GFP_ATOMIC); | ||
337 | if (!priv) | ||
338 | return -ENOMEM; | ||
339 | |||
340 | priv->result = req->result; | ||
341 | priv->complete = req->base.complete; | ||
342 | priv->data = req->base.data; | ||
343 | |||
344 | req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1); | ||
345 | req->base.complete = ahash_def_finup_done1; | ||
346 | req->base.data = req; | ||
347 | req->priv = priv; | ||
348 | |||
349 | return ahash_def_finup_finish1(req, tfm->update(req)); | ||
350 | } | ||
351 | |||
352 | static int ahash_no_export(struct ahash_request *req, void *out) | ||
353 | { | ||
354 | return -ENOSYS; | ||
355 | } | ||
356 | |||
357 | static int ahash_no_import(struct ahash_request *req, const void *in) | ||
358 | { | ||
359 | return -ENOSYS; | ||
360 | } | ||
361 | |||
362 | static int crypto_ahash_init_tfm(struct crypto_tfm *tfm) | ||
363 | { | ||
364 | struct crypto_ahash *hash = __crypto_ahash_cast(tfm); | ||
365 | struct ahash_alg *alg = crypto_ahash_alg(hash); | ||
366 | |||
367 | hash->setkey = ahash_nosetkey; | ||
368 | hash->export = ahash_no_export; | ||
369 | hash->import = ahash_no_import; | ||
370 | |||
371 | if (tfm->__crt_alg->cra_type != &crypto_ahash_type) | ||
372 | return crypto_init_shash_ops_async(tfm); | ||
373 | |||
374 | hash->init = alg->init; | ||
375 | hash->update = alg->update; | ||
376 | hash->final = alg->final; | ||
377 | hash->finup = alg->finup ?: ahash_def_finup; | ||
378 | hash->digest = alg->digest; | ||
379 | |||
380 | if (alg->setkey) | ||
381 | hash->setkey = alg->setkey; | ||
382 | if (alg->export) | ||
383 | hash->export = alg->export; | ||
384 | if (alg->import) | ||
385 | hash->import = alg->import; | ||
206 | 386 | ||
207 | return 0; | 387 | return 0; |
208 | } | 388 | } |
209 | 389 | ||
390 | static unsigned int crypto_ahash_extsize(struct crypto_alg *alg) | ||
391 | { | ||
392 | if (alg->cra_type == &crypto_ahash_type) | ||
393 | return alg->cra_ctxsize; | ||
394 | |||
395 | return sizeof(struct crypto_shash *); | ||
396 | } | ||
397 | |||
210 | static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) | 398 | static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) |
211 | __attribute__ ((unused)); | 399 | __attribute__ ((unused)); |
212 | static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) | 400 | static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) |
@@ -215,17 +403,101 @@ static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) | |||
215 | seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ? | 403 | seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ? |
216 | "yes" : "no"); | 404 | "yes" : "no"); |
217 | seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); | 405 | seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); |
218 | seq_printf(m, "digestsize : %u\n", alg->cra_ahash.digestsize); | 406 | seq_printf(m, "digestsize : %u\n", |
407 | __crypto_hash_alg_common(alg)->digestsize); | ||
219 | } | 408 | } |
220 | 409 | ||
221 | const struct crypto_type crypto_ahash_type = { | 410 | const struct crypto_type crypto_ahash_type = { |
222 | .ctxsize = crypto_ahash_ctxsize, | 411 | .extsize = crypto_ahash_extsize, |
223 | .init = crypto_init_ahash_ops, | 412 | .init_tfm = crypto_ahash_init_tfm, |
224 | #ifdef CONFIG_PROC_FS | 413 | #ifdef CONFIG_PROC_FS |
225 | .show = crypto_ahash_show, | 414 | .show = crypto_ahash_show, |
226 | #endif | 415 | #endif |
416 | .maskclear = ~CRYPTO_ALG_TYPE_MASK, | ||
417 | .maskset = CRYPTO_ALG_TYPE_AHASH_MASK, | ||
418 | .type = CRYPTO_ALG_TYPE_AHASH, | ||
419 | .tfmsize = offsetof(struct crypto_ahash, base), | ||
227 | }; | 420 | }; |
228 | EXPORT_SYMBOL_GPL(crypto_ahash_type); | 421 | EXPORT_SYMBOL_GPL(crypto_ahash_type); |
229 | 422 | ||
423 | struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type, | ||
424 | u32 mask) | ||
425 | { | ||
426 | return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask); | ||
427 | } | ||
428 | EXPORT_SYMBOL_GPL(crypto_alloc_ahash); | ||
429 | |||
430 | static int ahash_prepare_alg(struct ahash_alg *alg) | ||
431 | { | ||
432 | struct crypto_alg *base = &alg->halg.base; | ||
433 | |||
434 | if (alg->halg.digestsize > PAGE_SIZE / 8 || | ||
435 | alg->halg.statesize > PAGE_SIZE / 8) | ||
436 | return -EINVAL; | ||
437 | |||
438 | base->cra_type = &crypto_ahash_type; | ||
439 | base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; | ||
440 | base->cra_flags |= CRYPTO_ALG_TYPE_AHASH; | ||
441 | |||
442 | return 0; | ||
443 | } | ||
444 | |||
445 | int crypto_register_ahash(struct ahash_alg *alg) | ||
446 | { | ||
447 | struct crypto_alg *base = &alg->halg.base; | ||
448 | int err; | ||
449 | |||
450 | err = ahash_prepare_alg(alg); | ||
451 | if (err) | ||
452 | return err; | ||
453 | |||
454 | return crypto_register_alg(base); | ||
455 | } | ||
456 | EXPORT_SYMBOL_GPL(crypto_register_ahash); | ||
457 | |||
458 | int crypto_unregister_ahash(struct ahash_alg *alg) | ||
459 | { | ||
460 | return crypto_unregister_alg(&alg->halg.base); | ||
461 | } | ||
462 | EXPORT_SYMBOL_GPL(crypto_unregister_ahash); | ||
463 | |||
464 | int ahash_register_instance(struct crypto_template *tmpl, | ||
465 | struct ahash_instance *inst) | ||
466 | { | ||
467 | int err; | ||
468 | |||
469 | err = ahash_prepare_alg(&inst->alg); | ||
470 | if (err) | ||
471 | return err; | ||
472 | |||
473 | return crypto_register_instance(tmpl, ahash_crypto_instance(inst)); | ||
474 | } | ||
475 | EXPORT_SYMBOL_GPL(ahash_register_instance); | ||
476 | |||
477 | void ahash_free_instance(struct crypto_instance *inst) | ||
478 | { | ||
479 | crypto_drop_spawn(crypto_instance_ctx(inst)); | ||
480 | kfree(ahash_instance(inst)); | ||
481 | } | ||
482 | EXPORT_SYMBOL_GPL(ahash_free_instance); | ||
483 | |||
484 | int crypto_init_ahash_spawn(struct crypto_ahash_spawn *spawn, | ||
485 | struct hash_alg_common *alg, | ||
486 | struct crypto_instance *inst) | ||
487 | { | ||
488 | return crypto_init_spawn2(&spawn->base, &alg->base, inst, | ||
489 | &crypto_ahash_type); | ||
490 | } | ||
491 | EXPORT_SYMBOL_GPL(crypto_init_ahash_spawn); | ||
492 | |||
493 | struct hash_alg_common *ahash_attr_alg(struct rtattr *rta, u32 type, u32 mask) | ||
494 | { | ||
495 | struct crypto_alg *alg; | ||
496 | |||
497 | alg = crypto_attr_alg2(rta, &crypto_ahash_type, type, mask); | ||
498 | return IS_ERR(alg) ? ERR_CAST(alg) : __crypto_hash_alg_common(alg); | ||
499 | } | ||
500 | EXPORT_SYMBOL_GPL(ahash_attr_alg); | ||
501 | |||
230 | MODULE_LICENSE("GPL"); | 502 | MODULE_LICENSE("GPL"); |
231 | MODULE_DESCRIPTION("Asynchronous cryptographic hash type"); | 503 | MODULE_DESCRIPTION("Asynchronous cryptographic hash type"); |