diff options
author | Herbert Xu <herbert@gondor.apana.org.au> | 2009-07-15 00:40:40 -0400 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2009-07-15 00:40:40 -0400 |
commit | 66f6ce5e52f2f209d5bf1f06167cec888f4f4c13 (patch) | |
tree | aa7b21af00649d2f458b72ebfba071816cb340c3 /crypto | |
parent | 093900c2b964da73daf234374225b5ce5d49f941 (diff) |
crypto: ahash - Add unaligned handling and default operations
This patch exports the finup operation where available and adds
a default finup operation for ahash. The operations final, finup
and digest also will now deal with unaligned result pointers by
copying it. Finally export/import operations are will now be
exported too.
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto')
-rw-r--r-- | crypto/ahash.c | 204 | ||||
-rw-r--r-- | crypto/shash.c | 52 |
2 files changed, 249 insertions, 7 deletions
diff --git a/crypto/ahash.c b/crypto/ahash.c index a196055b73d3..ac0798d2824e 100644 --- a/crypto/ahash.c +++ b/crypto/ahash.c | |||
@@ -24,6 +24,13 @@ | |||
24 | 24 | ||
25 | #include "internal.h" | 25 | #include "internal.h" |
26 | 26 | ||
27 | struct ahash_request_priv { | ||
28 | crypto_completion_t complete; | ||
29 | void *data; | ||
30 | u8 *result; | ||
31 | void *ubuf[] CRYPTO_MINALIGN_ATTR; | ||
32 | }; | ||
33 | |||
27 | static inline struct ahash_alg *crypto_ahash_alg(struct crypto_ahash *hash) | 34 | static inline struct ahash_alg *crypto_ahash_alg(struct crypto_ahash *hash) |
28 | { | 35 | { |
29 | return container_of(crypto_hash_alg_common(hash), struct ahash_alg, | 36 | return container_of(crypto_hash_alg_common(hash), struct ahash_alg, |
@@ -156,7 +163,7 @@ static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key, | |||
156 | return ret; | 163 | return ret; |
157 | } | 164 | } |
158 | 165 | ||
159 | static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key, | 166 | int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key, |
160 | unsigned int keylen) | 167 | unsigned int keylen) |
161 | { | 168 | { |
162 | struct ahash_alg *ahash = crypto_ahash_alg(tfm); | 169 | struct ahash_alg *ahash = crypto_ahash_alg(tfm); |
@@ -167,6 +174,7 @@ static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key, | |||
167 | 174 | ||
168 | return ahash->setkey(tfm, key, keylen); | 175 | return ahash->setkey(tfm, key, keylen); |
169 | } | 176 | } |
177 | EXPORT_SYMBOL_GPL(crypto_ahash_setkey); | ||
170 | 178 | ||
171 | static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, | 179 | static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, |
172 | unsigned int keylen) | 180 | unsigned int keylen) |
@@ -174,19 +182,209 @@ static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, | |||
174 | return -ENOSYS; | 182 | return -ENOSYS; |
175 | } | 183 | } |
176 | 184 | ||
185 | static inline unsigned int ahash_align_buffer_size(unsigned len, | ||
186 | unsigned long mask) | ||
187 | { | ||
188 | return len + (mask & ~(crypto_tfm_ctx_alignment() - 1)); | ||
189 | } | ||
190 | |||
191 | static void ahash_op_unaligned_finish(struct ahash_request *req, int err) | ||
192 | { | ||
193 | struct ahash_request_priv *priv = req->priv; | ||
194 | |||
195 | if (err == -EINPROGRESS) | ||
196 | return; | ||
197 | |||
198 | if (!err) | ||
199 | memcpy(priv->result, req->result, | ||
200 | crypto_ahash_digestsize(crypto_ahash_reqtfm(req))); | ||
201 | |||
202 | kzfree(priv); | ||
203 | } | ||
204 | |||
205 | static void ahash_op_unaligned_done(struct crypto_async_request *req, int err) | ||
206 | { | ||
207 | struct ahash_request *areq = req->data; | ||
208 | struct ahash_request_priv *priv = areq->priv; | ||
209 | crypto_completion_t complete = priv->complete; | ||
210 | void *data = priv->data; | ||
211 | |||
212 | ahash_op_unaligned_finish(areq, err); | ||
213 | |||
214 | complete(data, err); | ||
215 | } | ||
216 | |||
217 | static int ahash_op_unaligned(struct ahash_request *req, | ||
218 | int (*op)(struct ahash_request *)) | ||
219 | { | ||
220 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | ||
221 | unsigned long alignmask = crypto_ahash_alignmask(tfm); | ||
222 | unsigned int ds = crypto_ahash_digestsize(tfm); | ||
223 | struct ahash_request_priv *priv; | ||
224 | int err; | ||
225 | |||
226 | priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask), | ||
227 | (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? | ||
228 | GFP_ATOMIC : GFP_ATOMIC); | ||
229 | if (!priv) | ||
230 | return -ENOMEM; | ||
231 | |||
232 | priv->result = req->result; | ||
233 | priv->complete = req->base.complete; | ||
234 | priv->data = req->base.data; | ||
235 | |||
236 | req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1); | ||
237 | req->base.complete = ahash_op_unaligned_done; | ||
238 | req->base.data = req; | ||
239 | req->priv = priv; | ||
240 | |||
241 | err = op(req); | ||
242 | ahash_op_unaligned_finish(req, err); | ||
243 | |||
244 | return err; | ||
245 | } | ||
246 | |||
247 | static int crypto_ahash_op(struct ahash_request *req, | ||
248 | int (*op)(struct ahash_request *)) | ||
249 | { | ||
250 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | ||
251 | unsigned long alignmask = crypto_ahash_alignmask(tfm); | ||
252 | |||
253 | if ((unsigned long)req->result & alignmask) | ||
254 | return ahash_op_unaligned(req, op); | ||
255 | |||
256 | return op(req); | ||
257 | } | ||
258 | |||
259 | int crypto_ahash_final(struct ahash_request *req) | ||
260 | { | ||
261 | return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->final); | ||
262 | } | ||
263 | EXPORT_SYMBOL_GPL(crypto_ahash_final); | ||
264 | |||
265 | int crypto_ahash_finup(struct ahash_request *req) | ||
266 | { | ||
267 | return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->finup); | ||
268 | } | ||
269 | EXPORT_SYMBOL_GPL(crypto_ahash_finup); | ||
270 | |||
271 | int crypto_ahash_digest(struct ahash_request *req) | ||
272 | { | ||
273 | return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->digest); | ||
274 | } | ||
275 | EXPORT_SYMBOL_GPL(crypto_ahash_digest); | ||
276 | |||
277 | static void ahash_def_finup_finish2(struct ahash_request *req, int err) | ||
278 | { | ||
279 | struct ahash_request_priv *priv = req->priv; | ||
280 | |||
281 | if (err == -EINPROGRESS) | ||
282 | return; | ||
283 | |||
284 | if (!err) | ||
285 | memcpy(priv->result, req->result, | ||
286 | crypto_ahash_digestsize(crypto_ahash_reqtfm(req))); | ||
287 | |||
288 | kzfree(priv); | ||
289 | } | ||
290 | |||
291 | static void ahash_def_finup_done2(struct crypto_async_request *req, int err) | ||
292 | { | ||
293 | struct ahash_request *areq = req->data; | ||
294 | struct ahash_request_priv *priv = areq->priv; | ||
295 | crypto_completion_t complete = priv->complete; | ||
296 | void *data = priv->data; | ||
297 | |||
298 | ahash_def_finup_finish2(areq, err); | ||
299 | |||
300 | complete(data, err); | ||
301 | } | ||
302 | |||
303 | static int ahash_def_finup_finish1(struct ahash_request *req, int err) | ||
304 | { | ||
305 | if (err) | ||
306 | goto out; | ||
307 | |||
308 | req->base.complete = ahash_def_finup_done2; | ||
309 | req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; | ||
310 | err = crypto_ahash_reqtfm(req)->final(req); | ||
311 | |||
312 | out: | ||
313 | ahash_def_finup_finish2(req, err); | ||
314 | return err; | ||
315 | } | ||
316 | |||
317 | static void ahash_def_finup_done1(struct crypto_async_request *req, int err) | ||
318 | { | ||
319 | struct ahash_request *areq = req->data; | ||
320 | struct ahash_request_priv *priv = areq->priv; | ||
321 | crypto_completion_t complete = priv->complete; | ||
322 | void *data = priv->data; | ||
323 | |||
324 | err = ahash_def_finup_finish1(areq, err); | ||
325 | |||
326 | complete(data, err); | ||
327 | } | ||
328 | |||
329 | static int ahash_def_finup(struct ahash_request *req) | ||
330 | { | ||
331 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | ||
332 | unsigned long alignmask = crypto_ahash_alignmask(tfm); | ||
333 | unsigned int ds = crypto_ahash_digestsize(tfm); | ||
334 | struct ahash_request_priv *priv; | ||
335 | |||
336 | priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask), | ||
337 | (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? | ||
338 | GFP_ATOMIC : GFP_ATOMIC); | ||
339 | if (!priv) | ||
340 | return -ENOMEM; | ||
341 | |||
342 | priv->result = req->result; | ||
343 | priv->complete = req->base.complete; | ||
344 | priv->data = req->base.data; | ||
345 | |||
346 | req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1); | ||
347 | req->base.complete = ahash_def_finup_done1; | ||
348 | req->base.data = req; | ||
349 | req->priv = priv; | ||
350 | |||
351 | return ahash_def_finup_finish1(req, tfm->update(req)); | ||
352 | } | ||
353 | |||
354 | static int ahash_no_export(struct ahash_request *req, void *out) | ||
355 | { | ||
356 | return -ENOSYS; | ||
357 | } | ||
358 | |||
359 | static int ahash_no_import(struct ahash_request *req, const void *in) | ||
360 | { | ||
361 | return -ENOSYS; | ||
362 | } | ||
363 | |||
177 | static int crypto_ahash_init_tfm(struct crypto_tfm *tfm) | 364 | static int crypto_ahash_init_tfm(struct crypto_tfm *tfm) |
178 | { | 365 | { |
179 | struct crypto_ahash *hash = __crypto_ahash_cast(tfm); | 366 | struct crypto_ahash *hash = __crypto_ahash_cast(tfm); |
180 | struct ahash_alg *alg = crypto_ahash_alg(hash); | 367 | struct ahash_alg *alg = crypto_ahash_alg(hash); |
181 | 368 | ||
369 | hash->setkey = ahash_nosetkey; | ||
370 | hash->export = ahash_no_export; | ||
371 | hash->import = ahash_no_import; | ||
372 | |||
182 | if (tfm->__crt_alg->cra_type != &crypto_ahash_type) | 373 | if (tfm->__crt_alg->cra_type != &crypto_ahash_type) |
183 | return crypto_init_shash_ops_async(tfm); | 374 | return crypto_init_shash_ops_async(tfm); |
184 | 375 | ||
185 | hash->init = alg->init; | 376 | hash->init = alg->init; |
186 | hash->update = alg->update; | 377 | hash->update = alg->update; |
187 | hash->final = alg->final; | 378 | hash->final = alg->final; |
379 | hash->finup = alg->finup ?: ahash_def_finup; | ||
188 | hash->digest = alg->digest; | 380 | hash->digest = alg->digest; |
189 | hash->setkey = alg->setkey ? ahash_setkey : ahash_nosetkey; | 381 | |
382 | if (alg->setkey) | ||
383 | hash->setkey = alg->setkey; | ||
384 | if (alg->export) | ||
385 | hash->export = alg->export; | ||
386 | if (alg->import) | ||
387 | hash->import = alg->import; | ||
190 | 388 | ||
191 | return 0; | 389 | return 0; |
192 | } | 390 | } |
diff --git a/crypto/shash.c b/crypto/shash.c index 171c8f052f89..834d9d24cdae 100644 --- a/crypto/shash.c +++ b/crypto/shash.c | |||
@@ -235,6 +235,33 @@ static int shash_async_final(struct ahash_request *req) | |||
235 | return crypto_shash_final(ahash_request_ctx(req), req->result); | 235 | return crypto_shash_final(ahash_request_ctx(req), req->result); |
236 | } | 236 | } |
237 | 237 | ||
238 | int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc) | ||
239 | { | ||
240 | struct crypto_hash_walk walk; | ||
241 | int nbytes; | ||
242 | |||
243 | for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; | ||
244 | nbytes = crypto_hash_walk_done(&walk, nbytes)) | ||
245 | nbytes = crypto_hash_walk_last(&walk) ? | ||
246 | crypto_shash_finup(desc, walk.data, nbytes, | ||
247 | req->result) : | ||
248 | crypto_shash_update(desc, walk.data, nbytes); | ||
249 | |||
250 | return nbytes; | ||
251 | } | ||
252 | EXPORT_SYMBOL_GPL(shash_ahash_finup); | ||
253 | |||
254 | static int shash_async_finup(struct ahash_request *req) | ||
255 | { | ||
256 | struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); | ||
257 | struct shash_desc *desc = ahash_request_ctx(req); | ||
258 | |||
259 | desc->tfm = *ctx; | ||
260 | desc->flags = req->base.flags; | ||
261 | |||
262 | return shash_ahash_finup(req, desc); | ||
263 | } | ||
264 | |||
238 | int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc) | 265 | int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc) |
239 | { | 266 | { |
240 | struct scatterlist *sg = req->src; | 267 | struct scatterlist *sg = req->src; |
@@ -252,8 +279,7 @@ int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc) | |||
252 | crypto_yield(desc->flags); | 279 | crypto_yield(desc->flags); |
253 | } else | 280 | } else |
254 | err = crypto_shash_init(desc) ?: | 281 | err = crypto_shash_init(desc) ?: |
255 | shash_ahash_update(req, desc) ?: | 282 | shash_ahash_finup(req, desc); |
256 | crypto_shash_final(desc, req->result); | ||
257 | 283 | ||
258 | return err; | 284 | return err; |
259 | } | 285 | } |
@@ -270,6 +296,16 @@ static int shash_async_digest(struct ahash_request *req) | |||
270 | return shash_ahash_digest(req, desc); | 296 | return shash_ahash_digest(req, desc); |
271 | } | 297 | } |
272 | 298 | ||
299 | static int shash_async_export(struct ahash_request *req, void *out) | ||
300 | { | ||
301 | return crypto_shash_export(ahash_request_ctx(req), out); | ||
302 | } | ||
303 | |||
304 | static int shash_async_import(struct ahash_request *req, const void *in) | ||
305 | { | ||
306 | return crypto_shash_import(ahash_request_ctx(req), in); | ||
307 | } | ||
308 | |||
273 | static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) | 309 | static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) |
274 | { | 310 | { |
275 | struct crypto_shash **ctx = crypto_tfm_ctx(tfm); | 311 | struct crypto_shash **ctx = crypto_tfm_ctx(tfm); |
@@ -280,6 +316,7 @@ static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) | |||
280 | int crypto_init_shash_ops_async(struct crypto_tfm *tfm) | 316 | int crypto_init_shash_ops_async(struct crypto_tfm *tfm) |
281 | { | 317 | { |
282 | struct crypto_alg *calg = tfm->__crt_alg; | 318 | struct crypto_alg *calg = tfm->__crt_alg; |
319 | struct shash_alg *alg = __crypto_shash_alg(calg); | ||
283 | struct crypto_ahash *crt = __crypto_ahash_cast(tfm); | 320 | struct crypto_ahash *crt = __crypto_ahash_cast(tfm); |
284 | struct crypto_shash **ctx = crypto_tfm_ctx(tfm); | 321 | struct crypto_shash **ctx = crypto_tfm_ctx(tfm); |
285 | struct crypto_shash *shash; | 322 | struct crypto_shash *shash; |
@@ -298,9 +335,16 @@ int crypto_init_shash_ops_async(struct crypto_tfm *tfm) | |||
298 | 335 | ||
299 | crt->init = shash_async_init; | 336 | crt->init = shash_async_init; |
300 | crt->update = shash_async_update; | 337 | crt->update = shash_async_update; |
301 | crt->final = shash_async_final; | 338 | crt->final = shash_async_final; |
339 | crt->finup = shash_async_finup; | ||
302 | crt->digest = shash_async_digest; | 340 | crt->digest = shash_async_digest; |
303 | crt->setkey = shash_async_setkey; | 341 | |
342 | if (alg->setkey) | ||
343 | crt->setkey = shash_async_setkey; | ||
344 | if (alg->export) | ||
345 | crt->export = shash_async_export; | ||
346 | if (alg->setkey) | ||
347 | crt->import = shash_async_import; | ||
304 | 348 | ||
305 | crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); | 349 | crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); |
306 | 350 | ||