diff options
author | Herbert Xu <herbert@gondor.apana.org.au> | 2008-08-31 04:52:18 -0400 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2008-12-24 19:01:28 -0500 |
commit | 3b2f6df08258e2875f42bd630eece7e7241a053b (patch) | |
tree | f46e989b103580a6286d644b3c88892188339e19 /crypto | |
parent | 7b5a080b3c46f0cac71c0d0262634c6517d4ee4f (diff) |
crypto: hash - Export shash through ahash
This patch allows shash algorithms to be used through the ahash
interface. This is required before we can convert digest algorithms
over to shash.
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto')
-rw-r--r-- | crypto/shash.c | 143 |
1 files changed, 143 insertions, 0 deletions
diff --git a/crypto/shash.c b/crypto/shash.c index 82ec4bd8d2f5..3f4c713a21ea 100644 --- a/crypto/shash.c +++ b/crypto/shash.c | |||
@@ -10,6 +10,7 @@ | |||
10 | * | 10 | * |
11 | */ | 11 | */ |
12 | 12 | ||
13 | #include <crypto/scatterwalk.h> | ||
13 | #include <crypto/internal/hash.h> | 14 | #include <crypto/internal/hash.h> |
14 | #include <linux/err.h> | 15 | #include <linux/err.h> |
15 | #include <linux/kernel.h> | 16 | #include <linux/kernel.h> |
@@ -17,11 +18,15 @@ | |||
17 | #include <linux/slab.h> | 18 | #include <linux/slab.h> |
18 | #include <linux/seq_file.h> | 19 | #include <linux/seq_file.h> |
19 | 20 | ||
21 | static const struct crypto_type crypto_shash_type; | ||
22 | |||
20 | static inline struct crypto_shash *__crypto_shash_cast(struct crypto_tfm *tfm) | 23 | static inline struct crypto_shash *__crypto_shash_cast(struct crypto_tfm *tfm) |
21 | { | 24 | { |
22 | return container_of(tfm, struct crypto_shash, base); | 25 | return container_of(tfm, struct crypto_shash, base); |
23 | } | 26 | } |
24 | 27 | ||
28 | #include "internal.h" | ||
29 | |||
25 | static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key, | 30 | static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key, |
26 | unsigned int keylen) | 31 | unsigned int keylen) |
27 | { | 32 | { |
@@ -167,6 +172,142 @@ int crypto_shash_digest(struct shash_desc *desc, const u8 *data, | |||
167 | } | 172 | } |
168 | EXPORT_SYMBOL_GPL(crypto_shash_digest); | 173 | EXPORT_SYMBOL_GPL(crypto_shash_digest); |
169 | 174 | ||
175 | static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key, | ||
176 | unsigned int keylen) | ||
177 | { | ||
178 | struct crypto_shash **ctx = crypto_ahash_ctx(tfm); | ||
179 | |||
180 | return crypto_shash_setkey(*ctx, key, keylen); | ||
181 | } | ||
182 | |||
183 | static int shash_async_init(struct ahash_request *req) | ||
184 | { | ||
185 | struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); | ||
186 | struct shash_desc *desc = ahash_request_ctx(req); | ||
187 | |||
188 | desc->tfm = *ctx; | ||
189 | desc->flags = req->base.flags; | ||
190 | |||
191 | return crypto_shash_init(desc); | ||
192 | } | ||
193 | |||
194 | static int shash_async_update(struct ahash_request *req) | ||
195 | { | ||
196 | struct shash_desc *desc = ahash_request_ctx(req); | ||
197 | struct crypto_hash_walk walk; | ||
198 | int nbytes; | ||
199 | |||
200 | for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; | ||
201 | nbytes = crypto_hash_walk_done(&walk, nbytes)) | ||
202 | nbytes = crypto_shash_update(desc, walk.data, nbytes); | ||
203 | |||
204 | return nbytes; | ||
205 | } | ||
206 | |||
207 | static int shash_async_final(struct ahash_request *req) | ||
208 | { | ||
209 | return crypto_shash_final(ahash_request_ctx(req), req->result); | ||
210 | } | ||
211 | |||
212 | static int shash_async_digest(struct ahash_request *req) | ||
213 | { | ||
214 | struct scatterlist *sg = req->src; | ||
215 | unsigned int offset = sg->offset; | ||
216 | unsigned int nbytes = req->nbytes; | ||
217 | int err; | ||
218 | |||
219 | if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { | ||
220 | struct crypto_shash **ctx = | ||
221 | crypto_ahash_ctx(crypto_ahash_reqtfm(req)); | ||
222 | struct shash_desc *desc = ahash_request_ctx(req); | ||
223 | void *data; | ||
224 | |||
225 | desc->tfm = *ctx; | ||
226 | desc->flags = req->base.flags; | ||
227 | |||
228 | data = crypto_kmap(sg_page(sg), 0); | ||
229 | err = crypto_shash_digest(desc, data + offset, nbytes, | ||
230 | req->result); | ||
231 | crypto_kunmap(data, 0); | ||
232 | crypto_yield(desc->flags); | ||
233 | goto out; | ||
234 | } | ||
235 | |||
236 | err = shash_async_init(req); | ||
237 | if (err) | ||
238 | goto out; | ||
239 | |||
240 | err = shash_async_update(req); | ||
241 | if (err) | ||
242 | goto out; | ||
243 | |||
244 | err = shash_async_final(req); | ||
245 | |||
246 | out: | ||
247 | return err; | ||
248 | } | ||
249 | |||
250 | static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) | ||
251 | { | ||
252 | struct crypto_shash **ctx = crypto_tfm_ctx(tfm); | ||
253 | |||
254 | crypto_free_shash(*ctx); | ||
255 | } | ||
256 | |||
257 | static int crypto_init_shash_ops_async(struct crypto_tfm *tfm) | ||
258 | { | ||
259 | struct crypto_alg *calg = tfm->__crt_alg; | ||
260 | struct shash_alg *alg = __crypto_shash_alg(calg); | ||
261 | struct ahash_tfm *crt = &tfm->crt_ahash; | ||
262 | struct crypto_shash **ctx = crypto_tfm_ctx(tfm); | ||
263 | struct crypto_shash *shash; | ||
264 | |||
265 | if (!crypto_mod_get(calg)) | ||
266 | return -EAGAIN; | ||
267 | |||
268 | shash = __crypto_shash_cast(crypto_create_tfm( | ||
269 | calg, &crypto_shash_type)); | ||
270 | if (IS_ERR(shash)) { | ||
271 | crypto_mod_put(calg); | ||
272 | return PTR_ERR(shash); | ||
273 | } | ||
274 | |||
275 | *ctx = shash; | ||
276 | tfm->exit = crypto_exit_shash_ops_async; | ||
277 | |||
278 | crt->init = shash_async_init; | ||
279 | crt->update = shash_async_update; | ||
280 | crt->final = shash_async_final; | ||
281 | crt->digest = shash_async_digest; | ||
282 | crt->setkey = shash_async_setkey; | ||
283 | |||
284 | crt->digestsize = alg->digestsize; | ||
285 | crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); | ||
286 | |||
287 | return 0; | ||
288 | } | ||
289 | |||
290 | static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) | ||
291 | { | ||
292 | switch (mask & CRYPTO_ALG_TYPE_MASK) { | ||
293 | case CRYPTO_ALG_TYPE_AHASH_MASK: | ||
294 | return crypto_init_shash_ops_async(tfm); | ||
295 | } | ||
296 | |||
297 | return -EINVAL; | ||
298 | } | ||
299 | |||
300 | static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type, | ||
301 | u32 mask) | ||
302 | { | ||
303 | switch (mask & CRYPTO_ALG_TYPE_MASK) { | ||
304 | case CRYPTO_ALG_TYPE_AHASH_MASK: | ||
305 | return sizeof(struct crypto_shash *); | ||
306 | } | ||
307 | |||
308 | return 0; | ||
309 | } | ||
310 | |||
170 | static int crypto_shash_init_tfm(struct crypto_tfm *tfm, | 311 | static int crypto_shash_init_tfm(struct crypto_tfm *tfm, |
171 | const struct crypto_type *frontend) | 312 | const struct crypto_type *frontend) |
172 | { | 313 | { |
@@ -194,7 +335,9 @@ static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) | |||
194 | } | 335 | } |
195 | 336 | ||
196 | static const struct crypto_type crypto_shash_type = { | 337 | static const struct crypto_type crypto_shash_type = { |
338 | .ctxsize = crypto_shash_ctxsize, | ||
197 | .extsize = crypto_shash_extsize, | 339 | .extsize = crypto_shash_extsize, |
340 | .init = crypto_init_shash_ops, | ||
198 | .init_tfm = crypto_shash_init_tfm, | 341 | .init_tfm = crypto_shash_init_tfm, |
199 | #ifdef CONFIG_PROC_FS | 342 | #ifdef CONFIG_PROC_FS |
200 | .show = crypto_shash_show, | 343 | .show = crypto_shash_show, |