aboutsummaryrefslogtreecommitdiffstats
path: root/crypto/ablkcipher.c
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2007-12-03 20:46:48 -0500
committerHerbert Xu <herbert@gondor.apana.org.au>2008-01-10 16:16:46 -0500
commitb9c55aa475599183d0eab6833ea23e70c52dd24b (patch)
tree74e011e9b95291f230d6e7200730e3bc3b9d7153 /crypto/ablkcipher.c
parent806d183aa6cc565d0f6bd2fb7fc6bfb175cc4813 (diff)
[CRYPTO] skcipher: Create default givcipher instances
This patch makes crypto_alloc_ablkcipher/crypto_grab_skcipher always return algorithms that are capable of generating their own IVs through givencrypt and givdecrypt. Each algorithm may specify its default IV generator through the geniv field. For algorithms that do not set the geniv field, the blkcipher layer will pick a default. Currently it's chainiv for synchronous algorithms and eseqiv for asynchronous algorithms. Note that if these wrappers do not work on an algorithm then that algorithm must specify its own geniv or it can't be used at all. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/ablkcipher.c')
-rw-r--r--crypto/ablkcipher.c158
1 files changed, 157 insertions, 1 deletions
diff --git a/crypto/ablkcipher.c b/crypto/ablkcipher.c
index 092d9659b861..d1df528c8fa3 100644
--- a/crypto/ablkcipher.c
+++ b/crypto/ablkcipher.c
@@ -18,6 +18,8 @@
18#include <linux/init.h> 18#include <linux/init.h>
19#include <linux/kernel.h> 19#include <linux/kernel.h>
20#include <linux/module.h> 20#include <linux/module.h>
21#include <linux/rtnetlink.h>
22#include <linux/sched.h>
21#include <linux/slab.h> 23#include <linux/slab.h>
22#include <linux/seq_file.h> 24#include <linux/seq_file.h>
23 25
@@ -68,6 +70,16 @@ static unsigned int crypto_ablkcipher_ctxsize(struct crypto_alg *alg, u32 type,
68 return alg->cra_ctxsize; 70 return alg->cra_ctxsize;
69} 71}
70 72
73int skcipher_null_givencrypt(struct skcipher_givcrypt_request *req)
74{
75 return crypto_ablkcipher_encrypt(&req->creq);
76}
77
78int skcipher_null_givdecrypt(struct skcipher_givcrypt_request *req)
79{
80 return crypto_ablkcipher_decrypt(&req->creq);
81}
82
71static int crypto_init_ablkcipher_ops(struct crypto_tfm *tfm, u32 type, 83static int crypto_init_ablkcipher_ops(struct crypto_tfm *tfm, u32 type,
72 u32 mask) 84 u32 mask)
73{ 85{
@@ -80,6 +92,10 @@ static int crypto_init_ablkcipher_ops(struct crypto_tfm *tfm, u32 type,
80 crt->setkey = setkey; 92 crt->setkey = setkey;
81 crt->encrypt = alg->encrypt; 93 crt->encrypt = alg->encrypt;
82 crt->decrypt = alg->decrypt; 94 crt->decrypt = alg->decrypt;
95 if (!alg->ivsize) {
96 crt->givencrypt = skcipher_null_givencrypt;
97 crt->givdecrypt = skcipher_null_givdecrypt;
98 }
83 crt->base = __crypto_ablkcipher_cast(tfm); 99 crt->base = __crypto_ablkcipher_cast(tfm);
84 crt->ivsize = alg->ivsize; 100 crt->ivsize = alg->ivsize;
85 101
@@ -163,6 +179,108 @@ const char *crypto_default_geniv(const struct crypto_alg *alg)
163 return alg->cra_flags & CRYPTO_ALG_ASYNC ? "eseqiv" : "chainiv"; 179 return alg->cra_flags & CRYPTO_ALG_ASYNC ? "eseqiv" : "chainiv";
164} 180}
165 181
182static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask)
183{
184 struct rtattr *tb[3];
185 struct {
186 struct rtattr attr;
187 struct crypto_attr_type data;
188 } ptype;
189 struct {
190 struct rtattr attr;
191 struct crypto_attr_alg data;
192 } palg;
193 struct crypto_template *tmpl;
194 struct crypto_instance *inst;
195 struct crypto_alg *larval;
196 const char *geniv;
197 int err;
198
199 larval = crypto_larval_lookup(alg->cra_driver_name,
200 CRYPTO_ALG_TYPE_GIVCIPHER,
201 CRYPTO_ALG_TYPE_MASK);
202 err = PTR_ERR(larval);
203 if (IS_ERR(larval))
204 goto out;
205
206 err = -EAGAIN;
207 if (!crypto_is_larval(larval))
208 goto drop_larval;
209
210 ptype.attr.rta_len = sizeof(ptype);
211 ptype.attr.rta_type = CRYPTOA_TYPE;
212 ptype.data.type = type | CRYPTO_ALG_GENIV;
213 /* GENIV tells the template that we're making a default geniv. */
214 ptype.data.mask = mask | CRYPTO_ALG_GENIV;
215 tb[0] = &ptype.attr;
216
217 palg.attr.rta_len = sizeof(palg);
218 palg.attr.rta_type = CRYPTOA_ALG;
219 /* Must use the exact name to locate ourselves. */
220 memcpy(palg.data.name, alg->cra_driver_name, CRYPTO_MAX_ALG_NAME);
221 tb[1] = &palg.attr;
222
223 tb[2] = NULL;
224
225 if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
226 CRYPTO_ALG_TYPE_BLKCIPHER)
227 geniv = alg->cra_blkcipher.geniv;
228 else
229 geniv = alg->cra_ablkcipher.geniv;
230
231 if (!geniv)
232 geniv = crypto_default_geniv(alg);
233
234 tmpl = crypto_lookup_template(geniv);
235 err = -ENOENT;
236 if (!tmpl)
237 goto kill_larval;
238
239 inst = tmpl->alloc(tb);
240 err = PTR_ERR(inst);
241 if (IS_ERR(inst))
242 goto put_tmpl;
243
244 if ((err = crypto_register_instance(tmpl, inst))) {
245 tmpl->free(inst);
246 goto put_tmpl;
247 }
248
249 /* Redo the lookup to use the instance we just registered. */
250 err = -EAGAIN;
251
252put_tmpl:
253 crypto_tmpl_put(tmpl);
254kill_larval:
255 crypto_larval_kill(larval);
256drop_larval:
257 crypto_mod_put(larval);
258out:
259 crypto_mod_put(alg);
260 return err;
261}
262
263static struct crypto_alg *crypto_lookup_skcipher(const char *name, u32 type,
264 u32 mask)
265{
266 struct crypto_alg *alg;
267
268 alg = crypto_alg_mod_lookup(name, type, mask);
269 if (IS_ERR(alg))
270 return alg;
271
272 if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
273 CRYPTO_ALG_TYPE_GIVCIPHER)
274 return alg;
275
276 if (!((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
277 CRYPTO_ALG_TYPE_BLKCIPHER ? alg->cra_blkcipher.ivsize :
278 alg->cra_ablkcipher.ivsize))
279 return alg;
280
281 return ERR_PTR(crypto_givcipher_default(alg, type, mask));
282}
283
166int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name, 284int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name,
167 u32 type, u32 mask) 285 u32 type, u32 mask)
168{ 286{
@@ -172,7 +290,7 @@ int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name,
172 type = crypto_skcipher_type(type); 290 type = crypto_skcipher_type(type);
173 mask = crypto_skcipher_mask(mask); 291 mask = crypto_skcipher_mask(mask);
174 292
175 alg = crypto_alg_mod_lookup(name, type, mask); 293 alg = crypto_lookup_skcipher(name, type, mask);
176 if (IS_ERR(alg)) 294 if (IS_ERR(alg))
177 return PTR_ERR(alg); 295 return PTR_ERR(alg);
178 296
@@ -182,5 +300,43 @@ int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name,
182} 300}
183EXPORT_SYMBOL_GPL(crypto_grab_skcipher); 301EXPORT_SYMBOL_GPL(crypto_grab_skcipher);
184 302
303struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name,
304 u32 type, u32 mask)
305{
306 struct crypto_tfm *tfm;
307 int err;
308
309 type = crypto_skcipher_type(type);
310 mask = crypto_skcipher_mask(mask);
311
312 for (;;) {
313 struct crypto_alg *alg;
314
315 alg = crypto_lookup_skcipher(alg_name, type, mask);
316 if (IS_ERR(alg)) {
317 err = PTR_ERR(alg);
318 goto err;
319 }
320
321 tfm = __crypto_alloc_tfm(alg, type, mask);
322 if (!IS_ERR(tfm))
323 return __crypto_ablkcipher_cast(tfm);
324
325 crypto_mod_put(alg);
326 err = PTR_ERR(tfm);
327
328err:
329 if (err != -EAGAIN)
330 break;
331 if (signal_pending(current)) {
332 err = -EINTR;
333 break;
334 }
335 }
336
337 return ERR_PTR(err);
338}
339EXPORT_SYMBOL_GPL(crypto_alloc_ablkcipher);
340
185MODULE_LICENSE("GPL"); 341MODULE_LICENSE("GPL");
186MODULE_DESCRIPTION("Asynchronous block chaining cipher type"); 342MODULE_DESCRIPTION("Asynchronous block chaining cipher type");