summaryrefslogtreecommitdiffstats
path: root/crypto/ctr.c
diff options
context:
space:
mode:
authorMarcelo Cerri <marcelo.cerri@canonical.com>2017-02-27 07:38:26 -0500
committerHerbert Xu <herbert@gondor.apana.org.au>2017-03-09 05:34:39 -0500
commitd2c2a85cfe829f9d0736dba567edc86ba8524fb2 (patch)
treed6e708d6dca58c5660b67c458594fde53f35e059 /crypto/ctr.c
parente6c2e65c70a6f606ea764f301e4024c85e0cd7a8 (diff)
crypto: ctr - Propagate NEED_FALLBACK bit
When requesting a fallback algorithm, we should propagate the NEED_FALLBACK bit when search for the underlying algorithm. This will prevents drivers from allocating unnecessary fallbacks that are never called. For instance, currently the vmx-crypto driver will use the following chain of calls when calling the fallback implementation: p8_aes_ctr -> ctr(p8_aes) -> aes-generic However p8_aes will always delegate its calls to aes-generic. With this patch, p8_aes_ctr will be able to use ctr(aes-generic) directly as its fallback. The same applies to aes_s390. Signed-off-by: Marcelo Henrique Cerri <marcelo.cerri@canonical.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/ctr.c')
-rw-r--r--crypto/ctr.c23
1 files changed, 18 insertions, 5 deletions
diff --git a/crypto/ctr.c b/crypto/ctr.c
index a4f4a8983169..477d9226ccaa 100644
--- a/crypto/ctr.c
+++ b/crypto/ctr.c
@@ -181,15 +181,24 @@ static void crypto_ctr_exit_tfm(struct crypto_tfm *tfm)
181static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb) 181static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
182{ 182{
183 struct crypto_instance *inst; 183 struct crypto_instance *inst;
184 struct crypto_attr_type *algt;
184 struct crypto_alg *alg; 185 struct crypto_alg *alg;
186 u32 mask;
185 int err; 187 int err;
186 188
187 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER); 189 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
188 if (err) 190 if (err)
189 return ERR_PTR(err); 191 return ERR_PTR(err);
190 192
191 alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER, 193 algt = crypto_get_attr_type(tb);
192 CRYPTO_ALG_TYPE_MASK); 194 if (IS_ERR(algt))
195 return ERR_CAST(algt);
196
197 mask = CRYPTO_ALG_TYPE_MASK |
198 crypto_requires_off(algt->type, algt->mask,
199 CRYPTO_ALG_NEED_FALLBACK);
200
201 alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER, mask);
193 if (IS_ERR(alg)) 202 if (IS_ERR(alg))
194 return ERR_CAST(alg); 203 return ERR_CAST(alg);
195 204
@@ -350,6 +359,8 @@ static int crypto_rfc3686_create(struct crypto_template *tmpl,
350 struct skcipher_alg *alg; 359 struct skcipher_alg *alg;
351 struct crypto_skcipher_spawn *spawn; 360 struct crypto_skcipher_spawn *spawn;
352 const char *cipher_name; 361 const char *cipher_name;
362 u32 mask;
363
353 int err; 364 int err;
354 365
355 algt = crypto_get_attr_type(tb); 366 algt = crypto_get_attr_type(tb);
@@ -367,12 +378,14 @@ static int crypto_rfc3686_create(struct crypto_template *tmpl,
367 if (!inst) 378 if (!inst)
368 return -ENOMEM; 379 return -ENOMEM;
369 380
381 mask = crypto_requires_sync(algt->type, algt->mask) |
382 crypto_requires_off(algt->type, algt->mask,
383 CRYPTO_ALG_NEED_FALLBACK);
384
370 spawn = skcipher_instance_ctx(inst); 385 spawn = skcipher_instance_ctx(inst);
371 386
372 crypto_set_skcipher_spawn(spawn, skcipher_crypto_instance(inst)); 387 crypto_set_skcipher_spawn(spawn, skcipher_crypto_instance(inst));
373 err = crypto_grab_skcipher(spawn, cipher_name, 0, 388 err = crypto_grab_skcipher(spawn, cipher_name, 0, mask);
374 crypto_requires_sync(algt->type,
375 algt->mask));
376 if (err) 389 if (err)
377 goto err_free_inst; 390 goto err_free_inst;
378 391