diff options
author | Eric Biggers <ebiggers@google.com> | 2019-04-13 00:23:52 -0400 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2019-04-18 10:15:04 -0400 |
commit | 0a877e354a2c09435c8aea3fd2188cdef3c149f7 (patch) | |
tree | 223f992c09f539e5d109f75e79cabd81587a2f14 /crypto | |
parent | 8c3fffe3993b06dd1955a79bd2f0f3b143d259b3 (diff) |
crypto: cryptd - remove ability to instantiate ablkciphers
Remove cryptd_alloc_ablkcipher() and the ability of cryptd to create
algorithms with the deprecated "ablkcipher" type.
This has been unused since commit 0e145b477dea ("crypto: ablk_helper -
remove ablk_helper"). Instead, cryptd_alloc_skcipher() is used.
Signed-off-by: Eric Biggers <ebiggers@google.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto')
-rw-r--r-- | crypto/cryptd.c | 249 |
1 files changed, 0 insertions, 249 deletions
diff --git a/crypto/cryptd.c b/crypto/cryptd.c index 5640e5db7bdb..42533cf80acc 100644 --- a/crypto/cryptd.c +++ b/crypto/cryptd.c | |||
@@ -65,15 +65,6 @@ struct aead_instance_ctx { | |||
65 | struct cryptd_queue *queue; | 65 | struct cryptd_queue *queue; |
66 | }; | 66 | }; |
67 | 67 | ||
68 | struct cryptd_blkcipher_ctx { | ||
69 | atomic_t refcnt; | ||
70 | struct crypto_blkcipher *child; | ||
71 | }; | ||
72 | |||
73 | struct cryptd_blkcipher_request_ctx { | ||
74 | crypto_completion_t complete; | ||
75 | }; | ||
76 | |||
77 | struct cryptd_skcipher_ctx { | 68 | struct cryptd_skcipher_ctx { |
78 | atomic_t refcnt; | 69 | atomic_t refcnt; |
79 | struct crypto_sync_skcipher *child; | 70 | struct crypto_sync_skcipher *child; |
@@ -216,129 +207,6 @@ static inline void cryptd_check_internal(struct rtattr **tb, u32 *type, | |||
216 | *mask |= algt->mask & CRYPTO_ALG_INTERNAL; | 207 | *mask |= algt->mask & CRYPTO_ALG_INTERNAL; |
217 | } | 208 | } |
218 | 209 | ||
219 | static int cryptd_blkcipher_setkey(struct crypto_ablkcipher *parent, | ||
220 | const u8 *key, unsigned int keylen) | ||
221 | { | ||
222 | struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(parent); | ||
223 | struct crypto_blkcipher *child = ctx->child; | ||
224 | int err; | ||
225 | |||
226 | crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); | ||
227 | crypto_blkcipher_set_flags(child, crypto_ablkcipher_get_flags(parent) & | ||
228 | CRYPTO_TFM_REQ_MASK); | ||
229 | err = crypto_blkcipher_setkey(child, key, keylen); | ||
230 | crypto_ablkcipher_set_flags(parent, crypto_blkcipher_get_flags(child) & | ||
231 | CRYPTO_TFM_RES_MASK); | ||
232 | return err; | ||
233 | } | ||
234 | |||
235 | static void cryptd_blkcipher_crypt(struct ablkcipher_request *req, | ||
236 | struct crypto_blkcipher *child, | ||
237 | int err, | ||
238 | int (*crypt)(struct blkcipher_desc *desc, | ||
239 | struct scatterlist *dst, | ||
240 | struct scatterlist *src, | ||
241 | unsigned int len)) | ||
242 | { | ||
243 | struct cryptd_blkcipher_request_ctx *rctx; | ||
244 | struct cryptd_blkcipher_ctx *ctx; | ||
245 | struct crypto_ablkcipher *tfm; | ||
246 | struct blkcipher_desc desc; | ||
247 | int refcnt; | ||
248 | |||
249 | rctx = ablkcipher_request_ctx(req); | ||
250 | |||
251 | if (unlikely(err == -EINPROGRESS)) | ||
252 | goto out; | ||
253 | |||
254 | desc.tfm = child; | ||
255 | desc.info = req->info; | ||
256 | desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; | ||
257 | |||
258 | err = crypt(&desc, req->dst, req->src, req->nbytes); | ||
259 | |||
260 | req->base.complete = rctx->complete; | ||
261 | |||
262 | out: | ||
263 | tfm = crypto_ablkcipher_reqtfm(req); | ||
264 | ctx = crypto_ablkcipher_ctx(tfm); | ||
265 | refcnt = atomic_read(&ctx->refcnt); | ||
266 | |||
267 | local_bh_disable(); | ||
268 | rctx->complete(&req->base, err); | ||
269 | local_bh_enable(); | ||
270 | |||
271 | if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt)) | ||
272 | crypto_free_ablkcipher(tfm); | ||
273 | } | ||
274 | |||
275 | static void cryptd_blkcipher_encrypt(struct crypto_async_request *req, int err) | ||
276 | { | ||
277 | struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm); | ||
278 | struct crypto_blkcipher *child = ctx->child; | ||
279 | |||
280 | cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err, | ||
281 | crypto_blkcipher_crt(child)->encrypt); | ||
282 | } | ||
283 | |||
284 | static void cryptd_blkcipher_decrypt(struct crypto_async_request *req, int err) | ||
285 | { | ||
286 | struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm); | ||
287 | struct crypto_blkcipher *child = ctx->child; | ||
288 | |||
289 | cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err, | ||
290 | crypto_blkcipher_crt(child)->decrypt); | ||
291 | } | ||
292 | |||
293 | static int cryptd_blkcipher_enqueue(struct ablkcipher_request *req, | ||
294 | crypto_completion_t compl) | ||
295 | { | ||
296 | struct cryptd_blkcipher_request_ctx *rctx = ablkcipher_request_ctx(req); | ||
297 | struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req); | ||
298 | struct cryptd_queue *queue; | ||
299 | |||
300 | queue = cryptd_get_queue(crypto_ablkcipher_tfm(tfm)); | ||
301 | rctx->complete = req->base.complete; | ||
302 | req->base.complete = compl; | ||
303 | |||
304 | return cryptd_enqueue_request(queue, &req->base); | ||
305 | } | ||
306 | |||
307 | static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request *req) | ||
308 | { | ||
309 | return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_encrypt); | ||
310 | } | ||
311 | |||
312 | static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request *req) | ||
313 | { | ||
314 | return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_decrypt); | ||
315 | } | ||
316 | |||
317 | static int cryptd_blkcipher_init_tfm(struct crypto_tfm *tfm) | ||
318 | { | ||
319 | struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); | ||
320 | struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst); | ||
321 | struct crypto_spawn *spawn = &ictx->spawn; | ||
322 | struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm); | ||
323 | struct crypto_blkcipher *cipher; | ||
324 | |||
325 | cipher = crypto_spawn_blkcipher(spawn); | ||
326 | if (IS_ERR(cipher)) | ||
327 | return PTR_ERR(cipher); | ||
328 | |||
329 | ctx->child = cipher; | ||
330 | tfm->crt_ablkcipher.reqsize = | ||
331 | sizeof(struct cryptd_blkcipher_request_ctx); | ||
332 | return 0; | ||
333 | } | ||
334 | |||
335 | static void cryptd_blkcipher_exit_tfm(struct crypto_tfm *tfm) | ||
336 | { | ||
337 | struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm); | ||
338 | |||
339 | crypto_free_blkcipher(ctx->child); | ||
340 | } | ||
341 | |||
342 | static int cryptd_init_instance(struct crypto_instance *inst, | 210 | static int cryptd_init_instance(struct crypto_instance *inst, |
343 | struct crypto_alg *alg) | 211 | struct crypto_alg *alg) |
344 | { | 212 | { |
@@ -382,67 +250,6 @@ out_free_inst: | |||
382 | goto out; | 250 | goto out; |
383 | } | 251 | } |
384 | 252 | ||
385 | static int cryptd_create_blkcipher(struct crypto_template *tmpl, | ||
386 | struct rtattr **tb, | ||
387 | struct cryptd_queue *queue) | ||
388 | { | ||
389 | struct cryptd_instance_ctx *ctx; | ||
390 | struct crypto_instance *inst; | ||
391 | struct crypto_alg *alg; | ||
392 | u32 type = CRYPTO_ALG_TYPE_BLKCIPHER; | ||
393 | u32 mask = CRYPTO_ALG_TYPE_MASK; | ||
394 | int err; | ||
395 | |||
396 | cryptd_check_internal(tb, &type, &mask); | ||
397 | |||
398 | alg = crypto_get_attr_alg(tb, type, mask); | ||
399 | if (IS_ERR(alg)) | ||
400 | return PTR_ERR(alg); | ||
401 | |||
402 | inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx)); | ||
403 | err = PTR_ERR(inst); | ||
404 | if (IS_ERR(inst)) | ||
405 | goto out_put_alg; | ||
406 | |||
407 | ctx = crypto_instance_ctx(inst); | ||
408 | ctx->queue = queue; | ||
409 | |||
410 | err = crypto_init_spawn(&ctx->spawn, alg, inst, | ||
411 | CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); | ||
412 | if (err) | ||
413 | goto out_free_inst; | ||
414 | |||
415 | type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC; | ||
416 | if (alg->cra_flags & CRYPTO_ALG_INTERNAL) | ||
417 | type |= CRYPTO_ALG_INTERNAL; | ||
418 | inst->alg.cra_flags = type; | ||
419 | inst->alg.cra_type = &crypto_ablkcipher_type; | ||
420 | |||
421 | inst->alg.cra_ablkcipher.ivsize = alg->cra_blkcipher.ivsize; | ||
422 | inst->alg.cra_ablkcipher.min_keysize = alg->cra_blkcipher.min_keysize; | ||
423 | inst->alg.cra_ablkcipher.max_keysize = alg->cra_blkcipher.max_keysize; | ||
424 | |||
425 | inst->alg.cra_ctxsize = sizeof(struct cryptd_blkcipher_ctx); | ||
426 | |||
427 | inst->alg.cra_init = cryptd_blkcipher_init_tfm; | ||
428 | inst->alg.cra_exit = cryptd_blkcipher_exit_tfm; | ||
429 | |||
430 | inst->alg.cra_ablkcipher.setkey = cryptd_blkcipher_setkey; | ||
431 | inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue; | ||
432 | inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue; | ||
433 | |||
434 | err = crypto_register_instance(tmpl, inst); | ||
435 | if (err) { | ||
436 | crypto_drop_spawn(&ctx->spawn); | ||
437 | out_free_inst: | ||
438 | kfree(inst); | ||
439 | } | ||
440 | |||
441 | out_put_alg: | ||
442 | crypto_mod_put(alg); | ||
443 | return err; | ||
444 | } | ||
445 | |||
446 | static int cryptd_skcipher_setkey(struct crypto_skcipher *parent, | 253 | static int cryptd_skcipher_setkey(struct crypto_skcipher *parent, |
447 | const u8 *key, unsigned int keylen) | 254 | const u8 *key, unsigned int keylen) |
448 | { | 255 | { |
@@ -1118,10 +925,6 @@ static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb) | |||
1118 | 925 | ||
1119 | switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) { | 926 | switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) { |
1120 | case CRYPTO_ALG_TYPE_BLKCIPHER: | 927 | case CRYPTO_ALG_TYPE_BLKCIPHER: |
1121 | if ((algt->type & CRYPTO_ALG_TYPE_MASK) == | ||
1122 | CRYPTO_ALG_TYPE_BLKCIPHER) | ||
1123 | return cryptd_create_blkcipher(tmpl, tb, &queue); | ||
1124 | |||
1125 | return cryptd_create_skcipher(tmpl, tb, &queue); | 928 | return cryptd_create_skcipher(tmpl, tb, &queue); |
1126 | case CRYPTO_ALG_TYPE_DIGEST: | 929 | case CRYPTO_ALG_TYPE_DIGEST: |
1127 | return cryptd_create_hash(tmpl, tb, &queue); | 930 | return cryptd_create_hash(tmpl, tb, &queue); |
@@ -1160,58 +963,6 @@ static struct crypto_template cryptd_tmpl = { | |||
1160 | .module = THIS_MODULE, | 963 | .module = THIS_MODULE, |
1161 | }; | 964 | }; |
1162 | 965 | ||
1163 | struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name, | ||
1164 | u32 type, u32 mask) | ||
1165 | { | ||
1166 | char cryptd_alg_name[CRYPTO_MAX_ALG_NAME]; | ||
1167 | struct cryptd_blkcipher_ctx *ctx; | ||
1168 | struct crypto_tfm *tfm; | ||
1169 | |||
1170 | if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME, | ||
1171 | "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME) | ||
1172 | return ERR_PTR(-EINVAL); | ||
1173 | type = crypto_skcipher_type(type); | ||
1174 | mask &= ~CRYPTO_ALG_TYPE_MASK; | ||
1175 | mask |= CRYPTO_ALG_TYPE_BLKCIPHER_MASK; | ||
1176 | tfm = crypto_alloc_base(cryptd_alg_name, type, mask); | ||
1177 | if (IS_ERR(tfm)) | ||
1178 | return ERR_CAST(tfm); | ||
1179 | if (tfm->__crt_alg->cra_module != THIS_MODULE) { | ||
1180 | crypto_free_tfm(tfm); | ||
1181 | return ERR_PTR(-EINVAL); | ||
1182 | } | ||
1183 | |||
1184 | ctx = crypto_tfm_ctx(tfm); | ||
1185 | atomic_set(&ctx->refcnt, 1); | ||
1186 | |||
1187 | return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm)); | ||
1188 | } | ||
1189 | EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher); | ||
1190 | |||
1191 | struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm) | ||
1192 | { | ||
1193 | struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base); | ||
1194 | return ctx->child; | ||
1195 | } | ||
1196 | EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child); | ||
1197 | |||
1198 | bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm) | ||
1199 | { | ||
1200 | struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base); | ||
1201 | |||
1202 | return atomic_read(&ctx->refcnt) - 1; | ||
1203 | } | ||
1204 | EXPORT_SYMBOL_GPL(cryptd_ablkcipher_queued); | ||
1205 | |||
1206 | void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm) | ||
1207 | { | ||
1208 | struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base); | ||
1209 | |||
1210 | if (atomic_dec_and_test(&ctx->refcnt)) | ||
1211 | crypto_free_ablkcipher(&tfm->base); | ||
1212 | } | ||
1213 | EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher); | ||
1214 | |||
1215 | struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name, | 966 | struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name, |
1216 | u32 type, u32 mask) | 967 | u32 type, u32 mask) |
1217 | { | 968 | { |