diff options
Diffstat (limited to 'crypto/pcrypt.c')
-rw-r--r-- | crypto/pcrypt.c | 191 |
1 files changed, 137 insertions, 54 deletions
diff --git a/crypto/pcrypt.c b/crypto/pcrypt.c index 6036b6de9079..c9662e25595e 100644 --- a/crypto/pcrypt.c +++ b/crypto/pcrypt.c | |||
@@ -24,12 +24,38 @@ | |||
24 | #include <linux/init.h> | 24 | #include <linux/init.h> |
25 | #include <linux/module.h> | 25 | #include <linux/module.h> |
26 | #include <linux/slab.h> | 26 | #include <linux/slab.h> |
27 | #include <linux/notifier.h> | ||
27 | #include <crypto/pcrypt.h> | 28 | #include <crypto/pcrypt.h> |
28 | 29 | ||
29 | static struct padata_instance *pcrypt_enc_padata; | 30 | struct pcrypt_instance { |
30 | static struct padata_instance *pcrypt_dec_padata; | 31 | struct padata_instance *pinst; |
31 | static struct workqueue_struct *encwq; | 32 | struct workqueue_struct *wq; |
32 | static struct workqueue_struct *decwq; | 33 | |
34 | /* | ||
35 | * Cpumask for callback CPUs. It should be | ||
36 | * equal to serial cpumask of corresponding padata instance, | ||
37 | * so it is updated when padata notifies us about serial | ||
38 | * cpumask change. | ||
39 | * | ||
40 | * cb_cpumask is protected by RCU. This fact prevents us from | ||
41 | * using cpumask_var_t directly because the actual type of | ||
42 | * cpumsak_var_t depends on kernel configuration(particularly on | ||
43 | * CONFIG_CPUMASK_OFFSTACK macro). Depending on the configuration | ||
44 | * cpumask_var_t may be either a pointer to the struct cpumask | ||
45 | * or a variable allocated on the stack. Thus we can not safely use | ||
46 | * cpumask_var_t with RCU operations such as rcu_assign_pointer or | ||
47 | * rcu_dereference. So cpumask_var_t is wrapped with struct | ||
48 | * pcrypt_cpumask which makes possible to use it with RCU. | ||
49 | */ | ||
50 | struct pcrypt_cpumask { | ||
51 | cpumask_var_t mask; | ||
52 | } *cb_cpumask; | ||
53 | struct notifier_block nblock; | ||
54 | }; | ||
55 | |||
56 | static struct pcrypt_instance pencrypt; | ||
57 | static struct pcrypt_instance pdecrypt; | ||
58 | |||
33 | 59 | ||
34 | struct pcrypt_instance_ctx { | 60 | struct pcrypt_instance_ctx { |
35 | struct crypto_spawn spawn; | 61 | struct crypto_spawn spawn; |
@@ -42,25 +68,29 @@ struct pcrypt_aead_ctx { | |||
42 | }; | 68 | }; |
43 | 69 | ||
44 | static int pcrypt_do_parallel(struct padata_priv *padata, unsigned int *cb_cpu, | 70 | static int pcrypt_do_parallel(struct padata_priv *padata, unsigned int *cb_cpu, |
45 | struct padata_instance *pinst) | 71 | struct pcrypt_instance *pcrypt) |
46 | { | 72 | { |
47 | unsigned int cpu_index, cpu, i; | 73 | unsigned int cpu_index, cpu, i; |
74 | struct pcrypt_cpumask *cpumask; | ||
48 | 75 | ||
49 | cpu = *cb_cpu; | 76 | cpu = *cb_cpu; |
50 | 77 | ||
51 | if (cpumask_test_cpu(cpu, cpu_active_mask)) | 78 | rcu_read_lock_bh(); |
79 | cpumask = rcu_dereference(pcrypt->cb_cpumask); | ||
80 | if (cpumask_test_cpu(cpu, cpumask->mask)) | ||
52 | goto out; | 81 | goto out; |
53 | 82 | ||
54 | cpu_index = cpu % cpumask_weight(cpu_active_mask); | 83 | cpu_index = cpu % cpumask_weight(cpumask->mask); |
55 | 84 | ||
56 | cpu = cpumask_first(cpu_active_mask); | 85 | cpu = cpumask_first(cpumask->mask); |
57 | for (i = 0; i < cpu_index; i++) | 86 | for (i = 0; i < cpu_index; i++) |
58 | cpu = cpumask_next(cpu, cpu_active_mask); | 87 | cpu = cpumask_next(cpu, cpumask->mask); |
59 | 88 | ||
60 | *cb_cpu = cpu; | 89 | *cb_cpu = cpu; |
61 | 90 | ||
62 | out: | 91 | out: |
63 | return padata_do_parallel(pinst, padata, cpu); | 92 | rcu_read_unlock_bh(); |
93 | return padata_do_parallel(pcrypt->pinst, padata, cpu); | ||
64 | } | 94 | } |
65 | 95 | ||
66 | static int pcrypt_aead_setkey(struct crypto_aead *parent, | 96 | static int pcrypt_aead_setkey(struct crypto_aead *parent, |
@@ -142,7 +172,7 @@ static int pcrypt_aead_encrypt(struct aead_request *req) | |||
142 | req->cryptlen, req->iv); | 172 | req->cryptlen, req->iv); |
143 | aead_request_set_assoc(creq, req->assoc, req->assoclen); | 173 | aead_request_set_assoc(creq, req->assoc, req->assoclen); |
144 | 174 | ||
145 | err = pcrypt_do_parallel(padata, &ctx->cb_cpu, pcrypt_enc_padata); | 175 | err = pcrypt_do_parallel(padata, &ctx->cb_cpu, &pencrypt); |
146 | if (!err) | 176 | if (!err) |
147 | return -EINPROGRESS; | 177 | return -EINPROGRESS; |
148 | 178 | ||
@@ -184,7 +214,7 @@ static int pcrypt_aead_decrypt(struct aead_request *req) | |||
184 | req->cryptlen, req->iv); | 214 | req->cryptlen, req->iv); |
185 | aead_request_set_assoc(creq, req->assoc, req->assoclen); | 215 | aead_request_set_assoc(creq, req->assoc, req->assoclen); |
186 | 216 | ||
187 | err = pcrypt_do_parallel(padata, &ctx->cb_cpu, pcrypt_dec_padata); | 217 | err = pcrypt_do_parallel(padata, &ctx->cb_cpu, &pdecrypt); |
188 | if (!err) | 218 | if (!err) |
189 | return -EINPROGRESS; | 219 | return -EINPROGRESS; |
190 | 220 | ||
@@ -228,7 +258,7 @@ static int pcrypt_aead_givencrypt(struct aead_givcrypt_request *req) | |||
228 | aead_givcrypt_set_assoc(creq, areq->assoc, areq->assoclen); | 258 | aead_givcrypt_set_assoc(creq, areq->assoc, areq->assoclen); |
229 | aead_givcrypt_set_giv(creq, req->giv, req->seq); | 259 | aead_givcrypt_set_giv(creq, req->giv, req->seq); |
230 | 260 | ||
231 | err = pcrypt_do_parallel(padata, &ctx->cb_cpu, pcrypt_enc_padata); | 261 | err = pcrypt_do_parallel(padata, &ctx->cb_cpu, &pencrypt); |
232 | if (!err) | 262 | if (!err) |
233 | return -EINPROGRESS; | 263 | return -EINPROGRESS; |
234 | 264 | ||
@@ -370,6 +400,88 @@ static void pcrypt_free(struct crypto_instance *inst) | |||
370 | kfree(inst); | 400 | kfree(inst); |
371 | } | 401 | } |
372 | 402 | ||
403 | static int pcrypt_cpumask_change_notify(struct notifier_block *self, | ||
404 | unsigned long val, void *data) | ||
405 | { | ||
406 | struct pcrypt_instance *pcrypt; | ||
407 | struct pcrypt_cpumask *new_mask, *old_mask; | ||
408 | |||
409 | if (!(val & PADATA_CPU_SERIAL)) | ||
410 | return 0; | ||
411 | |||
412 | pcrypt = container_of(self, struct pcrypt_instance, nblock); | ||
413 | new_mask = kmalloc(sizeof(*new_mask), GFP_KERNEL); | ||
414 | if (!new_mask) | ||
415 | return -ENOMEM; | ||
416 | if (!alloc_cpumask_var(&new_mask->mask, GFP_KERNEL)) { | ||
417 | kfree(new_mask); | ||
418 | return -ENOMEM; | ||
419 | } | ||
420 | |||
421 | old_mask = pcrypt->cb_cpumask; | ||
422 | |||
423 | padata_get_cpumask(pcrypt->pinst, PADATA_CPU_SERIAL, new_mask->mask); | ||
424 | rcu_assign_pointer(pcrypt->cb_cpumask, new_mask); | ||
425 | synchronize_rcu_bh(); | ||
426 | |||
427 | free_cpumask_var(old_mask->mask); | ||
428 | kfree(old_mask); | ||
429 | return 0; | ||
430 | } | ||
431 | |||
432 | static int __pcrypt_init_instance(struct pcrypt_instance *pcrypt, | ||
433 | const char *name) | ||
434 | { | ||
435 | int ret = -ENOMEM; | ||
436 | struct pcrypt_cpumask *mask; | ||
437 | |||
438 | pcrypt->wq = create_workqueue(name); | ||
439 | if (!pcrypt->wq) | ||
440 | goto err; | ||
441 | |||
442 | pcrypt->pinst = padata_alloc(pcrypt->wq); | ||
443 | if (!pcrypt->pinst) | ||
444 | goto err_destroy_workqueue; | ||
445 | |||
446 | mask = kmalloc(sizeof(*mask), GFP_KERNEL); | ||
447 | if (!mask) | ||
448 | goto err_free_padata; | ||
449 | if (!alloc_cpumask_var(&mask->mask, GFP_KERNEL)) { | ||
450 | kfree(mask); | ||
451 | goto err_free_padata; | ||
452 | } | ||
453 | |||
454 | padata_get_cpumask(pcrypt->pinst, PADATA_CPU_SERIAL, mask->mask); | ||
455 | rcu_assign_pointer(pcrypt->cb_cpumask, mask); | ||
456 | |||
457 | pcrypt->nblock.notifier_call = pcrypt_cpumask_change_notify; | ||
458 | ret = padata_register_cpumask_notifier(pcrypt->pinst, &pcrypt->nblock); | ||
459 | if (ret) | ||
460 | goto err_free_cpumask; | ||
461 | |||
462 | return ret; | ||
463 | err_free_cpumask: | ||
464 | free_cpumask_var(mask->mask); | ||
465 | kfree(mask); | ||
466 | err_free_padata: | ||
467 | padata_free(pcrypt->pinst); | ||
468 | err_destroy_workqueue: | ||
469 | destroy_workqueue(pcrypt->wq); | ||
470 | err: | ||
471 | return ret; | ||
472 | } | ||
473 | |||
474 | static void __pcrypt_deinit_instance(struct pcrypt_instance *pcrypt) | ||
475 | { | ||
476 | free_cpumask_var(pcrypt->cb_cpumask->mask); | ||
477 | kfree(pcrypt->cb_cpumask); | ||
478 | |||
479 | padata_stop(pcrypt->pinst); | ||
480 | padata_unregister_cpumask_notifier(pcrypt->pinst, &pcrypt->nblock); | ||
481 | destroy_workqueue(pcrypt->wq); | ||
482 | padata_free(pcrypt->pinst); | ||
483 | } | ||
484 | |||
373 | static struct crypto_template pcrypt_tmpl = { | 485 | static struct crypto_template pcrypt_tmpl = { |
374 | .name = "pcrypt", | 486 | .name = "pcrypt", |
375 | .alloc = pcrypt_alloc, | 487 | .alloc = pcrypt_alloc, |
@@ -379,60 +491,31 @@ static struct crypto_template pcrypt_tmpl = { | |||
379 | 491 | ||
380 | static int __init pcrypt_init(void) | 492 | static int __init pcrypt_init(void) |
381 | { | 493 | { |
382 | int err = -ENOMEM; | 494 | int err; |
383 | encwq = create_workqueue("pencrypt"); | ||
384 | if (!encwq) | ||
385 | goto err; | ||
386 | |||
387 | decwq = create_workqueue("pdecrypt"); | ||
388 | if (!decwq) | ||
389 | goto err_destroy_encwq; | ||
390 | |||
391 | |||
392 | pcrypt_enc_padata = padata_alloc(cpu_possible_mask, encwq); | ||
393 | if (!pcrypt_enc_padata) | ||
394 | goto err_destroy_decwq; | ||
395 | |||
396 | pcrypt_dec_padata = padata_alloc(cpu_possible_mask, decwq); | ||
397 | if (!pcrypt_dec_padata) | ||
398 | goto err_free_enc_padata; | ||
399 | 495 | ||
400 | err = padata_start(pcrypt_enc_padata); | 496 | err = __pcrypt_init_instance(&pencrypt, "pencrypt"); |
401 | if (err) | 497 | if (err) |
402 | goto err_free_dec_padata; | 498 | goto err; |
403 | 499 | ||
404 | err = padata_start(pcrypt_dec_padata); | 500 | err = __pcrypt_init_instance(&pdecrypt, "pdecrypt"); |
405 | if (err) | 501 | if (err) |
406 | goto err_free_dec_padata; | 502 | goto err_deinit_pencrypt; |
407 | |||
408 | return crypto_register_template(&pcrypt_tmpl); | ||
409 | |||
410 | err_free_dec_padata: | ||
411 | padata_free(pcrypt_dec_padata); | ||
412 | 503 | ||
413 | err_free_enc_padata: | 504 | padata_start(pencrypt.pinst); |
414 | padata_free(pcrypt_enc_padata); | 505 | padata_start(pdecrypt.pinst); |
415 | 506 | ||
416 | err_destroy_decwq: | 507 | return crypto_register_template(&pcrypt_tmpl); |
417 | destroy_workqueue(decwq); | ||
418 | |||
419 | err_destroy_encwq: | ||
420 | destroy_workqueue(encwq); | ||
421 | 508 | ||
509 | err_deinit_pencrypt: | ||
510 | __pcrypt_deinit_instance(&pencrypt); | ||
422 | err: | 511 | err: |
423 | return err; | 512 | return err; |
424 | } | 513 | } |
425 | 514 | ||
426 | static void __exit pcrypt_exit(void) | 515 | static void __exit pcrypt_exit(void) |
427 | { | 516 | { |
428 | padata_stop(pcrypt_enc_padata); | 517 | __pcrypt_deinit_instance(&pencrypt); |
429 | padata_stop(pcrypt_dec_padata); | 518 | __pcrypt_deinit_instance(&pdecrypt); |
430 | |||
431 | destroy_workqueue(encwq); | ||
432 | destroy_workqueue(decwq); | ||
433 | |||
434 | padata_free(pcrypt_enc_padata); | ||
435 | padata_free(pcrypt_dec_padata); | ||
436 | 519 | ||
437 | crypto_unregister_template(&pcrypt_tmpl); | 520 | crypto_unregister_template(&pcrypt_tmpl); |
438 | } | 521 | } |