aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorPaulo Flabiano Smorigo <pfsmorigo@linux.vnet.ibm.com>2017-03-01 09:00:00 -0500
committerHerbert Xu <herbert@gondor.apana.org.au>2017-03-02 05:57:31 -0500
commit5839f555fa576be57371686265206398d9ea1480 (patch)
tree991ef9e9666bdd256618d02c884f5eb18c641f6b
parentc96d0a1c47abd5c4fa544dcedb5fac4d020ac58b (diff)
crypto: vmx - Use skcipher for xts fallback
Cc: stable@vger.kernel.org #4.10 Signed-off-by: Paulo Flabiano Smorigo <pfsmorigo@linux.vnet.ibm.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r--drivers/crypto/vmx/aes_xts.c32
1 files changed, 16 insertions, 16 deletions
diff --git a/drivers/crypto/vmx/aes_xts.c b/drivers/crypto/vmx/aes_xts.c
index 24353ec336c5..6adc9290557a 100644
--- a/drivers/crypto/vmx/aes_xts.c
+++ b/drivers/crypto/vmx/aes_xts.c
@@ -28,11 +28,12 @@
28#include <crypto/aes.h> 28#include <crypto/aes.h>
29#include <crypto/scatterwalk.h> 29#include <crypto/scatterwalk.h>
30#include <crypto/xts.h> 30#include <crypto/xts.h>
31#include <crypto/skcipher.h>
31 32
32#include "aesp8-ppc.h" 33#include "aesp8-ppc.h"
33 34
34struct p8_aes_xts_ctx { 35struct p8_aes_xts_ctx {
35 struct crypto_blkcipher *fallback; 36 struct crypto_skcipher *fallback;
36 struct aes_key enc_key; 37 struct aes_key enc_key;
37 struct aes_key dec_key; 38 struct aes_key dec_key;
38 struct aes_key tweak_key; 39 struct aes_key tweak_key;
@@ -41,7 +42,7 @@ struct p8_aes_xts_ctx {
41static int p8_aes_xts_init(struct crypto_tfm *tfm) 42static int p8_aes_xts_init(struct crypto_tfm *tfm)
42{ 43{
43 const char *alg; 44 const char *alg;
44 struct crypto_blkcipher *fallback; 45 struct crypto_skcipher *fallback;
45 struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm); 46 struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
46 47
47 if (!(alg = crypto_tfm_alg_name(tfm))) { 48 if (!(alg = crypto_tfm_alg_name(tfm))) {
@@ -49,8 +50,8 @@ static int p8_aes_xts_init(struct crypto_tfm *tfm)
49 return -ENOENT; 50 return -ENOENT;
50 } 51 }
51 52
52 fallback = 53 fallback = crypto_alloc_skcipher(alg, 0,
53 crypto_alloc_blkcipher(alg, 0, CRYPTO_ALG_NEED_FALLBACK); 54 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
54 if (IS_ERR(fallback)) { 55 if (IS_ERR(fallback)) {
55 printk(KERN_ERR 56 printk(KERN_ERR
56 "Failed to allocate transformation for '%s': %ld\n", 57 "Failed to allocate transformation for '%s': %ld\n",
@@ -58,11 +59,11 @@ static int p8_aes_xts_init(struct crypto_tfm *tfm)
58 return PTR_ERR(fallback); 59 return PTR_ERR(fallback);
59 } 60 }
60 printk(KERN_INFO "Using '%s' as fallback implementation.\n", 61 printk(KERN_INFO "Using '%s' as fallback implementation.\n",
61 crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback)); 62 crypto_skcipher_driver_name(fallback));
62 63
63 crypto_blkcipher_set_flags( 64 crypto_skcipher_set_flags(
64 fallback, 65 fallback,
65 crypto_blkcipher_get_flags((struct crypto_blkcipher *)tfm)); 66 crypto_skcipher_get_flags((struct crypto_skcipher *)tfm));
66 ctx->fallback = fallback; 67 ctx->fallback = fallback;
67 68
68 return 0; 69 return 0;
@@ -73,7 +74,7 @@ static void p8_aes_xts_exit(struct crypto_tfm *tfm)
73 struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm); 74 struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
74 75
75 if (ctx->fallback) { 76 if (ctx->fallback) {
76 crypto_free_blkcipher(ctx->fallback); 77 crypto_free_skcipher(ctx->fallback);
77 ctx->fallback = NULL; 78 ctx->fallback = NULL;
78 } 79 }
79} 80}
@@ -98,7 +99,7 @@ static int p8_aes_xts_setkey(struct crypto_tfm *tfm, const u8 *key,
98 pagefault_enable(); 99 pagefault_enable();
99 preempt_enable(); 100 preempt_enable();
100 101
101 ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen); 102 ret += crypto_skcipher_setkey(ctx->fallback, key, keylen);
102 return ret; 103 return ret;
103} 104}
104 105
@@ -113,15 +114,14 @@ static int p8_aes_xts_crypt(struct blkcipher_desc *desc,
113 struct blkcipher_walk walk; 114 struct blkcipher_walk walk;
114 struct p8_aes_xts_ctx *ctx = 115 struct p8_aes_xts_ctx *ctx =
115 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); 116 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
116 struct blkcipher_desc fallback_desc = {
117 .tfm = ctx->fallback,
118 .info = desc->info,
119 .flags = desc->flags
120 };
121 117
122 if (in_interrupt()) { 118 if (in_interrupt()) {
123 ret = enc ? crypto_blkcipher_encrypt(&fallback_desc, dst, src, nbytes) : 119 SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
124 crypto_blkcipher_decrypt(&fallback_desc, dst, src, nbytes); 120 skcipher_request_set_tfm(req, ctx->fallback);
121 skcipher_request_set_callback(req, desc->flags, NULL, NULL);
122 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
123 ret = enc? crypto_skcipher_encrypt(req) : crypto_skcipher_decrypt(req);
124 skcipher_request_zero(req);
125 } else { 125 } else {
126 preempt_disable(); 126 preempt_disable();
127 pagefault_disable(); 127 pagefault_disable();