summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@google.com>2019-05-20 12:44:48 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2019-05-30 03:28:40 -0400
commit2621a8699e81c0a4f17d7b98ef22f1f89975a7b5 (patch)
treeb0fdfeea8fc24554caf48d43d27ee2e199b377b5
parent1fa0a7dcf7599f318e10e42ae66a0fe670fdc7bd (diff)
crypto: vmx - convert to skcipher API
Convert the VMX implementations of AES-CBC, AES-CTR, and AES-XTS from the deprecated "blkcipher" API to the "skcipher" API. As part of this, I moved the skcipher_request for the fallback algorithm off the stack and into the request context of the parent algorithm. I tested this in a PowerPC VM with CONFIG_CRYPTO_MANAGER_EXTRA_TESTS=y. Signed-off-by: Eric Biggers <ebiggers@google.com> Tested-by: Michael Ellerman <mpe@ellerman.id.au> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r--drivers/crypto/vmx/aes_cbc.c183
-rw-r--r--drivers/crypto/vmx/aes_ctr.c165
-rw-r--r--drivers/crypto/vmx/aes_xts.c175
-rw-r--r--drivers/crypto/vmx/aesp8-ppc.h2
-rw-r--r--drivers/crypto/vmx/vmx.c72
5 files changed, 252 insertions, 345 deletions
diff --git a/drivers/crypto/vmx/aes_cbc.c b/drivers/crypto/vmx/aes_cbc.c
index dae8af3c46dc..92e75a05d6a9 100644
--- a/drivers/crypto/vmx/aes_cbc.c
+++ b/drivers/crypto/vmx/aes_cbc.c
@@ -7,64 +7,52 @@
7 * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com> 7 * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com>
8 */ 8 */
9 9
10#include <linux/types.h>
11#include <linux/err.h>
12#include <linux/crypto.h>
13#include <linux/delay.h>
14#include <asm/simd.h> 10#include <asm/simd.h>
15#include <asm/switch_to.h> 11#include <asm/switch_to.h>
16#include <crypto/aes.h> 12#include <crypto/aes.h>
17#include <crypto/internal/simd.h> 13#include <crypto/internal/simd.h>
18#include <crypto/scatterwalk.h> 14#include <crypto/internal/skcipher.h>
19#include <crypto/skcipher.h>
20 15
21#include "aesp8-ppc.h" 16#include "aesp8-ppc.h"
22 17
23struct p8_aes_cbc_ctx { 18struct p8_aes_cbc_ctx {
24 struct crypto_sync_skcipher *fallback; 19 struct crypto_skcipher *fallback;
25 struct aes_key enc_key; 20 struct aes_key enc_key;
26 struct aes_key dec_key; 21 struct aes_key dec_key;
27}; 22};
28 23
29static int p8_aes_cbc_init(struct crypto_tfm *tfm) 24static int p8_aes_cbc_init(struct crypto_skcipher *tfm)
30{ 25{
31 const char *alg = crypto_tfm_alg_name(tfm); 26 struct p8_aes_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
32 struct crypto_sync_skcipher *fallback; 27 struct crypto_skcipher *fallback;
33 struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
34
35 fallback = crypto_alloc_sync_skcipher(alg, 0,
36 CRYPTO_ALG_NEED_FALLBACK);
37 28
29 fallback = crypto_alloc_skcipher("cbc(aes)", 0,
30 CRYPTO_ALG_NEED_FALLBACK |
31 CRYPTO_ALG_ASYNC);
38 if (IS_ERR(fallback)) { 32 if (IS_ERR(fallback)) {
39 printk(KERN_ERR 33 pr_err("Failed to allocate cbc(aes) fallback: %ld\n",
40 "Failed to allocate transformation for '%s': %ld\n", 34 PTR_ERR(fallback));
41 alg, PTR_ERR(fallback));
42 return PTR_ERR(fallback); 35 return PTR_ERR(fallback);
43 } 36 }
44 37
45 crypto_sync_skcipher_set_flags( 38 crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
46 fallback, 39 crypto_skcipher_reqsize(fallback));
47 crypto_skcipher_get_flags((struct crypto_skcipher *)tfm));
48 ctx->fallback = fallback; 40 ctx->fallback = fallback;
49
50 return 0; 41 return 0;
51} 42}
52 43
53static void p8_aes_cbc_exit(struct crypto_tfm *tfm) 44static void p8_aes_cbc_exit(struct crypto_skcipher *tfm)
54{ 45{
55 struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm); 46 struct p8_aes_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
56 47
57 if (ctx->fallback) { 48 crypto_free_skcipher(ctx->fallback);
58 crypto_free_sync_skcipher(ctx->fallback);
59 ctx->fallback = NULL;
60 }
61} 49}
62 50
63static int p8_aes_cbc_setkey(struct crypto_tfm *tfm, const u8 *key, 51static int p8_aes_cbc_setkey(struct crypto_skcipher *tfm, const u8 *key,
64 unsigned int keylen) 52 unsigned int keylen)
65{ 53{
54 struct p8_aes_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
66 int ret; 55 int ret;
67 struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
68 56
69 preempt_disable(); 57 preempt_disable();
70 pagefault_disable(); 58 pagefault_disable();
@@ -75,108 +63,71 @@ static int p8_aes_cbc_setkey(struct crypto_tfm *tfm, const u8 *key,
75 pagefault_enable(); 63 pagefault_enable();
76 preempt_enable(); 64 preempt_enable();
77 65
78 ret |= crypto_sync_skcipher_setkey(ctx->fallback, key, keylen); 66 ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen);
79 67
80 return ret ? -EINVAL : 0; 68 return ret ? -EINVAL : 0;
81} 69}
82 70
83static int p8_aes_cbc_encrypt(struct blkcipher_desc *desc, 71static int p8_aes_cbc_crypt(struct skcipher_request *req, int enc)
84 struct scatterlist *dst,
85 struct scatterlist *src, unsigned int nbytes)
86{ 72{
73 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
74 const struct p8_aes_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
75 struct skcipher_walk walk;
76 unsigned int nbytes;
87 int ret; 77 int ret;
88 struct blkcipher_walk walk;
89 struct p8_aes_cbc_ctx *ctx =
90 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
91 78
92 if (!crypto_simd_usable()) { 79 if (!crypto_simd_usable()) {
93 SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback); 80 struct skcipher_request *subreq = skcipher_request_ctx(req);
94 skcipher_request_set_sync_tfm(req, ctx->fallback); 81
95 skcipher_request_set_callback(req, desc->flags, NULL, NULL); 82 *subreq = *req;
96 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); 83 skcipher_request_set_tfm(subreq, ctx->fallback);
97 ret = crypto_skcipher_encrypt(req); 84 return enc ? crypto_skcipher_encrypt(subreq) :
98 skcipher_request_zero(req); 85 crypto_skcipher_decrypt(subreq);
99 } else {
100 blkcipher_walk_init(&walk, dst, src, nbytes);
101 ret = blkcipher_walk_virt(desc, &walk);
102 while ((nbytes = walk.nbytes)) {
103 preempt_disable();
104 pagefault_disable();
105 enable_kernel_vsx();
106 aes_p8_cbc_encrypt(walk.src.virt.addr,
107 walk.dst.virt.addr,
108 nbytes & AES_BLOCK_MASK,
109 &ctx->enc_key, walk.iv, 1);
110 disable_kernel_vsx();
111 pagefault_enable();
112 preempt_enable();
113
114 nbytes &= AES_BLOCK_SIZE - 1;
115 ret = blkcipher_walk_done(desc, &walk, nbytes);
116 }
117 } 86 }
118 87
88 ret = skcipher_walk_virt(&walk, req, false);
89 while ((nbytes = walk.nbytes) != 0) {
90 preempt_disable();
91 pagefault_disable();
92 enable_kernel_vsx();
93 aes_p8_cbc_encrypt(walk.src.virt.addr,
94 walk.dst.virt.addr,
95 round_down(nbytes, AES_BLOCK_SIZE),
96 enc ? &ctx->enc_key : &ctx->dec_key,
97 walk.iv, enc);
98 disable_kernel_vsx();
99 pagefault_enable();
100 preempt_enable();
101
102 ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
103 }
119 return ret; 104 return ret;
120} 105}
121 106
122static int p8_aes_cbc_decrypt(struct blkcipher_desc *desc, 107static int p8_aes_cbc_encrypt(struct skcipher_request *req)
123 struct scatterlist *dst,
124 struct scatterlist *src, unsigned int nbytes)
125{ 108{
126 int ret; 109 return p8_aes_cbc_crypt(req, 1);
127 struct blkcipher_walk walk;
128 struct p8_aes_cbc_ctx *ctx =
129 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
130
131 if (!crypto_simd_usable()) {
132 SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
133 skcipher_request_set_sync_tfm(req, ctx->fallback);
134 skcipher_request_set_callback(req, desc->flags, NULL, NULL);
135 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
136 ret = crypto_skcipher_decrypt(req);
137 skcipher_request_zero(req);
138 } else {
139 blkcipher_walk_init(&walk, dst, src, nbytes);
140 ret = blkcipher_walk_virt(desc, &walk);
141 while ((nbytes = walk.nbytes)) {
142 preempt_disable();
143 pagefault_disable();
144 enable_kernel_vsx();
145 aes_p8_cbc_encrypt(walk.src.virt.addr,
146 walk.dst.virt.addr,
147 nbytes & AES_BLOCK_MASK,
148 &ctx->dec_key, walk.iv, 0);
149 disable_kernel_vsx();
150 pagefault_enable();
151 preempt_enable();
152
153 nbytes &= AES_BLOCK_SIZE - 1;
154 ret = blkcipher_walk_done(desc, &walk, nbytes);
155 }
156 }
157
158 return ret;
159} 110}
160 111
112static int p8_aes_cbc_decrypt(struct skcipher_request *req)
113{
114 return p8_aes_cbc_crypt(req, 0);
115}
161 116
162struct crypto_alg p8_aes_cbc_alg = { 117struct skcipher_alg p8_aes_cbc_alg = {
163 .cra_name = "cbc(aes)", 118 .base.cra_name = "cbc(aes)",
164 .cra_driver_name = "p8_aes_cbc", 119 .base.cra_driver_name = "p8_aes_cbc",
165 .cra_module = THIS_MODULE, 120 .base.cra_module = THIS_MODULE,
166 .cra_priority = 2000, 121 .base.cra_priority = 2000,
167 .cra_type = &crypto_blkcipher_type, 122 .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
168 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK, 123 .base.cra_blocksize = AES_BLOCK_SIZE,
169 .cra_alignmask = 0, 124 .base.cra_ctxsize = sizeof(struct p8_aes_cbc_ctx),
170 .cra_blocksize = AES_BLOCK_SIZE, 125 .setkey = p8_aes_cbc_setkey,
171 .cra_ctxsize = sizeof(struct p8_aes_cbc_ctx), 126 .encrypt = p8_aes_cbc_encrypt,
172 .cra_init = p8_aes_cbc_init, 127 .decrypt = p8_aes_cbc_decrypt,
173 .cra_exit = p8_aes_cbc_exit, 128 .init = p8_aes_cbc_init,
174 .cra_blkcipher = { 129 .exit = p8_aes_cbc_exit,
175 .ivsize = AES_BLOCK_SIZE, 130 .min_keysize = AES_MIN_KEY_SIZE,
176 .min_keysize = AES_MIN_KEY_SIZE, 131 .max_keysize = AES_MAX_KEY_SIZE,
177 .max_keysize = AES_MAX_KEY_SIZE, 132 .ivsize = AES_BLOCK_SIZE,
178 .setkey = p8_aes_cbc_setkey,
179 .encrypt = p8_aes_cbc_encrypt,
180 .decrypt = p8_aes_cbc_decrypt,
181 },
182}; 133};
diff --git a/drivers/crypto/vmx/aes_ctr.c b/drivers/crypto/vmx/aes_ctr.c
index dc3110117844..c4d2809a5d9e 100644
--- a/drivers/crypto/vmx/aes_ctr.c
+++ b/drivers/crypto/vmx/aes_ctr.c
@@ -7,62 +7,51 @@
7 * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com> 7 * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com>
8 */ 8 */
9 9
10#include <linux/types.h>
11#include <linux/err.h>
12#include <linux/crypto.h>
13#include <linux/delay.h>
14#include <asm/simd.h> 10#include <asm/simd.h>
15#include <asm/switch_to.h> 11#include <asm/switch_to.h>
16#include <crypto/aes.h> 12#include <crypto/aes.h>
17#include <crypto/internal/simd.h> 13#include <crypto/internal/simd.h>
18#include <crypto/scatterwalk.h> 14#include <crypto/internal/skcipher.h>
19#include <crypto/skcipher.h>
20 15
21#include "aesp8-ppc.h" 16#include "aesp8-ppc.h"
22 17
23struct p8_aes_ctr_ctx { 18struct p8_aes_ctr_ctx {
24 struct crypto_sync_skcipher *fallback; 19 struct crypto_skcipher *fallback;
25 struct aes_key enc_key; 20 struct aes_key enc_key;
26}; 21};
27 22
28static int p8_aes_ctr_init(struct crypto_tfm *tfm) 23static int p8_aes_ctr_init(struct crypto_skcipher *tfm)
29{ 24{
30 const char *alg = crypto_tfm_alg_name(tfm); 25 struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
31 struct crypto_sync_skcipher *fallback; 26 struct crypto_skcipher *fallback;
32 struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
33 27
34 fallback = crypto_alloc_sync_skcipher(alg, 0, 28 fallback = crypto_alloc_skcipher("ctr(aes)", 0,
35 CRYPTO_ALG_NEED_FALLBACK); 29 CRYPTO_ALG_NEED_FALLBACK |
30 CRYPTO_ALG_ASYNC);
36 if (IS_ERR(fallback)) { 31 if (IS_ERR(fallback)) {
37 printk(KERN_ERR 32 pr_err("Failed to allocate ctr(aes) fallback: %ld\n",
38 "Failed to allocate transformation for '%s': %ld\n", 33 PTR_ERR(fallback));
39 alg, PTR_ERR(fallback));
40 return PTR_ERR(fallback); 34 return PTR_ERR(fallback);
41 } 35 }
42 36
43 crypto_sync_skcipher_set_flags( 37 crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
44 fallback, 38 crypto_skcipher_reqsize(fallback));
45 crypto_skcipher_get_flags((struct crypto_skcipher *)tfm));
46 ctx->fallback = fallback; 39 ctx->fallback = fallback;
47
48 return 0; 40 return 0;
49} 41}
50 42
51static void p8_aes_ctr_exit(struct crypto_tfm *tfm) 43static void p8_aes_ctr_exit(struct crypto_skcipher *tfm)
52{ 44{
53 struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm); 45 struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
54 46
55 if (ctx->fallback) { 47 crypto_free_skcipher(ctx->fallback);
56 crypto_free_sync_skcipher(ctx->fallback);
57 ctx->fallback = NULL;
58 }
59} 48}
60 49
61static int p8_aes_ctr_setkey(struct crypto_tfm *tfm, const u8 *key, 50static int p8_aes_ctr_setkey(struct crypto_skcipher *tfm, const u8 *key,
62 unsigned int keylen) 51 unsigned int keylen)
63{ 52{
53 struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
64 int ret; 54 int ret;
65 struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
66 55
67 preempt_disable(); 56 preempt_disable();
68 pagefault_disable(); 57 pagefault_disable();
@@ -72,13 +61,13 @@ static int p8_aes_ctr_setkey(struct crypto_tfm *tfm, const u8 *key,
72 pagefault_enable(); 61 pagefault_enable();
73 preempt_enable(); 62 preempt_enable();
74 63
75 ret |= crypto_sync_skcipher_setkey(ctx->fallback, key, keylen); 64 ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen);
76 65
77 return ret ? -EINVAL : 0; 66 return ret ? -EINVAL : 0;
78} 67}
79 68
80static void p8_aes_ctr_final(struct p8_aes_ctr_ctx *ctx, 69static void p8_aes_ctr_final(const struct p8_aes_ctr_ctx *ctx,
81 struct blkcipher_walk *walk) 70 struct skcipher_walk *walk)
82{ 71{
83 u8 *ctrblk = walk->iv; 72 u8 *ctrblk = walk->iv;
84 u8 keystream[AES_BLOCK_SIZE]; 73 u8 keystream[AES_BLOCK_SIZE];
@@ -98,77 +87,63 @@ static void p8_aes_ctr_final(struct p8_aes_ctr_ctx *ctx,
98 crypto_inc(ctrblk, AES_BLOCK_SIZE); 87 crypto_inc(ctrblk, AES_BLOCK_SIZE);
99} 88}
100 89
101static int p8_aes_ctr_crypt(struct blkcipher_desc *desc, 90static int p8_aes_ctr_crypt(struct skcipher_request *req)
102 struct scatterlist *dst,
103 struct scatterlist *src, unsigned int nbytes)
104{ 91{
92 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
93 const struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
94 struct skcipher_walk walk;
95 unsigned int nbytes;
105 int ret; 96 int ret;
106 u64 inc;
107 struct blkcipher_walk walk;
108 struct p8_aes_ctr_ctx *ctx =
109 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
110 97
111 if (!crypto_simd_usable()) { 98 if (!crypto_simd_usable()) {
112 SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback); 99 struct skcipher_request *subreq = skcipher_request_ctx(req);
113 skcipher_request_set_sync_tfm(req, ctx->fallback); 100
114 skcipher_request_set_callback(req, desc->flags, NULL, NULL); 101 *subreq = *req;
115 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); 102 skcipher_request_set_tfm(subreq, ctx->fallback);
116 ret = crypto_skcipher_encrypt(req); 103 return crypto_skcipher_encrypt(subreq);
117 skcipher_request_zero(req);
118 } else {
119 blkcipher_walk_init(&walk, dst, src, nbytes);
120 ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
121 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
122 preempt_disable();
123 pagefault_disable();
124 enable_kernel_vsx();
125 aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr,
126 walk.dst.virt.addr,
127 (nbytes &
128 AES_BLOCK_MASK) /
129 AES_BLOCK_SIZE,
130 &ctx->enc_key,
131 walk.iv);
132 disable_kernel_vsx();
133 pagefault_enable();
134 preempt_enable();
135
136 /* We need to update IV mostly for last bytes/round */
137 inc = (nbytes & AES_BLOCK_MASK) / AES_BLOCK_SIZE;
138 if (inc > 0)
139 while (inc--)
140 crypto_inc(walk.iv, AES_BLOCK_SIZE);
141
142 nbytes &= AES_BLOCK_SIZE - 1;
143 ret = blkcipher_walk_done(desc, &walk, nbytes);
144 }
145 if (walk.nbytes) {
146 p8_aes_ctr_final(ctx, &walk);
147 ret = blkcipher_walk_done(desc, &walk, 0);
148 }
149 } 104 }
150 105
106 ret = skcipher_walk_virt(&walk, req, false);
107 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
108 preempt_disable();
109 pagefault_disable();
110 enable_kernel_vsx();
111 aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr,
112 walk.dst.virt.addr,
113 nbytes / AES_BLOCK_SIZE,
114 &ctx->enc_key, walk.iv);
115 disable_kernel_vsx();
116 pagefault_enable();
117 preempt_enable();
118
119 do {
120 crypto_inc(walk.iv, AES_BLOCK_SIZE);
121 } while ((nbytes -= AES_BLOCK_SIZE) >= AES_BLOCK_SIZE);
122
123 ret = skcipher_walk_done(&walk, nbytes);
124 }
125 if (nbytes) {
126 p8_aes_ctr_final(ctx, &walk);
127 ret = skcipher_walk_done(&walk, 0);
128 }
151 return ret; 129 return ret;
152} 130}
153 131
154struct crypto_alg p8_aes_ctr_alg = { 132struct skcipher_alg p8_aes_ctr_alg = {
155 .cra_name = "ctr(aes)", 133 .base.cra_name = "ctr(aes)",
156 .cra_driver_name = "p8_aes_ctr", 134 .base.cra_driver_name = "p8_aes_ctr",
157 .cra_module = THIS_MODULE, 135 .base.cra_module = THIS_MODULE,
158 .cra_priority = 2000, 136 .base.cra_priority = 2000,
159 .cra_type = &crypto_blkcipher_type, 137 .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
160 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK, 138 .base.cra_blocksize = 1,
161 .cra_alignmask = 0, 139 .base.cra_ctxsize = sizeof(struct p8_aes_ctr_ctx),
162 .cra_blocksize = 1, 140 .setkey = p8_aes_ctr_setkey,
163 .cra_ctxsize = sizeof(struct p8_aes_ctr_ctx), 141 .encrypt = p8_aes_ctr_crypt,
164 .cra_init = p8_aes_ctr_init, 142 .decrypt = p8_aes_ctr_crypt,
165 .cra_exit = p8_aes_ctr_exit, 143 .init = p8_aes_ctr_init,
166 .cra_blkcipher = { 144 .exit = p8_aes_ctr_exit,
167 .ivsize = AES_BLOCK_SIZE, 145 .min_keysize = AES_MIN_KEY_SIZE,
168 .min_keysize = AES_MIN_KEY_SIZE, 146 .max_keysize = AES_MAX_KEY_SIZE,
169 .max_keysize = AES_MAX_KEY_SIZE, 147 .ivsize = AES_BLOCK_SIZE,
170 .setkey = p8_aes_ctr_setkey, 148 .chunksize = AES_BLOCK_SIZE,
171 .encrypt = p8_aes_ctr_crypt,
172 .decrypt = p8_aes_ctr_crypt,
173 },
174}; 149};
diff --git a/drivers/crypto/vmx/aes_xts.c b/drivers/crypto/vmx/aes_xts.c
index aee1339f134e..965d8e03321c 100644
--- a/drivers/crypto/vmx/aes_xts.c
+++ b/drivers/crypto/vmx/aes_xts.c
@@ -7,67 +7,56 @@
7 * Author: Leonidas S. Barbosa <leosilva@linux.vnet.ibm.com> 7 * Author: Leonidas S. Barbosa <leosilva@linux.vnet.ibm.com>
8 */ 8 */
9 9
10#include <linux/types.h>
11#include <linux/err.h>
12#include <linux/crypto.h>
13#include <linux/delay.h>
14#include <asm/simd.h> 10#include <asm/simd.h>
15#include <asm/switch_to.h> 11#include <asm/switch_to.h>
16#include <crypto/aes.h> 12#include <crypto/aes.h>
17#include <crypto/internal/simd.h> 13#include <crypto/internal/simd.h>
18#include <crypto/scatterwalk.h> 14#include <crypto/internal/skcipher.h>
19#include <crypto/xts.h> 15#include <crypto/xts.h>
20#include <crypto/skcipher.h>
21 16
22#include "aesp8-ppc.h" 17#include "aesp8-ppc.h"
23 18
24struct p8_aes_xts_ctx { 19struct p8_aes_xts_ctx {
25 struct crypto_sync_skcipher *fallback; 20 struct crypto_skcipher *fallback;
26 struct aes_key enc_key; 21 struct aes_key enc_key;
27 struct aes_key dec_key; 22 struct aes_key dec_key;
28 struct aes_key tweak_key; 23 struct aes_key tweak_key;
29}; 24};
30 25
31static int p8_aes_xts_init(struct crypto_tfm *tfm) 26static int p8_aes_xts_init(struct crypto_skcipher *tfm)
32{ 27{
33 const char *alg = crypto_tfm_alg_name(tfm); 28 struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
34 struct crypto_sync_skcipher *fallback; 29 struct crypto_skcipher *fallback;
35 struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
36 30
37 fallback = crypto_alloc_sync_skcipher(alg, 0, 31 fallback = crypto_alloc_skcipher("xts(aes)", 0,
38 CRYPTO_ALG_NEED_FALLBACK); 32 CRYPTO_ALG_NEED_FALLBACK |
33 CRYPTO_ALG_ASYNC);
39 if (IS_ERR(fallback)) { 34 if (IS_ERR(fallback)) {
40 printk(KERN_ERR 35 pr_err("Failed to allocate xts(aes) fallback: %ld\n",
41 "Failed to allocate transformation for '%s': %ld\n", 36 PTR_ERR(fallback));
42 alg, PTR_ERR(fallback));
43 return PTR_ERR(fallback); 37 return PTR_ERR(fallback);
44 } 38 }
45 39
46 crypto_sync_skcipher_set_flags( 40 crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
47 fallback, 41 crypto_skcipher_reqsize(fallback));
48 crypto_skcipher_get_flags((struct crypto_skcipher *)tfm));
49 ctx->fallback = fallback; 42 ctx->fallback = fallback;
50
51 return 0; 43 return 0;
52} 44}
53 45
54static void p8_aes_xts_exit(struct crypto_tfm *tfm) 46static void p8_aes_xts_exit(struct crypto_skcipher *tfm)
55{ 47{
56 struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm); 48 struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
57 49
58 if (ctx->fallback) { 50 crypto_free_skcipher(ctx->fallback);
59 crypto_free_sync_skcipher(ctx->fallback);
60 ctx->fallback = NULL;
61 }
62} 51}
63 52
64static int p8_aes_xts_setkey(struct crypto_tfm *tfm, const u8 *key, 53static int p8_aes_xts_setkey(struct crypto_skcipher *tfm, const u8 *key,
65 unsigned int keylen) 54 unsigned int keylen)
66{ 55{
56 struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
67 int ret; 57 int ret;
68 struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
69 58
70 ret = xts_check_key(tfm, key, keylen); 59 ret = xts_verify_key(tfm, key, keylen);
71 if (ret) 60 if (ret)
72 return ret; 61 return ret;
73 62
@@ -81,100 +70,90 @@ static int p8_aes_xts_setkey(struct crypto_tfm *tfm, const u8 *key,
81 pagefault_enable(); 70 pagefault_enable();
82 preempt_enable(); 71 preempt_enable();
83 72
84 ret |= crypto_sync_skcipher_setkey(ctx->fallback, key, keylen); 73 ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen);
85 74
86 return ret ? -EINVAL : 0; 75 return ret ? -EINVAL : 0;
87} 76}
88 77
89static int p8_aes_xts_crypt(struct blkcipher_desc *desc, 78static int p8_aes_xts_crypt(struct skcipher_request *req, int enc)
90 struct scatterlist *dst,
91 struct scatterlist *src,
92 unsigned int nbytes, int enc)
93{ 79{
94 int ret; 80 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
81 const struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
82 struct skcipher_walk walk;
83 unsigned int nbytes;
95 u8 tweak[AES_BLOCK_SIZE]; 84 u8 tweak[AES_BLOCK_SIZE];
96 u8 *iv; 85 int ret;
97 struct blkcipher_walk walk;
98 struct p8_aes_xts_ctx *ctx =
99 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
100 86
101 if (!crypto_simd_usable()) { 87 if (!crypto_simd_usable()) {
102 SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback); 88 struct skcipher_request *subreq = skcipher_request_ctx(req);
103 skcipher_request_set_sync_tfm(req, ctx->fallback); 89
104 skcipher_request_set_callback(req, desc->flags, NULL, NULL); 90 *subreq = *req;
105 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); 91 skcipher_request_set_tfm(subreq, ctx->fallback);
106 ret = enc? crypto_skcipher_encrypt(req) : crypto_skcipher_decrypt(req); 92 return enc ? crypto_skcipher_encrypt(subreq) :
107 skcipher_request_zero(req); 93 crypto_skcipher_decrypt(subreq);
108 } else { 94 }
109 blkcipher_walk_init(&walk, dst, src, nbytes); 95
96 ret = skcipher_walk_virt(&walk, req, false);
97 if (ret)
98 return ret;
99
100 preempt_disable();
101 pagefault_disable();
102 enable_kernel_vsx();
110 103
111 ret = blkcipher_walk_virt(desc, &walk); 104 aes_p8_encrypt(walk.iv, tweak, &ctx->tweak_key);
105
106 disable_kernel_vsx();
107 pagefault_enable();
108 preempt_enable();
112 109
110 while ((nbytes = walk.nbytes) != 0) {
113 preempt_disable(); 111 preempt_disable();
114 pagefault_disable(); 112 pagefault_disable();
115 enable_kernel_vsx(); 113 enable_kernel_vsx();
116 114 if (enc)
117 iv = walk.iv; 115 aes_p8_xts_encrypt(walk.src.virt.addr,
118 memset(tweak, 0, AES_BLOCK_SIZE); 116 walk.dst.virt.addr,
119 aes_p8_encrypt(iv, tweak, &ctx->tweak_key); 117 round_down(nbytes, AES_BLOCK_SIZE),
120 118 &ctx->enc_key, NULL, tweak);
119 else
120 aes_p8_xts_decrypt(walk.src.virt.addr,
121 walk.dst.virt.addr,
122 round_down(nbytes, AES_BLOCK_SIZE),
123 &ctx->dec_key, NULL, tweak);
121 disable_kernel_vsx(); 124 disable_kernel_vsx();
122 pagefault_enable(); 125 pagefault_enable();
123 preempt_enable(); 126 preempt_enable();
124 127
125 while ((nbytes = walk.nbytes)) { 128 ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
126 preempt_disable();
127 pagefault_disable();
128 enable_kernel_vsx();
129 if (enc)
130 aes_p8_xts_encrypt(walk.src.virt.addr, walk.dst.virt.addr,
131 nbytes & AES_BLOCK_MASK, &ctx->enc_key, NULL, tweak);
132 else
133 aes_p8_xts_decrypt(walk.src.virt.addr, walk.dst.virt.addr,
134 nbytes & AES_BLOCK_MASK, &ctx->dec_key, NULL, tweak);
135 disable_kernel_vsx();
136 pagefault_enable();
137 preempt_enable();
138
139 nbytes &= AES_BLOCK_SIZE - 1;
140 ret = blkcipher_walk_done(desc, &walk, nbytes);
141 }
142 } 129 }
143 return ret; 130 return ret;
144} 131}
145 132
146static int p8_aes_xts_encrypt(struct blkcipher_desc *desc, 133static int p8_aes_xts_encrypt(struct skcipher_request *req)
147 struct scatterlist *dst,
148 struct scatterlist *src, unsigned int nbytes)
149{ 134{
150 return p8_aes_xts_crypt(desc, dst, src, nbytes, 1); 135 return p8_aes_xts_crypt(req, 1);
151} 136}
152 137
153static int p8_aes_xts_decrypt(struct blkcipher_desc *desc, 138static int p8_aes_xts_decrypt(struct skcipher_request *req)
154 struct scatterlist *dst,
155 struct scatterlist *src, unsigned int nbytes)
156{ 139{
157 return p8_aes_xts_crypt(desc, dst, src, nbytes, 0); 140 return p8_aes_xts_crypt(req, 0);
158} 141}
159 142
160struct crypto_alg p8_aes_xts_alg = { 143struct skcipher_alg p8_aes_xts_alg = {
161 .cra_name = "xts(aes)", 144 .base.cra_name = "xts(aes)",
162 .cra_driver_name = "p8_aes_xts", 145 .base.cra_driver_name = "p8_aes_xts",
163 .cra_module = THIS_MODULE, 146 .base.cra_module = THIS_MODULE,
164 .cra_priority = 2000, 147 .base.cra_priority = 2000,
165 .cra_type = &crypto_blkcipher_type, 148 .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
166 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK, 149 .base.cra_blocksize = AES_BLOCK_SIZE,
167 .cra_alignmask = 0, 150 .base.cra_ctxsize = sizeof(struct p8_aes_xts_ctx),
168 .cra_blocksize = AES_BLOCK_SIZE, 151 .setkey = p8_aes_xts_setkey,
169 .cra_ctxsize = sizeof(struct p8_aes_xts_ctx), 152 .encrypt = p8_aes_xts_encrypt,
170 .cra_init = p8_aes_xts_init, 153 .decrypt = p8_aes_xts_decrypt,
171 .cra_exit = p8_aes_xts_exit, 154 .init = p8_aes_xts_init,
172 .cra_blkcipher = { 155 .exit = p8_aes_xts_exit,
173 .ivsize = AES_BLOCK_SIZE, 156 .min_keysize = 2 * AES_MIN_KEY_SIZE,
174 .min_keysize = 2 * AES_MIN_KEY_SIZE, 157 .max_keysize = 2 * AES_MAX_KEY_SIZE,
175 .max_keysize = 2 * AES_MAX_KEY_SIZE, 158 .ivsize = AES_BLOCK_SIZE,
176 .setkey = p8_aes_xts_setkey,
177 .encrypt = p8_aes_xts_encrypt,
178 .decrypt = p8_aes_xts_decrypt,
179 }
180}; 159};
diff --git a/drivers/crypto/vmx/aesp8-ppc.h b/drivers/crypto/vmx/aesp8-ppc.h
index 349646b73754..01774a4d26a2 100644
--- a/drivers/crypto/vmx/aesp8-ppc.h
+++ b/drivers/crypto/vmx/aesp8-ppc.h
@@ -2,8 +2,6 @@
2#include <linux/types.h> 2#include <linux/types.h>
3#include <crypto/aes.h> 3#include <crypto/aes.h>
4 4
5#define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
6
7struct aes_key { 5struct aes_key {
8 u8 key[AES_MAX_KEYLENGTH]; 6 u8 key[AES_MAX_KEYLENGTH];
9 int rounds; 7 int rounds;
diff --git a/drivers/crypto/vmx/vmx.c b/drivers/crypto/vmx/vmx.c
index abd89c2bcec4..eff03fdf964f 100644
--- a/drivers/crypto/vmx/vmx.c
+++ b/drivers/crypto/vmx/vmx.c
@@ -15,54 +15,58 @@
15#include <linux/crypto.h> 15#include <linux/crypto.h>
16#include <asm/cputable.h> 16#include <asm/cputable.h>
17#include <crypto/internal/hash.h> 17#include <crypto/internal/hash.h>
18#include <crypto/internal/skcipher.h>
18 19
19extern struct shash_alg p8_ghash_alg; 20extern struct shash_alg p8_ghash_alg;
20extern struct crypto_alg p8_aes_alg; 21extern struct crypto_alg p8_aes_alg;
21extern struct crypto_alg p8_aes_cbc_alg; 22extern struct skcipher_alg p8_aes_cbc_alg;
22extern struct crypto_alg p8_aes_ctr_alg; 23extern struct skcipher_alg p8_aes_ctr_alg;
23extern struct crypto_alg p8_aes_xts_alg; 24extern struct skcipher_alg p8_aes_xts_alg;
24static struct crypto_alg *algs[] = {
25 &p8_aes_alg,
26 &p8_aes_cbc_alg,
27 &p8_aes_ctr_alg,
28 &p8_aes_xts_alg,
29 NULL,
30};
31 25
32static int __init p8_init(void) 26static int __init p8_init(void)
33{ 27{
34 int ret = 0; 28 int ret;
35 struct crypto_alg **alg_it;
36 29
37 for (alg_it = algs; *alg_it; alg_it++) { 30 ret = crypto_register_shash(&p8_ghash_alg);
38 ret = crypto_register_alg(*alg_it);
39 printk(KERN_INFO "crypto_register_alg '%s' = %d\n",
40 (*alg_it)->cra_name, ret);
41 if (ret) {
42 for (alg_it--; alg_it >= algs; alg_it--)
43 crypto_unregister_alg(*alg_it);
44 break;
45 }
46 }
47 if (ret) 31 if (ret)
48 return ret; 32 goto err;
49 33
50 ret = crypto_register_shash(&p8_ghash_alg); 34 ret = crypto_register_alg(&p8_aes_alg);
51 if (ret) { 35 if (ret)
52 for (alg_it = algs; *alg_it; alg_it++) 36 goto err_unregister_ghash;
53 crypto_unregister_alg(*alg_it); 37
54 } 38 ret = crypto_register_skcipher(&p8_aes_cbc_alg);
39 if (ret)
40 goto err_unregister_aes;
41
42 ret = crypto_register_skcipher(&p8_aes_ctr_alg);
43 if (ret)
44 goto err_unregister_aes_cbc;
45
46 ret = crypto_register_skcipher(&p8_aes_xts_alg);
47 if (ret)
48 goto err_unregister_aes_ctr;
49
50 return 0;
51
52err_unregister_aes_ctr:
53 crypto_unregister_skcipher(&p8_aes_ctr_alg);
54err_unregister_aes_cbc:
55 crypto_unregister_skcipher(&p8_aes_cbc_alg);
56err_unregister_aes:
57 crypto_unregister_alg(&p8_aes_alg);
58err_unregister_ghash:
59 crypto_unregister_shash(&p8_ghash_alg);
60err:
55 return ret; 61 return ret;
56} 62}
57 63
58static void __exit p8_exit(void) 64static void __exit p8_exit(void)
59{ 65{
60 struct crypto_alg **alg_it; 66 crypto_unregister_skcipher(&p8_aes_xts_alg);
61 67 crypto_unregister_skcipher(&p8_aes_ctr_alg);
62 for (alg_it = algs; *alg_it; alg_it++) { 68 crypto_unregister_skcipher(&p8_aes_cbc_alg);
63 printk(KERN_INFO "Removing '%s'\n", (*alg_it)->cra_name); 69 crypto_unregister_alg(&p8_aes_alg);
64 crypto_unregister_alg(*alg_it);
65 }
66 crypto_unregister_shash(&p8_ghash_alg); 70 crypto_unregister_shash(&p8_ghash_alg);
67} 71}
68 72