aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/crypto
diff options
context:
space:
mode:
authorJussi Kivilinna <jussi.kivilinna@mbnet.fi>2012-06-18 07:07:34 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2012-06-27 02:42:02 -0400
commit414cb5e7cc6e258fe36e2c3cc3ef1ff2e246c0e3 (patch)
treebce5793f8d0e9791ea60d09192d74b482b9a6cfd /arch/x86/crypto
parent964263afdcbf9d1e85c021acfff0cc68dd168475 (diff)
crypto: twofish-x86_64-3way - remove duplicated glue code and use shared glue code from glue_helper
Now that shared glue code is available, convert twofish-x86_64-3way to use it. Signed-off-by: Jussi Kivilinna <jussi.kivilinna@mbnet.fi> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch/x86/crypto')
-rw-r--r--arch/x86/crypto/twofish_glue_3way.c365
1 files changed, 93 insertions, 272 deletions
diff --git a/arch/x86/crypto/twofish_glue_3way.c b/arch/x86/crypto/twofish_glue_3way.c
index 77e4e55a2660..25bf5e9b0067 100644
--- a/arch/x86/crypto/twofish_glue_3way.c
+++ b/arch/x86/crypto/twofish_glue_3way.c
@@ -3,11 +3,6 @@
3 * 3 *
4 * Copyright (c) 2011 Jussi Kivilinna <jussi.kivilinna@mbnet.fi> 4 * Copyright (c) 2011 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
5 * 5 *
6 * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by:
7 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
8 * CTR part based on code (crypto/ctr.c) by:
9 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
10 *
11 * This program is free software; you can redistribute it and/or modify 6 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License as published by 7 * it under the terms of the GNU General Public License as published by
13 * the Free Software Foundation; either version 2 of the License, or 8 * the Free Software Foundation; either version 2 of the License, or
@@ -33,6 +28,7 @@
33#include <crypto/algapi.h> 28#include <crypto/algapi.h>
34#include <crypto/twofish.h> 29#include <crypto/twofish.h>
35#include <crypto/b128ops.h> 30#include <crypto/b128ops.h>
31#include <asm/crypto/glue_helper.h>
36#include <crypto/lrw.h> 32#include <crypto/lrw.h>
37#include <crypto/xts.h> 33#include <crypto/xts.h>
38 34
@@ -62,311 +58,136 @@ static inline void twofish_enc_blk_xor_3way(struct twofish_ctx *ctx, u8 *dst,
62 __twofish_enc_blk_3way(ctx, dst, src, true); 58 __twofish_enc_blk_3way(ctx, dst, src, true);
63} 59}
64 60
65static int ecb_crypt(struct blkcipher_desc *desc, struct blkcipher_walk *walk, 61static void twofish_dec_blk_cbc_3way(void *ctx, u128 *dst, const u128 *src)
66 void (*fn)(struct twofish_ctx *, u8 *, const u8 *),
67 void (*fn_3way)(struct twofish_ctx *, u8 *, const u8 *))
68{ 62{
69 struct twofish_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 63 u128 ivs[2];
70 unsigned int bsize = TF_BLOCK_SIZE;
71 unsigned int nbytes;
72 int err;
73
74 err = blkcipher_walk_virt(desc, walk);
75
76 while ((nbytes = walk->nbytes)) {
77 u8 *wsrc = walk->src.virt.addr;
78 u8 *wdst = walk->dst.virt.addr;
79
80 /* Process three block batch */
81 if (nbytes >= bsize * 3) {
82 do {
83 fn_3way(ctx, wdst, wsrc);
84 64
85 wsrc += bsize * 3; 65 ivs[0] = src[0];
86 wdst += bsize * 3; 66 ivs[1] = src[1];
87 nbytes -= bsize * 3;
88 } while (nbytes >= bsize * 3);
89 67
90 if (nbytes < bsize) 68 twofish_dec_blk_3way(ctx, (u8 *)dst, (u8 *)src);
91 goto done;
92 }
93
94 /* Handle leftovers */
95 do {
96 fn(ctx, wdst, wsrc);
97
98 wsrc += bsize;
99 wdst += bsize;
100 nbytes -= bsize;
101 } while (nbytes >= bsize);
102
103done:
104 err = blkcipher_walk_done(desc, walk, nbytes);
105 }
106 69
107 return err; 70 u128_xor(&dst[1], &dst[1], &ivs[0]);
71 u128_xor(&dst[2], &dst[2], &ivs[1]);
108} 72}
109 73
110static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 74static void twofish_enc_blk_ctr(void *ctx, u128 *dst, const u128 *src, u128 *iv)
111 struct scatterlist *src, unsigned int nbytes)
112{ 75{
113 struct blkcipher_walk walk; 76 be128 ctrblk;
114 77
115 blkcipher_walk_init(&walk, dst, src, nbytes); 78 if (dst != src)
116 return ecb_crypt(desc, &walk, twofish_enc_blk, twofish_enc_blk_3way); 79 *dst = *src;
117}
118 80
119static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 81 u128_to_be128(&ctrblk, iv);
120 struct scatterlist *src, unsigned int nbytes) 82 u128_inc(iv);
121{
122 struct blkcipher_walk walk;
123 83
124 blkcipher_walk_init(&walk, dst, src, nbytes); 84 twofish_enc_blk(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
125 return ecb_crypt(desc, &walk, twofish_dec_blk, twofish_dec_blk_3way); 85 u128_xor(dst, dst, (u128 *)&ctrblk);
126} 86}
127 87
128static unsigned int __cbc_encrypt(struct blkcipher_desc *desc, 88static void twofish_enc_blk_ctr_3way(void *ctx, u128 *dst, const u128 *src,
129 struct blkcipher_walk *walk) 89 u128 *iv)
130{ 90{
131 struct twofish_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 91 be128 ctrblks[3];
132 unsigned int bsize = TF_BLOCK_SIZE;
133 unsigned int nbytes = walk->nbytes;
134 u128 *src = (u128 *)walk->src.virt.addr;
135 u128 *dst = (u128 *)walk->dst.virt.addr;
136 u128 *iv = (u128 *)walk->iv;
137
138 do {
139 u128_xor(dst, src, iv);
140 twofish_enc_blk(ctx, (u8 *)dst, (u8 *)dst);
141 iv = dst;
142
143 src += 1;
144 dst += 1;
145 nbytes -= bsize;
146 } while (nbytes >= bsize);
147
148 u128_xor((u128 *)walk->iv, (u128 *)walk->iv, iv);
149 return nbytes;
150}
151 92
152static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 93 if (dst != src) {
153 struct scatterlist *src, unsigned int nbytes) 94 dst[0] = src[0];
154{ 95 dst[1] = src[1];
155 struct blkcipher_walk walk; 96 dst[2] = src[2];
156 int err;
157
158 blkcipher_walk_init(&walk, dst, src, nbytes);
159 err = blkcipher_walk_virt(desc, &walk);
160
161 while ((nbytes = walk.nbytes)) {
162 nbytes = __cbc_encrypt(desc, &walk);
163 err = blkcipher_walk_done(desc, &walk, nbytes);
164 }
165
166 return err;
167}
168
169static unsigned int __cbc_decrypt(struct blkcipher_desc *desc,
170 struct blkcipher_walk *walk)
171{
172 struct twofish_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
173 unsigned int bsize = TF_BLOCK_SIZE;
174 unsigned int nbytes = walk->nbytes;
175 u128 *src = (u128 *)walk->src.virt.addr;
176 u128 *dst = (u128 *)walk->dst.virt.addr;
177 u128 ivs[3 - 1];
178 u128 last_iv;
179
180 /* Start of the last block. */
181 src += nbytes / bsize - 1;
182 dst += nbytes / bsize - 1;
183
184 last_iv = *src;
185
186 /* Process three block batch */
187 if (nbytes >= bsize * 3) {
188 do {
189 nbytes -= bsize * (3 - 1);
190 src -= 3 - 1;
191 dst -= 3 - 1;
192
193 ivs[0] = src[0];
194 ivs[1] = src[1];
195
196 twofish_dec_blk_3way(ctx, (u8 *)dst, (u8 *)src);
197
198 u128_xor(dst + 1, dst + 1, ivs + 0);
199 u128_xor(dst + 2, dst + 2, ivs + 1);
200
201 nbytes -= bsize;
202 if (nbytes < bsize)
203 goto done;
204
205 u128_xor(dst, dst, src - 1);
206 src -= 1;
207 dst -= 1;
208 } while (nbytes >= bsize * 3);
209
210 if (nbytes < bsize)
211 goto done;
212 }
213
214 /* Handle leftovers */
215 for (;;) {
216 twofish_dec_blk(ctx, (u8 *)dst, (u8 *)src);
217
218 nbytes -= bsize;
219 if (nbytes < bsize)
220 break;
221
222 u128_xor(dst, dst, src - 1);
223 src -= 1;
224 dst -= 1;
225 } 97 }
226 98
227done: 99 u128_to_be128(&ctrblks[0], iv);
228 u128_xor(dst, dst, (u128 *)walk->iv); 100 u128_inc(iv);
229 *(u128 *)walk->iv = last_iv; 101 u128_to_be128(&ctrblks[1], iv);
102 u128_inc(iv);
103 u128_to_be128(&ctrblks[2], iv);
104 u128_inc(iv);
230 105
231 return nbytes; 106 twofish_enc_blk_xor_3way(ctx, (u8 *)dst, (u8 *)ctrblks);
232} 107}
233 108
234static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 109static const struct common_glue_ctx twofish_enc = {
235 struct scatterlist *src, unsigned int nbytes) 110 .num_funcs = 2,
236{ 111 .fpu_blocks_limit = -1,
237 struct blkcipher_walk walk; 112
238 int err; 113 .funcs = { {
239 114 .num_blocks = 3,
240 blkcipher_walk_init(&walk, dst, src, nbytes); 115 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk_3way) }
241 err = blkcipher_walk_virt(desc, &walk); 116 }, {
117 .num_blocks = 1,
118 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk) }
119 } }
120};
242 121
243 while ((nbytes = walk.nbytes)) { 122static const struct common_glue_ctx twofish_ctr = {
244 nbytes = __cbc_decrypt(desc, &walk); 123 .num_funcs = 2,
245 err = blkcipher_walk_done(desc, &walk, nbytes); 124 .fpu_blocks_limit = -1,
246 } 125
126 .funcs = { {
127 .num_blocks = 3,
128 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk_ctr_3way) }
129 }, {
130 .num_blocks = 1,
131 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk_ctr) }
132 } }
133};
247 134
248 return err; 135static const struct common_glue_ctx twofish_dec = {
249} 136 .num_funcs = 2,
137 .fpu_blocks_limit = -1,
138
139 .funcs = { {
140 .num_blocks = 3,
141 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_dec_blk_3way) }
142 }, {
143 .num_blocks = 1,
144 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_dec_blk) }
145 } }
146};
250 147
251static inline void u128_to_be128(be128 *dst, const u128 *src) 148static const struct common_glue_ctx twofish_dec_cbc = {
252{ 149 .num_funcs = 2,
253 dst->a = cpu_to_be64(src->a); 150 .fpu_blocks_limit = -1,
254 dst->b = cpu_to_be64(src->b); 151
255} 152 .funcs = { {
153 .num_blocks = 3,
154 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(twofish_dec_blk_cbc_3way) }
155 }, {
156 .num_blocks = 1,
157 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(twofish_dec_blk) }
158 } }
159};
256 160
257static inline void be128_to_u128(u128 *dst, const be128 *src) 161static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
162 struct scatterlist *src, unsigned int nbytes)
258{ 163{
259 dst->a = be64_to_cpu(src->a); 164 return glue_ecb_crypt_128bit(&twofish_enc, desc, dst, src, nbytes);
260 dst->b = be64_to_cpu(src->b);
261} 165}
262 166
263static inline void u128_inc(u128 *i) 167static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
168 struct scatterlist *src, unsigned int nbytes)
264{ 169{
265 i->b++; 170 return glue_ecb_crypt_128bit(&twofish_dec, desc, dst, src, nbytes);
266 if (!i->b)
267 i->a++;
268} 171}
269 172
270static void ctr_crypt_final(struct blkcipher_desc *desc, 173static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
271 struct blkcipher_walk *walk) 174 struct scatterlist *src, unsigned int nbytes)
272{ 175{
273 struct twofish_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 176 return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(twofish_enc_blk), desc,
274 u8 *ctrblk = walk->iv; 177 dst, src, nbytes);
275 u8 keystream[TF_BLOCK_SIZE];
276 u8 *src = walk->src.virt.addr;
277 u8 *dst = walk->dst.virt.addr;
278 unsigned int nbytes = walk->nbytes;
279
280 twofish_enc_blk(ctx, keystream, ctrblk);
281 crypto_xor(keystream, src, nbytes);
282 memcpy(dst, keystream, nbytes);
283
284 crypto_inc(ctrblk, TF_BLOCK_SIZE);
285} 178}
286 179
287static unsigned int __ctr_crypt(struct blkcipher_desc *desc, 180static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
288 struct blkcipher_walk *walk) 181 struct scatterlist *src, unsigned int nbytes)
289{ 182{
290 struct twofish_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 183 return glue_cbc_decrypt_128bit(&twofish_dec_cbc, desc, dst, src,
291 unsigned int bsize = TF_BLOCK_SIZE; 184 nbytes);
292 unsigned int nbytes = walk->nbytes;
293 u128 *src = (u128 *)walk->src.virt.addr;
294 u128 *dst = (u128 *)walk->dst.virt.addr;
295 u128 ctrblk;
296 be128 ctrblocks[3];
297
298 be128_to_u128(&ctrblk, (be128 *)walk->iv);
299
300 /* Process three block batch */
301 if (nbytes >= bsize * 3) {
302 do {
303 if (dst != src) {
304 dst[0] = src[0];
305 dst[1] = src[1];
306 dst[2] = src[2];
307 }
308
309 /* create ctrblks for parallel encrypt */
310 u128_to_be128(&ctrblocks[0], &ctrblk);
311 u128_inc(&ctrblk);
312 u128_to_be128(&ctrblocks[1], &ctrblk);
313 u128_inc(&ctrblk);
314 u128_to_be128(&ctrblocks[2], &ctrblk);
315 u128_inc(&ctrblk);
316
317 twofish_enc_blk_xor_3way(ctx, (u8 *)dst,
318 (u8 *)ctrblocks);
319
320 src += 3;
321 dst += 3;
322 nbytes -= bsize * 3;
323 } while (nbytes >= bsize * 3);
324
325 if (nbytes < bsize)
326 goto done;
327 }
328
329 /* Handle leftovers */
330 do {
331 if (dst != src)
332 *dst = *src;
333
334 u128_to_be128(&ctrblocks[0], &ctrblk);
335 u128_inc(&ctrblk);
336
337 twofish_enc_blk(ctx, (u8 *)ctrblocks, (u8 *)ctrblocks);
338 u128_xor(dst, dst, (u128 *)ctrblocks);
339
340 src += 1;
341 dst += 1;
342 nbytes -= bsize;
343 } while (nbytes >= bsize);
344
345done:
346 u128_to_be128((be128 *)walk->iv, &ctrblk);
347 return nbytes;
348} 185}
349 186
350static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst, 187static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
351 struct scatterlist *src, unsigned int nbytes) 188 struct scatterlist *src, unsigned int nbytes)
352{ 189{
353 struct blkcipher_walk walk; 190 return glue_ctr_crypt_128bit(&twofish_ctr, desc, dst, src, nbytes);
354 int err;
355
356 blkcipher_walk_init(&walk, dst, src, nbytes);
357 err = blkcipher_walk_virt_block(desc, &walk, TF_BLOCK_SIZE);
358
359 while ((nbytes = walk.nbytes) >= TF_BLOCK_SIZE) {
360 nbytes = __ctr_crypt(desc, &walk);
361 err = blkcipher_walk_done(desc, &walk, nbytes);
362 }
363
364 if (walk.nbytes) {
365 ctr_crypt_final(desc, &walk);
366 err = blkcipher_walk_done(desc, &walk, 0);
367 }
368
369 return err;
370} 191}
371 192
372static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes) 193static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)