aboutsummaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
authorJussi Kivilinna <jussi.kivilinna@mbnet.fi>2011-11-08 22:56:06 -0500
committerHerbert Xu <herbert@gondor.apana.org.au>2011-11-08 22:56:06 -0500
commitce0045561e1edb92e4a509eb433ff52d3afaa258 (patch)
tree78c8a6fcd82d966aca6a48d17c9063de34e224e9 /crypto
parentf9d2691fc9a00f39b587f965c33cca012a5597bc (diff)
crypto: xts: add interface for parallelized cipher implementations
Add xts_crypt() function that can be used by cipher implementations that can benefit from parallelized cipher operations. Signed-off-by: Jussi Kivilinna <jussi.kivilinna@mbnet.fi> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto')
-rw-r--r--crypto/xts.c75
1 files changed, 73 insertions, 2 deletions
diff --git a/crypto/xts.c b/crypto/xts.c
index 96f3f88d576e..ca1608f44cb5 100644
--- a/crypto/xts.c
+++ b/crypto/xts.c
@@ -21,11 +21,10 @@
21#include <linux/scatterlist.h> 21#include <linux/scatterlist.h>
22#include <linux/slab.h> 22#include <linux/slab.h>
23 23
24#include <crypto/xts.h>
24#include <crypto/b128ops.h> 25#include <crypto/b128ops.h>
25#include <crypto/gf128mul.h> 26#include <crypto/gf128mul.h>
26 27
27#define XTS_BLOCK_SIZE 16
28
29struct priv { 28struct priv {
30 struct crypto_cipher *child; 29 struct crypto_cipher *child;
31 struct crypto_cipher *tweak; 30 struct crypto_cipher *tweak;
@@ -167,6 +166,78 @@ static int decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
167 crypto_cipher_alg(ctx->child)->cia_decrypt); 166 crypto_cipher_alg(ctx->child)->cia_decrypt);
168} 167}
169 168
169int xts_crypt(struct blkcipher_desc *desc, struct scatterlist *sdst,
170 struct scatterlist *ssrc, unsigned int nbytes,
171 struct xts_crypt_req *req)
172{
173 const unsigned int bsize = XTS_BLOCK_SIZE;
174 const unsigned int max_blks = req->tbuflen / bsize;
175 struct blkcipher_walk walk;
176 unsigned int nblocks;
177 be128 *src, *dst, *t;
178 be128 *t_buf = req->tbuf;
179 int err, i;
180
181 BUG_ON(max_blks < 1);
182
183 blkcipher_walk_init(&walk, sdst, ssrc, nbytes);
184
185 err = blkcipher_walk_virt(desc, &walk);
186 nbytes = walk.nbytes;
187 if (!nbytes)
188 return err;
189
190 nblocks = min(nbytes / bsize, max_blks);
191 src = (be128 *)walk.src.virt.addr;
192 dst = (be128 *)walk.dst.virt.addr;
193
194 /* calculate first value of T */
195 req->tweak_fn(req->tweak_ctx, (u8 *)&t_buf[0], walk.iv);
196
197 i = 0;
198 goto first;
199
200 for (;;) {
201 do {
202 for (i = 0; i < nblocks; i++) {
203 gf128mul_x_ble(&t_buf[i], t);
204first:
205 t = &t_buf[i];
206
207 /* PP <- T xor P */
208 be128_xor(dst + i, t, src + i);
209 }
210
211 /* CC <- E(Key2,PP) */
212 req->crypt_fn(req->crypt_ctx, (u8 *)dst,
213 nblocks * bsize);
214
215 /* C <- T xor CC */
216 for (i = 0; i < nblocks; i++)
217 be128_xor(dst + i, dst + i, &t_buf[i]);
218
219 src += nblocks;
220 dst += nblocks;
221 nbytes -= nblocks * bsize;
222 nblocks = min(nbytes / bsize, max_blks);
223 } while (nblocks > 0);
224
225 *(be128 *)walk.iv = *t;
226
227 err = blkcipher_walk_done(desc, &walk, nbytes);
228 nbytes = walk.nbytes;
229 if (!nbytes)
230 break;
231
232 nblocks = min(nbytes / bsize, max_blks);
233 src = (be128 *)walk.src.virt.addr;
234 dst = (be128 *)walk.dst.virt.addr;
235 }
236
237 return err;
238}
239EXPORT_SYMBOL_GPL(xts_crypt);
240
170static int init_tfm(struct crypto_tfm *tfm) 241static int init_tfm(struct crypto_tfm *tfm)
171{ 242{
172 struct crypto_cipher *cipher; 243 struct crypto_cipher *cipher;