diff options
Diffstat (limited to 'crypto/cipher.c')
-rw-r--r-- | crypto/cipher.c | 117 |
1 files changed, 94 insertions, 23 deletions
diff --git a/crypto/cipher.c b/crypto/cipher.c index b899eb97abd7..9e03701cfdcc 100644 --- a/crypto/cipher.c +++ b/crypto/cipher.c | |||
@@ -23,6 +23,28 @@ | |||
23 | #include "internal.h" | 23 | #include "internal.h" |
24 | #include "scatterwalk.h" | 24 | #include "scatterwalk.h" |
25 | 25 | ||
26 | struct cipher_alg_compat { | ||
27 | unsigned int cia_min_keysize; | ||
28 | unsigned int cia_max_keysize; | ||
29 | int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key, | ||
30 | unsigned int keylen); | ||
31 | void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); | ||
32 | void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); | ||
33 | |||
34 | unsigned int (*cia_encrypt_ecb)(const struct cipher_desc *desc, | ||
35 | u8 *dst, const u8 *src, | ||
36 | unsigned int nbytes); | ||
37 | unsigned int (*cia_decrypt_ecb)(const struct cipher_desc *desc, | ||
38 | u8 *dst, const u8 *src, | ||
39 | unsigned int nbytes); | ||
40 | unsigned int (*cia_encrypt_cbc)(const struct cipher_desc *desc, | ||
41 | u8 *dst, const u8 *src, | ||
42 | unsigned int nbytes); | ||
43 | unsigned int (*cia_decrypt_cbc)(const struct cipher_desc *desc, | ||
44 | u8 *dst, const u8 *src, | ||
45 | unsigned int nbytes); | ||
46 | }; | ||
47 | |||
26 | static inline void xor_64(u8 *a, const u8 *b) | 48 | static inline void xor_64(u8 *a, const u8 *b) |
27 | { | 49 | { |
28 | ((u32 *)a)[0] ^= ((u32 *)b)[0]; | 50 | ((u32 *)a)[0] ^= ((u32 *)b)[0]; |
@@ -45,15 +67,10 @@ static unsigned int crypt_slow(const struct cipher_desc *desc, | |||
45 | u8 buffer[bsize * 2 + alignmask]; | 67 | u8 buffer[bsize * 2 + alignmask]; |
46 | u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | 68 | u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); |
47 | u8 *dst = src + bsize; | 69 | u8 *dst = src + bsize; |
48 | unsigned int n; | ||
49 | |||
50 | n = scatterwalk_copychunks(src, in, bsize, 0); | ||
51 | scatterwalk_advance(in, n); | ||
52 | 70 | ||
71 | scatterwalk_copychunks(src, in, bsize, 0); | ||
53 | desc->prfn(desc, dst, src, bsize); | 72 | desc->prfn(desc, dst, src, bsize); |
54 | 73 | scatterwalk_copychunks(dst, out, bsize, 1); | |
55 | n = scatterwalk_copychunks(dst, out, bsize, 1); | ||
56 | scatterwalk_advance(out, n); | ||
57 | 74 | ||
58 | return bsize; | 75 | return bsize; |
59 | } | 76 | } |
@@ -64,12 +81,16 @@ static inline unsigned int crypt_fast(const struct cipher_desc *desc, | |||
64 | unsigned int nbytes, u8 *tmp) | 81 | unsigned int nbytes, u8 *tmp) |
65 | { | 82 | { |
66 | u8 *src, *dst; | 83 | u8 *src, *dst; |
84 | u8 *real_src, *real_dst; | ||
85 | |||
86 | real_src = scatterwalk_map(in, 0); | ||
87 | real_dst = scatterwalk_map(out, 1); | ||
67 | 88 | ||
68 | src = in->data; | 89 | src = real_src; |
69 | dst = scatterwalk_samebuf(in, out) ? src : out->data; | 90 | dst = scatterwalk_samebuf(in, out) ? src : real_dst; |
70 | 91 | ||
71 | if (tmp) { | 92 | if (tmp) { |
72 | memcpy(tmp, in->data, nbytes); | 93 | memcpy(tmp, src, nbytes); |
73 | src = tmp; | 94 | src = tmp; |
74 | dst = tmp; | 95 | dst = tmp; |
75 | } | 96 | } |
@@ -77,7 +98,10 @@ static inline unsigned int crypt_fast(const struct cipher_desc *desc, | |||
77 | nbytes = desc->prfn(desc, dst, src, nbytes); | 98 | nbytes = desc->prfn(desc, dst, src, nbytes); |
78 | 99 | ||
79 | if (tmp) | 100 | if (tmp) |
80 | memcpy(out->data, tmp, nbytes); | 101 | memcpy(real_dst, tmp, nbytes); |
102 | |||
103 | scatterwalk_unmap(real_src, 0); | ||
104 | scatterwalk_unmap(real_dst, 1); | ||
81 | 105 | ||
82 | scatterwalk_advance(in, nbytes); | 106 | scatterwalk_advance(in, nbytes); |
83 | scatterwalk_advance(out, nbytes); | 107 | scatterwalk_advance(out, nbytes); |
@@ -126,9 +150,6 @@ static int crypt(const struct cipher_desc *desc, | |||
126 | tmp = (u8 *)buffer; | 150 | tmp = (u8 *)buffer; |
127 | } | 151 | } |
128 | 152 | ||
129 | scatterwalk_map(&walk_in, 0); | ||
130 | scatterwalk_map(&walk_out, 1); | ||
131 | |||
132 | n = scatterwalk_clamp(&walk_in, n); | 153 | n = scatterwalk_clamp(&walk_in, n); |
133 | n = scatterwalk_clamp(&walk_out, n); | 154 | n = scatterwalk_clamp(&walk_out, n); |
134 | 155 | ||
@@ -145,7 +166,7 @@ static int crypt(const struct cipher_desc *desc, | |||
145 | if (!nbytes) | 166 | if (!nbytes) |
146 | break; | 167 | break; |
147 | 168 | ||
148 | crypto_yield(tfm); | 169 | crypto_yield(tfm->crt_flags); |
149 | } | 170 | } |
150 | 171 | ||
151 | if (buffer) | 172 | if (buffer) |
@@ -264,12 +285,12 @@ static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) | |||
264 | { | 285 | { |
265 | struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher; | 286 | struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher; |
266 | 287 | ||
288 | tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; | ||
267 | if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize) { | 289 | if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize) { |
268 | tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; | 290 | tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; |
269 | return -EINVAL; | 291 | return -EINVAL; |
270 | } else | 292 | } else |
271 | return cia->cia_setkey(tfm, key, keylen, | 293 | return cia->cia_setkey(tfm, key, keylen); |
272 | &tfm->crt_flags); | ||
273 | } | 294 | } |
274 | 295 | ||
275 | static int ecb_encrypt(struct crypto_tfm *tfm, | 296 | static int ecb_encrypt(struct crypto_tfm *tfm, |
@@ -277,7 +298,7 @@ static int ecb_encrypt(struct crypto_tfm *tfm, | |||
277 | struct scatterlist *src, unsigned int nbytes) | 298 | struct scatterlist *src, unsigned int nbytes) |
278 | { | 299 | { |
279 | struct cipher_desc desc; | 300 | struct cipher_desc desc; |
280 | struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; | 301 | struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher; |
281 | 302 | ||
282 | desc.tfm = tfm; | 303 | desc.tfm = tfm; |
283 | desc.crfn = cipher->cia_encrypt; | 304 | desc.crfn = cipher->cia_encrypt; |
@@ -292,7 +313,7 @@ static int ecb_decrypt(struct crypto_tfm *tfm, | |||
292 | unsigned int nbytes) | 313 | unsigned int nbytes) |
293 | { | 314 | { |
294 | struct cipher_desc desc; | 315 | struct cipher_desc desc; |
295 | struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; | 316 | struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher; |
296 | 317 | ||
297 | desc.tfm = tfm; | 318 | desc.tfm = tfm; |
298 | desc.crfn = cipher->cia_decrypt; | 319 | desc.crfn = cipher->cia_decrypt; |
@@ -307,7 +328,7 @@ static int cbc_encrypt(struct crypto_tfm *tfm, | |||
307 | unsigned int nbytes) | 328 | unsigned int nbytes) |
308 | { | 329 | { |
309 | struct cipher_desc desc; | 330 | struct cipher_desc desc; |
310 | struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; | 331 | struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher; |
311 | 332 | ||
312 | desc.tfm = tfm; | 333 | desc.tfm = tfm; |
313 | desc.crfn = cipher->cia_encrypt; | 334 | desc.crfn = cipher->cia_encrypt; |
@@ -323,7 +344,7 @@ static int cbc_encrypt_iv(struct crypto_tfm *tfm, | |||
323 | unsigned int nbytes, u8 *iv) | 344 | unsigned int nbytes, u8 *iv) |
324 | { | 345 | { |
325 | struct cipher_desc desc; | 346 | struct cipher_desc desc; |
326 | struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; | 347 | struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher; |
327 | 348 | ||
328 | desc.tfm = tfm; | 349 | desc.tfm = tfm; |
329 | desc.crfn = cipher->cia_encrypt; | 350 | desc.crfn = cipher->cia_encrypt; |
@@ -339,7 +360,7 @@ static int cbc_decrypt(struct crypto_tfm *tfm, | |||
339 | unsigned int nbytes) | 360 | unsigned int nbytes) |
340 | { | 361 | { |
341 | struct cipher_desc desc; | 362 | struct cipher_desc desc; |
342 | struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; | 363 | struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher; |
343 | 364 | ||
344 | desc.tfm = tfm; | 365 | desc.tfm = tfm; |
345 | desc.crfn = cipher->cia_decrypt; | 366 | desc.crfn = cipher->cia_decrypt; |
@@ -355,7 +376,7 @@ static int cbc_decrypt_iv(struct crypto_tfm *tfm, | |||
355 | unsigned int nbytes, u8 *iv) | 376 | unsigned int nbytes, u8 *iv) |
356 | { | 377 | { |
357 | struct cipher_desc desc; | 378 | struct cipher_desc desc; |
358 | struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; | 379 | struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher; |
359 | 380 | ||
360 | desc.tfm = tfm; | 381 | desc.tfm = tfm; |
361 | desc.crfn = cipher->cia_decrypt; | 382 | desc.crfn = cipher->cia_decrypt; |
@@ -388,17 +409,67 @@ int crypto_init_cipher_flags(struct crypto_tfm *tfm, u32 flags) | |||
388 | return 0; | 409 | return 0; |
389 | } | 410 | } |
390 | 411 | ||
412 | static void cipher_crypt_unaligned(void (*fn)(struct crypto_tfm *, u8 *, | ||
413 | const u8 *), | ||
414 | struct crypto_tfm *tfm, | ||
415 | u8 *dst, const u8 *src) | ||
416 | { | ||
417 | unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); | ||
418 | unsigned int size = crypto_tfm_alg_blocksize(tfm); | ||
419 | u8 buffer[size + alignmask]; | ||
420 | u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | ||
421 | |||
422 | memcpy(tmp, src, size); | ||
423 | fn(tfm, tmp, tmp); | ||
424 | memcpy(dst, tmp, size); | ||
425 | } | ||
426 | |||
427 | static void cipher_encrypt_unaligned(struct crypto_tfm *tfm, | ||
428 | u8 *dst, const u8 *src) | ||
429 | { | ||
430 | unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); | ||
431 | struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; | ||
432 | |||
433 | if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) { | ||
434 | cipher_crypt_unaligned(cipher->cia_encrypt, tfm, dst, src); | ||
435 | return; | ||
436 | } | ||
437 | |||
438 | cipher->cia_encrypt(tfm, dst, src); | ||
439 | } | ||
440 | |||
441 | static void cipher_decrypt_unaligned(struct crypto_tfm *tfm, | ||
442 | u8 *dst, const u8 *src) | ||
443 | { | ||
444 | unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); | ||
445 | struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; | ||
446 | |||
447 | if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) { | ||
448 | cipher_crypt_unaligned(cipher->cia_decrypt, tfm, dst, src); | ||
449 | return; | ||
450 | } | ||
451 | |||
452 | cipher->cia_decrypt(tfm, dst, src); | ||
453 | } | ||
454 | |||
391 | int crypto_init_cipher_ops(struct crypto_tfm *tfm) | 455 | int crypto_init_cipher_ops(struct crypto_tfm *tfm) |
392 | { | 456 | { |
393 | int ret = 0; | 457 | int ret = 0; |
394 | struct cipher_tfm *ops = &tfm->crt_cipher; | 458 | struct cipher_tfm *ops = &tfm->crt_cipher; |
459 | struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; | ||
395 | 460 | ||
396 | ops->cit_setkey = setkey; | 461 | ops->cit_setkey = setkey; |
462 | ops->cit_encrypt_one = crypto_tfm_alg_alignmask(tfm) ? | ||
463 | cipher_encrypt_unaligned : cipher->cia_encrypt; | ||
464 | ops->cit_decrypt_one = crypto_tfm_alg_alignmask(tfm) ? | ||
465 | cipher_decrypt_unaligned : cipher->cia_decrypt; | ||
397 | 466 | ||
398 | switch (tfm->crt_cipher.cit_mode) { | 467 | switch (tfm->crt_cipher.cit_mode) { |
399 | case CRYPTO_TFM_MODE_ECB: | 468 | case CRYPTO_TFM_MODE_ECB: |
400 | ops->cit_encrypt = ecb_encrypt; | 469 | ops->cit_encrypt = ecb_encrypt; |
401 | ops->cit_decrypt = ecb_decrypt; | 470 | ops->cit_decrypt = ecb_decrypt; |
471 | ops->cit_encrypt_iv = nocrypt_iv; | ||
472 | ops->cit_decrypt_iv = nocrypt_iv; | ||
402 | break; | 473 | break; |
403 | 474 | ||
404 | case CRYPTO_TFM_MODE_CBC: | 475 | case CRYPTO_TFM_MODE_CBC: |