diff options
Diffstat (limited to 'crypto')
-rw-r--r-- | crypto/Kconfig | 45 | ||||
-rw-r--r-- | crypto/aes.c | 63 | ||||
-rw-r--r-- | crypto/anubis.c | 39 | ||||
-rw-r--r-- | crypto/api.c | 54 | ||||
-rw-r--r-- | crypto/blowfish.c | 3 | ||||
-rw-r--r-- | crypto/cast5.c | 47 | ||||
-rw-r--r-- | crypto/cast6.c | 83 | ||||
-rw-r--r-- | crypto/cipher.c | 5 | ||||
-rw-r--r-- | crypto/crc32c.c | 1 | ||||
-rw-r--r-- | crypto/des.c | 3 | ||||
-rw-r--r-- | crypto/internal.h | 6 | ||||
-rw-r--r-- | crypto/khazad.c | 46 | ||||
-rw-r--r-- | crypto/md4.c | 1 | ||||
-rw-r--r-- | crypto/md5.c | 1 | ||||
-rw-r--r-- | crypto/michael_mic.c | 40 | ||||
-rw-r--r-- | crypto/proc.c | 6 | ||||
-rw-r--r-- | crypto/serpent.c | 2 | ||||
-rw-r--r-- | crypto/sha1.c | 66 | ||||
-rw-r--r-- | crypto/sha256.c | 31 | ||||
-rw-r--r-- | crypto/sha512.c | 54 | ||||
-rw-r--r-- | crypto/tcrypt.c | 4 | ||||
-rw-r--r-- | crypto/tcrypt.h | 64 | ||||
-rw-r--r-- | crypto/tea.c | 98 | ||||
-rw-r--r-- | crypto/tgr192.c | 64 | ||||
-rw-r--r-- | crypto/twofish.c | 13 | ||||
-rw-r--r-- | crypto/wp512.c | 32 |
26 files changed, 443 insertions, 428 deletions
diff --git a/crypto/Kconfig b/crypto/Kconfig index 89299f4ffe12..c442f2e7ce46 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig | |||
@@ -40,10 +40,11 @@ config CRYPTO_SHA1 | |||
40 | help | 40 | help |
41 | SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2). | 41 | SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2). |
42 | 42 | ||
43 | config CRYPTO_SHA1_Z990 | 43 | config CRYPTO_SHA1_S390 |
44 | tristate "SHA1 digest algorithm for IBM zSeries z990" | 44 | tristate "SHA1 digest algorithm (s390)" |
45 | depends on CRYPTO && ARCH_S390 | 45 | depends on CRYPTO && S390 |
46 | help | 46 | help |
47 | This is the s390 hardware accelerated implementation of the | ||
47 | SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2). | 48 | SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2). |
48 | 49 | ||
49 | config CRYPTO_SHA256 | 50 | config CRYPTO_SHA256 |
@@ -55,6 +56,16 @@ config CRYPTO_SHA256 | |||
55 | This version of SHA implements a 256 bit hash with 128 bits of | 56 | This version of SHA implements a 256 bit hash with 128 bits of |
56 | security against collision attacks. | 57 | security against collision attacks. |
57 | 58 | ||
59 | config CRYPTO_SHA256_S390 | ||
60 | tristate "SHA256 digest algorithm (s390)" | ||
61 | depends on CRYPTO && S390 | ||
62 | help | ||
63 | This is the s390 hardware accelerated implementation of the | ||
64 | SHA256 secure hash standard (DFIPS 180-2). | ||
65 | |||
66 | This version of SHA implements a 256 bit hash with 128 bits of | ||
67 | security against collision attacks. | ||
68 | |||
58 | config CRYPTO_SHA512 | 69 | config CRYPTO_SHA512 |
59 | tristate "SHA384 and SHA512 digest algorithms" | 70 | tristate "SHA384 and SHA512 digest algorithms" |
60 | depends on CRYPTO | 71 | depends on CRYPTO |
@@ -98,9 +109,9 @@ config CRYPTO_DES | |||
98 | help | 109 | help |
99 | DES cipher algorithm (FIPS 46-2), and Triple DES EDE (FIPS 46-3). | 110 | DES cipher algorithm (FIPS 46-2), and Triple DES EDE (FIPS 46-3). |
100 | 111 | ||
101 | config CRYPTO_DES_Z990 | 112 | config CRYPTO_DES_S390 |
102 | tristate "DES and Triple DES cipher algorithms for IBM zSeries z990" | 113 | tristate "DES and Triple DES cipher algorithms (s390)" |
103 | depends on CRYPTO && ARCH_S390 | 114 | depends on CRYPTO && S390 |
104 | help | 115 | help |
105 | DES cipher algorithm (FIPS 46-2), and Triple DES EDE (FIPS 46-3). | 116 | DES cipher algorithm (FIPS 46-2), and Triple DES EDE (FIPS 46-3). |
106 | 117 | ||
@@ -146,7 +157,7 @@ config CRYPTO_SERPENT | |||
146 | 157 | ||
147 | config CRYPTO_AES | 158 | config CRYPTO_AES |
148 | tristate "AES cipher algorithms" | 159 | tristate "AES cipher algorithms" |
149 | depends on CRYPTO && !(X86 || UML_X86) | 160 | depends on CRYPTO |
150 | help | 161 | help |
151 | AES cipher algorithms (FIPS-197). AES uses the Rijndael | 162 | AES cipher algorithms (FIPS-197). AES uses the Rijndael |
152 | algorithm. | 163 | algorithm. |
@@ -204,6 +215,26 @@ config CRYPTO_AES_X86_64 | |||
204 | 215 | ||
205 | See <http://csrc.nist.gov/encryption/aes/> for more information. | 216 | See <http://csrc.nist.gov/encryption/aes/> for more information. |
206 | 217 | ||
218 | config CRYPTO_AES_S390 | ||
219 | tristate "AES cipher algorithms (s390)" | ||
220 | depends on CRYPTO && S390 | ||
221 | help | ||
222 | This is the s390 hardware accelerated implementation of the | ||
223 | AES cipher algorithms (FIPS-197). AES uses the Rijndael | ||
224 | algorithm. | ||
225 | |||
226 | Rijndael appears to be consistently a very good performer in | ||
227 | both hardware and software across a wide range of computing | ||
228 | environments regardless of its use in feedback or non-feedback | ||
229 | modes. Its key setup time is excellent, and its key agility is | ||
230 | good. Rijndael's very low memory requirements make it very well | ||
231 | suited for restricted-space environments, in which it also | ||
232 | demonstrates excellent performance. Rijndael's operations are | ||
233 | among the easiest to defend against power and timing attacks. | ||
234 | |||
235 | On s390 the System z9-109 currently only supports the key size | ||
236 | of 128 bit. | ||
237 | |||
207 | config CRYPTO_CAST5 | 238 | config CRYPTO_CAST5 |
208 | tristate "CAST5 (CAST-128) cipher algorithm" | 239 | tristate "CAST5 (CAST-128) cipher algorithm" |
209 | depends on CRYPTO | 240 | depends on CRYPTO |
diff --git a/crypto/aes.c b/crypto/aes.c index 5df92888ef5a..0a6a5c143686 100644 --- a/crypto/aes.c +++ b/crypto/aes.c | |||
@@ -73,9 +73,6 @@ byte(const u32 x, const unsigned n) | |||
73 | return x >> (n << 3); | 73 | return x >> (n << 3); |
74 | } | 74 | } |
75 | 75 | ||
76 | #define u32_in(x) le32_to_cpu(*(const u32 *)(x)) | ||
77 | #define u32_out(to, from) (*(u32 *)(to) = cpu_to_le32(from)) | ||
78 | |||
79 | struct aes_ctx { | 76 | struct aes_ctx { |
80 | int key_length; | 77 | int key_length; |
81 | u32 E[60]; | 78 | u32 E[60]; |
@@ -256,6 +253,7 @@ static int | |||
256 | aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u32 *flags) | 253 | aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u32 *flags) |
257 | { | 254 | { |
258 | struct aes_ctx *ctx = ctx_arg; | 255 | struct aes_ctx *ctx = ctx_arg; |
256 | const __le32 *key = (const __le32 *)in_key; | ||
259 | u32 i, t, u, v, w; | 257 | u32 i, t, u, v, w; |
260 | 258 | ||
261 | if (key_len != 16 && key_len != 24 && key_len != 32) { | 259 | if (key_len != 16 && key_len != 24 && key_len != 32) { |
@@ -265,10 +263,10 @@ aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u32 *flags) | |||
265 | 263 | ||
266 | ctx->key_length = key_len; | 264 | ctx->key_length = key_len; |
267 | 265 | ||
268 | E_KEY[0] = u32_in (in_key); | 266 | E_KEY[0] = le32_to_cpu(key[0]); |
269 | E_KEY[1] = u32_in (in_key + 4); | 267 | E_KEY[1] = le32_to_cpu(key[1]); |
270 | E_KEY[2] = u32_in (in_key + 8); | 268 | E_KEY[2] = le32_to_cpu(key[2]); |
271 | E_KEY[3] = u32_in (in_key + 12); | 269 | E_KEY[3] = le32_to_cpu(key[3]); |
272 | 270 | ||
273 | switch (key_len) { | 271 | switch (key_len) { |
274 | case 16: | 272 | case 16: |
@@ -278,17 +276,17 @@ aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u32 *flags) | |||
278 | break; | 276 | break; |
279 | 277 | ||
280 | case 24: | 278 | case 24: |
281 | E_KEY[4] = u32_in (in_key + 16); | 279 | E_KEY[4] = le32_to_cpu(key[4]); |
282 | t = E_KEY[5] = u32_in (in_key + 20); | 280 | t = E_KEY[5] = le32_to_cpu(key[5]); |
283 | for (i = 0; i < 8; ++i) | 281 | for (i = 0; i < 8; ++i) |
284 | loop6 (i); | 282 | loop6 (i); |
285 | break; | 283 | break; |
286 | 284 | ||
287 | case 32: | 285 | case 32: |
288 | E_KEY[4] = u32_in (in_key + 16); | 286 | E_KEY[4] = le32_to_cpu(key[4]); |
289 | E_KEY[5] = u32_in (in_key + 20); | 287 | E_KEY[5] = le32_to_cpu(key[5]); |
290 | E_KEY[6] = u32_in (in_key + 24); | 288 | E_KEY[6] = le32_to_cpu(key[6]); |
291 | t = E_KEY[7] = u32_in (in_key + 28); | 289 | t = E_KEY[7] = le32_to_cpu(key[7]); |
292 | for (i = 0; i < 7; ++i) | 290 | for (i = 0; i < 7; ++i) |
293 | loop8 (i); | 291 | loop8 (i); |
294 | break; | 292 | break; |
@@ -324,13 +322,15 @@ aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u32 *flags) | |||
324 | static void aes_encrypt(void *ctx_arg, u8 *out, const u8 *in) | 322 | static void aes_encrypt(void *ctx_arg, u8 *out, const u8 *in) |
325 | { | 323 | { |
326 | const struct aes_ctx *ctx = ctx_arg; | 324 | const struct aes_ctx *ctx = ctx_arg; |
325 | const __le32 *src = (const __le32 *)in; | ||
326 | __le32 *dst = (__le32 *)out; | ||
327 | u32 b0[4], b1[4]; | 327 | u32 b0[4], b1[4]; |
328 | const u32 *kp = E_KEY + 4; | 328 | const u32 *kp = E_KEY + 4; |
329 | 329 | ||
330 | b0[0] = u32_in (in) ^ E_KEY[0]; | 330 | b0[0] = le32_to_cpu(src[0]) ^ E_KEY[0]; |
331 | b0[1] = u32_in (in + 4) ^ E_KEY[1]; | 331 | b0[1] = le32_to_cpu(src[1]) ^ E_KEY[1]; |
332 | b0[2] = u32_in (in + 8) ^ E_KEY[2]; | 332 | b0[2] = le32_to_cpu(src[2]) ^ E_KEY[2]; |
333 | b0[3] = u32_in (in + 12) ^ E_KEY[3]; | 333 | b0[3] = le32_to_cpu(src[3]) ^ E_KEY[3]; |
334 | 334 | ||
335 | if (ctx->key_length > 24) { | 335 | if (ctx->key_length > 24) { |
336 | f_nround (b1, b0, kp); | 336 | f_nround (b1, b0, kp); |
@@ -353,10 +353,10 @@ static void aes_encrypt(void *ctx_arg, u8 *out, const u8 *in) | |||
353 | f_nround (b1, b0, kp); | 353 | f_nround (b1, b0, kp); |
354 | f_lround (b0, b1, kp); | 354 | f_lround (b0, b1, kp); |
355 | 355 | ||
356 | u32_out (out, b0[0]); | 356 | dst[0] = cpu_to_le32(b0[0]); |
357 | u32_out (out + 4, b0[1]); | 357 | dst[1] = cpu_to_le32(b0[1]); |
358 | u32_out (out + 8, b0[2]); | 358 | dst[2] = cpu_to_le32(b0[2]); |
359 | u32_out (out + 12, b0[3]); | 359 | dst[3] = cpu_to_le32(b0[3]); |
360 | } | 360 | } |
361 | 361 | ||
362 | /* decrypt a block of text */ | 362 | /* decrypt a block of text */ |
@@ -377,14 +377,16 @@ static void aes_encrypt(void *ctx_arg, u8 *out, const u8 *in) | |||
377 | static void aes_decrypt(void *ctx_arg, u8 *out, const u8 *in) | 377 | static void aes_decrypt(void *ctx_arg, u8 *out, const u8 *in) |
378 | { | 378 | { |
379 | const struct aes_ctx *ctx = ctx_arg; | 379 | const struct aes_ctx *ctx = ctx_arg; |
380 | const __le32 *src = (const __le32 *)in; | ||
381 | __le32 *dst = (__le32 *)out; | ||
380 | u32 b0[4], b1[4]; | 382 | u32 b0[4], b1[4]; |
381 | const int key_len = ctx->key_length; | 383 | const int key_len = ctx->key_length; |
382 | const u32 *kp = D_KEY + key_len + 20; | 384 | const u32 *kp = D_KEY + key_len + 20; |
383 | 385 | ||
384 | b0[0] = u32_in (in) ^ E_KEY[key_len + 24]; | 386 | b0[0] = le32_to_cpu(src[0]) ^ E_KEY[key_len + 24]; |
385 | b0[1] = u32_in (in + 4) ^ E_KEY[key_len + 25]; | 387 | b0[1] = le32_to_cpu(src[1]) ^ E_KEY[key_len + 25]; |
386 | b0[2] = u32_in (in + 8) ^ E_KEY[key_len + 26]; | 388 | b0[2] = le32_to_cpu(src[2]) ^ E_KEY[key_len + 26]; |
387 | b0[3] = u32_in (in + 12) ^ E_KEY[key_len + 27]; | 389 | b0[3] = le32_to_cpu(src[3]) ^ E_KEY[key_len + 27]; |
388 | 390 | ||
389 | if (key_len > 24) { | 391 | if (key_len > 24) { |
390 | i_nround (b1, b0, kp); | 392 | i_nround (b1, b0, kp); |
@@ -407,18 +409,21 @@ static void aes_decrypt(void *ctx_arg, u8 *out, const u8 *in) | |||
407 | i_nround (b1, b0, kp); | 409 | i_nround (b1, b0, kp); |
408 | i_lround (b0, b1, kp); | 410 | i_lround (b0, b1, kp); |
409 | 411 | ||
410 | u32_out (out, b0[0]); | 412 | dst[0] = cpu_to_le32(b0[0]); |
411 | u32_out (out + 4, b0[1]); | 413 | dst[1] = cpu_to_le32(b0[1]); |
412 | u32_out (out + 8, b0[2]); | 414 | dst[2] = cpu_to_le32(b0[2]); |
413 | u32_out (out + 12, b0[3]); | 415 | dst[3] = cpu_to_le32(b0[3]); |
414 | } | 416 | } |
415 | 417 | ||
416 | 418 | ||
417 | static struct crypto_alg aes_alg = { | 419 | static struct crypto_alg aes_alg = { |
418 | .cra_name = "aes", | 420 | .cra_name = "aes", |
421 | .cra_driver_name = "aes-generic", | ||
422 | .cra_priority = 100, | ||
419 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, | 423 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, |
420 | .cra_blocksize = AES_BLOCK_SIZE, | 424 | .cra_blocksize = AES_BLOCK_SIZE, |
421 | .cra_ctxsize = sizeof(struct aes_ctx), | 425 | .cra_ctxsize = sizeof(struct aes_ctx), |
426 | .cra_alignmask = 3, | ||
422 | .cra_module = THIS_MODULE, | 427 | .cra_module = THIS_MODULE, |
423 | .cra_list = LIST_HEAD_INIT(aes_alg.cra_list), | 428 | .cra_list = LIST_HEAD_INIT(aes_alg.cra_list), |
424 | .cra_u = { | 429 | .cra_u = { |
diff --git a/crypto/anubis.c b/crypto/anubis.c index 3925eb0133cb..2c796bdb91a6 100644 --- a/crypto/anubis.c +++ b/crypto/anubis.c | |||
@@ -32,8 +32,10 @@ | |||
32 | #include <linux/init.h> | 32 | #include <linux/init.h> |
33 | #include <linux/module.h> | 33 | #include <linux/module.h> |
34 | #include <linux/mm.h> | 34 | #include <linux/mm.h> |
35 | #include <asm/byteorder.h> | ||
35 | #include <asm/scatterlist.h> | 36 | #include <asm/scatterlist.h> |
36 | #include <linux/crypto.h> | 37 | #include <linux/crypto.h> |
38 | #include <linux/types.h> | ||
37 | 39 | ||
38 | #define ANUBIS_MIN_KEY_SIZE 16 | 40 | #define ANUBIS_MIN_KEY_SIZE 16 |
39 | #define ANUBIS_MAX_KEY_SIZE 40 | 41 | #define ANUBIS_MAX_KEY_SIZE 40 |
@@ -461,8 +463,8 @@ static const u32 rc[] = { | |||
461 | static int anubis_setkey(void *ctx_arg, const u8 *in_key, | 463 | static int anubis_setkey(void *ctx_arg, const u8 *in_key, |
462 | unsigned int key_len, u32 *flags) | 464 | unsigned int key_len, u32 *flags) |
463 | { | 465 | { |
464 | 466 | const __be32 *key = (const __be32 *)in_key; | |
465 | int N, R, i, pos, r; | 467 | int N, R, i, r; |
466 | u32 kappa[ANUBIS_MAX_N]; | 468 | u32 kappa[ANUBIS_MAX_N]; |
467 | u32 inter[ANUBIS_MAX_N]; | 469 | u32 inter[ANUBIS_MAX_N]; |
468 | 470 | ||
@@ -483,13 +485,8 @@ static int anubis_setkey(void *ctx_arg, const u8 *in_key, | |||
483 | ctx->R = R = 8 + N; | 485 | ctx->R = R = 8 + N; |
484 | 486 | ||
485 | /* * map cipher key to initial key state (mu): */ | 487 | /* * map cipher key to initial key state (mu): */ |
486 | for (i = 0, pos = 0; i < N; i++, pos += 4) { | 488 | for (i = 0; i < N; i++) |
487 | kappa[i] = | 489 | kappa[i] = be32_to_cpu(key[i]); |
488 | (in_key[pos ] << 24) ^ | ||
489 | (in_key[pos + 1] << 16) ^ | ||
490 | (in_key[pos + 2] << 8) ^ | ||
491 | (in_key[pos + 3] ); | ||
492 | } | ||
493 | 490 | ||
494 | /* | 491 | /* |
495 | * generate R + 1 round keys: | 492 | * generate R + 1 round keys: |
@@ -578,7 +575,9 @@ static int anubis_setkey(void *ctx_arg, const u8 *in_key, | |||
578 | static void anubis_crypt(u32 roundKey[ANUBIS_MAX_ROUNDS + 1][4], | 575 | static void anubis_crypt(u32 roundKey[ANUBIS_MAX_ROUNDS + 1][4], |
579 | u8 *ciphertext, const u8 *plaintext, const int R) | 576 | u8 *ciphertext, const u8 *plaintext, const int R) |
580 | { | 577 | { |
581 | int i, pos, r; | 578 | const __be32 *src = (const __be32 *)plaintext; |
579 | __be32 *dst = (__be32 *)ciphertext; | ||
580 | int i, r; | ||
582 | u32 state[4]; | 581 | u32 state[4]; |
583 | u32 inter[4]; | 582 | u32 inter[4]; |
584 | 583 | ||
@@ -586,14 +585,8 @@ static void anubis_crypt(u32 roundKey[ANUBIS_MAX_ROUNDS + 1][4], | |||
586 | * map plaintext block to cipher state (mu) | 585 | * map plaintext block to cipher state (mu) |
587 | * and add initial round key (sigma[K^0]): | 586 | * and add initial round key (sigma[K^0]): |
588 | */ | 587 | */ |
589 | for (i = 0, pos = 0; i < 4; i++, pos += 4) { | 588 | for (i = 0; i < 4; i++) |
590 | state[i] = | 589 | state[i] = be32_to_cpu(src[i]) ^ roundKey[0][i]; |
591 | (plaintext[pos ] << 24) ^ | ||
592 | (plaintext[pos + 1] << 16) ^ | ||
593 | (plaintext[pos + 2] << 8) ^ | ||
594 | (plaintext[pos + 3] ) ^ | ||
595 | roundKey[0][i]; | ||
596 | } | ||
597 | 590 | ||
598 | /* | 591 | /* |
599 | * R - 1 full rounds: | 592 | * R - 1 full rounds: |
@@ -663,13 +656,8 @@ static void anubis_crypt(u32 roundKey[ANUBIS_MAX_ROUNDS + 1][4], | |||
663 | * map cipher state to ciphertext block (mu^{-1}): | 656 | * map cipher state to ciphertext block (mu^{-1}): |
664 | */ | 657 | */ |
665 | 658 | ||
666 | for (i = 0, pos = 0; i < 4; i++, pos += 4) { | 659 | for (i = 0; i < 4; i++) |
667 | u32 w = inter[i]; | 660 | dst[i] = cpu_to_be32(inter[i]); |
668 | ciphertext[pos ] = (u8)(w >> 24); | ||
669 | ciphertext[pos + 1] = (u8)(w >> 16); | ||
670 | ciphertext[pos + 2] = (u8)(w >> 8); | ||
671 | ciphertext[pos + 3] = (u8)(w ); | ||
672 | } | ||
673 | } | 661 | } |
674 | 662 | ||
675 | static void anubis_encrypt(void *ctx_arg, u8 *dst, const u8 *src) | 663 | static void anubis_encrypt(void *ctx_arg, u8 *dst, const u8 *src) |
@@ -689,6 +677,7 @@ static struct crypto_alg anubis_alg = { | |||
689 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, | 677 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, |
690 | .cra_blocksize = ANUBIS_BLOCK_SIZE, | 678 | .cra_blocksize = ANUBIS_BLOCK_SIZE, |
691 | .cra_ctxsize = sizeof (struct anubis_ctx), | 679 | .cra_ctxsize = sizeof (struct anubis_ctx), |
680 | .cra_alignmask = 3, | ||
692 | .cra_module = THIS_MODULE, | 681 | .cra_module = THIS_MODULE, |
693 | .cra_list = LIST_HEAD_INIT(anubis_alg.cra_list), | 682 | .cra_list = LIST_HEAD_INIT(anubis_alg.cra_list), |
694 | .cra_u = { .cipher = { | 683 | .cra_u = { .cipher = { |
diff --git a/crypto/api.c b/crypto/api.c index 40ae42e9b6a6..e26156f71839 100644 --- a/crypto/api.c +++ b/crypto/api.c | |||
@@ -3,6 +3,7 @@ | |||
3 | * | 3 | * |
4 | * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> | 4 | * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> |
5 | * Copyright (c) 2002 David S. Miller (davem@redhat.com) | 5 | * Copyright (c) 2002 David S. Miller (davem@redhat.com) |
6 | * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> | ||
6 | * | 7 | * |
7 | * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no> | 8 | * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no> |
8 | * and Nettle, by Niels Möller. | 9 | * and Nettle, by Niels Möller. |
@@ -18,9 +19,11 @@ | |||
18 | #include <linux/init.h> | 19 | #include <linux/init.h> |
19 | #include <linux/crypto.h> | 20 | #include <linux/crypto.h> |
20 | #include <linux/errno.h> | 21 | #include <linux/errno.h> |
22 | #include <linux/kernel.h> | ||
21 | #include <linux/kmod.h> | 23 | #include <linux/kmod.h> |
22 | #include <linux/rwsem.h> | 24 | #include <linux/rwsem.h> |
23 | #include <linux/slab.h> | 25 | #include <linux/slab.h> |
26 | #include <linux/string.h> | ||
24 | #include "internal.h" | 27 | #include "internal.h" |
25 | 28 | ||
26 | LIST_HEAD(crypto_alg_list); | 29 | LIST_HEAD(crypto_alg_list); |
@@ -39,6 +42,7 @@ static inline void crypto_alg_put(struct crypto_alg *alg) | |||
39 | static struct crypto_alg *crypto_alg_lookup(const char *name) | 42 | static struct crypto_alg *crypto_alg_lookup(const char *name) |
40 | { | 43 | { |
41 | struct crypto_alg *q, *alg = NULL; | 44 | struct crypto_alg *q, *alg = NULL; |
45 | int best = -1; | ||
42 | 46 | ||
43 | if (!name) | 47 | if (!name) |
44 | return NULL; | 48 | return NULL; |
@@ -46,11 +50,23 @@ static struct crypto_alg *crypto_alg_lookup(const char *name) | |||
46 | down_read(&crypto_alg_sem); | 50 | down_read(&crypto_alg_sem); |
47 | 51 | ||
48 | list_for_each_entry(q, &crypto_alg_list, cra_list) { | 52 | list_for_each_entry(q, &crypto_alg_list, cra_list) { |
49 | if (!(strcmp(q->cra_name, name))) { | 53 | int exact, fuzzy; |
50 | if (crypto_alg_get(q)) | 54 | |
51 | alg = q; | 55 | exact = !strcmp(q->cra_driver_name, name); |
56 | fuzzy = !strcmp(q->cra_name, name); | ||
57 | if (!exact && !(fuzzy && q->cra_priority > best)) | ||
58 | continue; | ||
59 | |||
60 | if (unlikely(!crypto_alg_get(q))) | ||
61 | continue; | ||
62 | |||
63 | best = q->cra_priority; | ||
64 | if (alg) | ||
65 | crypto_alg_put(alg); | ||
66 | alg = q; | ||
67 | |||
68 | if (exact) | ||
52 | break; | 69 | break; |
53 | } | ||
54 | } | 70 | } |
55 | 71 | ||
56 | up_read(&crypto_alg_sem); | 72 | up_read(&crypto_alg_sem); |
@@ -207,9 +223,26 @@ void crypto_free_tfm(struct crypto_tfm *tfm) | |||
207 | kfree(tfm); | 223 | kfree(tfm); |
208 | } | 224 | } |
209 | 225 | ||
226 | static inline int crypto_set_driver_name(struct crypto_alg *alg) | ||
227 | { | ||
228 | static const char suffix[] = "-generic"; | ||
229 | char *driver_name = (char *)alg->cra_driver_name; | ||
230 | int len; | ||
231 | |||
232 | if (*driver_name) | ||
233 | return 0; | ||
234 | |||
235 | len = strlcpy(driver_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); | ||
236 | if (len + sizeof(suffix) > CRYPTO_MAX_ALG_NAME) | ||
237 | return -ENAMETOOLONG; | ||
238 | |||
239 | memcpy(driver_name + len, suffix, sizeof(suffix)); | ||
240 | return 0; | ||
241 | } | ||
242 | |||
210 | int crypto_register_alg(struct crypto_alg *alg) | 243 | int crypto_register_alg(struct crypto_alg *alg) |
211 | { | 244 | { |
212 | int ret = 0; | 245 | int ret; |
213 | struct crypto_alg *q; | 246 | struct crypto_alg *q; |
214 | 247 | ||
215 | if (alg->cra_alignmask & (alg->cra_alignmask + 1)) | 248 | if (alg->cra_alignmask & (alg->cra_alignmask + 1)) |
@@ -218,13 +251,20 @@ int crypto_register_alg(struct crypto_alg *alg) | |||
218 | if (alg->cra_alignmask & alg->cra_blocksize) | 251 | if (alg->cra_alignmask & alg->cra_blocksize) |
219 | return -EINVAL; | 252 | return -EINVAL; |
220 | 253 | ||
221 | if (alg->cra_blocksize > PAGE_SIZE) | 254 | if (alg->cra_blocksize > PAGE_SIZE / 8) |
255 | return -EINVAL; | ||
256 | |||
257 | if (alg->cra_priority < 0) | ||
222 | return -EINVAL; | 258 | return -EINVAL; |
223 | 259 | ||
260 | ret = crypto_set_driver_name(alg); | ||
261 | if (unlikely(ret)) | ||
262 | return ret; | ||
263 | |||
224 | down_write(&crypto_alg_sem); | 264 | down_write(&crypto_alg_sem); |
225 | 265 | ||
226 | list_for_each_entry(q, &crypto_alg_list, cra_list) { | 266 | list_for_each_entry(q, &crypto_alg_list, cra_list) { |
227 | if (!(strcmp(q->cra_name, alg->cra_name))) { | 267 | if (!strcmp(q->cra_driver_name, alg->cra_driver_name)) { |
228 | ret = -EEXIST; | 268 | ret = -EEXIST; |
229 | goto out; | 269 | goto out; |
230 | } | 270 | } |
diff --git a/crypto/blowfish.c b/crypto/blowfish.c index a8b29d54e7d8..7f710b201f20 100644 --- a/crypto/blowfish.c +++ b/crypto/blowfish.c | |||
@@ -19,8 +19,10 @@ | |||
19 | #include <linux/init.h> | 19 | #include <linux/init.h> |
20 | #include <linux/module.h> | 20 | #include <linux/module.h> |
21 | #include <linux/mm.h> | 21 | #include <linux/mm.h> |
22 | #include <asm/byteorder.h> | ||
22 | #include <asm/scatterlist.h> | 23 | #include <asm/scatterlist.h> |
23 | #include <linux/crypto.h> | 24 | #include <linux/crypto.h> |
25 | #include <linux/types.h> | ||
24 | 26 | ||
25 | #define BF_BLOCK_SIZE 8 | 27 | #define BF_BLOCK_SIZE 8 |
26 | #define BF_MIN_KEY_SIZE 4 | 28 | #define BF_MIN_KEY_SIZE 4 |
@@ -451,6 +453,7 @@ static struct crypto_alg alg = { | |||
451 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, | 453 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, |
452 | .cra_blocksize = BF_BLOCK_SIZE, | 454 | .cra_blocksize = BF_BLOCK_SIZE, |
453 | .cra_ctxsize = sizeof(struct bf_ctx), | 455 | .cra_ctxsize = sizeof(struct bf_ctx), |
456 | .cra_alignmask = 3, | ||
454 | .cra_module = THIS_MODULE, | 457 | .cra_module = THIS_MODULE, |
455 | .cra_list = LIST_HEAD_INIT(alg.cra_list), | 458 | .cra_list = LIST_HEAD_INIT(alg.cra_list), |
456 | .cra_u = { .cipher = { | 459 | .cra_u = { .cipher = { |
diff --git a/crypto/cast5.c b/crypto/cast5.c index bc42f42b4fe3..8834c8580c04 100644 --- a/crypto/cast5.c +++ b/crypto/cast5.c | |||
@@ -21,11 +21,13 @@ | |||
21 | */ | 21 | */ |
22 | 22 | ||
23 | 23 | ||
24 | #include <asm/byteorder.h> | ||
24 | #include <linux/init.h> | 25 | #include <linux/init.h> |
25 | #include <linux/crypto.h> | 26 | #include <linux/crypto.h> |
26 | #include <linux/module.h> | 27 | #include <linux/module.h> |
27 | #include <linux/errno.h> | 28 | #include <linux/errno.h> |
28 | #include <linux/string.h> | 29 | #include <linux/string.h> |
30 | #include <linux/types.h> | ||
29 | 31 | ||
30 | #define CAST5_BLOCK_SIZE 8 | 32 | #define CAST5_BLOCK_SIZE 8 |
31 | #define CAST5_MIN_KEY_SIZE 5 | 33 | #define CAST5_MIN_KEY_SIZE 5 |
@@ -578,6 +580,8 @@ static const u32 sb8[256] = { | |||
578 | static void cast5_encrypt(void *ctx, u8 * outbuf, const u8 * inbuf) | 580 | static void cast5_encrypt(void *ctx, u8 * outbuf, const u8 * inbuf) |
579 | { | 581 | { |
580 | struct cast5_ctx *c = (struct cast5_ctx *) ctx; | 582 | struct cast5_ctx *c = (struct cast5_ctx *) ctx; |
583 | const __be32 *src = (const __be32 *)inbuf; | ||
584 | __be32 *dst = (__be32 *)outbuf; | ||
581 | u32 l, r, t; | 585 | u32 l, r, t; |
582 | u32 I; /* used by the Fx macros */ | 586 | u32 I; /* used by the Fx macros */ |
583 | u32 *Km; | 587 | u32 *Km; |
@@ -589,8 +593,8 @@ static void cast5_encrypt(void *ctx, u8 * outbuf, const u8 * inbuf) | |||
589 | /* (L0,R0) <-- (m1...m64). (Split the plaintext into left and | 593 | /* (L0,R0) <-- (m1...m64). (Split the plaintext into left and |
590 | * right 32-bit halves L0 = m1...m32 and R0 = m33...m64.) | 594 | * right 32-bit halves L0 = m1...m32 and R0 = m33...m64.) |
591 | */ | 595 | */ |
592 | l = inbuf[0] << 24 | inbuf[1] << 16 | inbuf[2] << 8 | inbuf[3]; | 596 | l = be32_to_cpu(src[0]); |
593 | r = inbuf[4] << 24 | inbuf[5] << 16 | inbuf[6] << 8 | inbuf[7]; | 597 | r = be32_to_cpu(src[1]); |
594 | 598 | ||
595 | /* (16 rounds) for i from 1 to 16, compute Li and Ri as follows: | 599 | /* (16 rounds) for i from 1 to 16, compute Li and Ri as follows: |
596 | * Li = Ri-1; | 600 | * Li = Ri-1; |
@@ -634,19 +638,15 @@ static void cast5_encrypt(void *ctx, u8 * outbuf, const u8 * inbuf) | |||
634 | 638 | ||
635 | /* c1...c64 <-- (R16,L16). (Exchange final blocks L16, R16 and | 639 | /* c1...c64 <-- (R16,L16). (Exchange final blocks L16, R16 and |
636 | * concatenate to form the ciphertext.) */ | 640 | * concatenate to form the ciphertext.) */ |
637 | outbuf[0] = (r >> 24) & 0xff; | 641 | dst[0] = cpu_to_be32(r); |
638 | outbuf[1] = (r >> 16) & 0xff; | 642 | dst[1] = cpu_to_be32(l); |
639 | outbuf[2] = (r >> 8) & 0xff; | ||
640 | outbuf[3] = r & 0xff; | ||
641 | outbuf[4] = (l >> 24) & 0xff; | ||
642 | outbuf[5] = (l >> 16) & 0xff; | ||
643 | outbuf[6] = (l >> 8) & 0xff; | ||
644 | outbuf[7] = l & 0xff; | ||
645 | } | 643 | } |
646 | 644 | ||
647 | static void cast5_decrypt(void *ctx, u8 * outbuf, const u8 * inbuf) | 645 | static void cast5_decrypt(void *ctx, u8 * outbuf, const u8 * inbuf) |
648 | { | 646 | { |
649 | struct cast5_ctx *c = (struct cast5_ctx *) ctx; | 647 | struct cast5_ctx *c = (struct cast5_ctx *) ctx; |
648 | const __be32 *src = (const __be32 *)inbuf; | ||
649 | __be32 *dst = (__be32 *)outbuf; | ||
650 | u32 l, r, t; | 650 | u32 l, r, t; |
651 | u32 I; | 651 | u32 I; |
652 | u32 *Km; | 652 | u32 *Km; |
@@ -655,8 +655,8 @@ static void cast5_decrypt(void *ctx, u8 * outbuf, const u8 * inbuf) | |||
655 | Km = c->Km; | 655 | Km = c->Km; |
656 | Kr = c->Kr; | 656 | Kr = c->Kr; |
657 | 657 | ||
658 | l = inbuf[0] << 24 | inbuf[1] << 16 | inbuf[2] << 8 | inbuf[3]; | 658 | l = be32_to_cpu(src[0]); |
659 | r = inbuf[4] << 24 | inbuf[5] << 16 | inbuf[6] << 8 | inbuf[7]; | 659 | r = be32_to_cpu(src[1]); |
660 | 660 | ||
661 | if (!(c->rr)) { | 661 | if (!(c->rr)) { |
662 | t = l; l = r; r = t ^ F1(r, Km[15], Kr[15]); | 662 | t = l; l = r; r = t ^ F1(r, Km[15], Kr[15]); |
@@ -690,14 +690,8 @@ static void cast5_decrypt(void *ctx, u8 * outbuf, const u8 * inbuf) | |||
690 | t = l; l = r; r = t ^ F1(r, Km[0], Kr[0]); | 690 | t = l; l = r; r = t ^ F1(r, Km[0], Kr[0]); |
691 | } | 691 | } |
692 | 692 | ||
693 | outbuf[0] = (r >> 24) & 0xff; | 693 | dst[0] = cpu_to_be32(r); |
694 | outbuf[1] = (r >> 16) & 0xff; | 694 | dst[1] = cpu_to_be32(l); |
695 | outbuf[2] = (r >> 8) & 0xff; | ||
696 | outbuf[3] = r & 0xff; | ||
697 | outbuf[4] = (l >> 24) & 0xff; | ||
698 | outbuf[5] = (l >> 16) & 0xff; | ||
699 | outbuf[6] = (l >> 8) & 0xff; | ||
700 | outbuf[7] = l & 0xff; | ||
701 | } | 695 | } |
702 | 696 | ||
703 | static void key_schedule(u32 * x, u32 * z, u32 * k) | 697 | static void key_schedule(u32 * x, u32 * z, u32 * k) |
@@ -782,7 +776,7 @@ cast5_setkey(void *ctx, const u8 * key, unsigned key_len, u32 * flags) | |||
782 | u32 x[4]; | 776 | u32 x[4]; |
783 | u32 z[4]; | 777 | u32 z[4]; |
784 | u32 k[16]; | 778 | u32 k[16]; |
785 | u8 p_key[16]; | 779 | __be32 p_key[4]; |
786 | struct cast5_ctx *c = (struct cast5_ctx *) ctx; | 780 | struct cast5_ctx *c = (struct cast5_ctx *) ctx; |
787 | 781 | ||
788 | if (key_len < 5 || key_len > 16) { | 782 | if (key_len < 5 || key_len > 16) { |
@@ -796,12 +790,10 @@ cast5_setkey(void *ctx, const u8 * key, unsigned key_len, u32 * flags) | |||
796 | memcpy(p_key, key, key_len); | 790 | memcpy(p_key, key, key_len); |
797 | 791 | ||
798 | 792 | ||
799 | x[0] = p_key[0] << 24 | p_key[1] << 16 | p_key[2] << 8 | p_key[3]; | 793 | x[0] = be32_to_cpu(p_key[0]); |
800 | x[1] = p_key[4] << 24 | p_key[5] << 16 | p_key[6] << 8 | p_key[7]; | 794 | x[1] = be32_to_cpu(p_key[1]); |
801 | x[2] = | 795 | x[2] = be32_to_cpu(p_key[2]); |
802 | p_key[8] << 24 | p_key[9] << 16 | p_key[10] << 8 | p_key[11]; | 796 | x[3] = be32_to_cpu(p_key[3]); |
803 | x[3] = | ||
804 | p_key[12] << 24 | p_key[13] << 16 | p_key[14] << 8 | p_key[15]; | ||
805 | 797 | ||
806 | key_schedule(x, z, k); | 798 | key_schedule(x, z, k); |
807 | for (i = 0; i < 16; i++) | 799 | for (i = 0; i < 16; i++) |
@@ -817,6 +809,7 @@ static struct crypto_alg alg = { | |||
817 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, | 809 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, |
818 | .cra_blocksize = CAST5_BLOCK_SIZE, | 810 | .cra_blocksize = CAST5_BLOCK_SIZE, |
819 | .cra_ctxsize = sizeof(struct cast5_ctx), | 811 | .cra_ctxsize = sizeof(struct cast5_ctx), |
812 | .cra_alignmask = 3, | ||
820 | .cra_module = THIS_MODULE, | 813 | .cra_module = THIS_MODULE, |
821 | .cra_list = LIST_HEAD_INIT(alg.cra_list), | 814 | .cra_list = LIST_HEAD_INIT(alg.cra_list), |
822 | .cra_u = { | 815 | .cra_u = { |
diff --git a/crypto/cast6.c b/crypto/cast6.c index 3eb081073423..9e28740ba775 100644 --- a/crypto/cast6.c +++ b/crypto/cast6.c | |||
@@ -18,11 +18,13 @@ | |||
18 | */ | 18 | */ |
19 | 19 | ||
20 | 20 | ||
21 | #include <asm/byteorder.h> | ||
21 | #include <linux/init.h> | 22 | #include <linux/init.h> |
22 | #include <linux/crypto.h> | 23 | #include <linux/crypto.h> |
23 | #include <linux/module.h> | 24 | #include <linux/module.h> |
24 | #include <linux/errno.h> | 25 | #include <linux/errno.h> |
25 | #include <linux/string.h> | 26 | #include <linux/string.h> |
27 | #include <linux/types.h> | ||
26 | 28 | ||
27 | #define CAST6_BLOCK_SIZE 16 | 29 | #define CAST6_BLOCK_SIZE 16 |
28 | #define CAST6_MIN_KEY_SIZE 16 | 30 | #define CAST6_MIN_KEY_SIZE 16 |
@@ -384,7 +386,7 @@ cast6_setkey(void *ctx, const u8 * in_key, unsigned key_len, u32 * flags) | |||
384 | { | 386 | { |
385 | int i; | 387 | int i; |
386 | u32 key[8]; | 388 | u32 key[8]; |
387 | u8 p_key[32]; /* padded key */ | 389 | __be32 p_key[8]; /* padded key */ |
388 | struct cast6_ctx *c = (struct cast6_ctx *) ctx; | 390 | struct cast6_ctx *c = (struct cast6_ctx *) ctx; |
389 | 391 | ||
390 | if (key_len < 16 || key_len > 32 || key_len % 4 != 0) { | 392 | if (key_len < 16 || key_len > 32 || key_len % 4 != 0) { |
@@ -395,14 +397,14 @@ cast6_setkey(void *ctx, const u8 * in_key, unsigned key_len, u32 * flags) | |||
395 | memset (p_key, 0, 32); | 397 | memset (p_key, 0, 32); |
396 | memcpy (p_key, in_key, key_len); | 398 | memcpy (p_key, in_key, key_len); |
397 | 399 | ||
398 | key[0] = p_key[0] << 24 | p_key[1] << 16 | p_key[2] << 8 | p_key[3]; /* A */ | 400 | key[0] = be32_to_cpu(p_key[0]); /* A */ |
399 | key[1] = p_key[4] << 24 | p_key[5] << 16 | p_key[6] << 8 | p_key[7]; /* B */ | 401 | key[1] = be32_to_cpu(p_key[1]); /* B */ |
400 | key[2] = p_key[8] << 24 | p_key[9] << 16 | p_key[10] << 8 | p_key[11]; /* C */ | 402 | key[2] = be32_to_cpu(p_key[2]); /* C */ |
401 | key[3] = p_key[12] << 24 | p_key[13] << 16 | p_key[14] << 8 | p_key[15]; /* D */ | 403 | key[3] = be32_to_cpu(p_key[3]); /* D */ |
402 | key[4] = p_key[16] << 24 | p_key[17] << 16 | p_key[18] << 8 | p_key[19]; /* E */ | 404 | key[4] = be32_to_cpu(p_key[4]); /* E */ |
403 | key[5] = p_key[20] << 24 | p_key[21] << 16 | p_key[22] << 8 | p_key[23]; /* F */ | 405 | key[5] = be32_to_cpu(p_key[5]); /* F */ |
404 | key[6] = p_key[24] << 24 | p_key[25] << 16 | p_key[26] << 8 | p_key[27]; /* G */ | 406 | key[6] = be32_to_cpu(p_key[6]); /* G */ |
405 | key[7] = p_key[28] << 24 | p_key[29] << 16 | p_key[30] << 8 | p_key[31]; /* H */ | 407 | key[7] = be32_to_cpu(p_key[7]); /* H */ |
406 | 408 | ||
407 | 409 | ||
408 | 410 | ||
@@ -444,14 +446,16 @@ static inline void QBAR (u32 * block, u8 * Kr, u32 * Km) { | |||
444 | 446 | ||
445 | static void cast6_encrypt (void * ctx, u8 * outbuf, const u8 * inbuf) { | 447 | static void cast6_encrypt (void * ctx, u8 * outbuf, const u8 * inbuf) { |
446 | struct cast6_ctx * c = (struct cast6_ctx *)ctx; | 448 | struct cast6_ctx * c = (struct cast6_ctx *)ctx; |
449 | const __be32 *src = (const __be32 *)inbuf; | ||
450 | __be32 *dst = (__be32 *)outbuf; | ||
447 | u32 block[4]; | 451 | u32 block[4]; |
448 | u32 * Km; | 452 | u32 * Km; |
449 | u8 * Kr; | 453 | u8 * Kr; |
450 | 454 | ||
451 | block[0] = inbuf[0] << 24 | inbuf[1] << 16 | inbuf[2] << 8 | inbuf[3]; | 455 | block[0] = be32_to_cpu(src[0]); |
452 | block[1] = inbuf[4] << 24 | inbuf[5] << 16 | inbuf[6] << 8 | inbuf[7]; | 456 | block[1] = be32_to_cpu(src[1]); |
453 | block[2] = inbuf[8] << 24 | inbuf[9] << 16 | inbuf[10] << 8 | inbuf[11]; | 457 | block[2] = be32_to_cpu(src[2]); |
454 | block[3] = inbuf[12] << 24 | inbuf[13] << 16 | inbuf[14] << 8 | inbuf[15]; | 458 | block[3] = be32_to_cpu(src[3]); |
455 | 459 | ||
456 | Km = c->Km[0]; Kr = c->Kr[0]; Q (block, Kr, Km); | 460 | Km = c->Km[0]; Kr = c->Kr[0]; Q (block, Kr, Km); |
457 | Km = c->Km[1]; Kr = c->Kr[1]; Q (block, Kr, Km); | 461 | Km = c->Km[1]; Kr = c->Kr[1]; Q (block, Kr, Km); |
@@ -465,35 +469,25 @@ static void cast6_encrypt (void * ctx, u8 * outbuf, const u8 * inbuf) { | |||
465 | Km = c->Km[9]; Kr = c->Kr[9]; QBAR (block, Kr, Km); | 469 | Km = c->Km[9]; Kr = c->Kr[9]; QBAR (block, Kr, Km); |
466 | Km = c->Km[10]; Kr = c->Kr[10]; QBAR (block, Kr, Km); | 470 | Km = c->Km[10]; Kr = c->Kr[10]; QBAR (block, Kr, Km); |
467 | Km = c->Km[11]; Kr = c->Kr[11]; QBAR (block, Kr, Km); | 471 | Km = c->Km[11]; Kr = c->Kr[11]; QBAR (block, Kr, Km); |
468 | 472 | ||
469 | outbuf[0] = (block[0] >> 24) & 0xff; | 473 | dst[0] = cpu_to_be32(block[0]); |
470 | outbuf[1] = (block[0] >> 16) & 0xff; | 474 | dst[1] = cpu_to_be32(block[1]); |
471 | outbuf[2] = (block[0] >> 8) & 0xff; | 475 | dst[2] = cpu_to_be32(block[2]); |
472 | outbuf[3] = block[0] & 0xff; | 476 | dst[3] = cpu_to_be32(block[3]); |
473 | outbuf[4] = (block[1] >> 24) & 0xff; | ||
474 | outbuf[5] = (block[1] >> 16) & 0xff; | ||
475 | outbuf[6] = (block[1] >> 8) & 0xff; | ||
476 | outbuf[7] = block[1] & 0xff; | ||
477 | outbuf[8] = (block[2] >> 24) & 0xff; | ||
478 | outbuf[9] = (block[2] >> 16) & 0xff; | ||
479 | outbuf[10] = (block[2] >> 8) & 0xff; | ||
480 | outbuf[11] = block[2] & 0xff; | ||
481 | outbuf[12] = (block[3] >> 24) & 0xff; | ||
482 | outbuf[13] = (block[3] >> 16) & 0xff; | ||
483 | outbuf[14] = (block[3] >> 8) & 0xff; | ||
484 | outbuf[15] = block[3] & 0xff; | ||
485 | } | 477 | } |
486 | 478 | ||
487 | static void cast6_decrypt (void * ctx, u8 * outbuf, const u8 * inbuf) { | 479 | static void cast6_decrypt (void * ctx, u8 * outbuf, const u8 * inbuf) { |
488 | struct cast6_ctx * c = (struct cast6_ctx *)ctx; | 480 | struct cast6_ctx * c = (struct cast6_ctx *)ctx; |
481 | const __be32 *src = (const __be32 *)inbuf; | ||
482 | __be32 *dst = (__be32 *)outbuf; | ||
489 | u32 block[4]; | 483 | u32 block[4]; |
490 | u32 * Km; | 484 | u32 * Km; |
491 | u8 * Kr; | 485 | u8 * Kr; |
492 | 486 | ||
493 | block[0] = inbuf[0] << 24 | inbuf[1] << 16 | inbuf[2] << 8 | inbuf[3]; | 487 | block[0] = be32_to_cpu(src[0]); |
494 | block[1] = inbuf[4] << 24 | inbuf[5] << 16 | inbuf[6] << 8 | inbuf[7]; | 488 | block[1] = be32_to_cpu(src[1]); |
495 | block[2] = inbuf[8] << 24 | inbuf[9] << 16 | inbuf[10] << 8 | inbuf[11]; | 489 | block[2] = be32_to_cpu(src[2]); |
496 | block[3] = inbuf[12] << 24 | inbuf[13] << 16 | inbuf[14] << 8 | inbuf[15]; | 490 | block[3] = be32_to_cpu(src[3]); |
497 | 491 | ||
498 | Km = c->Km[11]; Kr = c->Kr[11]; Q (block, Kr, Km); | 492 | Km = c->Km[11]; Kr = c->Kr[11]; Q (block, Kr, Km); |
499 | Km = c->Km[10]; Kr = c->Kr[10]; Q (block, Kr, Km); | 493 | Km = c->Km[10]; Kr = c->Kr[10]; Q (block, Kr, Km); |
@@ -508,22 +502,10 @@ static void cast6_decrypt (void * ctx, u8 * outbuf, const u8 * inbuf) { | |||
508 | Km = c->Km[1]; Kr = c->Kr[1]; QBAR (block, Kr, Km); | 502 | Km = c->Km[1]; Kr = c->Kr[1]; QBAR (block, Kr, Km); |
509 | Km = c->Km[0]; Kr = c->Kr[0]; QBAR (block, Kr, Km); | 503 | Km = c->Km[0]; Kr = c->Kr[0]; QBAR (block, Kr, Km); |
510 | 504 | ||
511 | outbuf[0] = (block[0] >> 24) & 0xff; | 505 | dst[0] = cpu_to_be32(block[0]); |
512 | outbuf[1] = (block[0] >> 16) & 0xff; | 506 | dst[1] = cpu_to_be32(block[1]); |
513 | outbuf[2] = (block[0] >> 8) & 0xff; | 507 | dst[2] = cpu_to_be32(block[2]); |
514 | outbuf[3] = block[0] & 0xff; | 508 | dst[3] = cpu_to_be32(block[3]); |
515 | outbuf[4] = (block[1] >> 24) & 0xff; | ||
516 | outbuf[5] = (block[1] >> 16) & 0xff; | ||
517 | outbuf[6] = (block[1] >> 8) & 0xff; | ||
518 | outbuf[7] = block[1] & 0xff; | ||
519 | outbuf[8] = (block[2] >> 24) & 0xff; | ||
520 | outbuf[9] = (block[2] >> 16) & 0xff; | ||
521 | outbuf[10] = (block[2] >> 8) & 0xff; | ||
522 | outbuf[11] = block[2] & 0xff; | ||
523 | outbuf[12] = (block[3] >> 24) & 0xff; | ||
524 | outbuf[13] = (block[3] >> 16) & 0xff; | ||
525 | outbuf[14] = (block[3] >> 8) & 0xff; | ||
526 | outbuf[15] = block[3] & 0xff; | ||
527 | } | 509 | } |
528 | 510 | ||
529 | static struct crypto_alg alg = { | 511 | static struct crypto_alg alg = { |
@@ -531,6 +513,7 @@ static struct crypto_alg alg = { | |||
531 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, | 513 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, |
532 | .cra_blocksize = CAST6_BLOCK_SIZE, | 514 | .cra_blocksize = CAST6_BLOCK_SIZE, |
533 | .cra_ctxsize = sizeof(struct cast6_ctx), | 515 | .cra_ctxsize = sizeof(struct cast6_ctx), |
516 | .cra_alignmask = 3, | ||
534 | .cra_module = THIS_MODULE, | 517 | .cra_module = THIS_MODULE, |
535 | .cra_list = LIST_HEAD_INIT(alg.cra_list), | 518 | .cra_list = LIST_HEAD_INIT(alg.cra_list), |
536 | .cra_u = { | 519 | .cra_u = { |
diff --git a/crypto/cipher.c b/crypto/cipher.c index dfd4bcfc5975..65bcea0cd17c 100644 --- a/crypto/cipher.c +++ b/crypto/cipher.c | |||
@@ -212,9 +212,10 @@ static unsigned int cbc_process_decrypt(const struct cipher_desc *desc, | |||
212 | struct crypto_tfm *tfm = desc->tfm; | 212 | struct crypto_tfm *tfm = desc->tfm; |
213 | void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block; | 213 | void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block; |
214 | int bsize = crypto_tfm_alg_blocksize(tfm); | 214 | int bsize = crypto_tfm_alg_blocksize(tfm); |
215 | unsigned long alignmask = crypto_tfm_alg_alignmask(desc->tfm); | ||
215 | 216 | ||
216 | u8 stack[src == dst ? bsize : 0]; | 217 | u8 stack[src == dst ? bsize + alignmask : 0]; |
217 | u8 *buf = stack; | 218 | u8 *buf = (u8 *)ALIGN((unsigned long)stack, alignmask + 1); |
218 | u8 **dst_p = src == dst ? &buf : &dst; | 219 | u8 **dst_p = src == dst ? &buf : &dst; |
219 | 220 | ||
220 | void (*fn)(void *, u8 *, const u8 *) = desc->crfn; | 221 | void (*fn)(void *, u8 *, const u8 *) = desc->crfn; |
diff --git a/crypto/crc32c.c b/crypto/crc32c.c index 256956cd9377..953362423a5c 100644 --- a/crypto/crc32c.c +++ b/crypto/crc32c.c | |||
@@ -16,6 +16,7 @@ | |||
16 | #include <linux/string.h> | 16 | #include <linux/string.h> |
17 | #include <linux/crypto.h> | 17 | #include <linux/crypto.h> |
18 | #include <linux/crc32c.h> | 18 | #include <linux/crc32c.h> |
19 | #include <linux/types.h> | ||
19 | #include <asm/byteorder.h> | 20 | #include <asm/byteorder.h> |
20 | 21 | ||
21 | #define CHKSUM_BLOCK_SIZE 32 | 22 | #define CHKSUM_BLOCK_SIZE 32 |
diff --git a/crypto/des.c b/crypto/des.c index a3c863dddded..7bb548653dc6 100644 --- a/crypto/des.c +++ b/crypto/des.c | |||
@@ -12,11 +12,13 @@ | |||
12 | * | 12 | * |
13 | */ | 13 | */ |
14 | 14 | ||
15 | #include <asm/byteorder.h> | ||
15 | #include <linux/bitops.h> | 16 | #include <linux/bitops.h> |
16 | #include <linux/init.h> | 17 | #include <linux/init.h> |
17 | #include <linux/module.h> | 18 | #include <linux/module.h> |
18 | #include <linux/errno.h> | 19 | #include <linux/errno.h> |
19 | #include <linux/crypto.h> | 20 | #include <linux/crypto.h> |
21 | #include <linux/types.h> | ||
20 | 22 | ||
21 | #define DES_KEY_SIZE 8 | 23 | #define DES_KEY_SIZE 8 |
22 | #define DES_EXPKEY_WORDS 32 | 24 | #define DES_EXPKEY_WORDS 32 |
@@ -947,6 +949,7 @@ static struct crypto_alg des_alg = { | |||
947 | .cra_blocksize = DES_BLOCK_SIZE, | 949 | .cra_blocksize = DES_BLOCK_SIZE, |
948 | .cra_ctxsize = sizeof(struct des_ctx), | 950 | .cra_ctxsize = sizeof(struct des_ctx), |
949 | .cra_module = THIS_MODULE, | 951 | .cra_module = THIS_MODULE, |
952 | .cra_alignmask = 3, | ||
950 | .cra_list = LIST_HEAD_INIT(des_alg.cra_list), | 953 | .cra_list = LIST_HEAD_INIT(des_alg.cra_list), |
951 | .cra_u = { .cipher = { | 954 | .cra_u = { .cipher = { |
952 | .cia_min_keysize = DES_KEY_SIZE, | 955 | .cia_min_keysize = DES_KEY_SIZE, |
diff --git a/crypto/internal.h b/crypto/internal.h index 37aa652ce5ce..959e602909a6 100644 --- a/crypto/internal.h +++ b/crypto/internal.h | |||
@@ -2,6 +2,7 @@ | |||
2 | * Cryptographic API. | 2 | * Cryptographic API. |
3 | * | 3 | * |
4 | * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> | 4 | * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> |
5 | * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> | ||
5 | * | 6 | * |
6 | * This program is free software; you can redistribute it and/or modify it | 7 | * This program is free software; you can redistribute it and/or modify it |
7 | * under the terms of the GNU General Public License as published by the Free | 8 | * under the terms of the GNU General Public License as published by the Free |
@@ -16,10 +17,15 @@ | |||
16 | #include <linux/highmem.h> | 17 | #include <linux/highmem.h> |
17 | #include <linux/interrupt.h> | 18 | #include <linux/interrupt.h> |
18 | #include <linux/init.h> | 19 | #include <linux/init.h> |
20 | #include <linux/list.h> | ||
19 | #include <linux/kernel.h> | 21 | #include <linux/kernel.h> |
22 | #include <linux/rwsem.h> | ||
20 | #include <linux/slab.h> | 23 | #include <linux/slab.h> |
21 | #include <asm/kmap_types.h> | 24 | #include <asm/kmap_types.h> |
22 | 25 | ||
26 | extern struct list_head crypto_alg_list; | ||
27 | extern struct rw_semaphore crypto_alg_sem; | ||
28 | |||
23 | extern enum km_type crypto_km_types[]; | 29 | extern enum km_type crypto_km_types[]; |
24 | 30 | ||
25 | static inline enum km_type crypto_kmap_type(int out) | 31 | static inline enum km_type crypto_kmap_type(int out) |
diff --git a/crypto/khazad.c b/crypto/khazad.c index 738cb0dd1e7c..807f2bf4ea24 100644 --- a/crypto/khazad.c +++ b/crypto/khazad.c | |||
@@ -22,8 +22,10 @@ | |||
22 | #include <linux/init.h> | 22 | #include <linux/init.h> |
23 | #include <linux/module.h> | 23 | #include <linux/module.h> |
24 | #include <linux/mm.h> | 24 | #include <linux/mm.h> |
25 | #include <asm/byteorder.h> | ||
25 | #include <asm/scatterlist.h> | 26 | #include <asm/scatterlist.h> |
26 | #include <linux/crypto.h> | 27 | #include <linux/crypto.h> |
28 | #include <linux/types.h> | ||
27 | 29 | ||
28 | #define KHAZAD_KEY_SIZE 16 | 30 | #define KHAZAD_KEY_SIZE 16 |
29 | #define KHAZAD_BLOCK_SIZE 8 | 31 | #define KHAZAD_BLOCK_SIZE 8 |
@@ -755,8 +757,8 @@ static const u64 c[KHAZAD_ROUNDS + 1] = { | |||
755 | static int khazad_setkey(void *ctx_arg, const u8 *in_key, | 757 | static int khazad_setkey(void *ctx_arg, const u8 *in_key, |
756 | unsigned int key_len, u32 *flags) | 758 | unsigned int key_len, u32 *flags) |
757 | { | 759 | { |
758 | |||
759 | struct khazad_ctx *ctx = ctx_arg; | 760 | struct khazad_ctx *ctx = ctx_arg; |
761 | const __be64 *key = (const __be64 *)in_key; | ||
760 | int r; | 762 | int r; |
761 | const u64 *S = T7; | 763 | const u64 *S = T7; |
762 | u64 K2, K1; | 764 | u64 K2, K1; |
@@ -767,22 +769,8 @@ static int khazad_setkey(void *ctx_arg, const u8 *in_key, | |||
767 | return -EINVAL; | 769 | return -EINVAL; |
768 | } | 770 | } |
769 | 771 | ||
770 | K2 = ((u64)in_key[ 0] << 56) ^ | 772 | K2 = be64_to_cpu(key[0]); |
771 | ((u64)in_key[ 1] << 48) ^ | 773 | K1 = be64_to_cpu(key[1]); |
772 | ((u64)in_key[ 2] << 40) ^ | ||
773 | ((u64)in_key[ 3] << 32) ^ | ||
774 | ((u64)in_key[ 4] << 24) ^ | ||
775 | ((u64)in_key[ 5] << 16) ^ | ||
776 | ((u64)in_key[ 6] << 8) ^ | ||
777 | ((u64)in_key[ 7] ); | ||
778 | K1 = ((u64)in_key[ 8] << 56) ^ | ||
779 | ((u64)in_key[ 9] << 48) ^ | ||
780 | ((u64)in_key[10] << 40) ^ | ||
781 | ((u64)in_key[11] << 32) ^ | ||
782 | ((u64)in_key[12] << 24) ^ | ||
783 | ((u64)in_key[13] << 16) ^ | ||
784 | ((u64)in_key[14] << 8) ^ | ||
785 | ((u64)in_key[15] ); | ||
786 | 774 | ||
787 | /* setup the encrypt key */ | 775 | /* setup the encrypt key */ |
788 | for (r = 0; r <= KHAZAD_ROUNDS; r++) { | 776 | for (r = 0; r <= KHAZAD_ROUNDS; r++) { |
@@ -820,19 +808,12 @@ static int khazad_setkey(void *ctx_arg, const u8 *in_key, | |||
820 | static void khazad_crypt(const u64 roundKey[KHAZAD_ROUNDS + 1], | 808 | static void khazad_crypt(const u64 roundKey[KHAZAD_ROUNDS + 1], |
821 | u8 *ciphertext, const u8 *plaintext) | 809 | u8 *ciphertext, const u8 *plaintext) |
822 | { | 810 | { |
823 | 811 | const __be64 *src = (const __be64 *)plaintext; | |
812 | __be64 *dst = (__be64 *)ciphertext; | ||
824 | int r; | 813 | int r; |
825 | u64 state; | 814 | u64 state; |
826 | 815 | ||
827 | state = ((u64)plaintext[0] << 56) ^ | 816 | state = be64_to_cpu(*src) ^ roundKey[0]; |
828 | ((u64)plaintext[1] << 48) ^ | ||
829 | ((u64)plaintext[2] << 40) ^ | ||
830 | ((u64)plaintext[3] << 32) ^ | ||
831 | ((u64)plaintext[4] << 24) ^ | ||
832 | ((u64)plaintext[5] << 16) ^ | ||
833 | ((u64)plaintext[6] << 8) ^ | ||
834 | ((u64)plaintext[7] ) ^ | ||
835 | roundKey[0]; | ||
836 | 817 | ||
837 | for (r = 1; r < KHAZAD_ROUNDS; r++) { | 818 | for (r = 1; r < KHAZAD_ROUNDS; r++) { |
838 | state = T0[(int)(state >> 56) ] ^ | 819 | state = T0[(int)(state >> 56) ] ^ |
@@ -856,15 +837,7 @@ static void khazad_crypt(const u64 roundKey[KHAZAD_ROUNDS + 1], | |||
856 | (T7[(int)(state ) & 0xff] & 0x00000000000000ffULL) ^ | 837 | (T7[(int)(state ) & 0xff] & 0x00000000000000ffULL) ^ |
857 | roundKey[KHAZAD_ROUNDS]; | 838 | roundKey[KHAZAD_ROUNDS]; |
858 | 839 | ||
859 | ciphertext[0] = (u8)(state >> 56); | 840 | *dst = cpu_to_be64(state); |
860 | ciphertext[1] = (u8)(state >> 48); | ||
861 | ciphertext[2] = (u8)(state >> 40); | ||
862 | ciphertext[3] = (u8)(state >> 32); | ||
863 | ciphertext[4] = (u8)(state >> 24); | ||
864 | ciphertext[5] = (u8)(state >> 16); | ||
865 | ciphertext[6] = (u8)(state >> 8); | ||
866 | ciphertext[7] = (u8)(state ); | ||
867 | |||
868 | } | 841 | } |
869 | 842 | ||
870 | static void khazad_encrypt(void *ctx_arg, u8 *dst, const u8 *src) | 843 | static void khazad_encrypt(void *ctx_arg, u8 *dst, const u8 *src) |
@@ -884,6 +857,7 @@ static struct crypto_alg khazad_alg = { | |||
884 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, | 857 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, |
885 | .cra_blocksize = KHAZAD_BLOCK_SIZE, | 858 | .cra_blocksize = KHAZAD_BLOCK_SIZE, |
886 | .cra_ctxsize = sizeof (struct khazad_ctx), | 859 | .cra_ctxsize = sizeof (struct khazad_ctx), |
860 | .cra_alignmask = 7, | ||
887 | .cra_module = THIS_MODULE, | 861 | .cra_module = THIS_MODULE, |
888 | .cra_list = LIST_HEAD_INIT(khazad_alg.cra_list), | 862 | .cra_list = LIST_HEAD_INIT(khazad_alg.cra_list), |
889 | .cra_u = { .cipher = { | 863 | .cra_u = { .cipher = { |
diff --git a/crypto/md4.c b/crypto/md4.c index bef6a9e5ac9b..a2d6df5c0f8c 100644 --- a/crypto/md4.c +++ b/crypto/md4.c | |||
@@ -24,6 +24,7 @@ | |||
24 | #include <linux/crypto.h> | 24 | #include <linux/crypto.h> |
25 | #include <linux/kernel.h> | 25 | #include <linux/kernel.h> |
26 | #include <linux/string.h> | 26 | #include <linux/string.h> |
27 | #include <linux/types.h> | ||
27 | #include <asm/byteorder.h> | 28 | #include <asm/byteorder.h> |
28 | 29 | ||
29 | #define MD4_DIGEST_SIZE 16 | 30 | #define MD4_DIGEST_SIZE 16 |
diff --git a/crypto/md5.c b/crypto/md5.c index 1ed45f9c263e..7f041aef5da2 100644 --- a/crypto/md5.c +++ b/crypto/md5.c | |||
@@ -19,6 +19,7 @@ | |||
19 | #include <linux/module.h> | 19 | #include <linux/module.h> |
20 | #include <linux/string.h> | 20 | #include <linux/string.h> |
21 | #include <linux/crypto.h> | 21 | #include <linux/crypto.h> |
22 | #include <linux/types.h> | ||
22 | #include <asm/byteorder.h> | 23 | #include <asm/byteorder.h> |
23 | 24 | ||
24 | #define MD5_DIGEST_SIZE 16 | 25 | #define MD5_DIGEST_SIZE 16 |
diff --git a/crypto/michael_mic.c b/crypto/michael_mic.c index a470bcb3693e..4f6ab23e14ad 100644 --- a/crypto/michael_mic.c +++ b/crypto/michael_mic.c | |||
@@ -10,10 +10,12 @@ | |||
10 | * published by the Free Software Foundation. | 10 | * published by the Free Software Foundation. |
11 | */ | 11 | */ |
12 | 12 | ||
13 | #include <asm/byteorder.h> | ||
13 | #include <linux/init.h> | 14 | #include <linux/init.h> |
14 | #include <linux/module.h> | 15 | #include <linux/module.h> |
15 | #include <linux/string.h> | 16 | #include <linux/string.h> |
16 | #include <linux/crypto.h> | 17 | #include <linux/crypto.h> |
18 | #include <linux/types.h> | ||
17 | 19 | ||
18 | 20 | ||
19 | struct michael_mic_ctx { | 21 | struct michael_mic_ctx { |
@@ -43,21 +45,6 @@ do { \ | |||
43 | } while (0) | 45 | } while (0) |
44 | 46 | ||
45 | 47 | ||
46 | static inline u32 get_le32(const u8 *p) | ||
47 | { | ||
48 | return p[0] | (p[1] << 8) | (p[2] << 16) | (p[3] << 24); | ||
49 | } | ||
50 | |||
51 | |||
52 | static inline void put_le32(u8 *p, u32 v) | ||
53 | { | ||
54 | p[0] = v; | ||
55 | p[1] = v >> 8; | ||
56 | p[2] = v >> 16; | ||
57 | p[3] = v >> 24; | ||
58 | } | ||
59 | |||
60 | |||
61 | static void michael_init(void *ctx) | 48 | static void michael_init(void *ctx) |
62 | { | 49 | { |
63 | struct michael_mic_ctx *mctx = ctx; | 50 | struct michael_mic_ctx *mctx = ctx; |
@@ -68,6 +55,7 @@ static void michael_init(void *ctx) | |||
68 | static void michael_update(void *ctx, const u8 *data, unsigned int len) | 55 | static void michael_update(void *ctx, const u8 *data, unsigned int len) |
69 | { | 56 | { |
70 | struct michael_mic_ctx *mctx = ctx; | 57 | struct michael_mic_ctx *mctx = ctx; |
58 | const __le32 *src; | ||
71 | 59 | ||
72 | if (mctx->pending_len) { | 60 | if (mctx->pending_len) { |
73 | int flen = 4 - mctx->pending_len; | 61 | int flen = 4 - mctx->pending_len; |
@@ -81,21 +69,23 @@ static void michael_update(void *ctx, const u8 *data, unsigned int len) | |||
81 | if (mctx->pending_len < 4) | 69 | if (mctx->pending_len < 4) |
82 | return; | 70 | return; |
83 | 71 | ||
84 | mctx->l ^= get_le32(mctx->pending); | 72 | src = (const __le32 *)mctx->pending; |
73 | mctx->l ^= le32_to_cpup(src); | ||
85 | michael_block(mctx->l, mctx->r); | 74 | michael_block(mctx->l, mctx->r); |
86 | mctx->pending_len = 0; | 75 | mctx->pending_len = 0; |
87 | } | 76 | } |
88 | 77 | ||
78 | src = (const __le32 *)data; | ||
79 | |||
89 | while (len >= 4) { | 80 | while (len >= 4) { |
90 | mctx->l ^= get_le32(data); | 81 | mctx->l ^= le32_to_cpup(src++); |
91 | michael_block(mctx->l, mctx->r); | 82 | michael_block(mctx->l, mctx->r); |
92 | data += 4; | ||
93 | len -= 4; | 83 | len -= 4; |
94 | } | 84 | } |
95 | 85 | ||
96 | if (len > 0) { | 86 | if (len > 0) { |
97 | mctx->pending_len = len; | 87 | mctx->pending_len = len; |
98 | memcpy(mctx->pending, data, len); | 88 | memcpy(mctx->pending, src, len); |
99 | } | 89 | } |
100 | } | 90 | } |
101 | 91 | ||
@@ -104,6 +94,7 @@ static void michael_final(void *ctx, u8 *out) | |||
104 | { | 94 | { |
105 | struct michael_mic_ctx *mctx = ctx; | 95 | struct michael_mic_ctx *mctx = ctx; |
106 | u8 *data = mctx->pending; | 96 | u8 *data = mctx->pending; |
97 | __le32 *dst = (__le32 *)out; | ||
107 | 98 | ||
108 | /* Last block and padding (0x5a, 4..7 x 0) */ | 99 | /* Last block and padding (0x5a, 4..7 x 0) */ |
109 | switch (mctx->pending_len) { | 100 | switch (mctx->pending_len) { |
@@ -125,8 +116,8 @@ static void michael_final(void *ctx, u8 *out) | |||
125 | /* l ^= 0; */ | 116 | /* l ^= 0; */ |
126 | michael_block(mctx->l, mctx->r); | 117 | michael_block(mctx->l, mctx->r); |
127 | 118 | ||
128 | put_le32(out, mctx->l); | 119 | dst[0] = cpu_to_le32(mctx->l); |
129 | put_le32(out + 4, mctx->r); | 120 | dst[1] = cpu_to_le32(mctx->r); |
130 | } | 121 | } |
131 | 122 | ||
132 | 123 | ||
@@ -134,13 +125,16 @@ static int michael_setkey(void *ctx, const u8 *key, unsigned int keylen, | |||
134 | u32 *flags) | 125 | u32 *flags) |
135 | { | 126 | { |
136 | struct michael_mic_ctx *mctx = ctx; | 127 | struct michael_mic_ctx *mctx = ctx; |
128 | const __le32 *data = (const __le32 *)key; | ||
129 | |||
137 | if (keylen != 8) { | 130 | if (keylen != 8) { |
138 | if (flags) | 131 | if (flags) |
139 | *flags = CRYPTO_TFM_RES_BAD_KEY_LEN; | 132 | *flags = CRYPTO_TFM_RES_BAD_KEY_LEN; |
140 | return -EINVAL; | 133 | return -EINVAL; |
141 | } | 134 | } |
142 | mctx->l = get_le32(key); | 135 | |
143 | mctx->r = get_le32(key + 4); | 136 | mctx->l = le32_to_cpu(data[0]); |
137 | mctx->r = le32_to_cpu(data[1]); | ||
144 | return 0; | 138 | return 0; |
145 | } | 139 | } |
146 | 140 | ||
diff --git a/crypto/proc.c b/crypto/proc.c index 630ba91c08f1..c0a5dd7ce2cc 100644 --- a/crypto/proc.c +++ b/crypto/proc.c | |||
@@ -4,6 +4,7 @@ | |||
4 | * Procfs information. | 4 | * Procfs information. |
5 | * | 5 | * |
6 | * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> | 6 | * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> |
7 | * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> | ||
7 | * | 8 | * |
8 | * This program is free software; you can redistribute it and/or modify it | 9 | * This program is free software; you can redistribute it and/or modify it |
9 | * under the terms of the GNU General Public License as published by the Free | 10 | * under the terms of the GNU General Public License as published by the Free |
@@ -18,9 +19,6 @@ | |||
18 | #include <linux/seq_file.h> | 19 | #include <linux/seq_file.h> |
19 | #include "internal.h" | 20 | #include "internal.h" |
20 | 21 | ||
21 | extern struct list_head crypto_alg_list; | ||
22 | extern struct rw_semaphore crypto_alg_sem; | ||
23 | |||
24 | static void *c_start(struct seq_file *m, loff_t *pos) | 22 | static void *c_start(struct seq_file *m, loff_t *pos) |
25 | { | 23 | { |
26 | struct list_head *v; | 24 | struct list_head *v; |
@@ -53,7 +51,9 @@ static int c_show(struct seq_file *m, void *p) | |||
53 | struct crypto_alg *alg = (struct crypto_alg *)p; | 51 | struct crypto_alg *alg = (struct crypto_alg *)p; |
54 | 52 | ||
55 | seq_printf(m, "name : %s\n", alg->cra_name); | 53 | seq_printf(m, "name : %s\n", alg->cra_name); |
54 | seq_printf(m, "driver : %s\n", alg->cra_driver_name); | ||
56 | seq_printf(m, "module : %s\n", module_name(alg->cra_module)); | 55 | seq_printf(m, "module : %s\n", module_name(alg->cra_module)); |
56 | seq_printf(m, "priority : %d\n", alg->cra_priority); | ||
57 | 57 | ||
58 | switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { | 58 | switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { |
59 | case CRYPTO_ALG_TYPE_CIPHER: | 59 | case CRYPTO_ALG_TYPE_CIPHER: |
diff --git a/crypto/serpent.c b/crypto/serpent.c index 3cf2c5067eea..52ad1a492620 100644 --- a/crypto/serpent.c +++ b/crypto/serpent.c | |||
@@ -20,6 +20,7 @@ | |||
20 | #include <linux/errno.h> | 20 | #include <linux/errno.h> |
21 | #include <asm/byteorder.h> | 21 | #include <asm/byteorder.h> |
22 | #include <linux/crypto.h> | 22 | #include <linux/crypto.h> |
23 | #include <linux/types.h> | ||
23 | 24 | ||
24 | /* Key is padded to the maximum of 256 bits before round key generation. | 25 | /* Key is padded to the maximum of 256 bits before round key generation. |
25 | * Any key length <= 256 bits (32 bytes) is allowed by the algorithm. | 26 | * Any key length <= 256 bits (32 bytes) is allowed by the algorithm. |
@@ -552,6 +553,7 @@ static struct crypto_alg tnepres_alg = { | |||
552 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, | 553 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, |
553 | .cra_blocksize = SERPENT_BLOCK_SIZE, | 554 | .cra_blocksize = SERPENT_BLOCK_SIZE, |
554 | .cra_ctxsize = sizeof(struct serpent_ctx), | 555 | .cra_ctxsize = sizeof(struct serpent_ctx), |
556 | .cra_alignmask = 3, | ||
555 | .cra_module = THIS_MODULE, | 557 | .cra_module = THIS_MODULE, |
556 | .cra_list = LIST_HEAD_INIT(serpent_alg.cra_list), | 558 | .cra_list = LIST_HEAD_INIT(serpent_alg.cra_list), |
557 | .cra_u = { .cipher = { | 559 | .cra_u = { .cipher = { |
diff --git a/crypto/sha1.c b/crypto/sha1.c index 4016f3b8ce9b..21571ed35b7e 100644 --- a/crypto/sha1.c +++ b/crypto/sha1.c | |||
@@ -21,6 +21,7 @@ | |||
21 | #include <linux/mm.h> | 21 | #include <linux/mm.h> |
22 | #include <linux/crypto.h> | 22 | #include <linux/crypto.h> |
23 | #include <linux/cryptohash.h> | 23 | #include <linux/cryptohash.h> |
24 | #include <linux/types.h> | ||
24 | #include <asm/scatterlist.h> | 25 | #include <asm/scatterlist.h> |
25 | #include <asm/byteorder.h> | 26 | #include <asm/byteorder.h> |
26 | 27 | ||
@@ -48,23 +49,33 @@ static void sha1_init(void *ctx) | |||
48 | static void sha1_update(void *ctx, const u8 *data, unsigned int len) | 49 | static void sha1_update(void *ctx, const u8 *data, unsigned int len) |
49 | { | 50 | { |
50 | struct sha1_ctx *sctx = ctx; | 51 | struct sha1_ctx *sctx = ctx; |
51 | unsigned int i, j; | 52 | unsigned int partial, done; |
52 | u32 temp[SHA_WORKSPACE_WORDS]; | 53 | const u8 *src; |
53 | 54 | ||
54 | j = (sctx->count >> 3) & 0x3f; | 55 | partial = sctx->count & 0x3f; |
55 | sctx->count += len << 3; | 56 | sctx->count += len; |
57 | done = 0; | ||
58 | src = data; | ||
56 | 59 | ||
57 | if ((j + len) > 63) { | 60 | if ((partial + len) > 63) { |
58 | memcpy(&sctx->buffer[j], data, (i = 64-j)); | 61 | u32 temp[SHA_WORKSPACE_WORDS]; |
59 | sha_transform(sctx->state, sctx->buffer, temp); | 62 | |
60 | for ( ; i + 63 < len; i += 64) { | 63 | if (partial) { |
61 | sha_transform(sctx->state, &data[i], temp); | 64 | done = -partial; |
65 | memcpy(sctx->buffer + partial, data, done + 64); | ||
66 | src = sctx->buffer; | ||
62 | } | 67 | } |
63 | j = 0; | 68 | |
69 | do { | ||
70 | sha_transform(sctx->state, src, temp); | ||
71 | done += 64; | ||
72 | src = data + done; | ||
73 | } while (done + 63 < len); | ||
74 | |||
75 | memset(temp, 0, sizeof(temp)); | ||
76 | partial = 0; | ||
64 | } | 77 | } |
65 | else i = 0; | 78 | memcpy(sctx->buffer + partial, src, len - done); |
66 | memset(temp, 0, sizeof(temp)); | ||
67 | memcpy(&sctx->buffer[j], &data[i], len - i); | ||
68 | } | 79 | } |
69 | 80 | ||
70 | 81 | ||
@@ -72,37 +83,24 @@ static void sha1_update(void *ctx, const u8 *data, unsigned int len) | |||
72 | static void sha1_final(void* ctx, u8 *out) | 83 | static void sha1_final(void* ctx, u8 *out) |
73 | { | 84 | { |
74 | struct sha1_ctx *sctx = ctx; | 85 | struct sha1_ctx *sctx = ctx; |
75 | u32 i, j, index, padlen; | 86 | __be32 *dst = (__be32 *)out; |
76 | u64 t; | 87 | u32 i, index, padlen; |
77 | u8 bits[8] = { 0, }; | 88 | __be64 bits; |
78 | static const u8 padding[64] = { 0x80, }; | 89 | static const u8 padding[64] = { 0x80, }; |
79 | 90 | ||
80 | t = sctx->count; | 91 | bits = cpu_to_be64(sctx->count << 3); |
81 | bits[7] = 0xff & t; t>>=8; | ||
82 | bits[6] = 0xff & t; t>>=8; | ||
83 | bits[5] = 0xff & t; t>>=8; | ||
84 | bits[4] = 0xff & t; t>>=8; | ||
85 | bits[3] = 0xff & t; t>>=8; | ||
86 | bits[2] = 0xff & t; t>>=8; | ||
87 | bits[1] = 0xff & t; t>>=8; | ||
88 | bits[0] = 0xff & t; | ||
89 | 92 | ||
90 | /* Pad out to 56 mod 64 */ | 93 | /* Pad out to 56 mod 64 */ |
91 | index = (sctx->count >> 3) & 0x3f; | 94 | index = sctx->count & 0x3f; |
92 | padlen = (index < 56) ? (56 - index) : ((64+56) - index); | 95 | padlen = (index < 56) ? (56 - index) : ((64+56) - index); |
93 | sha1_update(sctx, padding, padlen); | 96 | sha1_update(sctx, padding, padlen); |
94 | 97 | ||
95 | /* Append length */ | 98 | /* Append length */ |
96 | sha1_update(sctx, bits, sizeof bits); | 99 | sha1_update(sctx, (const u8 *)&bits, sizeof(bits)); |
97 | 100 | ||
98 | /* Store state in digest */ | 101 | /* Store state in digest */ |
99 | for (i = j = 0; i < 5; i++, j += 4) { | 102 | for (i = 0; i < 5; i++) |
100 | u32 t2 = sctx->state[i]; | 103 | dst[i] = cpu_to_be32(sctx->state[i]); |
101 | out[j+3] = t2 & 0xff; t2>>=8; | ||
102 | out[j+2] = t2 & 0xff; t2>>=8; | ||
103 | out[j+1] = t2 & 0xff; t2>>=8; | ||
104 | out[j ] = t2 & 0xff; | ||
105 | } | ||
106 | 104 | ||
107 | /* Wipe context */ | 105 | /* Wipe context */ |
108 | memset(sctx, 0, sizeof *sctx); | 106 | memset(sctx, 0, sizeof *sctx); |
diff --git a/crypto/sha256.c b/crypto/sha256.c index c78da50a9b7a..9d5ef674d6a9 100644 --- a/crypto/sha256.c +++ b/crypto/sha256.c | |||
@@ -20,6 +20,7 @@ | |||
20 | #include <linux/module.h> | 20 | #include <linux/module.h> |
21 | #include <linux/mm.h> | 21 | #include <linux/mm.h> |
22 | #include <linux/crypto.h> | 22 | #include <linux/crypto.h> |
23 | #include <linux/types.h> | ||
23 | #include <asm/scatterlist.h> | 24 | #include <asm/scatterlist.h> |
24 | #include <asm/byteorder.h> | 25 | #include <asm/byteorder.h> |
25 | 26 | ||
@@ -279,22 +280,15 @@ static void sha256_update(void *ctx, const u8 *data, unsigned int len) | |||
279 | static void sha256_final(void* ctx, u8 *out) | 280 | static void sha256_final(void* ctx, u8 *out) |
280 | { | 281 | { |
281 | struct sha256_ctx *sctx = ctx; | 282 | struct sha256_ctx *sctx = ctx; |
282 | u8 bits[8]; | 283 | __be32 *dst = (__be32 *)out; |
283 | unsigned int index, pad_len, t; | 284 | __be32 bits[2]; |
284 | int i, j; | 285 | unsigned int index, pad_len; |
286 | int i; | ||
285 | static const u8 padding[64] = { 0x80, }; | 287 | static const u8 padding[64] = { 0x80, }; |
286 | 288 | ||
287 | /* Save number of bits */ | 289 | /* Save number of bits */ |
288 | t = sctx->count[0]; | 290 | bits[1] = cpu_to_be32(sctx->count[0]); |
289 | bits[7] = t; t >>= 8; | 291 | bits[0] = cpu_to_be32(sctx->count[1]); |
290 | bits[6] = t; t >>= 8; | ||
291 | bits[5] = t; t >>= 8; | ||
292 | bits[4] = t; | ||
293 | t = sctx->count[1]; | ||
294 | bits[3] = t; t >>= 8; | ||
295 | bits[2] = t; t >>= 8; | ||
296 | bits[1] = t; t >>= 8; | ||
297 | bits[0] = t; | ||
298 | 292 | ||
299 | /* Pad out to 56 mod 64. */ | 293 | /* Pad out to 56 mod 64. */ |
300 | index = (sctx->count[0] >> 3) & 0x3f; | 294 | index = (sctx->count[0] >> 3) & 0x3f; |
@@ -302,16 +296,11 @@ static void sha256_final(void* ctx, u8 *out) | |||
302 | sha256_update(sctx, padding, pad_len); | 296 | sha256_update(sctx, padding, pad_len); |
303 | 297 | ||
304 | /* Append length (before padding) */ | 298 | /* Append length (before padding) */ |
305 | sha256_update(sctx, bits, 8); | 299 | sha256_update(sctx, (const u8 *)bits, sizeof(bits)); |
306 | 300 | ||
307 | /* Store state in digest */ | 301 | /* Store state in digest */ |
308 | for (i = j = 0; i < 8; i++, j += 4) { | 302 | for (i = 0; i < 8; i++) |
309 | t = sctx->state[i]; | 303 | dst[i] = cpu_to_be32(sctx->state[i]); |
310 | out[j+3] = t; t >>= 8; | ||
311 | out[j+2] = t; t >>= 8; | ||
312 | out[j+1] = t; t >>= 8; | ||
313 | out[j ] = t; | ||
314 | } | ||
315 | 304 | ||
316 | /* Zeroize sensitive information. */ | 305 | /* Zeroize sensitive information. */ |
317 | memset(sctx, 0, sizeof(*sctx)); | 306 | memset(sctx, 0, sizeof(*sctx)); |
diff --git a/crypto/sha512.c b/crypto/sha512.c index c663438322e9..3e6e9392310c 100644 --- a/crypto/sha512.c +++ b/crypto/sha512.c | |||
@@ -17,6 +17,7 @@ | |||
17 | #include <linux/mm.h> | 17 | #include <linux/mm.h> |
18 | #include <linux/init.h> | 18 | #include <linux/init.h> |
19 | #include <linux/crypto.h> | 19 | #include <linux/crypto.h> |
20 | #include <linux/types.h> | ||
20 | 21 | ||
21 | #include <asm/scatterlist.h> | 22 | #include <asm/scatterlist.h> |
22 | #include <asm/byteorder.h> | 23 | #include <asm/byteorder.h> |
@@ -235,39 +236,17 @@ static void | |||
235 | sha512_final(void *ctx, u8 *hash) | 236 | sha512_final(void *ctx, u8 *hash) |
236 | { | 237 | { |
237 | struct sha512_ctx *sctx = ctx; | 238 | struct sha512_ctx *sctx = ctx; |
238 | |||
239 | static u8 padding[128] = { 0x80, }; | 239 | static u8 padding[128] = { 0x80, }; |
240 | 240 | __be64 *dst = (__be64 *)hash; | |
241 | u32 t; | 241 | __be32 bits[4]; |
242 | u64 t2; | ||
243 | u8 bits[128]; | ||
244 | unsigned int index, pad_len; | 242 | unsigned int index, pad_len; |
245 | int i, j; | 243 | int i; |
246 | |||
247 | index = pad_len = t = i = j = 0; | ||
248 | t2 = 0; | ||
249 | 244 | ||
250 | /* Save number of bits */ | 245 | /* Save number of bits */ |
251 | t = sctx->count[0]; | 246 | bits[3] = cpu_to_be32(sctx->count[0]); |
252 | bits[15] = t; t>>=8; | 247 | bits[2] = cpu_to_be32(sctx->count[1]); |
253 | bits[14] = t; t>>=8; | 248 | bits[1] = cpu_to_be32(sctx->count[2]); |
254 | bits[13] = t; t>>=8; | 249 | bits[0] = cpu_to_be32(sctx->count[3]); |
255 | bits[12] = t; | ||
256 | t = sctx->count[1]; | ||
257 | bits[11] = t; t>>=8; | ||
258 | bits[10] = t; t>>=8; | ||
259 | bits[9 ] = t; t>>=8; | ||
260 | bits[8 ] = t; | ||
261 | t = sctx->count[2]; | ||
262 | bits[7 ] = t; t>>=8; | ||
263 | bits[6 ] = t; t>>=8; | ||
264 | bits[5 ] = t; t>>=8; | ||
265 | bits[4 ] = t; | ||
266 | t = sctx->count[3]; | ||
267 | bits[3 ] = t; t>>=8; | ||
268 | bits[2 ] = t; t>>=8; | ||
269 | bits[1 ] = t; t>>=8; | ||
270 | bits[0 ] = t; | ||
271 | 250 | ||
272 | /* Pad out to 112 mod 128. */ | 251 | /* Pad out to 112 mod 128. */ |
273 | index = (sctx->count[0] >> 3) & 0x7f; | 252 | index = (sctx->count[0] >> 3) & 0x7f; |
@@ -275,21 +254,12 @@ sha512_final(void *ctx, u8 *hash) | |||
275 | sha512_update(sctx, padding, pad_len); | 254 | sha512_update(sctx, padding, pad_len); |
276 | 255 | ||
277 | /* Append length (before padding) */ | 256 | /* Append length (before padding) */ |
278 | sha512_update(sctx, bits, 16); | 257 | sha512_update(sctx, (const u8 *)bits, sizeof(bits)); |
279 | 258 | ||
280 | /* Store state in digest */ | 259 | /* Store state in digest */ |
281 | for (i = j = 0; i < 8; i++, j += 8) { | 260 | for (i = 0; i < 8; i++) |
282 | t2 = sctx->state[i]; | 261 | dst[i] = cpu_to_be64(sctx->state[i]); |
283 | hash[j+7] = (char)t2 & 0xff; t2>>=8; | 262 | |
284 | hash[j+6] = (char)t2 & 0xff; t2>>=8; | ||
285 | hash[j+5] = (char)t2 & 0xff; t2>>=8; | ||
286 | hash[j+4] = (char)t2 & 0xff; t2>>=8; | ||
287 | hash[j+3] = (char)t2 & 0xff; t2>>=8; | ||
288 | hash[j+2] = (char)t2 & 0xff; t2>>=8; | ||
289 | hash[j+1] = (char)t2 & 0xff; t2>>=8; | ||
290 | hash[j ] = (char)t2 & 0xff; | ||
291 | } | ||
292 | |||
293 | /* Zeroize sensitive information. */ | 263 | /* Zeroize sensitive information. */ |
294 | memset(sctx, 0, sizeof(struct sha512_ctx)); | 264 | memset(sctx, 0, sizeof(struct sha512_ctx)); |
295 | } | 265 | } |
diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index 53f4ee804bdb..49e344f00806 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c | |||
@@ -805,6 +805,8 @@ static void do_test(void) | |||
805 | //AES | 805 | //AES |
806 | test_cipher ("aes", MODE_ECB, ENCRYPT, aes_enc_tv_template, AES_ENC_TEST_VECTORS); | 806 | test_cipher ("aes", MODE_ECB, ENCRYPT, aes_enc_tv_template, AES_ENC_TEST_VECTORS); |
807 | test_cipher ("aes", MODE_ECB, DECRYPT, aes_dec_tv_template, AES_DEC_TEST_VECTORS); | 807 | test_cipher ("aes", MODE_ECB, DECRYPT, aes_dec_tv_template, AES_DEC_TEST_VECTORS); |
808 | test_cipher ("aes", MODE_CBC, ENCRYPT, aes_cbc_enc_tv_template, AES_CBC_ENC_TEST_VECTORS); | ||
809 | test_cipher ("aes", MODE_CBC, DECRYPT, aes_cbc_dec_tv_template, AES_CBC_DEC_TEST_VECTORS); | ||
808 | 810 | ||
809 | //CAST5 | 811 | //CAST5 |
810 | test_cipher ("cast5", MODE_ECB, ENCRYPT, cast5_enc_tv_template, CAST5_ENC_TEST_VECTORS); | 812 | test_cipher ("cast5", MODE_ECB, ENCRYPT, cast5_enc_tv_template, CAST5_ENC_TEST_VECTORS); |
@@ -910,6 +912,8 @@ static void do_test(void) | |||
910 | case 10: | 912 | case 10: |
911 | test_cipher ("aes", MODE_ECB, ENCRYPT, aes_enc_tv_template, AES_ENC_TEST_VECTORS); | 913 | test_cipher ("aes", MODE_ECB, ENCRYPT, aes_enc_tv_template, AES_ENC_TEST_VECTORS); |
912 | test_cipher ("aes", MODE_ECB, DECRYPT, aes_dec_tv_template, AES_DEC_TEST_VECTORS); | 914 | test_cipher ("aes", MODE_ECB, DECRYPT, aes_dec_tv_template, AES_DEC_TEST_VECTORS); |
915 | test_cipher ("aes", MODE_CBC, ENCRYPT, aes_cbc_enc_tv_template, AES_CBC_ENC_TEST_VECTORS); | ||
916 | test_cipher ("aes", MODE_CBC, DECRYPT, aes_cbc_dec_tv_template, AES_CBC_DEC_TEST_VECTORS); | ||
913 | break; | 917 | break; |
914 | 918 | ||
915 | case 11: | 919 | case 11: |
diff --git a/crypto/tcrypt.h b/crypto/tcrypt.h index 522ffd4b6f43..733d07ed75e9 100644 --- a/crypto/tcrypt.h +++ b/crypto/tcrypt.h | |||
@@ -1836,6 +1836,8 @@ static struct cipher_testvec cast6_dec_tv_template[] = { | |||
1836 | */ | 1836 | */ |
1837 | #define AES_ENC_TEST_VECTORS 3 | 1837 | #define AES_ENC_TEST_VECTORS 3 |
1838 | #define AES_DEC_TEST_VECTORS 3 | 1838 | #define AES_DEC_TEST_VECTORS 3 |
1839 | #define AES_CBC_ENC_TEST_VECTORS 2 | ||
1840 | #define AES_CBC_DEC_TEST_VECTORS 2 | ||
1839 | 1841 | ||
1840 | static struct cipher_testvec aes_enc_tv_template[] = { | 1842 | static struct cipher_testvec aes_enc_tv_template[] = { |
1841 | { /* From FIPS-197 */ | 1843 | { /* From FIPS-197 */ |
@@ -1911,6 +1913,68 @@ static struct cipher_testvec aes_dec_tv_template[] = { | |||
1911 | }, | 1913 | }, |
1912 | }; | 1914 | }; |
1913 | 1915 | ||
1916 | static struct cipher_testvec aes_cbc_enc_tv_template[] = { | ||
1917 | { /* From RFC 3602 */ | ||
1918 | .key = { 0x06, 0xa9, 0x21, 0x40, 0x36, 0xb8, 0xa1, 0x5b, | ||
1919 | 0x51, 0x2e, 0x03, 0xd5, 0x34, 0x12, 0x00, 0x06 }, | ||
1920 | .klen = 16, | ||
1921 | .iv = { 0x3d, 0xaf, 0xba, 0x42, 0x9d, 0x9e, 0xb4, 0x30, | ||
1922 | 0xb4, 0x22, 0xda, 0x80, 0x2c, 0x9f, 0xac, 0x41 }, | ||
1923 | .input = { "Single block msg" }, | ||
1924 | .ilen = 16, | ||
1925 | .result = { 0xe3, 0x53, 0x77, 0x9c, 0x10, 0x79, 0xae, 0xb8, | ||
1926 | 0x27, 0x08, 0x94, 0x2d, 0xbe, 0x77, 0x18, 0x1a }, | ||
1927 | .rlen = 16, | ||
1928 | }, { | ||
1929 | .key = { 0xc2, 0x86, 0x69, 0x6d, 0x88, 0x7c, 0x9a, 0xa0, | ||
1930 | 0x61, 0x1b, 0xbb, 0x3e, 0x20, 0x25, 0xa4, 0x5a }, | ||
1931 | .klen = 16, | ||
1932 | .iv = { 0x56, 0x2e, 0x17, 0x99, 0x6d, 0x09, 0x3d, 0x28, | ||
1933 | 0xdd, 0xb3, 0xba, 0x69, 0x5a, 0x2e, 0x6f, 0x58 }, | ||
1934 | .input = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, | ||
1935 | 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, | ||
1936 | 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, | ||
1937 | 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f }, | ||
1938 | .ilen = 32, | ||
1939 | .result = { 0xd2, 0x96, 0xcd, 0x94, 0xc2, 0xcc, 0xcf, 0x8a, | ||
1940 | 0x3a, 0x86, 0x30, 0x28, 0xb5, 0xe1, 0xdc, 0x0a, | ||
1941 | 0x75, 0x86, 0x60, 0x2d, 0x25, 0x3c, 0xff, 0xf9, | ||
1942 | 0x1b, 0x82, 0x66, 0xbe, 0xa6, 0xd6, 0x1a, 0xb1 }, | ||
1943 | .rlen = 32, | ||
1944 | }, | ||
1945 | }; | ||
1946 | |||
1947 | static struct cipher_testvec aes_cbc_dec_tv_template[] = { | ||
1948 | { /* From RFC 3602 */ | ||
1949 | .key = { 0x06, 0xa9, 0x21, 0x40, 0x36, 0xb8, 0xa1, 0x5b, | ||
1950 | 0x51, 0x2e, 0x03, 0xd5, 0x34, 0x12, 0x00, 0x06 }, | ||
1951 | .klen = 16, | ||
1952 | .iv = { 0x3d, 0xaf, 0xba, 0x42, 0x9d, 0x9e, 0xb4, 0x30, | ||
1953 | 0xb4, 0x22, 0xda, 0x80, 0x2c, 0x9f, 0xac, 0x41 }, | ||
1954 | .input = { 0xe3, 0x53, 0x77, 0x9c, 0x10, 0x79, 0xae, 0xb8, | ||
1955 | 0x27, 0x08, 0x94, 0x2d, 0xbe, 0x77, 0x18, 0x1a }, | ||
1956 | .ilen = 16, | ||
1957 | .result = { "Single block msg" }, | ||
1958 | .rlen = 16, | ||
1959 | }, { | ||
1960 | .key = { 0xc2, 0x86, 0x69, 0x6d, 0x88, 0x7c, 0x9a, 0xa0, | ||
1961 | 0x61, 0x1b, 0xbb, 0x3e, 0x20, 0x25, 0xa4, 0x5a }, | ||
1962 | .klen = 16, | ||
1963 | .iv = { 0x56, 0x2e, 0x17, 0x99, 0x6d, 0x09, 0x3d, 0x28, | ||
1964 | 0xdd, 0xb3, 0xba, 0x69, 0x5a, 0x2e, 0x6f, 0x58 }, | ||
1965 | .input = { 0xd2, 0x96, 0xcd, 0x94, 0xc2, 0xcc, 0xcf, 0x8a, | ||
1966 | 0x3a, 0x86, 0x30, 0x28, 0xb5, 0xe1, 0xdc, 0x0a, | ||
1967 | 0x75, 0x86, 0x60, 0x2d, 0x25, 0x3c, 0xff, 0xf9, | ||
1968 | 0x1b, 0x82, 0x66, 0xbe, 0xa6, 0xd6, 0x1a, 0xb1 }, | ||
1969 | .ilen = 32, | ||
1970 | .result = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, | ||
1971 | 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, | ||
1972 | 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, | ||
1973 | 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f }, | ||
1974 | .rlen = 32, | ||
1975 | }, | ||
1976 | }; | ||
1977 | |||
1914 | /* Cast5 test vectors from RFC 2144 */ | 1978 | /* Cast5 test vectors from RFC 2144 */ |
1915 | #define CAST5_ENC_TEST_VECTORS 3 | 1979 | #define CAST5_ENC_TEST_VECTORS 3 |
1916 | #define CAST5_DEC_TEST_VECTORS 3 | 1980 | #define CAST5_DEC_TEST_VECTORS 3 |
diff --git a/crypto/tea.c b/crypto/tea.c index 5924efdd3a16..a6a02b30e470 100644 --- a/crypto/tea.c +++ b/crypto/tea.c | |||
@@ -22,8 +22,10 @@ | |||
22 | #include <linux/init.h> | 22 | #include <linux/init.h> |
23 | #include <linux/module.h> | 23 | #include <linux/module.h> |
24 | #include <linux/mm.h> | 24 | #include <linux/mm.h> |
25 | #include <asm/byteorder.h> | ||
25 | #include <asm/scatterlist.h> | 26 | #include <asm/scatterlist.h> |
26 | #include <linux/crypto.h> | 27 | #include <linux/crypto.h> |
28 | #include <linux/types.h> | ||
27 | 29 | ||
28 | #define TEA_KEY_SIZE 16 | 30 | #define TEA_KEY_SIZE 16 |
29 | #define TEA_BLOCK_SIZE 8 | 31 | #define TEA_BLOCK_SIZE 8 |
@@ -35,9 +37,6 @@ | |||
35 | #define XTEA_ROUNDS 32 | 37 | #define XTEA_ROUNDS 32 |
36 | #define XTEA_DELTA 0x9e3779b9 | 38 | #define XTEA_DELTA 0x9e3779b9 |
37 | 39 | ||
38 | #define u32_in(x) le32_to_cpu(*(const __le32 *)(x)) | ||
39 | #define u32_out(to, from) (*(__le32 *)(to) = cpu_to_le32(from)) | ||
40 | |||
41 | struct tea_ctx { | 40 | struct tea_ctx { |
42 | u32 KEY[4]; | 41 | u32 KEY[4]; |
43 | }; | 42 | }; |
@@ -49,8 +48,8 @@ struct xtea_ctx { | |||
49 | static int tea_setkey(void *ctx_arg, const u8 *in_key, | 48 | static int tea_setkey(void *ctx_arg, const u8 *in_key, |
50 | unsigned int key_len, u32 *flags) | 49 | unsigned int key_len, u32 *flags) |
51 | { | 50 | { |
52 | |||
53 | struct tea_ctx *ctx = ctx_arg; | 51 | struct tea_ctx *ctx = ctx_arg; |
52 | const __le32 *key = (const __le32 *)in_key; | ||
54 | 53 | ||
55 | if (key_len != 16) | 54 | if (key_len != 16) |
56 | { | 55 | { |
@@ -58,10 +57,10 @@ static int tea_setkey(void *ctx_arg, const u8 *in_key, | |||
58 | return -EINVAL; | 57 | return -EINVAL; |
59 | } | 58 | } |
60 | 59 | ||
61 | ctx->KEY[0] = u32_in (in_key); | 60 | ctx->KEY[0] = le32_to_cpu(key[0]); |
62 | ctx->KEY[1] = u32_in (in_key + 4); | 61 | ctx->KEY[1] = le32_to_cpu(key[1]); |
63 | ctx->KEY[2] = u32_in (in_key + 8); | 62 | ctx->KEY[2] = le32_to_cpu(key[2]); |
64 | ctx->KEY[3] = u32_in (in_key + 12); | 63 | ctx->KEY[3] = le32_to_cpu(key[3]); |
65 | 64 | ||
66 | return 0; | 65 | return 0; |
67 | 66 | ||
@@ -73,9 +72,11 @@ static void tea_encrypt(void *ctx_arg, u8 *dst, const u8 *src) | |||
73 | u32 k0, k1, k2, k3; | 72 | u32 k0, k1, k2, k3; |
74 | 73 | ||
75 | struct tea_ctx *ctx = ctx_arg; | 74 | struct tea_ctx *ctx = ctx_arg; |
75 | const __le32 *in = (const __le32 *)src; | ||
76 | __le32 *out = (__le32 *)dst; | ||
76 | 77 | ||
77 | y = u32_in (src); | 78 | y = le32_to_cpu(in[0]); |
78 | z = u32_in (src + 4); | 79 | z = le32_to_cpu(in[1]); |
79 | 80 | ||
80 | k0 = ctx->KEY[0]; | 81 | k0 = ctx->KEY[0]; |
81 | k1 = ctx->KEY[1]; | 82 | k1 = ctx->KEY[1]; |
@@ -90,19 +91,20 @@ static void tea_encrypt(void *ctx_arg, u8 *dst, const u8 *src) | |||
90 | z += ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3); | 91 | z += ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3); |
91 | } | 92 | } |
92 | 93 | ||
93 | u32_out (dst, y); | 94 | out[0] = cpu_to_le32(y); |
94 | u32_out (dst + 4, z); | 95 | out[1] = cpu_to_le32(z); |
95 | } | 96 | } |
96 | 97 | ||
97 | static void tea_decrypt(void *ctx_arg, u8 *dst, const u8 *src) | 98 | static void tea_decrypt(void *ctx_arg, u8 *dst, const u8 *src) |
98 | { | 99 | { |
99 | u32 y, z, n, sum; | 100 | u32 y, z, n, sum; |
100 | u32 k0, k1, k2, k3; | 101 | u32 k0, k1, k2, k3; |
101 | |||
102 | struct tea_ctx *ctx = ctx_arg; | 102 | struct tea_ctx *ctx = ctx_arg; |
103 | const __le32 *in = (const __le32 *)src; | ||
104 | __le32 *out = (__le32 *)dst; | ||
103 | 105 | ||
104 | y = u32_in (src); | 106 | y = le32_to_cpu(in[0]); |
105 | z = u32_in (src + 4); | 107 | z = le32_to_cpu(in[1]); |
106 | 108 | ||
107 | k0 = ctx->KEY[0]; | 109 | k0 = ctx->KEY[0]; |
108 | k1 = ctx->KEY[1]; | 110 | k1 = ctx->KEY[1]; |
@@ -119,16 +121,15 @@ static void tea_decrypt(void *ctx_arg, u8 *dst, const u8 *src) | |||
119 | sum -= TEA_DELTA; | 121 | sum -= TEA_DELTA; |
120 | } | 122 | } |
121 | 123 | ||
122 | u32_out (dst, y); | 124 | out[0] = cpu_to_le32(y); |
123 | u32_out (dst + 4, z); | 125 | out[1] = cpu_to_le32(z); |
124 | |||
125 | } | 126 | } |
126 | 127 | ||
127 | static int xtea_setkey(void *ctx_arg, const u8 *in_key, | 128 | static int xtea_setkey(void *ctx_arg, const u8 *in_key, |
128 | unsigned int key_len, u32 *flags) | 129 | unsigned int key_len, u32 *flags) |
129 | { | 130 | { |
130 | |||
131 | struct xtea_ctx *ctx = ctx_arg; | 131 | struct xtea_ctx *ctx = ctx_arg; |
132 | const __le32 *key = (const __le32 *)in_key; | ||
132 | 133 | ||
133 | if (key_len != 16) | 134 | if (key_len != 16) |
134 | { | 135 | { |
@@ -136,10 +137,10 @@ static int xtea_setkey(void *ctx_arg, const u8 *in_key, | |||
136 | return -EINVAL; | 137 | return -EINVAL; |
137 | } | 138 | } |
138 | 139 | ||
139 | ctx->KEY[0] = u32_in (in_key); | 140 | ctx->KEY[0] = le32_to_cpu(key[0]); |
140 | ctx->KEY[1] = u32_in (in_key + 4); | 141 | ctx->KEY[1] = le32_to_cpu(key[1]); |
141 | ctx->KEY[2] = u32_in (in_key + 8); | 142 | ctx->KEY[2] = le32_to_cpu(key[2]); |
142 | ctx->KEY[3] = u32_in (in_key + 12); | 143 | ctx->KEY[3] = le32_to_cpu(key[3]); |
143 | 144 | ||
144 | return 0; | 145 | return 0; |
145 | 146 | ||
@@ -147,14 +148,15 @@ static int xtea_setkey(void *ctx_arg, const u8 *in_key, | |||
147 | 148 | ||
148 | static void xtea_encrypt(void *ctx_arg, u8 *dst, const u8 *src) | 149 | static void xtea_encrypt(void *ctx_arg, u8 *dst, const u8 *src) |
149 | { | 150 | { |
150 | |||
151 | u32 y, z, sum = 0; | 151 | u32 y, z, sum = 0; |
152 | u32 limit = XTEA_DELTA * XTEA_ROUNDS; | 152 | u32 limit = XTEA_DELTA * XTEA_ROUNDS; |
153 | 153 | ||
154 | struct xtea_ctx *ctx = ctx_arg; | 154 | struct xtea_ctx *ctx = ctx_arg; |
155 | const __le32 *in = (const __le32 *)src; | ||
156 | __le32 *out = (__le32 *)dst; | ||
155 | 157 | ||
156 | y = u32_in (src); | 158 | y = le32_to_cpu(in[0]); |
157 | z = u32_in (src + 4); | 159 | z = le32_to_cpu(in[1]); |
158 | 160 | ||
159 | while (sum != limit) { | 161 | while (sum != limit) { |
160 | y += ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum&3]); | 162 | y += ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum&3]); |
@@ -162,19 +164,19 @@ static void xtea_encrypt(void *ctx_arg, u8 *dst, const u8 *src) | |||
162 | z += ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 &3]); | 164 | z += ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 &3]); |
163 | } | 165 | } |
164 | 166 | ||
165 | u32_out (dst, y); | 167 | out[0] = cpu_to_le32(y); |
166 | u32_out (dst + 4, z); | 168 | out[1] = cpu_to_le32(z); |
167 | |||
168 | } | 169 | } |
169 | 170 | ||
170 | static void xtea_decrypt(void *ctx_arg, u8 *dst, const u8 *src) | 171 | static void xtea_decrypt(void *ctx_arg, u8 *dst, const u8 *src) |
171 | { | 172 | { |
172 | |||
173 | u32 y, z, sum; | 173 | u32 y, z, sum; |
174 | struct tea_ctx *ctx = ctx_arg; | 174 | struct tea_ctx *ctx = ctx_arg; |
175 | const __le32 *in = (const __le32 *)src; | ||
176 | __le32 *out = (__le32 *)dst; | ||
175 | 177 | ||
176 | y = u32_in (src); | 178 | y = le32_to_cpu(in[0]); |
177 | z = u32_in (src + 4); | 179 | z = le32_to_cpu(in[1]); |
178 | 180 | ||
179 | sum = XTEA_DELTA * XTEA_ROUNDS; | 181 | sum = XTEA_DELTA * XTEA_ROUNDS; |
180 | 182 | ||
@@ -184,22 +186,22 @@ static void xtea_decrypt(void *ctx_arg, u8 *dst, const u8 *src) | |||
184 | y -= ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum & 3]); | 186 | y -= ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum & 3]); |
185 | } | 187 | } |
186 | 188 | ||
187 | u32_out (dst, y); | 189 | out[0] = cpu_to_le32(y); |
188 | u32_out (dst + 4, z); | 190 | out[1] = cpu_to_le32(z); |
189 | |||
190 | } | 191 | } |
191 | 192 | ||
192 | 193 | ||
193 | static void xeta_encrypt(void *ctx_arg, u8 *dst, const u8 *src) | 194 | static void xeta_encrypt(void *ctx_arg, u8 *dst, const u8 *src) |
194 | { | 195 | { |
195 | |||
196 | u32 y, z, sum = 0; | 196 | u32 y, z, sum = 0; |
197 | u32 limit = XTEA_DELTA * XTEA_ROUNDS; | 197 | u32 limit = XTEA_DELTA * XTEA_ROUNDS; |
198 | 198 | ||
199 | struct xtea_ctx *ctx = ctx_arg; | 199 | struct xtea_ctx *ctx = ctx_arg; |
200 | const __le32 *in = (const __le32 *)src; | ||
201 | __le32 *out = (__le32 *)dst; | ||
200 | 202 | ||
201 | y = u32_in (src); | 203 | y = le32_to_cpu(in[0]); |
202 | z = u32_in (src + 4); | 204 | z = le32_to_cpu(in[1]); |
203 | 205 | ||
204 | while (sum != limit) { | 206 | while (sum != limit) { |
205 | y += (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum&3]; | 207 | y += (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum&3]; |
@@ -207,19 +209,19 @@ static void xeta_encrypt(void *ctx_arg, u8 *dst, const u8 *src) | |||
207 | z += (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 &3]; | 209 | z += (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 &3]; |
208 | } | 210 | } |
209 | 211 | ||
210 | u32_out (dst, y); | 212 | out[0] = cpu_to_le32(y); |
211 | u32_out (dst + 4, z); | 213 | out[1] = cpu_to_le32(z); |
212 | |||
213 | } | 214 | } |
214 | 215 | ||
215 | static void xeta_decrypt(void *ctx_arg, u8 *dst, const u8 *src) | 216 | static void xeta_decrypt(void *ctx_arg, u8 *dst, const u8 *src) |
216 | { | 217 | { |
217 | |||
218 | u32 y, z, sum; | 218 | u32 y, z, sum; |
219 | struct tea_ctx *ctx = ctx_arg; | 219 | struct tea_ctx *ctx = ctx_arg; |
220 | const __le32 *in = (const __le32 *)src; | ||
221 | __le32 *out = (__le32 *)dst; | ||
220 | 222 | ||
221 | y = u32_in (src); | 223 | y = le32_to_cpu(in[0]); |
222 | z = u32_in (src + 4); | 224 | z = le32_to_cpu(in[1]); |
223 | 225 | ||
224 | sum = XTEA_DELTA * XTEA_ROUNDS; | 226 | sum = XTEA_DELTA * XTEA_ROUNDS; |
225 | 227 | ||
@@ -229,9 +231,8 @@ static void xeta_decrypt(void *ctx_arg, u8 *dst, const u8 *src) | |||
229 | y -= (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum & 3]; | 231 | y -= (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum & 3]; |
230 | } | 232 | } |
231 | 233 | ||
232 | u32_out (dst, y); | 234 | out[0] = cpu_to_le32(y); |
233 | u32_out (dst + 4, z); | 235 | out[1] = cpu_to_le32(z); |
234 | |||
235 | } | 236 | } |
236 | 237 | ||
237 | static struct crypto_alg tea_alg = { | 238 | static struct crypto_alg tea_alg = { |
@@ -239,6 +240,7 @@ static struct crypto_alg tea_alg = { | |||
239 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, | 240 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, |
240 | .cra_blocksize = TEA_BLOCK_SIZE, | 241 | .cra_blocksize = TEA_BLOCK_SIZE, |
241 | .cra_ctxsize = sizeof (struct tea_ctx), | 242 | .cra_ctxsize = sizeof (struct tea_ctx), |
243 | .cra_alignmask = 3, | ||
242 | .cra_module = THIS_MODULE, | 244 | .cra_module = THIS_MODULE, |
243 | .cra_list = LIST_HEAD_INIT(tea_alg.cra_list), | 245 | .cra_list = LIST_HEAD_INIT(tea_alg.cra_list), |
244 | .cra_u = { .cipher = { | 246 | .cra_u = { .cipher = { |
@@ -254,6 +256,7 @@ static struct crypto_alg xtea_alg = { | |||
254 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, | 256 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, |
255 | .cra_blocksize = XTEA_BLOCK_SIZE, | 257 | .cra_blocksize = XTEA_BLOCK_SIZE, |
256 | .cra_ctxsize = sizeof (struct xtea_ctx), | 258 | .cra_ctxsize = sizeof (struct xtea_ctx), |
259 | .cra_alignmask = 3, | ||
257 | .cra_module = THIS_MODULE, | 260 | .cra_module = THIS_MODULE, |
258 | .cra_list = LIST_HEAD_INIT(xtea_alg.cra_list), | 261 | .cra_list = LIST_HEAD_INIT(xtea_alg.cra_list), |
259 | .cra_u = { .cipher = { | 262 | .cra_u = { .cipher = { |
@@ -269,6 +272,7 @@ static struct crypto_alg xeta_alg = { | |||
269 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, | 272 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, |
270 | .cra_blocksize = XTEA_BLOCK_SIZE, | 273 | .cra_blocksize = XTEA_BLOCK_SIZE, |
271 | .cra_ctxsize = sizeof (struct xtea_ctx), | 274 | .cra_ctxsize = sizeof (struct xtea_ctx), |
275 | .cra_alignmask = 3, | ||
272 | .cra_module = THIS_MODULE, | 276 | .cra_module = THIS_MODULE, |
273 | .cra_list = LIST_HEAD_INIT(xtea_alg.cra_list), | 277 | .cra_list = LIST_HEAD_INIT(xtea_alg.cra_list), |
274 | .cra_u = { .cipher = { | 278 | .cra_u = { .cipher = { |
diff --git a/crypto/tgr192.c b/crypto/tgr192.c index f0a45cf716d0..2d8e44f6fbe9 100644 --- a/crypto/tgr192.c +++ b/crypto/tgr192.c | |||
@@ -24,8 +24,10 @@ | |||
24 | #include <linux/init.h> | 24 | #include <linux/init.h> |
25 | #include <linux/module.h> | 25 | #include <linux/module.h> |
26 | #include <linux/mm.h> | 26 | #include <linux/mm.h> |
27 | #include <asm/byteorder.h> | ||
27 | #include <asm/scatterlist.h> | 28 | #include <asm/scatterlist.h> |
28 | #include <linux/crypto.h> | 29 | #include <linux/crypto.h> |
30 | #include <linux/types.h> | ||
29 | 31 | ||
30 | #define TGR192_DIGEST_SIZE 24 | 32 | #define TGR192_DIGEST_SIZE 24 |
31 | #define TGR160_DIGEST_SIZE 20 | 33 | #define TGR160_DIGEST_SIZE 20 |
@@ -467,18 +469,10 @@ static void tgr192_transform(struct tgr192_ctx *tctx, const u8 * data) | |||
467 | u64 a, b, c, aa, bb, cc; | 469 | u64 a, b, c, aa, bb, cc; |
468 | u64 x[8]; | 470 | u64 x[8]; |
469 | int i; | 471 | int i; |
470 | const u8 *ptr = data; | 472 | const __le64 *ptr = (const __le64 *)data; |
471 | 473 | ||
472 | for (i = 0; i < 8; i++, ptr += 8) { | 474 | for (i = 0; i < 8; i++) |
473 | x[i] = (((u64)ptr[7] ) << 56) ^ | 475 | x[i] = le64_to_cpu(ptr[i]); |
474 | (((u64)ptr[6] & 0xffL) << 48) ^ | ||
475 | (((u64)ptr[5] & 0xffL) << 40) ^ | ||
476 | (((u64)ptr[4] & 0xffL) << 32) ^ | ||
477 | (((u64)ptr[3] & 0xffL) << 24) ^ | ||
478 | (((u64)ptr[2] & 0xffL) << 16) ^ | ||
479 | (((u64)ptr[1] & 0xffL) << 8) ^ | ||
480 | (((u64)ptr[0] & 0xffL) ); | ||
481 | } | ||
482 | 476 | ||
483 | /* save */ | 477 | /* save */ |
484 | a = aa = tctx->a; | 478 | a = aa = tctx->a; |
@@ -558,9 +552,10 @@ static void tgr192_update(void *ctx, const u8 * inbuf, unsigned int len) | |||
558 | static void tgr192_final(void *ctx, u8 * out) | 552 | static void tgr192_final(void *ctx, u8 * out) |
559 | { | 553 | { |
560 | struct tgr192_ctx *tctx = ctx; | 554 | struct tgr192_ctx *tctx = ctx; |
555 | __be64 *dst = (__be64 *)out; | ||
556 | __be64 *be64p; | ||
557 | __le32 *le32p; | ||
561 | u32 t, msb, lsb; | 558 | u32 t, msb, lsb; |
562 | u8 *p; | ||
563 | int i, j; | ||
564 | 559 | ||
565 | tgr192_update(tctx, NULL, 0); /* flush */ ; | 560 | tgr192_update(tctx, NULL, 0); /* flush */ ; |
566 | 561 | ||
@@ -594,41 +589,16 @@ static void tgr192_final(void *ctx, u8 * out) | |||
594 | memset(tctx->hash, 0, 56); /* fill next block with zeroes */ | 589 | memset(tctx->hash, 0, 56); /* fill next block with zeroes */ |
595 | } | 590 | } |
596 | /* append the 64 bit count */ | 591 | /* append the 64 bit count */ |
597 | tctx->hash[56] = lsb; | 592 | le32p = (__le32 *)&tctx->hash[56]; |
598 | tctx->hash[57] = lsb >> 8; | 593 | le32p[0] = cpu_to_le32(lsb); |
599 | tctx->hash[58] = lsb >> 16; | 594 | le32p[1] = cpu_to_le32(msb); |
600 | tctx->hash[59] = lsb >> 24; | 595 | |
601 | tctx->hash[60] = msb; | ||
602 | tctx->hash[61] = msb >> 8; | ||
603 | tctx->hash[62] = msb >> 16; | ||
604 | tctx->hash[63] = msb >> 24; | ||
605 | tgr192_transform(tctx, tctx->hash); | 596 | tgr192_transform(tctx, tctx->hash); |
606 | 597 | ||
607 | p = tctx->hash; | 598 | be64p = (__be64 *)tctx->hash; |
608 | *p++ = tctx->a >> 56; *p++ = tctx->a >> 48; *p++ = tctx->a >> 40; | 599 | dst[0] = be64p[0] = cpu_to_be64(tctx->a); |
609 | *p++ = tctx->a >> 32; *p++ = tctx->a >> 24; *p++ = tctx->a >> 16; | 600 | dst[1] = be64p[1] = cpu_to_be64(tctx->b); |
610 | *p++ = tctx->a >> 8; *p++ = tctx->a;\ | 601 | dst[2] = be64p[2] = cpu_to_be64(tctx->c); |
611 | *p++ = tctx->b >> 56; *p++ = tctx->b >> 48; *p++ = tctx->b >> 40; | ||
612 | *p++ = tctx->b >> 32; *p++ = tctx->b >> 24; *p++ = tctx->b >> 16; | ||
613 | *p++ = tctx->b >> 8; *p++ = tctx->b; | ||
614 | *p++ = tctx->c >> 56; *p++ = tctx->c >> 48; *p++ = tctx->c >> 40; | ||
615 | *p++ = tctx->c >> 32; *p++ = tctx->c >> 24; *p++ = tctx->c >> 16; | ||
616 | *p++ = tctx->c >> 8; *p++ = tctx->c; | ||
617 | |||
618 | |||
619 | /* unpack the hash */ | ||
620 | j = 7; | ||
621 | for (i = 0; i < 8; i++) { | ||
622 | out[j--] = (tctx->a >> 8 * i) & 0xff; | ||
623 | } | ||
624 | j = 15; | ||
625 | for (i = 0; i < 8; i++) { | ||
626 | out[j--] = (tctx->b >> 8 * i) & 0xff; | ||
627 | } | ||
628 | j = 23; | ||
629 | for (i = 0; i < 8; i++) { | ||
630 | out[j--] = (tctx->c >> 8 * i) & 0xff; | ||
631 | } | ||
632 | } | 602 | } |
633 | 603 | ||
634 | static void tgr160_final(void *ctx, u8 * out) | 604 | static void tgr160_final(void *ctx, u8 * out) |
diff --git a/crypto/twofish.c b/crypto/twofish.c index 4efff8cf9958..a26d885486fb 100644 --- a/crypto/twofish.c +++ b/crypto/twofish.c | |||
@@ -37,6 +37,8 @@ | |||
37 | * Abstract Algebra_ by Joseph A. Gallian, especially chapter 22 in the | 37 | * Abstract Algebra_ by Joseph A. Gallian, especially chapter 22 in the |
38 | * Third Edition. | 38 | * Third Edition. |
39 | */ | 39 | */ |
40 | |||
41 | #include <asm/byteorder.h> | ||
40 | #include <linux/module.h> | 42 | #include <linux/module.h> |
41 | #include <linux/init.h> | 43 | #include <linux/init.h> |
42 | #include <linux/types.h> | 44 | #include <linux/types.h> |
@@ -621,13 +623,11 @@ static const u8 calc_sb_tbl[512] = { | |||
621 | * whitening subkey number m. */ | 623 | * whitening subkey number m. */ |
622 | 624 | ||
623 | #define INPACK(n, x, m) \ | 625 | #define INPACK(n, x, m) \ |
624 | x = in[4 * (n)] ^ (in[4 * (n) + 1] << 8) \ | 626 | x = le32_to_cpu(src[n]) ^ ctx->w[m] |
625 | ^ (in[4 * (n) + 2] << 16) ^ (in[4 * (n) + 3] << 24) ^ ctx->w[m] | ||
626 | 627 | ||
627 | #define OUTUNPACK(n, x, m) \ | 628 | #define OUTUNPACK(n, x, m) \ |
628 | x ^= ctx->w[m]; \ | 629 | x ^= ctx->w[m]; \ |
629 | out[4 * (n)] = x; out[4 * (n) + 1] = x >> 8; \ | 630 | dst[n] = cpu_to_le32(x) |
630 | out[4 * (n) + 2] = x >> 16; out[4 * (n) + 3] = x >> 24 | ||
631 | 631 | ||
632 | #define TF_MIN_KEY_SIZE 16 | 632 | #define TF_MIN_KEY_SIZE 16 |
633 | #define TF_MAX_KEY_SIZE 32 | 633 | #define TF_MAX_KEY_SIZE 32 |
@@ -804,6 +804,8 @@ static int twofish_setkey(void *cx, const u8 *key, | |||
804 | static void twofish_encrypt(void *cx, u8 *out, const u8 *in) | 804 | static void twofish_encrypt(void *cx, u8 *out, const u8 *in) |
805 | { | 805 | { |
806 | struct twofish_ctx *ctx = cx; | 806 | struct twofish_ctx *ctx = cx; |
807 | const __le32 *src = (const __le32 *)in; | ||
808 | __le32 *dst = (__le32 *)out; | ||
807 | 809 | ||
808 | /* The four 32-bit chunks of the text. */ | 810 | /* The four 32-bit chunks of the text. */ |
809 | u32 a, b, c, d; | 811 | u32 a, b, c, d; |
@@ -839,6 +841,8 @@ static void twofish_encrypt(void *cx, u8 *out, const u8 *in) | |||
839 | static void twofish_decrypt(void *cx, u8 *out, const u8 *in) | 841 | static void twofish_decrypt(void *cx, u8 *out, const u8 *in) |
840 | { | 842 | { |
841 | struct twofish_ctx *ctx = cx; | 843 | struct twofish_ctx *ctx = cx; |
844 | const __le32 *src = (const __le32 *)in; | ||
845 | __le32 *dst = (__le32 *)out; | ||
842 | 846 | ||
843 | /* The four 32-bit chunks of the text. */ | 847 | /* The four 32-bit chunks of the text. */ |
844 | u32 a, b, c, d; | 848 | u32 a, b, c, d; |
@@ -875,6 +879,7 @@ static struct crypto_alg alg = { | |||
875 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, | 879 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, |
876 | .cra_blocksize = TF_BLOCK_SIZE, | 880 | .cra_blocksize = TF_BLOCK_SIZE, |
877 | .cra_ctxsize = sizeof(struct twofish_ctx), | 881 | .cra_ctxsize = sizeof(struct twofish_ctx), |
882 | .cra_alignmask = 3, | ||
878 | .cra_module = THIS_MODULE, | 883 | .cra_module = THIS_MODULE, |
879 | .cra_list = LIST_HEAD_INIT(alg.cra_list), | 884 | .cra_list = LIST_HEAD_INIT(alg.cra_list), |
880 | .cra_u = { .cipher = { | 885 | .cra_u = { .cipher = { |
diff --git a/crypto/wp512.c b/crypto/wp512.c index fd6e20e1f291..b226a126cfae 100644 --- a/crypto/wp512.c +++ b/crypto/wp512.c | |||
@@ -22,8 +22,10 @@ | |||
22 | #include <linux/init.h> | 22 | #include <linux/init.h> |
23 | #include <linux/module.h> | 23 | #include <linux/module.h> |
24 | #include <linux/mm.h> | 24 | #include <linux/mm.h> |
25 | #include <asm/byteorder.h> | ||
25 | #include <asm/scatterlist.h> | 26 | #include <asm/scatterlist.h> |
26 | #include <linux/crypto.h> | 27 | #include <linux/crypto.h> |
28 | #include <linux/types.h> | ||
27 | 29 | ||
28 | #define WP512_DIGEST_SIZE 64 | 30 | #define WP512_DIGEST_SIZE 64 |
29 | #define WP384_DIGEST_SIZE 48 | 31 | #define WP384_DIGEST_SIZE 48 |
@@ -778,19 +780,10 @@ static void wp512_process_buffer(struct wp512_ctx *wctx) { | |||
778 | u64 block[8]; /* mu(buffer) */ | 780 | u64 block[8]; /* mu(buffer) */ |
779 | u64 state[8]; /* the cipher state */ | 781 | u64 state[8]; /* the cipher state */ |
780 | u64 L[8]; | 782 | u64 L[8]; |
781 | u8 *buffer = wctx->buffer; | 783 | const __be64 *buffer = (const __be64 *)wctx->buffer; |
782 | 784 | ||
783 | for (i = 0; i < 8; i++, buffer += 8) { | 785 | for (i = 0; i < 8; i++) |
784 | block[i] = | 786 | block[i] = be64_to_cpu(buffer[i]); |
785 | (((u64)buffer[0] ) << 56) ^ | ||
786 | (((u64)buffer[1] & 0xffL) << 48) ^ | ||
787 | (((u64)buffer[2] & 0xffL) << 40) ^ | ||
788 | (((u64)buffer[3] & 0xffL) << 32) ^ | ||
789 | (((u64)buffer[4] & 0xffL) << 24) ^ | ||
790 | (((u64)buffer[5] & 0xffL) << 16) ^ | ||
791 | (((u64)buffer[6] & 0xffL) << 8) ^ | ||
792 | (((u64)buffer[7] & 0xffL) ); | ||
793 | } | ||
794 | 787 | ||
795 | state[0] = block[0] ^ (K[0] = wctx->hash[0]); | 788 | state[0] = block[0] ^ (K[0] = wctx->hash[0]); |
796 | state[1] = block[1] ^ (K[1] = wctx->hash[1]); | 789 | state[1] = block[1] ^ (K[1] = wctx->hash[1]); |
@@ -1069,7 +1062,7 @@ static void wp512_final(void *ctx, u8 *out) | |||
1069 | u8 *bitLength = wctx->bitLength; | 1062 | u8 *bitLength = wctx->bitLength; |
1070 | int bufferBits = wctx->bufferBits; | 1063 | int bufferBits = wctx->bufferBits; |
1071 | int bufferPos = wctx->bufferPos; | 1064 | int bufferPos = wctx->bufferPos; |
1072 | u8 *digest = out; | 1065 | __be64 *digest = (__be64 *)out; |
1073 | 1066 | ||
1074 | buffer[bufferPos] |= 0x80U >> (bufferBits & 7); | 1067 | buffer[bufferPos] |= 0x80U >> (bufferBits & 7); |
1075 | bufferPos++; | 1068 | bufferPos++; |
@@ -1088,17 +1081,8 @@ static void wp512_final(void *ctx, u8 *out) | |||
1088 | memcpy(&buffer[WP512_BLOCK_SIZE - WP512_LENGTHBYTES], | 1081 | memcpy(&buffer[WP512_BLOCK_SIZE - WP512_LENGTHBYTES], |
1089 | bitLength, WP512_LENGTHBYTES); | 1082 | bitLength, WP512_LENGTHBYTES); |
1090 | wp512_process_buffer(wctx); | 1083 | wp512_process_buffer(wctx); |
1091 | for (i = 0; i < WP512_DIGEST_SIZE/8; i++) { | 1084 | for (i = 0; i < WP512_DIGEST_SIZE/8; i++) |
1092 | digest[0] = (u8)(wctx->hash[i] >> 56); | 1085 | digest[i] = cpu_to_be64(wctx->hash[i]); |
1093 | digest[1] = (u8)(wctx->hash[i] >> 48); | ||
1094 | digest[2] = (u8)(wctx->hash[i] >> 40); | ||
1095 | digest[3] = (u8)(wctx->hash[i] >> 32); | ||
1096 | digest[4] = (u8)(wctx->hash[i] >> 24); | ||
1097 | digest[5] = (u8)(wctx->hash[i] >> 16); | ||
1098 | digest[6] = (u8)(wctx->hash[i] >> 8); | ||
1099 | digest[7] = (u8)(wctx->hash[i] ); | ||
1100 | digest += 8; | ||
1101 | } | ||
1102 | wctx->bufferBits = bufferBits; | 1086 | wctx->bufferBits = bufferBits; |
1103 | wctx->bufferPos = bufferPos; | 1087 | wctx->bufferPos = bufferPos; |
1104 | } | 1088 | } |